What's the best way to consume messages in RabbitMq? - rabbitmq

I have about 100 different types of messages and I'd like to know the correct way to consume them in RabbitMq.
I have 2 solutions and I don't know which one is the best way.
1: Implement 100 consumers for 100 different types of messages.
2: Implement 1 consumer and process the messages by for example switch case or etc.
In your opinion what's the best way?

If you are going to be calling 100 different messaging services then you will need to set up 100 different consumers BUT use only on rabbitmq connection
Here is how I implemented my rabbitmq consumer
module.exports.start = async () => {
try{
const queue1 = 'email';
const connection = await amqplib.connect(process.env.QUEUE_SERVICE);
const channel = await connection.createChannel();
await channel.assertQueue(queue1, { durable: true });
await channel.prefetch(1);
console.log('Listening for emails...');
channel.consume(queue1, async (msg) => {
if (msg !== null) {
winston.info(`Got message ${msg.content.toString()}`);
const queueMessage = JSON.parse(msg.content.toString());
Emails.Mailer(queueMessage)
.then(() => {
channel.ack(msg)
})
.catch(err=> err);
}
});
const queue2 = 'sms';
await channel.assertQueue(queue2, { durable: true });
await channel.prefetch(1);
console.log('Listening for SMS...');
channel.consume(queue2, async (msg) => {
if (msg !== null) {
const queueMessage = JSON.parse(msg.content.toString());
Sms.sendPaymentSms(queueMessage).then(() => channel.ack(msg));
}
});
} catch (error) {
return error
}
};
Another thing you can do to maintain code readability is to modularize the various services you want to call and call them in your consumer using conditionals or strategy design pattern.

Related

Keep client connected to WebSocket in react native and express server

I have a react native application where i have two users using the app (customer and restaurant)
So on checkout I connect the customer to websocket on the express server and once the order is placed i send a message to the restaurant which is supposed to be connected to websocket all time.
However, sometimes the restaurant is disconnected somehow, so I am trying to keep the restaurant connected, and if disconnected then reconnect again automatically.
In react native restaurant side implementation i have the following code :
this is useWebSocketLite hook to handle connection, send, receive messages and retry connection to server when closed:
function useWebSocketLite({ socketUrl, retry: defaultRetry = 3, retryInterval = 1000 }) {
const [data, setData] = useState();
const [send, setSend] = useState(() => () => undefined);
const [retry, setRetry] = useState(defaultRetry);
const [readyState, setReadyState] = useState(false);
useEffect(() => {
const ws = new WebSocket(socketUrl);
ws.onopen = () => {
setReadyState(true);
setSend(() => {
return (data) => {
try {
const d = JSON.stringify(data);
ws.send(d);
return true;
} catch (err) {
return false;
}
};
});
ws.onmessage = (event) => {
const msg = formatMessage(event.data);
setData({ message: msg, timestamp: getTimestamp() });
};
};
ws.onclose = () => {
setReadyState(false);
if (retry > 0) {
setTimeout(() => {
setRetry((retry) => retry - 1);
}, retryInterval);
}
};
return () => {
ws.close();
};
}, [retry]);
return { send, data, readyState };
}
So based on this, every-time the connection is closed, the connection will retry again.
Besides, when a restaurant launches the app the following code will be implemented:
const ws = useWebSocketLite({
socketUrl: `wss://${url}/id=${user.user_id}&role=restaurant`
});
This useEffect to establish the connection:
useEffect(() => {
if (ws.readyState === true) {
setConnectionOpen(true);
}
}, [ws.readyState]);
and this useEffect to handle incoming messages
useEffect(() => {
if (ws.data) {
const message = ws.data;
//dispatch...
}
}, [ws.data]);
Express server implementation:
This is the code where i handle socket connections and messages in express server:
var webSockets = {}
function setupWebSocket(server) {
server.on('connection', (socket, req) => {
if (req) {
var clientId = req.url
let regexReplace = /[\[\]/]/g
let regex = /([^=#&]+)=([^?&#]*)/g,
params = {},
match;
while ((match = regex.exec(clientId))) {
params[decodeURIComponent(match[1]).replace(regexReplace, '')] = decodeURIComponent(match[2])
}
if (params.role === 'restaurant') {
webSockets[params.id] = socket
}
}
socket.on('message', data => {
let sData = JSON.parse(JSON.parse(data))
let {id, data} = sData.data
sendToClient(id, 'order', data)
})
socket.on('error', (err) => {
console.log(err)
})
socket.on('close', (code, req) => {
var clientId = req.url
let regexReplace = /[\[\]/]/g
let regex = /([^=#&]+)=([^?&#]*)/g,
params = {},
match;
while ((match = regex.exec(clientId))) {
params[decodeURIComponent(match[1]).replace(regexReplace, '')] = decodeURIComponent(match[2])
}
if (params.role === 'restaurant') {
delete webSockets[clientId]
console.log(`${webSockets[clientId]} disconnected with code ${code} !`);
}
});
});
// sends a message to a specific client
const sendToClient = (clientId, type, data = {}) => {
const payload = { type, data }
const messageToSend = JSON.stringify({ error: false, message: payload })
if (webSockets[clientId]) {
webSockets[clientId].send(messageToSend)
console.log(`${clientId} client notified with this order`)
} else {
console.log(`${clientId} websocket client is not connected.`)
}
}
}
So most of the time I get 13 websocket client is not connected. which means the restaurant has already been deleted from the webSockets object and its connection already closed.
Apologise for long question and hope someone can help me regarding this.
First of all, you should know that this is not a good practice of websockets, where you are forcing the client (the restaurant) to be connected.
Whatever, at the current state of your code, there is an illogical behavior: at the end of the useEffect of your “useWebSocketLite” function, you are closing the socket connection:
return () => {
ws.close();
};
Knowing that the useEffect hook is called twice: after the first render of the component, and then after every change of the dependencies (the “retry” state in your case); Your code can be ridden like so: everytime the “retry” state changes, we will close the socket! So for me that is why you got the client disconnected.

WebRTC succesfully signalled offer and answer, but not getting any ICE candidates

I'm trying to establish a WebRTC connection between two browsers. I have a node.js server for them to communicate through, which essentially just forwards the messages from one client to the other. I am running the server and two tabs all on my laptop, but I have not been able to make a connection. I have been able to send the offers and answers between the two tabs successfully resulting in pc.signalingState = 'stable' in both tabs. I believe once this is done then the RTCPeerConnection objects should start producing icecandidate events, but this is not happening and I do not know why. Here is my code (I've omitted the server code):
'use strict';
// This is mostly copy pasted from webrtc.org/getting-started/peer-connections.
import { io } from 'socket.io-client';
const configuration = {
'iceServers': [
{ 'urls': 'stun:stun4.l.google.com:19302' },
{ 'urls': 'stun:stunserver.stunprotocol.org:3478' },
]
}
// Returns a promise for an RTCDataChannel
function join() {
const socket = io('ws://localhost:8090');
const pc = new RTCPeerConnection(configuration);
socket.on('error', error => {
socket.close();
throw error;
});
pc.addEventListener('signalingstatechange', event => {
// Prints 'have-local-offer' then 'stable' in one tab,
// 'have-remote-offer' then 'stable' in the other.
console.log(pc.signalingState);
})
pc.addEventListener('icegatheringstatechange', event => {
console.log(pc.iceGatheringState); // This line is never reached.
})
// Listen for local ICE candidates on the local RTCPeerConnection
pc.addEventListener('icecandidate', event => {
if (event.candidate) {
console.log('Sending ICE candidate'); // This line is never reached.
socket.emit('icecandidate', event.candidate);
}
});
// Listen for remote ICE candidates and add them to the local RTCPeerConnection
socket.on('icecandidate', async candidate => {
try {
await pc.addIceCandidate(candidate);
} catch (e) {
console.error('Error adding received ice candidate', e);
}
});
// Listen for connectionstatechange on the local RTCPeerConnection
pc.addEventListener('connectionstatechange', event => {
if (pc.connectionState === 'connected') {
socket.close();
}
});
// When both browsers send this signal they will both receive the 'matched' signal,
// one with the payload true and the other with false.
socket.emit('join');
return new Promise((res, rej) => {
socket.on('matched', async first => {
if (first) {
// caller side
socket.on('answer', async answer => {
await pc.setRemoteDescription(new RTCSessionDescription(answer))
.catch(console.error);
});
const offer = await pc.createOffer();
await pc.setLocalDescription(offer)
.catch(console.error);
socket.emit('offer', offer);
// Listen for connectionstatechange on the local RTCPeerConnection
pc.addEventListener('connectionstatechange', event => {
if (pc.connectionState === 'connected') {
res(pc.createDataChannel('data'));
}
});
} else {
// recipient side
socket.on('offer', async offer => {
pc.setRemoteDescription(new RTCSessionDescription(offer))
.catch(console.error);
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer)
.catch(console.error);
socket.emit('answer', answer);
});
pc.addEventListener('datachannel', event => {
res(event.channel);
});
}
});
});
}
join().then(dc => {
dc.addEventListener('open', event => {
dc.send('Hello');
});
dc.addEventListener('message', event => {
console.log(event.data);
});
});
The behavior is the same in both Firefox and Chrome. That behavior is, again, that the offers and answers are signalled successfully, but no ICE candidates are ever created. Does anyone know what I'm missing?
Okay, I found the problem. I have to create the RTCDataChannel before creating the offer. Here's a before and after comparison of the SDP offers:
# offer created before data channel:
{
type: 'offer',
sdp: 'v=0\r\n' +
'o=- 9150577729961293316 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n' +
'a=extmap-allow-mixed\r\n' +
'a=msid-semantic: WMS\r\n'
}
# data channel created before offer:
{
type: 'offer',
sdp: 'v=0\r\n' +
'o=- 1578211649345353372 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n' +
'a=group:BUNDLE 0\r\n' +
'a=extmap-allow-mixed\r\n' +
'a=msid-semantic: WMS\r\n' +
'm=application 9 UDP/DTLS/SCTP webrtc-datachannel\r\n' +
'c=IN IP4 0.0.0.0\r\n' +
'a=ice-ufrag:MZWR\r\n' +
'a=ice-pwd:LfptE6PDVughzmQBPoOtvaU8\r\n' +
'a=ice-options:trickle\r\n' +
'a=fingerprint:sha-256 1B:C4:38:9A:CD:7F:34:20:B8:8D:78:CA:4A:3F:81:AE:C5:55:B3:27:6A:BD:E5:49:5A:F9:07:AE:0C:F6:6F:C8\r\n' +
'a=setup:actpass\r\n' +
'a=mid:0\r\n' +
'a=sctp-port:5000\r\n' +
'a=max-message-size:262144\r\n'
}
In both cases the answer looked similar to the offer. You an see the offer is much longer and mentions webrtc-datachannel in the second case. And sure enough, I started getting icecandidate events and everything is working now.

Change rabbitmq exchange with nestjs

I am using rabbitmq with nestjs. I need to replicate a message from one queue to another. I set up an exchange on rabbitmq to make it work. But how can I change the exchange of rabbitmq inside nestjs?
my api gateway
my current rabbitmq configuration inside nestjs:
constructor( ) {
this.rabbitmq = ClientProxyFactory.create({
transport: Transport.RMQ,
options: {
urls: [`amqp://${this.configService.get<string>('RABBITMQ_USER')}:${this.configService.get<string>('RABBITMQ_PASSWORD')}#${this.configService.get<string>('RABBITMQ_URL')}`],
queue: 'students'
}
})
}
createStudent(#Body() body: CreateStudentDto): Observable<any> {
return this.rabbitmq.send('createStudent', body)
}
my client
#MessagePattern('createStudent')
async createStudent(#Payload() student: Student, #Ctx() context: RmqContext) {
const channel = context.getChannelRef()
const originalMsg = context.getMessage()
try {
let response = await this.studentService.createStudent(student)
await channel.ack(originalMsg)
return response;
} catch(error) {
this.logger.log(`error: ${JSON.stringify(error.message)}`)
const filterAckError = ackErrors.filter(ackError => error.message.includes(ackError))
if (filterAckError.length > 0) {
await channel.ack(originalMsg)
}
}
}
I need the message to be sent to two queues.

how to remove subscription callbacks in redis?

I have a subscriber redis client instance that is performing a callback when entries in the db expire.. I tried adding an initial unsubscribe call to remove previous any existing listeners, but it does not seem to be working:
const setOnExpire = (onExpire) => {
client.config('set', 'notify-keyspace-events', 'Ex', () => {
subscriber.unsubscribe('__keyevent#0__:expired', 0); // <-- this does not seem to be doing what I was hoping it would...
subscriber.subscribe('__keyevent#0__:expired', () => {
subscriber.on('message', function (channel, key) {
onExpire(key);
});
});
});
};
setOnExpire(() => { console.log('foo'); });
setOnExpire(() => { console.log('bar'); }); // my intention is to replace the callback that logs "foo"
client.hmsetAsync(someKey, someAttrs).then(() => {
client.expireAsync(someKey, 5);
});
I run this, hoping to only see "bar" get logged when the record expires in 5 seconds, however instead, I see "foo" and "bar."
How can I properly remove the pre-existing subscriber.on('message') listeners?
If I understand your question correctly. I think this is not a Redis related problem, it's just an application-level problem. You only need to call subscriber.subscribe once to set up a subscription. You want to support only one callback, so store that callback internally. And every time setOnExpire get called, just replace the callback with a new one. I'm not a JavaScript expert, bellow code snippet works fine on my computer:
var redis = require("redis");
var bluebird = require('bluebird');
bluebird.promisifyAll(redis);
var client = redis.createClient();
var subscriber = redis.createClient();
const setOnExpire = function() {
var notify_on = false;
var promise;
var callback = function(key) { };
return (onExpire) => {
if (notify_on) {
promise.then(()=> {
callback = onExpire;
});
} else {
promise = new Promise((resolve, reject) => {
notify_on = true;
client.config('set', 'notify-keyspace-events', 'Ex', () => {
resolve();
});
});
promise.then(() => {
subscriber.subscribe('__keyevent#0__:expired', () => {
subscriber.on('message', function (channel, key) {
callback(key);
});
});
});
}
};
}();
setOnExpire(() => { console.log('foo'); });
setOnExpire(() => { console.log('bar'); }); // my intention is to replace the callback that logs "foo"
client.hmsetAsync('hello', 'yesl', 'thankyou').then(() => {
client.expireAsync('hello', 5);
});

How to send data from redis subscriber to express route

I have a redis pubsub client where publisher is in one file and subscriber is in another file working perfectly
I have 2 controllers a home controller that handles '/' route and a data controller that handles '/data' route
Inside my redis subscriber I want to update the state of a variable that I continuously get from publisher
How do I send this state to both controllers when they do a request
I was doing
app.get('/', (req, res) => {
c = redis.createClient()
c.on("message", (channel, message) => {
// Send data here
})
})
This does not look like a good idea, it is creating a new CLIENT for every request to the '/' endpoint
I want to be able to do
// home controller file
app.get('/', (req, res) => {
res.json(state)
})
// data controller file
app.get('/data', (req, res) => {
res.json(state)
})
How to implement this state
After doing some research, I decided to use Node's native events module to solve this. This example uses ioredis rather than node_redis, but the principle is the same.
First, I instantiate three redis clients. One for regular db work, a publisher and a subscriber
/* redis.js */
const Redis = require('ioredis');
const redis = new Redis();
const publisher = new Redis();
const subscriber = new Redis();
// redis is the defaut export
// publisher and subscriber are "named" exports
const client = (module.exports = redis);
client.publisher = publisher;
client.subscriber = subscriber;
Next we create an EventEmitter in node that will emit an event every time the subscriber receives a message from a channel in redis.
/* emitter.js */
const EventEmitter = require('events');
const { subscriber } = require('./redis');
const eventEmitter = new EventEmitter();
subscriber.subscribe('my-channel', err => {
if (err) { return console.log('Unable to subscribe to my-event channel') };
console.log('Subscription to my-event channel successful');
});
subscriber.on('message', (channel, message) => {
eventEmitter.emit('my-event', message);
});
module.exports = eventEmitter;
Here we have two routes. The first handles a PUT request that sets a field in redis and then publishes a message to a channel with the key of the hash that was updated. The second route handles a GET request that stays open (as an EventSource for an SSE connection for example). It listens to an event from emitter and then sends the data of the updated key from redis
/* route.js*/
const express = require('express');
const redis = require('./redis');
const { publisher } = require('./redis');
const { eventEmitter } = require('./emitter');
const router = express.Router();
router.put('/content', async (req, res) => {
const { key, field, content } = req.body;
try {
await redis.hset(key, field, content);
res.sendStatus(200);
return publisher.publish('my-channel', key);
} catch(err) {
res.status(500).send(err.message);
}
});
router.get('/content-stream', (req, res) => {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive'
});
res.write('\n');
const handleEvent = async key => {
try {
const query = await redis.hgetall(key);
res.write(`data: ${JSON.stringify(query)}\n\n`);
} catch(err) {
console.log('Something went wrong');
}
}
eventEmitter.addListener('my-event', handleEvent);
req.on('close', eventEmitter.removeListener('my-event', handleEvent));
module.exports = router;
This will effectively allow you to avoid instantiating new redis clients with every connection. There may be better ways of doing this, but this is what worked for me.