How to send data from redis subscriber to express route - express

I have a redis pubsub client where publisher is in one file and subscriber is in another file working perfectly
I have 2 controllers a home controller that handles '/' route and a data controller that handles '/data' route
Inside my redis subscriber I want to update the state of a variable that I continuously get from publisher
How do I send this state to both controllers when they do a request
I was doing
app.get('/', (req, res) => {
c = redis.createClient()
c.on("message", (channel, message) => {
// Send data here
})
})
This does not look like a good idea, it is creating a new CLIENT for every request to the '/' endpoint
I want to be able to do
// home controller file
app.get('/', (req, res) => {
res.json(state)
})
// data controller file
app.get('/data', (req, res) => {
res.json(state)
})
How to implement this state

After doing some research, I decided to use Node's native events module to solve this. This example uses ioredis rather than node_redis, but the principle is the same.
First, I instantiate three redis clients. One for regular db work, a publisher and a subscriber
/* redis.js */
const Redis = require('ioredis');
const redis = new Redis();
const publisher = new Redis();
const subscriber = new Redis();
// redis is the defaut export
// publisher and subscriber are "named" exports
const client = (module.exports = redis);
client.publisher = publisher;
client.subscriber = subscriber;
Next we create an EventEmitter in node that will emit an event every time the subscriber receives a message from a channel in redis.
/* emitter.js */
const EventEmitter = require('events');
const { subscriber } = require('./redis');
const eventEmitter = new EventEmitter();
subscriber.subscribe('my-channel', err => {
if (err) { return console.log('Unable to subscribe to my-event channel') };
console.log('Subscription to my-event channel successful');
});
subscriber.on('message', (channel, message) => {
eventEmitter.emit('my-event', message);
});
module.exports = eventEmitter;
Here we have two routes. The first handles a PUT request that sets a field in redis and then publishes a message to a channel with the key of the hash that was updated. The second route handles a GET request that stays open (as an EventSource for an SSE connection for example). It listens to an event from emitter and then sends the data of the updated key from redis
/* route.js*/
const express = require('express');
const redis = require('./redis');
const { publisher } = require('./redis');
const { eventEmitter } = require('./emitter');
const router = express.Router();
router.put('/content', async (req, res) => {
const { key, field, content } = req.body;
try {
await redis.hset(key, field, content);
res.sendStatus(200);
return publisher.publish('my-channel', key);
} catch(err) {
res.status(500).send(err.message);
}
});
router.get('/content-stream', (req, res) => {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive'
});
res.write('\n');
const handleEvent = async key => {
try {
const query = await redis.hgetall(key);
res.write(`data: ${JSON.stringify(query)}\n\n`);
} catch(err) {
console.log('Something went wrong');
}
}
eventEmitter.addListener('my-event', handleEvent);
req.on('close', eventEmitter.removeListener('my-event', handleEvent));
module.exports = router;
This will effectively allow you to avoid instantiating new redis clients with every connection. There may be better ways of doing this, but this is what worked for me.

Related

How to emit an event in express js and handle it in discord.js

I have a simple express.js backend that handles a get request from discord which runs a heavy process in the background using bull and throng.
I got it all working, Discord sends the get request. backend reserves the request and adds it to the job queue, once it's done I receive the returned value in the Queue.on() listener.
My problem is how can I send this result I get in the queue.on() listener back to discord as a message. how can I make discord listen to this event and post the result?
Note: discord and express are 2 separate apps. can't integrate them into one project.
Index.js file:
const express = require('express')
const scrapTikTok = require('./tiktokScraping')
const Queue = require('bull')
const app = express()
// Initialize BULL
const REDIS_URL = process.env.REDIS_URL || 'redis://127.0.0.1:6379'
const workQueue = new Queue('work', REDIS_URL, {
defaultJobOptions: {
removeOnComplete: true,
removeOnFail: true
}
})
app.get('/scrap', async (req, res) => {
res.send('Working on it...')
await scrapTikTok()
})
app.get('/url', async (req, res) => {
const url = 'https://www.tiktok.com/#tristanvincentt'
let job = await workQueue.add({url: url})
res.json({id: job.id})
})
workQueue.on('global:completed', (jobId, result) => {
console.log(`Job completed with result ${result}`)
// SEND RESULT BACK TO DISCORD
})
app.listen(process.env.PORT || 3000, () => {
console.log('App is running...')
})
Worker file:
const throng = require('throng')
const Queue = require("bull")
const REDIS_URL = process.env.REDIS_URL || "redis://127.0.0.1:6379"
const workers = process.env.WEB_CONCURRENCY || 1
const maxJobsPerWorker = 50
const sleep = ms => new Promise(r => setTimeout(r, ms));
function start() {
const workQueue = new Queue('work', REDIS_URL)
workQueue.process(maxJobsPerWorker, async job => {
console.log(job.data.url)
await sleep(10000)
return {value: job.id}
})
}
throng({workers, start})
you can create another express server in your discord app. and add a post endpoint:
const express = require('express')
const app = express()
app.post('/secreturl', async (req, res) => {
res.send('Discordjs Received the data!')
let jobId = req.body.jobId
let result = req.body.result
//now you have the results here!
})
then when the queue is done (based on the code you provided):
workQueue.on('global:completed', (jobId, result) => {
console.log(`Job completed with result ${result}`)
// SEND RESULT BACK TO DISCORD:
axios.post('(your-discordjs-server-ip)/secreturl', {
jobId: jobId,
result: result
})
})
just make sure you add the axios package to the queue app
npm install axios
and require it in the index.js file
const axios = require("axios")
tick this as the answer if it solved your problem!

Market data routing to frontend: Alpaca WebSocket -> Node.js WebSocket -> React Frontend

I'm trying to use the websocket example from:
https://github.com/alpacahq/alpaca-trade-api-js/blob/master/examples/websocket_example_datav2.js
In order to connect to the Alpaca V2 data stream. Currently, my stream is working but I'm trying to route my data to the client side using Server Sent Events. My data flow seems like it should be:
Alpaca Data Stream API -> My Node.js server -> React Frontend.
The issue I have is using the DataStream object in the example in order to route the data to the frontend. Since, with the object alone, I don't have any route to subscribe to via Server Sent Events, does this mean that I should also be using either express, socket.io, or ws? Since the all of the ".on_xyz" methods are defined within the DataStream object, I'm not sure how to set up the endpoint properly to allow my frontend to subscribe to it. If anyone knows how to route this datastream information forward it would be greatly appreciated- I'm particularly trying to work with the .onStockQuote method but any of them is fine! I'm simply trying to use Node as an inbetween router so that I don't have to subscribe directly from the frontend (and not use the sdk), because that limits scalability of the API's use.
"use strict";
/**
* This examples shows how to use tha alpaca data v2 websocket to subscribe to events.
* You should use the alpaca api's data_steam_v2, also add feed besides the other parameters.
* For subscribing (and unsubscribing) to trades, quotes and bars you should call
* a function for each like below.
*/
import express from 'express';
const app = express()
const Alpaca = require("#alpacahq/alpaca-trade-api");
const API_KEY = "XYZ_Key";
const API_SECRET = "XYZ_Secret";
const PORT = 3000;
// Add a new message and send it to all subscribed clients
const addMessage = (req, res) => {
const message = req.body;
// Return the message as a response for the "/message" call
res.json(message);
return ;
};
class DataStream {
constructor({ apiKey, secretKey, feed }) {
this.alpaca = new Alpaca({
keyId: apiKey,
secretKey,
feed,
});
const socket = this.alpaca.data_stream_v2;
socket.onConnect(function () {
console.log("Connected");
socket.subscribeForQuotes(["AAPL"]);
// socket.subscribeForTrades(["FB"]);
// socket.subscribeForBars(["SPY"]);
// socket.subscribeForStatuses(["*"]);
});
socket.onError((err) => {
console.log(err);
});
socket.onStockTrade((trade) => {
console.log(trade);
});
socket.onStockQuote((quote) => {
console.log(quote);
});
socket.onStockBar((bar) => {
console.log(bar);
});
socket.onStatuses((s) => {
console.log(s);
});
socket.onStateChange((state) => {
console.log(state);
});
socket.onDisconnect(() => {
console.log("Disconnected");
});
socket.connect();
// unsubscribe from FB after a second
// setTimeout(() => {
// socket.unsubscribeFromTrades(["FB"]);
// }, 1000);
}
}
app.post("/message", addMessage);
let stream = new DataStream({
apiKey: API_KEY,
secretKey: API_SECRET,
feed: "sip",
paper: false,
});
app.listen(PORT, () => {
console.log(`App listening on port ${PORT}`);
});

Slackbot not able to pick up messages

I'm trying to create a simple slackbot and I can get the thing to run on start and almost anything else, but it will not pick up incoming messages from any channel. It's not generating any errors what so ever which makes it very frusterating.
things I've tried so far:
adding the bot to the channel
console logging inside the message block
adding a catch
No matter what I do the bot seems to just outright ignore the prompt "on.message". I don't get it. My code is below.
require("dotenv").config();
const { WebClient } = require("#slack/web-api");
const { createEventAdapter } = require("#slack/events-api");
const slackEvents = createEventAdapter(process.env.SLACK_SIGNING_SECRET);
const { RTMClient } = require("#slack/rtm-api");
const axios = require('axios')
const sgMail = require("#sendgrid/mail");
sgMail.setApiKey(process.env.SENDGRID_API_KEY);
const express = require("express");
const bodyParser = require("body-parser");
const pool = require("./pool");
const app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(express.static("build"));
let config = {
headers: {
"X-Auth-Client": process.env.BG_AUTH_CLIENT,
"X-Auth-Token": process.env.BG_AUTH_TOKEN,
},
};
const PORT = process.env.PORT || 5000;
app.listen(PORT, () => {
console.log("server running on: ", PORT);
});
const token = process.env.SLACK_TOKEN;
const web = new WebClient(token);
const conversationId = "C0139RJPUEM";
// The client is initialized and then started to get an active connection to the platform
const rtm = new RTMClient(token);
slackEvents.on('error', console.error);
slackEvents.start().then(() => {
(async () => {
const res = await web.chat.postMessage({
icon_emoji: ":email:",
channel: conversationId,
text: "Testing connection",
});
message
console.log("Message sent: ", res.ts);
})();
console.log("bot listening on port", PORT);
});
slackEvents.on("message", (event) => {
console.log(
`Received a message event: user ${event.user} in channel ${event.channel} says ${event.text}`
);
(async () => {
// See: https://api.slack.com/methods/chat.postMessage
const res = await web.chat.postMessage({
icon_emoji: ":email:",
channel: conversationId,
text: "Testing message",
});
// `res` contains information about the posted message
console.log("Message sent: ", res.ts);
})();
// }
});
I think I got it, I was using a library that was not compatible, plus I didn't set up API event subscriptions and a few other dumb things. Oh well. Hopefully this will help someone who might casually come across this.

Trying to use express middleware to serve third party API requests

Sorry if I posted this incorretly, it is my first question on Stack Overflow. I am currently trying to use express to serve third party API requests to my React front-end. This is because the steam api I use throws me a CORS error when requesting in the client-side. I tried to route the requests to my controller which makes the axios call, but I have had no luck. Not sure if I am doing something completely incorrect.
here is my server.js
const app = require('./app.js');
const PORT = process.env.PORT || 3005;
app.listen(PORT, () => console.log(`Listening on port: ${PORT}`))
and here is my app.js
const express = require('express');
const cors = require('cors')
const app = express();
app.use(cors())
module.exports = app
my routes:
const { Router } = require('express')
const controllers = require('../controllers')
const router = Router()
router.get('/', (req, res) => res.send('This is root!'))
router.get('/applist', controllers.getAllSteamGames)
router.get('/game/:id', controllers.getSingleGameSteam )
router.get('/gameSpy/:id', controllers.getSingleGameSpy)
module.exports = router
and lastly my controller:
const axios = require('axios');
const getAllSteamGames = async () => {
try {
const resp = await axios.get('https://api.steampowered.com/ISteamApps/GetAppList/v2?applist')
return resp.data
} catch (error) {
console.log(error)
throw error
}
}
Thank you for your help and time.
You aren't doing anything to send a response back to the client. If we look at the /applist route:
router.get('/applist', controllers.getAllSteamGames)
const axios = require('axios');
const getAllSteamGames = async () => {
try {
const resp = await axios.get('https://api.steampowered.com/ISteamApps/GetAppList/v2?applist');
return resp.data
} catch (error) {
console.log(error)
throw error
}
}
All, your getAllSteamGames() function does is return a promise that resolves to a value (remember all async functions return a promise). It doesn't send a response back to the client.
Then, if you look at the actual route handler, it doesn't send a response back to the client either. So, you get the request and never send a response. The client never gets a response.
What I would suggest is that you just send a response in your controller. It's already passed (req, res) as arguments to the function so you can use them.
router.get('/applist', controllers.getAllSteamGames)
const axios = require('axios');
const getAllSteamGames = async (req, res) => {
try {
const resp = await axios.get('https://api.steampowered.com/ISteamApps/GetAppList/v2?applist');
res.json(resp.data);
} catch (error) {
// send error status upon error
console.log(error);
res.sendStatus(500);
}
}

socket emit an event on http PUT

I am using expressjs, nedb, and socket.io. Various (non-browser) clients are able to PUT new values into the db successfully. When that happens, I want a message emitted to all browsers connected to the server. I have the following code which is currently not sending a message back to the browser.
// on the server
//***************************************************************
// reachable to the world at http://server/foo
// clients can PUT data into the db
app.put('/foo', jsonParser, function(req, res, next) {
if (!req.body) return res.sendStatus(400);
db.insert(req.body, function (err, newDoc) {
io.sockets.emit('PUT a new value', { added: newDoc._id });
res.send('Success! Find it again with id: ' + newDoc._id);
});
});
// reachable to the world at http://server/
// browser shows a dashboard of events
app.get('/', function(req, res, next) {
// code to serve the dashboard here
});
io.sockets.on('connection', function (socket) {
socket.on('foo', function (data) {
io.sockets.emit('PUT a new value', data);
})
});
// in the browser
//***************************************************************
var socket = io.connect('/');
socket.on('PUT a new value', function (data) {
console.log(data);
});
Data get inserted into the db successfully from different non-browser clients, but the connected browser doesn't receive an update.
What am I doing wrong?
I found a solution which I don't like at all but it works. We can add io object to req or to res in the middleware like that:
app.use(function (req, res, next) {
req.io = io;
next();
});
before app.use('/', routes) and then in our router module we "import" the io object:
app.put('/foo', jsonParser, function(req, res, next) {
if (!req.body) return res.sendStatus(400);
db.insert(req.body, function (err, newDoc) {
var io = req.io; // HERE !!!
io.sockets.emit('PUT a new value', { added: newDoc._id });
res.send('Success! Find it again with id: ' + newDoc._id);
});
});
I know, I know... let's find something else :-)
I have the following app structure generated by express generator. I start the app with $ DEBUG=foo:* npm start
.
|____app.js
|____bin
| |____www
|____data
|____LICENSE
|____node_modules
|____package.json
|____public
| |____stylesheets
| |____javascripts
| |____images
|____README.md
|____routes
| |____index.js
| |____readings.js
| |____sensors.js
| |____users.js
|____views
| |____error.hjs
| |____index.hjs
In app.js
var express = require('express');
var app = express();
var io = require('socket.io')();
app.io = io;
// notice the `(io)` for the routes that need to be socket-aware
var routes = require('./routes/index');
var users = require('./routes/users');
var sensors = require('./routes/sensors');
var readings = require('./routes/readings')(io);
…
// start listening with socket.io
app.io.on('connection', function(socket){
console.log('a user connected');
});
module.exports = app;
Then in ./routes/readings.js
var express = require('express');
var app = express();
var router = express.Router();
module.exports = function(io) {
router.put('/', jsonParser, function(req, res, next) {
if (!req.body) return res.sendStatus(400);
db.insert(req.body, function (err, newDoc) {
io.emit("reading", {id: newDoc._id});
res.send('Success PUTting data! id: ' + newDoc._id);
});
});
return router;
}
Finally, in the index.hjs template for the client-side
<script src="/socket.io/socket.io.js"></script>
<script>
var socket = io();
socket.on('reading', function (data) {
console.log(data);
});
</script>
The above works. When data are inserted into the db via an http PUT (see readings.js), an event is emitted by io.emit('reading', data) and the browser receives that event and shows it in the console with socket.on('reading', function (data) { … });