Unable to emit after rabbitmq channel.start_consuming() call in flask-socketio handler - rabbitmq

I'm trying to listen to a rabbitmq queue from within a flask-socketio event handler so I can send realtime notifications to a web app. My setup so far:
Server
import pika
import sys
from flask import Flask, request
from flask_socketio import SocketIO, emit, disconnect
app = Flask(__name__)
app.config['SECRET_KEY'] = 'not-so-secret'
socketio = SocketIO(app)
def is_authenticated():
return True
def rabbit_callback(ch, method, properties, body):
socketio.emit('connect', {'data': 'yes'})
print "body: ", body
#socketio.on('connect')
def connected():
emit('notification', {'data': 'Connected'})
creds = pika.PlainCredentials(
username="username",
password="password")
params = pika.ConnectionParameters(
host="localhost",
credentials=creds,
virtual_host="/")
connection = pika.BlockingConnection(params)
# This is one channel inside the connection
channel = connection.channel()
# Declare the exchange we're going to use
exchange_name = 'user'
channel.exchange_declare(exchange=exchange_name,
type='topic')
channel.queue_declare(queue='notifications')
channel.queue_bind(exchange='user',
queue='notifications',
routing_key='#')
channel.basic_consume(rabbit_callback,
queue='notifications',
no_ack=True)
channel.start_consuming()
if __name__ == '__main__':
socketio.run(app, port=8082)
Browser
<script type="text/javascript" charset="utf-8">
var socket = io.connect('http://' + document.domain + ':8082');
socket.on('connect', function(resp) {
console.log(resp);
});
socket.on('disconnect', function(resp) {
console.log(resp);
});
socket.on('error', function(resp) {
console.log(resp);
});
socket.on('notification', function(resp) {
console.log(resp);
});
</script>
If I comment out the "channel.start_consuming()" line at the bottom of the server code and load the browser page, I connect successfully to flask-socketio and I see {data: "Connected"} in my console.
When I uncomment the line, I do not see {data: "Connected"} in my console. Nevertheless, when I send a message to the notifications queue, the rabbit_callback function fires. I see my message printed to the server console, but the emit call doesn't seem to work. There are no errors on the server or in the browser. Any advice is much appreciated.
Thanks!

I had the same problem using eventlet and I just solved adding:
import eventlet
eventlet.monkey_patch()
,at the beginning of my source code.
Anyway my code is a bit different and using the start_background_task method:
import pika
from threading import Lock
from flask import Flask, render_template, session, request, copy_current_request_context
from flask_socketio import SocketIO, emit, join_room, leave_room, \
close_room, rooms, disconnect
app = Flask(__name__, static_url_path='/static')
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode=async_mode)
thread = None
thread_lock = Lock()
#socketio.on('connect', namespace='/test')
def test_connect():
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(target=get_messages)
emit('my_response', {'data': 'Connected', 'count': 0})
print('connected')
def get_messages():
channel = connect_rabbitmq()
channel.start_consuming()
def connect_rabbitmq():
cred = pika.credentials.PlainCredentials('username', 'password')
conn_param = pika.ConnectionParameters(host='yourhostname',
credentials=cred)
connection = pika.BlockingConnection(conn_param)
channel = connection.channel()
channel.exchange_declare(exchange='ncs', exchange_type='fanout')
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
channel.queue_bind(exchange='myexchangename', queue=queue_name)
channel.basic_consume(callback, queue=queue_name, no_ack=True)
return channel
Hope this helps...

Related

stomp.js cannot receive message sent from pika (RabbitMQ version: 3.11.7)

I have a web page that should receive messages from RabbitMQ using STOMP:
<body>
<script src="stomp.js"></script>
<script src="https://cdn.jsdelivr.net/npm/sockjs-client#1.1/dist/sockjs.min.js"></script>
<script>
var client = Stomp.client('ws://localhost:15674/ws');
client.debug = null;
var sub = function(d) {
// print_first(d.body);
console.log("got the message! ", d.body)
}
var on_connect = function(x) {
id = client.subscribe("/topic/test", sub);
console.log("connected")
};
var on_error = function() {
console.log('error');
};
client.connect('guest', 'guest', on_connect, on_error, '/');
</script>
</body>
when I run this code, it shows connected in the console (so far so good)
I also have a python backend, which should send messages to the queue (send.py):
import pika
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare("/topic/test")
properties = pika.BasicProperties(
content_type='application/json',
content_encoding='utf-8',
delivery_mode=2,
)
channel.basic_publish(exchange='',
routing_key='/topic/test',
body='Hello World!',
properties=properties)
The messages are sent (I ran the script with py -m send; the messages appear in the RabbitMQ Management) :
However the console.log in sub isn't running. Any idea how I can fix this?
Thanks!
Ended up using stomp.py instead of pika:
import stomp
PORT = 61613
LOCALHOST = '0.0.0.0'
conn = stomp.Connection11([(LOCALHOST, PORT)])
conn.connect('guest','guest')
conn.send(body="start",destination='/queue/test')
conn.send(body="end",destination='/queue/test')
conn.disconnect()

asyncio stream vs synchronous stream in socket communication with a react native app

Objective is esp32 running micropython acts as a server while android app acts as a client. Before asyncio stream I am able to communicate successfully, but after switching to asyncio i fail to do so, only android app to esp32 is successful but app is failing to retrieve json output from server and I even tried text strings too . App side code remains unchanged for both synchronous/asyncio codes.
Desired output:
response = {
'error': 'invalid request',
'status': 'retry'
}
synchronous side:
conn.send('HTTP/1.1 200 OK\n')
conn.send('Content-Type: application/json\n')
conn.send('Connection: close\n\n')
conn.sendall(ujson.dumps(response ))
asyncio side:
swriter.write(ujson.dumps(response ))
await swriter.drain()
react native side:
fetch( 'http://192.168.0.110' )
.then(response => response.json())
.then((responseJson) => {
const data1 = responseJson;
console.log('getting data from fetch', data1)
setData({ data1 });
onConnectionMessage(data1);
})
synchronous way I was able to retrieve the json output sent from esp32 to android app(react native), but the same code using asyncio failed. What am I doing wrong?
sample asyncio server side code is:
import usocket as socket
import uasyncio as asyncio
import uselect as select
import ujson
from heartbeat import heartbeat # Optional LED flash
class Server:
def __init__(self, host='0.0.0.0', port=80, backlog=5, timeout=10):
self.host = host
self.port = port
self.backlog = backlog
self.timeout = timeout
async def run(self):
print('Awaiting client connection.')
self.cid = 0
asyncio.create_task(heartbeat(100))
self.server = await asyncio.start_server(self.run_client, self.host, self.port, self.backlog)
while True:
await asyncio.sleep(100)
async def run_client(self, sreader, swriter):
self.cid += 1
print('Got connection from client', self.cid)
try:
while True:
try:
res = await asyncio.wait_for(sreader.readline(), self.timeout)
except asyncio.TimeoutError:
res = b''
if res == b'':
raise OSError
print('Received {} from client {}'.format(ujson.loads(res.rstrip()), self.cid))
response = {
'error': 'invalid request',
'status': 'retry'
}
swriter.write(ujson.dumps(response))
await swriter.drain() # Echo back
except OSError:
pass
print('Client {} disconnect.'.format(self.cid))
await sreader.wait_closed()
print('Client {} socket closed.'.format(self.cid))
async def close(self):
print('Closing server')
self.server.close()
await self.server.wait_closed()
print('Server closed.')
server = Server()
try:
asyncio.run(server.run())
except KeyboardInterrupt:
print('Interrupted') # This mechanism doesn't work on Unix build.
finally:
asyncio.run(server.close())
_ = asyncio.new_event_loop()
got the error: asyncio.wait_for(sreader.readline(), self.timeout)------> changed to
asyncio.wait_for(sreader.read(2048), self.timeout). Now client is recieving json output immediately after closing the socket

Error while trying to run the Client Code

enter image description herePlease I am following a guide to create a Server that responds to the text a Client sends to it but in the reversed order. The Server code runs without any issue but the Client code is not working, its is giving the error message: "OSError:[WinError 10022] An invalid argument was supplied"
Please I need help to resolve this.
Below is the code for both the Server and the Client:
#server.py
import asyncio
import socket
# Get the default "event loop" that we will run
loop = asyncio.get_event_loop()
# notice our new "async" before the function definition
async def server_method():
server = socket.socket()
server.bind(('localhost', 6789))
server.listen(1)
# await for a new client
# The event loop can run other code while we wait here!
client, _ = await loop.sock_accept(server)
# await for some data
data = await loop.sock_recv(client, 1024)
data = data[::-1]
# await for sending the data
await loop.sock_sendall(client, data)
# Close both the Server and Client connections
server.close()
client.close()
if __name__ == '__main__':
# run the loop until "server_method" is complete
loop.run_until_complete(server_method())
#client.py
import asyncio
import socket
loop = asyncio.get_event_loop()
addr = ('localhost', 6789)
async def client_method():
message = b'Hello Server!\n'
client = socket.socket()
client.setblocking(False)
# await to establish a connection
await loop.sock_connect(client, ('localhost', 6789))
# await asyncio.get_event_loop().sock_connect(client, addr)
# await to send the message
print('Sending', message)
await loop.sock_sendall(client, message)
# await to receive a response
response = loop.sock_recv(client, 1024)
print('Server replied', response)
client.close()
if __name__ == '__main__':
loop.run_until_complete(client_method())

Client not receiving events from Flask-SocketIO server with Redis message queue

I want to add multiprocessing to my Flask-SocketIO server so I am trying to add a Redis message queue as per the Flask-SocketIO docs. Even without adding multiprocessing, the client is not receiving any events. Everything else is working fine (e.g. the web page is being served, HTTP requests are being made, database calls are being made). There are no error messages on the front or back end. Before I added the Redis queue it was working. I verified that the 'addname' SocketIO route is being hit and that the request.sid looks right. What am I doing wrong?
Very simplified server code:
external_sio = SocketIO(message_queue='redis://')
def requester(user, sid):
global external_sio
external_sio.emit('addname', {'data': 'hello'}, room=sid)
# do some stuff with requests and databases
external_sio.emit('addname', {'data': 'goodbye'}, room=sid)
def main():
app = Flask(__name__,
static_url_path='',
static_folder='dist',
template_folder='dist')
socketio = SocketIO(app)
#socketio.on('addname')
def add_name(user):
global external_sio
external_sio.emit('addname', {'data': 'test'}, room=request.sid)
requester(user.data, request.sid)
socketio.run(app, host='0.0.0.0', port=8000)
if __name__ == '__main__':
main()
Simplified client code (React Javascript):
const socket = SocketIOClient('ipaddress:8000')
socket.emit('addname', {data: 'somename'})
socket.on('addname', ({data}) => console.log(data))
The main server also needs to be connected to the message queue. In your main server do this:
socketio = SocketIO(app, message_queue='redis://')
In your external process do this:
external_sio = SocketIO(message_queue='redis://') # <--- no app on this one

How do you reply to a RabbitMQ RPC client with multiple messages?

I'm trying to use RabbitMQ in an RPC environment where each remote call will take a significant amount of time, producing results continually. I want the results to be delivered to the client as they are generated.
I started with the standard tutorial RPC example, then modified it to use "Direct Reply-to". I publish all the intermediate results back to an "anonymous exclusive callback queue", with out acknowledging the original request. When processing is complete, I send a final message back to the client and then acknowledge the original request. But the client is only seeing the first intermediate message. My client happens to be in PHP and my server is in Python, but I suspect that is not relevant. Does anyone have the magic to make this work? I can post code, but is pretty basic stuff by the cookbook.
Answering my own question. The following worked:
php client:
#!/usr/bin/php
<?php
require_once __DIR__ . '/vendor/autoload.php';
use PhpAmqpLib\Connection\AMQPStreamConnection;
use PhpAmqpLib\Message\AMQPMessage;
class RpcClient {
private $connection;
private $channel;
private $callback_queue;
private $response;
private $corr_id;
public function __construct() {
$this->connection = new AMQPStreamConnection(
'localhost', 5672, 'guest', 'guest'
);
$this->channel = $this->connection->channel();
list($this->callback_queue, ,) = $this->channel->queue_declare(
"", false, false, true, false
);
# For direct reply-to, need to consume amq.rabbitmq.repy-to, a special queue name
# Unclear what happens to the declare above
$this->channel->basic_consume(
$this->callback_queue, '', false, true,
false, false, array($this, 'onResponse')
);
}
# This is going to be called once for each message coming back
public function onResponse($rep) {
if ($rep->get('correlation_id') == $this->corr_id) {
$response = json_decode($rep->body, true);
echo print_r($response['line'], true);
if ($response['type'] == 'final') {
$this->response = $rep->body;
}
}
}
public function call($message_array) {
$this->response = null;
$this->corr_id = uniqid();
$jsonm = json_encode($message_array);
$msg = new AMQPMessage(
$jsonm,
array(
'correlation_id' => $this->corr_id,
### Not sure which of the next two lines is the correct one... if either....
##'reply_to' => 'amq.rabbitmq.reply-to' # This is when using direct reply-to
'reply_to' => $this->callback_queue
)
);
$this->channel->basic_publish($msg, '', 'ansiblePB_rpc_queue');
while (!$this->response) {
$this->channel->wait();
}
return intval($this->response);
}
}
$ansiblepb_rpc = new RpcClient();
$response = $ansiblepb_rpc->call(array('userID' => 'jb1234',
'user_display_name' => 'Joe Bloe',
'limit' => '24000'));
echo ' [.] Got ', $response, "\n";
?>
Python server:
#!/usr/bin/env python
""" 1 """
import glob
import json
import platform
import os
import re
import shutil
import subprocess
import time
import yaml
import pika
class RMQmultireply(object):
""" Generic class to support ansible_playbook on a Rabbit MQ RPC queue"""
def __init__(self, channel, method, props):
#""" Constructor.... duh """
self.channel = channel
self.method = method
self.props = props
def run(self, userID, username, limit):
""" Run the main guts of the service """
cmd = ['/home/dhutchin/devel/rmq/multilineoutput']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in proc.stdout.readlines():
intermediate_json_result = json.dumps({'type': 'intermediate', 'line': line})
self.channel.basic_publish(exchange='',
routing_key=self.props.reply_to,
properties=pika.BasicProperties(
correlation_id=self.props.correlation_id),
body=str(intermediate_json_result))
#self.channel.basic_ack(delivery_tag=self.method.delivery_tag)
proc.wait()
return proc.returncode
def on_request(channel, method, props, jsonstring):
""" Request has just come in to run ansible_playbook """
playbook = RMQmultireply(channel, method, props)
# fork and exec a playbook
# Recieve each line of output and send them as received back
# to the requestor.
# .run does not return until playbook exits.
# Use "Direct Reply-to" mechanism to return multiple messages to
# our client.
request = yaml.load(jsonstring) # Yes, yaml works better than JSON
returncode = playbook.run(request['userID'], request['user_display_name'], request['limit'])
final_json_result = json.dumps({'type': "final", 'line': '', 'rc': returncode})
channel.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id=
props.correlation_id),
body=str(final_json_result))
# Acknowlege the original message so that RabbitMQ can remove it
# from the ansiblePB_rpc_queue queue
channel.basic_ack(delivery_tag=method.delivery_tag)
def main():
""" Its kinda obvious what this does """
try:
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
except Exception:
print "pika.BlockingConnection.... failed... maybe RabbitMQ is not running"
quit()
channel = connection.channel()
channel.queue_declare(queue='ansiblePB_rpc_queue')
channel.basic_qos(prefetch_count=1)
# auto_ack is turned off by default, so we don't need to specify auto_ack=False
channel.basic_consume(queue='ansiblePB_rpc_queue', on_message_callback=on_request)
print " [x] Awaiting RPC requests"
channel.start_consuming()
if __name__ == '__main__':
main()