Can Kombu do publish and sucscribe to multiple consumers - rabbitmq

Using Kombu with RabbitMQ to implement a classic publish/subscribe design pattern. I have created a producer that creates a topic:
from kombu import Connection, Exchange, Queue
media_exchange = Exchange('media', 'topic', durable=False)
video_queue = Queue('video', exchange=media_exchange, routing_key='video')
with Connection('amqp://guest:guest#localhost//') as conn:
producer = conn.Producer(serializer='json')
producer.publish('Hello World!',
exchange=media_exchange, routing_key='video',
declare=[video_queue])
I then created a consumer to consume from the publisher:
from kombu import Connection, Exchange, Queue
media_exchange = Exchange('media', type='topic', durable=False)
video_queue = Queue('video', exchange=media_exchange, routing_key='video')
def process_media(body, message):
print(body)
#message.ack()
with Connection('amqp://guest:guest#localhost//') as conn:
with conn.Consumer(video_queue, callbacks=[process_media]) as consumer:
# Process messages and handle events on all channels
while True:
conn.drain_events()
In then launch two consumers, each one in a separate terminal; both wait for a message:
terminal 1: python consumer.py
terminal 2: python consumer.py
When I run the producer, only one consumer receives the message.

The producer publishes in an exchange, not in a queue. The queues are defined by the consumers. When using different queue name for each consumer then all will get the message.
When using many consumers for the same queue then it is load balancing, that's why only one of your consumers gets the message.

To clarify, the messages in the queue are 'consumed' i.e. the first consumer consumes it, and the message is no more in the queue, that's why the second consumer isn't getting anything.
To have 2 separate consumers for same message - use 2 separate queues i.e.
video_queue1 and video_queue2, declared and bound to the exchange media_exchange, using same key video.
producer.py
from kombu import Connection, Exchange, Queue
media_exchange = Exchange('media', 'topic', durable=False)
video_queue1 = Queue('video1', exchange=media_exchange, routing_key='video')
video_queue2 = Queue('video2', exchange=media_exchange, routing_key='video')
with Connection('amqp://guest:guest#localhost//') as conn:
producer = conn.Producer(serializer='json')
producer.publish('Hello World!',
exchange=media_exchange, routing_key='video',
declare=[video_queue1, video_queue2])
consumer1.py
from kombu import Connection, Exchange, Queue
media_exchange = Exchange('media', type='topic', durable=False)
video_queue = Queue('video1', exchange=media_exchange, routing_key='video')
def process_media(body, message):
print(body)
#message.ack()
with Connection('amqp://guest:guest#localhost//') as conn:
with conn.Consumer(video_queue, callbacks=[process_media]) as consumer:
# Process messages and handle events on all channels
while True:
conn.drain_events()
consumer2.py
from kombu import Connection, Exchange, Queue
media_exchange = Exchange('media', type='topic', durable=False)
video_queue = Queue('video2', exchange=media_exchange, routing_key='video')
def process_media(body, message):
print(body)
#message.ack()
with Connection('amqp://guest:guest#localhost//') as conn:
with conn.Consumer(video_queue, callbacks=[process_media]) as consumer:
# Process messages and handle events on all channels
while True:
conn.drain_events()

Related

How to get and send all messages from the channel to channel?

I have a code that can copy a message from one channel to another channel, but this code works only for "NewMessages", how to make this code to send all messages from the channel to another channel?
from telethon import TelegramClient, events
import asyncio
api_id =
api_hash = ''
my_channel_id = -1001247324182
channels = [-100129537617]
client = TelegramClient('myGrab', api_id, api_hash)
print("GRAB - Started")
#client.on(events.NewMessage(chats=channels))
async def my_event_handler(event):
if event.message:
await client.send_message(my_channel_id, event.message)
client.start()
client.run_until_disconnected()
So, it is possible to do this code to send all messages from the channel? Maybe, another library?
You can iter_messages and forward it to new channel

Disable global qos in rabbitmq from celery

I have created a RabbitMQ 3-node cluster using three aws ec2 servers. I'm trying to access the quorum queue I created using celery. When I connect it gives the error
raise error_for_code(reply_code, reply_text,
amqp.exceptions.AMQPNotImplementedError: Basic.consume: (540) NOT_IMPLEMENTED - queue 'Replica_que' in vhost '/' does not support global qos
I suppose it will work if I disabled global qos but I couldn't find where I can do it. How do I disable global qos in celery?
my celery code
from celery import Celery
from time import sleep
import kombu
broker_uri=['amqp://xxxx:5672/', 'amqp://xxxx:5672/','amqp://xxx:5672/']
backend_uri="mongodb+srv://xxxxx"
app = Celery('TestApp', broker=broker_uri,backend=backend_uri)
app.config_from_object('celeryconfig')
app.conf.task_default_exchange='Replica_que'
app.conf.task_default_routing_key='Replica'
#app.task
def reverse(text):
sleep(10)
return text[:-1]
and the config code
from kombu import Queue
task_queues = [Queue(name="Replica_que", queue_arguments={"x-queue-type": "quorum"})]
task_routes = {
'tasks.add': 'Replica_que',
}
This was possible by adding a celeryconfig.py file,
from kombu import Queue
task_queues = [Queue(name="Replica_que", queue_arguments={"x-queue-type": "quorum"})]
task_routes = {
'tasks.add': 'Replica_que',
}
and creating a custom QoS class: https://github.com/celery/celery/issues/6067
So I added the QoS class
class NoChannelGlobalQoS(bootsteps.StartStopStep):
requires = {'celery.worker.consumer.tasks:Tasks'}
def start(self, c):
qos_global = False
c.connection.default_channel.basic_qos(0, c.initial_prefetch_count, qos_global)
def set_prefetch_count(prefetch_count):
return c.task_consumer.qos(
prefetch_count=prefetch_count,
apply_global=qos_global,
)
c.qos = QoS(set_prefetch_count, c.initial_prefetch_count)
app.steps['consumer'].add(NoChannelGlobalQoS)
Currently this is an issue in celery related to quorum queue but this works.

Celery consumer (only) with an external producer

I'm using Celery 4.4.7 with Redis as my message broker.
I want to use celery as a consumer only, as the external producer is a java application.
The java application pushes messages to a channel on redis. But my celery application is not picking up the messages.
I have simulated the java producer in python using redis-py (redis_producer.py) to publish to a channel. The redis_consumer.py is able to pickup the messages from the producer.
But my celery_consumer.py seems to be blind to these messages.
Messages from redis_producer.py is picked up by the redis_consumer.py, but not from celery.
Messages from kombu_producer.py is picked up by the celery worker, but not from my redis_consumer.py
redis_producer.py
import json
import redis
r = redis.Redis(host='localhost', port=6379)
for i in range(10):
body = {
'id': i,
'message': f'Hello {i}',
}
r.publish(channel='redis.test.topic', message=json.dumps(body))
redis_consumer.py
import json
import multiprocessing
import os
import signal
import redis
redis_conn = redis.Redis(charset='utf-8', decode_responses=True)
def sub(name: str):
pubsub = redis_conn.pubsub()
pubsub.subscribe('redis.test.topic')
for message in pubsub.listen():
print(message)
if message.get('type') == 'message':
data = message.get('data')
print('%s: %s' % (name, data))
def on_terminate(signum, stack):
wait_for_current_scp_operation()
if __name__ == '__main__':
multiprocessing.Process(target=sub, args=('consumer',)).start()
signal.signal(signal.SIGTERM, on_terminate)
celery_consumer.py
import os
import ssl
from celery import Celery
from celery import shared_task
from celery.utils.log import get_task_logger
from kombu import Exchange
from kombu import Queue
logger = get_task_logger(__name__)
# Celery broker Url
broker_url = os.environ.get('CELERY_BROKER_URL', None)
if broker_url is None:
broker_url = 'redis://localhost:6379/0'
# store to use to store task results, default=None
result_backend = os.environ.get('CELERY_RESULT_BACKEND', None)
if result_backend is None:
result_backend = 'redis://localhost:6379/0'
config = dict(
broker_url=broker_url,
result_backend=result_backend,
# maximum number of connections that can be open in the connection pool
broker_pool_limit=20,
broker_transport_options={
'visibility_timeout': 3600,
'confirm_publish': True,
},
# Serializer method
task_serializer='json',
result_serializer='json',
accept_content=['json'],
# Below two settings just reserves one task at a time
# Task acknowledgement mode, default=False
task_acks_late=True,
# How many message to prefetch, default=4, value=1 disable prefetch
worker_prefetch_multiplier=1,
# Dates and times in messages will be converted to use the UTC timezone
timezone='UTC',
enable_utc=True,
# if true, store the task return values
task_ignore_result=False,
# if true, result messages will be persistent, messages won't be lost after a broker restart
result_persistent=False,
# Celery tasks expiry (in secs), default=1d, value=0/None never expire
result_expires=900,
# Message compression setting
task_compression='gzip',
# Task execution marker, default=False
task_track_started=True,
# rate limits on tasks (Disable all rate limits, even if tasks has explicit rate limits set)
worker_disable_rate_limits=True,
# is True, all tasks will be executed locally by blocking until the task returns
task_always_eager=False,
# Send events so the worker can be monitored by tools like celerymon
worker_send_task_events=False,
# Expiry time in seconds for when a monitor clients event queue will be deleted, default=never
event_queue_expires=60,
# Default queue, exchange, routing keys configuration
# task_default_queue = 'default.queue',
# task_default_exchange = 'default.exchange',
# task_default_exchange_type='topic',
# task_default_routing_key = 'default.route',
# task_create_missing_queues = True
task_queues=(
# Default configuration
Queue('redis.test.topic',
Exchange('redis.test.topic'),
routing_key='redis.test.topic'),
),
)
def create_celery_app() -> Celery:
logger.info('Initializing Celery...')
celery_app = Celery(name=__name__)
celery_app.config_from_object(config)
return celery_app
# create a celery app
app = create_celery_app()
#app.task(name='task_process_message', bind=True, max_retries=3)
def task_process_message(self, message):
try:
logger.info(f'{message}: Triggered task: task_process_message')
except Exception as e:
logger.exception(
f'Error executing task_process_message({message}')
# We do not have to reset timestamp since the job always looks back by 1 hr
self.retry(exc=e, countdown=utils.get_retry_delay(self.request.retries))
#shared_task(name='shared_task_process_message', bind=True, max_retries=3)
def shared_task_process_message(self, message):
try:
logger.info(f'{message}: Triggered task: shared_task_process_message')
except Exception as e:
logger.exception(
f'Error executing shared_task_process_message({message}')
# We do not have to reset timestamp since the job always looks back by 1 hr
self.retry(exc=e, countdown=utils.get_retry_delay(self.request.retries))
kombu_producer.py
from kombu import Producer, Consumer, Queue, Connection
import json
redis_url = 'redis://localhost:6379/0'
conn = Connection(redis_url)
producer = Producer(conn.channel())
channel = 'redis.test.topic'
for i in range(10):
body = {
'task': 'task_process_message',
'id': f'{i}',
'kwargs': {'message': f'Hello {i}',
}
}
producer.publish(body=body, routing_key='redis.test.topic')
The below picture shows activity on redis using a regular redis producer/consumer
Below picture shows activity on redis while running kombu producer and celery consumer.

Rabbitmq Exchanges vs Queues

Im trying to do some basic testing with amqp using rabbitmq. I wrote a simple python script with pika to create an amqp client. I first did it sending messages directly to a queue. This worked well and was able to monitor performance with the web rabbitmq managment tool as well as "rabbitmqctl list_queues". I next tried to send messages to an exchange using multiple queues with the receiver listen on all queues. Im confused because when I use the exchange I can not longer see the queues in the management tool or rabbitmqctl. How do i monitor queues when using exchanges?
###########################################
queue send code:
###########################################
channel.queue_declare(queue='hello')
msgCnt = 1
while True:
msg = "'Hello World! Msg Count: "+str(msgCnt)+"'"
channel.basic_publish(exchange='',
routing_key='hello',
body=msg)
print(" [x] Sent " + msg)
msgCnt = msgCnt + 1
time.sleep(1)
###########################################
exchange send code:
###########################################
queue = sys.argv[1] #passing test.0001, test.0002 in diff clients.
channel.exchange_declare(exchange='test', type='topic')
msgCnt = 1
while True:
msg = "'Hello World! Msg Count: "+str(msgCnt)+"'"
channel.basic_publish(exchange='test',
routing_key=queue,
body=msg)
print(" [x] Sent " + msg)
msgCnt = msgCnt + 1
time.sleep(1)
###########################################
consumer code:
###########################################
channel.exchange_declare(exchange='test',
type='topic')
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
binding_keys = sys.argv[1:]
for binding_key in binding_keys:
channel.queue_bind(exchange='test',
queue=queue_name,
routing_key=binding_key)
def callback(ch, method, properties, body):
print(" [x] Received %r" % body)
channel.basic_consume(callback,
queue=queue_name,
no_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()

RabbitMQ prefetch ignored when consumer is Down and gets Up

I'm getting my basicQos ignored when consumer is down and, after, consumer gets up. For instance, suppose that consumer is down and 5 messages arrives from a producer. If consumer is not running, these messages will be stored in disk (I think!) if exchanger/queue is (are) durable.
if I set basicQos as channel.basicQos(0, 3, true), my consumer receives more than 3 messages when it gets UP. Why?!?
On the other hand, everything works properly (only 3 messages are read from the queue) if consumer is running when it receives messages from the queues... My code is as follows:
factory = new ConnectionFactory();
factory.setHost(mRabbitMQHost); //may get server address from file configuration.
factory.setUsername(mRabbitMQUsername);
factory.setPassword(mRabbitMQPassword);
connection = factory.newConnection();
channel = connection.createChannel();
channel.exchangeDeclare("exchangeName", "direct", true); //True enables durability
consumer = new QueueingConsumer(channel);
for (QGQueues queue : QGQueues.values()) {
String queueName = queue.getQueueName();
channel.queueDeclare(queueName, true, false, false, null);
channel.queueBind(queueName, "exchangeName", queue.getRoutingKey());
channel.basicConsume(queueName, false, consumer); //false enables ACK message to RabbitMQ server
}
channel.basicQos(0, 3, true);
Thanks!
My bet would be that you need to set the QoS before you do anything else.
Change your code to this order:
channel = connection.createChannel();
// set QoS immediately
channel.basicQos(0, 3, true);
channel.exchangeDeclare("exchangeName", "direct", true); //True enables durability
consumer = new QueueingConsumer(channel);
for (QGQueues queue : QGQueues.values()) {
String queueName = queue.getQueueName();
channel.queueDeclare(queueName, true, false, false, null);
channel.queueBind(queueName, "exchangeName", queue.getRoutingKey());
channel.basicConsume(queueName, false, consumer); //false enables ACK message to RabbitMQ server
}
this will ensure the prefetch limit is set before you try to consume any messages.