How do i send delayed message in rabbitmq using the rabbitmq-delayed-message-exchange plugin? - rabbitmq

i have enabled the plugin using
rabbitmq-plugins enable rabbitmq_delayed_message_exchange
I am trying to create delayed exchange and attached header x-delay with 5000ms as int value and binded it to a queue it dint work.
So i tried it using Pika in python:
import pika
credentials = pika.PlainCredentials('admin', 'admin')
parameters = pika.ConnectionParameters('localhost',5672,'/',credentials)
connection = pika.BlockingConnection(pika.ConnectionParameters(host='127.0.0.1',port=5673,credentials=credentials))
channel = connection.channel()
#channel.exchange_declare(exchange='x-delayed-type', type='direct')
channel.exchange_declare("test-exchange", type="x-delayed-message", arguments={"x-delayed-type":"direct"},durable=True,auto_delete=True)
channel.queue_declare(queue='task_queue',durable=True)
channel.queue_bind(queue="task_queue", exchange="test-exchange", routing_key="task_queue")
for i in range(0,100):
channel.basic_publish(exchange='test-exchange', routing_key='task_queue',
body='gooogle',
properties=pika.BasicProperties(headers={"x-delay": 5000},delivery_mode=1))
print i
How can i make delayed exchange using delay make working?
Error Report :
ERROR REPORT==== 10-Mar-2017::13:08:09 ===
Error on AMQP connection <0.683.0> (127.0.0.1:42052 -> 127.0.0.1:5673, vhost: '/', user: 'admin', state: running), channel 1:
{{{undef,
[{erlang,system_time,[milli_seconds],[]},
{rabbit_delayed_message,internal_delay_message,4,
[{file,"src/rabbit_delayed_message.erl"},{line,179}]},
{rabbit_delayed_message,handle_call,3,
[{file,"src/rabbit_delayed_message.erl"},{line,122}]},
{gen_server,handle_msg,5,[{file,"gen_server.erl"},{line,585}]},
{proc_lib,init_p_do_apply,3,[{file,"proc_lib.erl"},{line,239}]}]},
{gen_server,call,
[rabbit_delayed_message,
{delay_message,
{exchange,
{resource,<<"/">>,exchange,<<"test-exchange">>},
'x-delayed-message',true,true,false,
[{<<"x-delayed-type">>,longstr,<<"direct">>}],
undefined,undefined,
{[],[]}},
{delivery,false,false,<0.691.0>,
{basic_message,
{resource,<<"/">>,exchange,<<"test-exchange">>},
[<<"task_queue">>],
{content,60,
{'P_basic',undefined,undefined,
[{<<"x-delay">>,signedint,5000}],
1,undefined,undefined,undefined,undefined,
undefined,undefined,undefined,undefined,undefined,
undefined},
<<48,0,0,0,0,13,7,120,45,100,101,108,97,121,73,0,0,19,
136,1>>,
rabbit_framing_amqp_0_9_1,
[<<"gooogle">>]},
<<80,125,217,116,181,47,214,41,203,179,7,85,150,76,35,2>>,
false},
undefined,noflow},
5000},
infinity]}},
[{gen_server,call,3,[{file,"gen_server.erl"},{line,188}]},
{rabbit_exchange_type_delayed_message,route,2,
[{file,"src/rabbit_exchange_type_delayed_message.erl"},{line,53}]},
{rabbit_exchange,route1,3,[{file,"src/rabbit_exchange.erl"},{line,381}]},
{rabbit_exchange,route,2,[{file,"src/rabbit_exchange.erl"},{line,371}]},
{rabbit_channel,handle_method,3,
[{file,"src/rabbit_channel.erl"},{line,949}]},
{rabbit_channel,handle_cast,2,[{file,"src/rabbit_channel.erl"},{line,457}]},
{gen_server2,handle_msg,2,[{file,"src/gen_server2.erl"},{line,1032}]},
{proc_lib,init_p_do_apply,3,[{file,"proc_lib.erl"},{line,239}]}]}

Paste a workable code with Rabbitmq 3.7.7:
send.py
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
#channel.exchange_declare(exchange='direct_logs',
# exchange_type='direct')
#channel.exchange_declare("test-exchange", type="x-delayed-message", arguments={"x-delayed-type":"direct"},durable=True,auto_delete=True)
channel.exchange_declare(exchange='test-exchange',
exchange_type='x-delayed-message',
arguments={"x-delayed-type":"direct"})
severity = sys.argv[1] if len(sys.argv) > 2 else 'info'
message = ' '.join(sys.argv[2:]) or 'Hello World!'
channel.basic_publish(exchange='test-exchange',
routing_key=severity,
properties=pika.BasicProperties(
headers={'x-delay': 5000} # Add a key/value header
),
body=message)
print(" [x] Sent %r:%r" % (severity, message))
connection.close()
receive.py
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='test-exchange',
exchange_type='x-delayed-message',
arguments={"x-delayed-type":"direct"})
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
binding_keys = sys.argv[1:]
if not binding_keys:
sys.stderr.write("Usage: %s [binding_key]...\n" % sys.argv[0])
sys.exit(1)
for binding_key in binding_keys:
channel.queue_bind(exchange='test-exchange',
queue=queue_name,
routing_key=binding_key)
print(' [*] Waiting for logs. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(" [x] %r:%r" % (method.routing_key, body))
channel.basic_consume(callback,
queue=queue_name,
no_ack=True)
channel.start_consuming()
python send.py error aaaabbbb
python receive.py error
[*] Waiting for logs. To exit press CTRL+C
[x] 'error':'aaaabbbb'

Related

Telegram bot not responding after upload files and database on heroku

I'm a beginner of python. Below is my python code for telegram bot. It's working on XAMPP but I would to host the bot on cloud so that there's no need to start the XAMPP's Apache & MYSQL everytime when I'm trying to use the bot. However, it's not working after it's been uploaded to Heroku. May I know how can I fix this ? Thank you in advance.
Modified for uploading to Heroku
import logging
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
import os
import mysql.connector
from typing import Dict
from telegram import ReplyKeyboardMarkup, Update, ReplyKeyboardRemove
from telegram.ext import (
Updater,
CommandHandler,
MessageHandler,
Filters,
ConversationHandler,
CallbackContext,
)
PORT = int(os.environ.get('PORT', 5000))
# Enable logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
mydb = mysql.connector.connect(
host='us-cdbr-east-05.cleardb.net',
user='b081bd520f9623',
passwd='557dad71',
database='heroku_26b1a208f24f1fa')
query = mydb.cursor()
logger = logging.getLogger(__name__)
TOKEN = '5333685233:AAFr4-1nB6_I8ZMdt25Y4zBotHRA9I_qtMI'
# Define a few command handlers. These usually take the two arguments update and
# context. Error handlers also receive the raised TelegramError object in error.
def start(update, context):
"""Send a message when the command /start is issued."""
update.message.reply_text('Hi! This is start')
def help(update, context):
"""Send a message when the command /help is issued."""
update.message.reply_text('Help!')
def sql(update, context):
sql = "SELECT nama_item, jumlah_dalam_kg FROM data_penjualan_harian WHERE nama_item = 'Lemon'"
query.execute(sql)
sql_result = query.fetchall()
pesan_balasan = ''
for x in sql_result:
pesan_balasan = pesan_balasan + str(x) + '\n'
#memperbagus balasan bot
#menghilangkan tanda petik
pesan_balasan = pesan_balasan.replace("'","")
#menghilangkan tanda kurung
pesan_balasan = pesan_balasan.replace("(","")
pesan_balasan = pesan_balasan.replace(")","")
#menghilangkan tanda koma
pesan_balasan = pesan_balasan.replace(",","")
update.message.reply_text(pesan_balasan)
def main():
updater = Updater(TOKEN, use_context=True)
# Get the dispatcher to register handlers
dp = updater.dispatcher
# on different commands - answer in Telegram
dp.add_handler(CommandHandler("start", start))
dp.add_handler(CommandHandler("help", help))
# on noncommand i.e message - echo the message on Telegram
dp.add_handler(MessageHandler(Filters.text, echo))
# log all errors
dp.add_error_handler(error)
# # Start the Bot
updater.start_webhook(listen="0.0.0.0",
port=PORT,
url_path=TOKEN,
webhook_url='https://powerful-lowlands-14039.herokuapp.com/' + TOKEN)
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()

Celery consumer (only) with an external producer

I'm using Celery 4.4.7 with Redis as my message broker.
I want to use celery as a consumer only, as the external producer is a java application.
The java application pushes messages to a channel on redis. But my celery application is not picking up the messages.
I have simulated the java producer in python using redis-py (redis_producer.py) to publish to a channel. The redis_consumer.py is able to pickup the messages from the producer.
But my celery_consumer.py seems to be blind to these messages.
Messages from redis_producer.py is picked up by the redis_consumer.py, but not from celery.
Messages from kombu_producer.py is picked up by the celery worker, but not from my redis_consumer.py
redis_producer.py
import json
import redis
r = redis.Redis(host='localhost', port=6379)
for i in range(10):
body = {
'id': i,
'message': f'Hello {i}',
}
r.publish(channel='redis.test.topic', message=json.dumps(body))
redis_consumer.py
import json
import multiprocessing
import os
import signal
import redis
redis_conn = redis.Redis(charset='utf-8', decode_responses=True)
def sub(name: str):
pubsub = redis_conn.pubsub()
pubsub.subscribe('redis.test.topic')
for message in pubsub.listen():
print(message)
if message.get('type') == 'message':
data = message.get('data')
print('%s: %s' % (name, data))
def on_terminate(signum, stack):
wait_for_current_scp_operation()
if __name__ == '__main__':
multiprocessing.Process(target=sub, args=('consumer',)).start()
signal.signal(signal.SIGTERM, on_terminate)
celery_consumer.py
import os
import ssl
from celery import Celery
from celery import shared_task
from celery.utils.log import get_task_logger
from kombu import Exchange
from kombu import Queue
logger = get_task_logger(__name__)
# Celery broker Url
broker_url = os.environ.get('CELERY_BROKER_URL', None)
if broker_url is None:
broker_url = 'redis://localhost:6379/0'
# store to use to store task results, default=None
result_backend = os.environ.get('CELERY_RESULT_BACKEND', None)
if result_backend is None:
result_backend = 'redis://localhost:6379/0'
config = dict(
broker_url=broker_url,
result_backend=result_backend,
# maximum number of connections that can be open in the connection pool
broker_pool_limit=20,
broker_transport_options={
'visibility_timeout': 3600,
'confirm_publish': True,
},
# Serializer method
task_serializer='json',
result_serializer='json',
accept_content=['json'],
# Below two settings just reserves one task at a time
# Task acknowledgement mode, default=False
task_acks_late=True,
# How many message to prefetch, default=4, value=1 disable prefetch
worker_prefetch_multiplier=1,
# Dates and times in messages will be converted to use the UTC timezone
timezone='UTC',
enable_utc=True,
# if true, store the task return values
task_ignore_result=False,
# if true, result messages will be persistent, messages won't be lost after a broker restart
result_persistent=False,
# Celery tasks expiry (in secs), default=1d, value=0/None never expire
result_expires=900,
# Message compression setting
task_compression='gzip',
# Task execution marker, default=False
task_track_started=True,
# rate limits on tasks (Disable all rate limits, even if tasks has explicit rate limits set)
worker_disable_rate_limits=True,
# is True, all tasks will be executed locally by blocking until the task returns
task_always_eager=False,
# Send events so the worker can be monitored by tools like celerymon
worker_send_task_events=False,
# Expiry time in seconds for when a monitor clients event queue will be deleted, default=never
event_queue_expires=60,
# Default queue, exchange, routing keys configuration
# task_default_queue = 'default.queue',
# task_default_exchange = 'default.exchange',
# task_default_exchange_type='topic',
# task_default_routing_key = 'default.route',
# task_create_missing_queues = True
task_queues=(
# Default configuration
Queue('redis.test.topic',
Exchange('redis.test.topic'),
routing_key='redis.test.topic'),
),
)
def create_celery_app() -> Celery:
logger.info('Initializing Celery...')
celery_app = Celery(name=__name__)
celery_app.config_from_object(config)
return celery_app
# create a celery app
app = create_celery_app()
#app.task(name='task_process_message', bind=True, max_retries=3)
def task_process_message(self, message):
try:
logger.info(f'{message}: Triggered task: task_process_message')
except Exception as e:
logger.exception(
f'Error executing task_process_message({message}')
# We do not have to reset timestamp since the job always looks back by 1 hr
self.retry(exc=e, countdown=utils.get_retry_delay(self.request.retries))
#shared_task(name='shared_task_process_message', bind=True, max_retries=3)
def shared_task_process_message(self, message):
try:
logger.info(f'{message}: Triggered task: shared_task_process_message')
except Exception as e:
logger.exception(
f'Error executing shared_task_process_message({message}')
# We do not have to reset timestamp since the job always looks back by 1 hr
self.retry(exc=e, countdown=utils.get_retry_delay(self.request.retries))
kombu_producer.py
from kombu import Producer, Consumer, Queue, Connection
import json
redis_url = 'redis://localhost:6379/0'
conn = Connection(redis_url)
producer = Producer(conn.channel())
channel = 'redis.test.topic'
for i in range(10):
body = {
'task': 'task_process_message',
'id': f'{i}',
'kwargs': {'message': f'Hello {i}',
}
}
producer.publish(body=body, routing_key='redis.test.topic')
The below picture shows activity on redis using a regular redis producer/consumer
Below picture shows activity on redis while running kombu producer and celery consumer.

How do you reply to a RabbitMQ RPC client with multiple messages?

I'm trying to use RabbitMQ in an RPC environment where each remote call will take a significant amount of time, producing results continually. I want the results to be delivered to the client as they are generated.
I started with the standard tutorial RPC example, then modified it to use "Direct Reply-to". I publish all the intermediate results back to an "anonymous exclusive callback queue", with out acknowledging the original request. When processing is complete, I send a final message back to the client and then acknowledge the original request. But the client is only seeing the first intermediate message. My client happens to be in PHP and my server is in Python, but I suspect that is not relevant. Does anyone have the magic to make this work? I can post code, but is pretty basic stuff by the cookbook.
Answering my own question. The following worked:
php client:
#!/usr/bin/php
<?php
require_once __DIR__ . '/vendor/autoload.php';
use PhpAmqpLib\Connection\AMQPStreamConnection;
use PhpAmqpLib\Message\AMQPMessage;
class RpcClient {
private $connection;
private $channel;
private $callback_queue;
private $response;
private $corr_id;
public function __construct() {
$this->connection = new AMQPStreamConnection(
'localhost', 5672, 'guest', 'guest'
);
$this->channel = $this->connection->channel();
list($this->callback_queue, ,) = $this->channel->queue_declare(
"", false, false, true, false
);
# For direct reply-to, need to consume amq.rabbitmq.repy-to, a special queue name
# Unclear what happens to the declare above
$this->channel->basic_consume(
$this->callback_queue, '', false, true,
false, false, array($this, 'onResponse')
);
}
# This is going to be called once for each message coming back
public function onResponse($rep) {
if ($rep->get('correlation_id') == $this->corr_id) {
$response = json_decode($rep->body, true);
echo print_r($response['line'], true);
if ($response['type'] == 'final') {
$this->response = $rep->body;
}
}
}
public function call($message_array) {
$this->response = null;
$this->corr_id = uniqid();
$jsonm = json_encode($message_array);
$msg = new AMQPMessage(
$jsonm,
array(
'correlation_id' => $this->corr_id,
### Not sure which of the next two lines is the correct one... if either....
##'reply_to' => 'amq.rabbitmq.reply-to' # This is when using direct reply-to
'reply_to' => $this->callback_queue
)
);
$this->channel->basic_publish($msg, '', 'ansiblePB_rpc_queue');
while (!$this->response) {
$this->channel->wait();
}
return intval($this->response);
}
}
$ansiblepb_rpc = new RpcClient();
$response = $ansiblepb_rpc->call(array('userID' => 'jb1234',
'user_display_name' => 'Joe Bloe',
'limit' => '24000'));
echo ' [.] Got ', $response, "\n";
?>
Python server:
#!/usr/bin/env python
""" 1 """
import glob
import json
import platform
import os
import re
import shutil
import subprocess
import time
import yaml
import pika
class RMQmultireply(object):
""" Generic class to support ansible_playbook on a Rabbit MQ RPC queue"""
def __init__(self, channel, method, props):
#""" Constructor.... duh """
self.channel = channel
self.method = method
self.props = props
def run(self, userID, username, limit):
""" Run the main guts of the service """
cmd = ['/home/dhutchin/devel/rmq/multilineoutput']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in proc.stdout.readlines():
intermediate_json_result = json.dumps({'type': 'intermediate', 'line': line})
self.channel.basic_publish(exchange='',
routing_key=self.props.reply_to,
properties=pika.BasicProperties(
correlation_id=self.props.correlation_id),
body=str(intermediate_json_result))
#self.channel.basic_ack(delivery_tag=self.method.delivery_tag)
proc.wait()
return proc.returncode
def on_request(channel, method, props, jsonstring):
""" Request has just come in to run ansible_playbook """
playbook = RMQmultireply(channel, method, props)
# fork and exec a playbook
# Recieve each line of output and send them as received back
# to the requestor.
# .run does not return until playbook exits.
# Use "Direct Reply-to" mechanism to return multiple messages to
# our client.
request = yaml.load(jsonstring) # Yes, yaml works better than JSON
returncode = playbook.run(request['userID'], request['user_display_name'], request['limit'])
final_json_result = json.dumps({'type': "final", 'line': '', 'rc': returncode})
channel.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id=
props.correlation_id),
body=str(final_json_result))
# Acknowlege the original message so that RabbitMQ can remove it
# from the ansiblePB_rpc_queue queue
channel.basic_ack(delivery_tag=method.delivery_tag)
def main():
""" Its kinda obvious what this does """
try:
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
except Exception:
print "pika.BlockingConnection.... failed... maybe RabbitMQ is not running"
quit()
channel = connection.channel()
channel.queue_declare(queue='ansiblePB_rpc_queue')
channel.basic_qos(prefetch_count=1)
# auto_ack is turned off by default, so we don't need to specify auto_ack=False
channel.basic_consume(queue='ansiblePB_rpc_queue', on_message_callback=on_request)
print " [x] Awaiting RPC requests"
channel.start_consuming()
if __name__ == '__main__':
main()

Rabbitmq Exchanges vs Queues

Im trying to do some basic testing with amqp using rabbitmq. I wrote a simple python script with pika to create an amqp client. I first did it sending messages directly to a queue. This worked well and was able to monitor performance with the web rabbitmq managment tool as well as "rabbitmqctl list_queues". I next tried to send messages to an exchange using multiple queues with the receiver listen on all queues. Im confused because when I use the exchange I can not longer see the queues in the management tool or rabbitmqctl. How do i monitor queues when using exchanges?
###########################################
queue send code:
###########################################
channel.queue_declare(queue='hello')
msgCnt = 1
while True:
msg = "'Hello World! Msg Count: "+str(msgCnt)+"'"
channel.basic_publish(exchange='',
routing_key='hello',
body=msg)
print(" [x] Sent " + msg)
msgCnt = msgCnt + 1
time.sleep(1)
###########################################
exchange send code:
###########################################
queue = sys.argv[1] #passing test.0001, test.0002 in diff clients.
channel.exchange_declare(exchange='test', type='topic')
msgCnt = 1
while True:
msg = "'Hello World! Msg Count: "+str(msgCnt)+"'"
channel.basic_publish(exchange='test',
routing_key=queue,
body=msg)
print(" [x] Sent " + msg)
msgCnt = msgCnt + 1
time.sleep(1)
###########################################
consumer code:
###########################################
channel.exchange_declare(exchange='test',
type='topic')
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
binding_keys = sys.argv[1:]
for binding_key in binding_keys:
channel.queue_bind(exchange='test',
queue=queue_name,
routing_key=binding_key)
def callback(ch, method, properties, body):
print(" [x] Received %r" % body)
channel.basic_consume(callback,
queue=queue_name,
no_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()

RabbitMQ and Pika

I'm using python lib pika, fow work with rabbitmq.
RabbitMq runnning and listen 0.0.0.0:5672, I try connect to him from another server, and I get exception:
socket.timeout: timed out
Python code using from official doc RabbitMQ(Hello, World)
I was try disable iptables.
But if I run script with host "localhost", all good work.
My /etc/rabbitmq/rabbitmq.config
[
{rabbit, [
{tcp_listeners,[{"0.0.0.0",5672}]}
]}
].
Code:
#!/usr/bin/env python
import pika
connection = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.10.150', port=5672, virtual_host='/', credentials=pika.credentials.PlainCredentials('user', '123456')))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = "Hello World!"
channel.basic_publish(exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode = 2, # make message persistent
))
print " [x] Sent %r" % (message,)
connection.close()
Since you are connecting from another server, you should check your machine`s firewall settings