How can I remove messages from an ActiveMQ queue using Python? - activemq

I have an ActiveMQ queue which has several messages that were sent using persistent set to true. When I create a subscriber in Python to read the queue, I get all of the messages in the queue. The next time I open the subscriber, I get all of the same messages. I adjusted the code that writes to the queue to set persistent to false, but the message remains in the queue. Have I neglected to send an acknowledgement?
The code is written using Python 2.7 because that's what our customer is using. I'd love to upgrade them, but I don't have the time.
Here's the script that reads the queue:
import socket
import threading
import xml.etree.ElementTree as etree
from xml.dom import minidom # for pretty printing
# import SampleXML
import sys
import os
import math
import time
from time import monotonic
import string
import stomp # for queue support
import platform
class ConnectionListener(stomp.ConnectionListener):
def __init__(self, connection):
self.connection = connection
print ("Listener created")
def on_message(self, message):
print ("Received message with body ") + message.body
class Reader:
def __init__(self):
pass
def ConnectToQueue(self):
#For Production
user = os.getenv("ACTIVEMQ_USER") or "worthington"
#user = os.getenv("ACTIVEMQ_USER") or "worthington_test"
password = os.getenv("ACTIVEMQ_PASSWORD") or "level3"
host = os.getenv("ACTIVEMQ_HOST") or "localhost"
port = os.getenv("ACTIVEMQ_PORT") or 61613
# destination = sys.argv[1:2] or ["/topic/event"]
# destination = destination[0]
dest = "from_entec_test"
#For Production
# dest = "from_entec"
try:
conn = stomp.Connection10(host_and_ports = [(host, port)])
conn.set_listener('message', ConnectionListener(conn))
# conn.start()
# subscribe_id = '-'.join(map(str, (platform.node(), os.getppid(), os.getpid())))
conn.connect(login=user,passcode=password)
subscribe_id = "Queue Test Listener"
conn.subscribe(destination=dest, id=subscribe_id, ack='client-individual')
conn.unsubscribe(id=subscribe_id)
conn.disconnect()
except Exception as error:
reason = str(error)
print("Exception when readig data from queue: " + str(error))
pass
if __name__ == "__main__" :
try:
UploadData = Reader()
UploadData.ConnectToQueue()
print ("Reader finished.")
except Exception as Value:
reason = str(Value)
pass
And here's the code that writes to it:
import socket
import threading
import xml.etree.ElementTree as etree
from xml.dom import minidom # for pretty printing
# import SampleXML
import sys
import os
import math
import time
from time import monotonic
import string
import stomp # for queue support
import platform
class ConnectionListener(stomp.ConnectionListener):
def __init__(self, connection):
self.connection = connection
print "Listener created"
def on_message(self, message):
print "Received message with body " + message.body
class UploadData:
def __init__(self):
pass
def ConnectToQueue(self):
#For Production
user = os.getenv("ACTIVEMQ_USER") or "worthington"
#user = os.getenv("ACTIVEMQ_USER") or "worthington_test"
password = os.getenv("ACTIVEMQ_PASSWORD") or "level3"
host = os.getenv("ACTIVEMQ_HOST") or "localhost"
port = os.getenv("ACTIVEMQ_PORT") or 61613
# destination = sys.argv[1:2] or ["/topic/event"]
# destination = destination[0]
dest = "from_entec_test"
#For Production
# dest = "from_entec"
try:
conn = stomp.Connection10(host_and_ports = [(host, port)])
# conn.start()
# subscribe_id = '-'.join(map(str, (platform.node(), os.getppid(), os.getpid())))
subscribe_id = "Queue Test Listener"
conn.connect(login=user,passcode=password)
message = "This is a test message."
conn.send(dest, message, persistent='true')
print "Sent message containing: " + message
conn.disconnect()
except Exception, error:
reason = str(error)
print "Exception when writing data to queue: " + str(error)
pass
if __name__ == "__main__" :
try:
UploadData = UploadData()
UploadData.ConnectToQueue()
except Exception, Value:
reason = str(Value)
print "Main routine exception: " + str(Value)
pass

I'm not very familiar with Python STOMP clients but from the code you appear to be subscribing using the 'client-individual' mode of STOMP which means that each message you receive requires you to send an ACK frame back with the message Id value so that the remote can mark it as consumed. Since you are not doing that the messages will not be removed from the Queue.
As an alternative you can use the 'auto' acknowledgement mode which marks the message as consumed as soon as the broker dispatches them. To understand the STOMP subscription model please refer to the STOMP specification.

Related

What If I produce 3 messages for 3 different queues but consumes only from 2 queues in rabbitmq?

In the following python programme I am configuring RabbitMq. I am creating an exchange named "order" and publishing 3 messages with routing keys "order.notify","order.report","order.test".
import pika
import json
import uuid
con = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = con.channel()
channel.exchange_declare(
exchange='order',
exchange_type = 'direct'
)
channel.basic_publish(
exchange= 'order',
routing_key= 'order.notify',
body= json.dumps({'user_email' : 'First'})
#body= json.dumps({'user_email' : order['user_email']})
)
print('[x] Sent nortify message')
channel.basic_publish(
exchange= 'order',
routing_key= 'order.report',
body= json.dumps({'user_email' : 'Second'})
)
print('[x] Sent report message')
channel.basic_publish(
exchange= 'order',
routing_key= 'order.test',
body= json.dumps({'user_email' : 'third'})
)
print('[x] Sent report message')
con.close()
Now at consumer side I have created only 2 queues , with binding keys order.nortiy and order.report
report.py
import pika
import json
con = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = con.channel()
queue = channel.queue_declare('order_notify')
queue_name = queue.method.queue
channel.queue_bind(
exchange='order',
queue=queue_name,
routing_key='order.report' #binding key
)
def callback(ch,method, properties, body):
payload = json.loads(body)
# print(' [x] Notifying {}' .format(payload['user_email']))
print('Report Queue')
print(payload['user_email'])
ch.basic_ack(delivery_tag= method.delivery_tag)
channel.basic_consume(on_message_callback= callback,queue=queue_name)
print(' [*] waiting for report messages. To exit press CTRL + C')
channel.start_consuming()
nortify.py
import pika
import json
con = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = con.channel()
queue = channel.queue_declare('order_notify')
queue_name = queue.method.queue
channel.queue_bind(
exchange='order',
queue=queue_name,
routing_key='order.notify' #binding key
)
def callback(ch,method, properties, body):
payload = json.loads(body)
# print(' [x] Notifying {}' .format(payload['user_email']))
print('Nortify Queue')
print(payload['user_email'])
ch.basic_ack(delivery_tag= method.delivery_tag)
channel.basic_consume(on_message_callback= callback,queue=queue_name)
print(' [*] waiting for report messages. To exit press CTRL + C')
channel.start_consuming()
Now which queue wil consume which message. I try to run and queues were consuming randomly. Can someone please explain?
Tried to run the above programme but was getting random results.Each queue was consuming different messages when run different times

Celery publisher doesn't send messages to the intended rabbitmq queue

I have a flask app running with celery in it. When a user inputs a message into app uri it needed to be sent to a rabbitmq queue as a message to a given queue. But it send messages to a default queue. How do I specifically send a message to a defined queue?
publisher.py
from flask import Flask,request,jsonify
from celery import Celery,bootsteps
from time import sleep
from kombu.common import QoS
class NoChannelGlobalQoS(bootsteps.StartStopStep):
requires = {'celery.worker.consumer.tasks:Tasks'}
def start(self, c):
qos_global = False
c.connection.default_channel.basic_qos(0, c.initial_prefetch_count, qos_global)
def set_prefetch_count(prefetch_count):
return c.task_consumer.qos(
prefetch_count=prefetch_count,
apply_global=qos_global,
)
c.qos = QoS(set_prefetch_count, c.initial_prefetch_count)
server = Flask(__name__)
broker_uri1="amqp://"
backend="mongodb+srv://"
app = Celery('TestApp', broker_uri=broker_uri1,backend=backend)
CELERY_TASK_ROUTES = {'app.tasks.*': {'queue': 'rabbit'},'app.task.*': {'queue': 'rabbit'}}
app.config_from_object('celeryconfig')
app.conf.task_default_exchange='rabbit'
app.conf.task_default_routing_key='rabbit'
app.steps['worker'].add(NoChannelGlobalQoS)
#server.route("/")
def print_hello():
return "Flask server working"
#server.route("/sync-reverse")
def reverse_by_worker_with_results():
input = request.args.get("text")
app.select_queues(queues="rabbit")
task = app.signature('tasks.reverse', kwargs={'text': input})
result = task.delay()
while result.status == 'PENDING':
print(result.status)
sleep(2)
print(result.status)
return "Reversed >><br />Input Text : " + input + "<br />Output Text : " + result.get()
#server.route("/async-reverse")
def reverse_by_worker():
input = request.args.get("text")
app.select_queues(queues="rabbit") #supposed to add queues to the task
task = app.signature('tasks.reverse', kwargs={'text': input})
result = task.delay()
if result.id:
return 'Task add'
else:
return 'Failure in adding task'
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
server.run(host= '0.0.0.0')
worker.py
from celery import Celery,bootsteps
from time import sleep
from kombu.common import QoS
broker_uri='amqp:///'
backend_uri="mongodb+srv://"
app = Celery('TestApp', broker=broker_uri, backend=backend_uri)
app.config_from_object('celeryconfig')
app.conf.task_default_exchange='rabbit'
app.conf.task_default_routing_key='rabbit'
class NoChannelGlobalQoS(bootsteps.StartStopStep):
requires = {'celery.worker.consumer.tasks:Tasks'}
def start(self, c):
qos_global = False
c.connection.default_channel.basic_qos(0, c.initial_prefetch_count, qos_global)
def set_prefetch_count(prefetch_count):
return c.task_consumer.qos(
prefetch_count=prefetch_count,
apply_global=qos_global,
)
c.qos = QoS(set_prefetch_count, c.initial_prefetch_count)
app.steps['consumer'].add(NoChannelGlobalQoS)
#app.task
def reverse(text):
sleep(10)
return text[:-1]
I have used a celery config file to define the queue name and type as suggested
from kombu import Queue
task_queues = [Queue(name="rabbit", queue_arguments={"x-queue-type": "quorum"})]
task_routes = {
'tasks.add': 'rabbit',
}

How to pass headers in header excahnge method in rabbitmq using python

Consumer.py:
#!/usr/bin/env python
import pika, sys, os
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='headers_logs', exchange_type='headers')
channel.queue_declare(queue='', exclusive=True)
queue_name = "HeadersQueue1"
channel.queue_bind(exchange='headers_logs', queue=queue_name)
def callback(ch, method, properties, body):
print(" [x] %r" % body.decode())
print(' [*] Waiting for logs. To exit press CTRL+C')
channel.basic_consume(
queue=queue_name, on_message_callback=callback, auto_ack=True)
channel.start_consuming()
if name == 'main':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
try:
sys.exit(0)
except SystemExit:
os._exit(0)
Publish.py:
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='headers_logs',exchange_type='headers')
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(exchange='headers_logs',headers={"name":"ram"},body=message)
print(" [x] Sent %r" % message)
connection.close()
Here I have written consumer and publish program like above. Can anyone please guide that how to write Simple headersExchange program in rabbitMq using python
To use a headers exchange, you need to declare the exchange type as headers, not fanout as in your question's text.
exchangeName = 'headers_logs'
channel.exchange_declare(exchangeName, exchange_type='headers', durable=True)
Then create the queue and bind it to the exchange using the headers. Note that 'x-match' here can be set to match any or all headers. The routing key is set to empty string because it will not be used for routing messages.
qName = 'queue_logs'
channel.queue_declare(queue=qNameI, durable=True)
channel.queue_bind(qName, exchangeName, routing_key='', arguments={'x-match': 'any', 'key1': 'one', 'key2': 'two'})
Now we can publish a message to the exchange with a set of headers:
channel.basic_publish(
exchange=exchangeName,
routing_key='',
body='test message body',
properties=pika.BasicProperties(
delivery_mode = 2, # make message persistent
headers = {'key1':'one', 'key2': 'three'}
)
)
I have only matched 'key1' in this message to demonstrate that 'x-match' has been set to 'any'.

See all of the unique IDs related to a vmware virtual machine

I want to see all of the unique IDs that are specific for a virtual machine such as:
hardware ID, CPU ID, UUID , Mac address and etc.
could anybody please help me to find these IDs??
I can help you finding some of these. For rest of the things you have to search the doc.
Install pyVmomi and run the following code.
EDIT: Changed the code to run on esx host. Simply run it by python .py
Now to know how this code is working. You have to learn Manged Objects. For example here we are working with the Manged Object vm and this object has many properties listed in the doc. So to retrieve uuid of a vm we are invoking vm.config.uuid. Regarding other details you have to go through the VirtualMachine object see what all properties you nedd.
import sys
import atexit
import time
from pyVmomi import vim, vmodl
from pyVim.connect import Disconnect
from pyVim import connect
inputs = {'esx_ip': '15.22.10.10',
'esx_password': 'Password123',
'esx_user': 'root',
'vm_name': 'ubuntu',
}
def wait_for_task(task, actionName='job', hideResult=False):
"""
Waits and provides updates on a vSphere task
"""
while task.info.state == vim.TaskInfo.State.running:
time.sleep(2)
if task.info.state == vim.TaskInfo.State.success:
if task.info.result is not None and not hideResult:
out = '%s completed successfully, result: %s' % (actionName, task.info.result)
print out
else:
out = '%s completed successfully.' % actionName
print out
else:
out = '%s did not complete successfully: %s' % (actionName, task.info.error)
raise task.info.error
print out
return task.info.result
def get_obj(content, vimtype, name):
"""
Get the vsphere object associated with a given text name
"""
obj = None
container = content.viewManager.CreateContainerView(content.rootFolder, vimtype, True)
for c in container.view:
if c.name == name:
obj = c
break
return obj
def main():
si = None
try:
print "Trying to connect ..."
si = connect.Connect(inputs['vcenter_ip'], 443, inputs['vcenter_user'], inputs['vcenter_password'])
except IOError, e:
pass
if not si:
print "Cannot connect to specified host using specified username and password"
sys.exit()
print "Connected to vcenter!"
atexit.register(Disconnect, si)
content = si.RetrieveContent()
# Get the VirtualMachine Object
vm = get_obj(content, [vim.VirtualMachine], inputs['vm_name'])
print "GuestID: ", vm.config.guestId
print "UUID: ", vm.config.uuid
print "Version: ", vm.config.version
for device in vm.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualEthernetCard):
print "MAC Address: ", device.macAddress
#Example of changing UUID:
new_uuid = '423ffff0-5d62-d040-248c-4538ae2c734f'
vmconf = vim.vm.ConfigSpec()
vmconf.uuid = new_uuid
task = vm.ReconfigVM_Task(vmconf)
wait_for_task(task, si)
print "Successfully changed UUID"
print "New UUID: ", vm.config.uuid
if __name__ == "__main__":
main()

tornado's AsyncHttpTestCase is not available outside self.fetch

I have an AsyncHttpTestCase and I want to access it from methods besides self.fetch. Specifically, I have a SockJS handler that I want a sockjs-client to attach too for my tests.
I've discovered that even though self.get_url('/foo') returns a valid url, that url does not respond to anything except for self.fetch(). What gives?
Is this just not possible with AsyncHttpTestCase? What is the best pattern for doing this?
Here's tests.py
import urllib2
from tornado.httpclient import AsyncHTTPClient
import tornado.testing
from tornado.testing import AsyncTestCase, AsyncHTTPTestCase
from app import DebugApp
class TestDebug(AsyncHTTPTestCase):
def get_app(self):
return DebugApp()
def test_foo(self):
response = self.fetch('/foo')
print response.body
assert response.body == 'derp'
def test_foo_from_urllib(self):
response = urllib2.urlopen(self.get_url('/foo'), None, 2)
print response
assert response.body == 'derp'
def runTest(self):
pass
and app.py
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.options import options
class FooHandler(tornado.web.RequestHandler):
def get(self):
self.write("derp")
url_patterns = [
(r"/foo", FooHandler),
]
class DebugApp(tornado.web.Application):
def __init__(self):
tornado.web.Application.__init__(self, url_patterns, debug=True, xsrf_cookies=False)
def main():
app = DebugApp()
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(6006)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
and runtest.py
#!/usr/bin/env python
import unittest
from os import path
import sys
import tornado.testing
PROJECT_PATH = path.dirname(path.abspath(__file__))
sys.path.append(PROJECT_PATH)
def all():
suite = unittest.defaultTestLoader.discover('./', 'tests.py', path.dirname(path.abspath(__file__)))
print suite
return suite
if __name__ == '__main__':
# Print a nice message so that we can differentiate between test runs
print ''
print '%s %s' % ('Debug app', '0.1.0')
print '\033[92m' + '-------------- Running Test Suite --------------' + '\033[0m'
print ''
tornado.testing.main()
The problem is that the IOLoop is not running when you call urlopen (and it cannot be, because urlopen is a blocking function). You must run the IOLoop (with either #gen_test, self.wait(), or indirectly via methods like self.fetch()) for the server to respond, and you can only interact with it via non-blocking functions (or if you must use blocking functions like urlopen, run them in a separate thread).