I need to write an Obj-C object (e.g. NSString in this case) to a sqlite database and store it in a BLOB column, using Python 2.7.
To that extent I wrote this demo code that fails with the below traceback.
from sqlite3 import connect
from Foundation import NSArchiver
conn = connect(':memory:')
create = "CREATE TABLE test(data BLOB)"
conn.execute(create)
conn.commit()
blob = NSArchiver.archivedDataWithRootObject_("Hello World").bytes()
print type(blob), blob
sql = "INSERT INTO test VALUES (?)"
data = [blob]
conn.execute(sql, data)
conn.commit()
This traces back with:
$ ./sqlite3_test.py
<type 'memoryview'> <memory at 0x104a5e218>
Traceback (most recent call last):
File "./sqlite3_test.py", line 16, in <module>
conn.execute(sql, data)
sqlite3.InterfaceError: Error binding parameter 0 - probably unsupported type.
No amount of magik using sqlite3.Binary (which is defined as Binary = buffer inside the sqlite3 module) or .tobytes() (from memoryview) made that work any better.
I also tried to create a buffer() object out of the NSArchiver blob, but a naïve approach:
b = buffer(blob, 0, len(blob))
traces back with TypeError: buffer object expected - arguably NSArchiver objects are not Python strings.
I got a working example!
On the Objective-C call bytes() I need to call the memoryview .tobytes() which can then be serialized to buffer().
#!/usr/bin/python -tt
from sqlite3 import connect, Binary
from Foundation import NSArchiver
conn = connect(':memory:')
create = "CREATE TABLE test(data BLOB)"
conn.execute(create)
conn.commit()
str = NSString.alloc().initWithString_("Hello World")
blob = NSArchiver.archivedDataWithRootObject_(str).bytes().tobytes()
print "Original (%s): %s" % (len(blob), blob)
sql = "INSERT INTO test VALUES (?)"
data = [Binary(blob)]
conn.execute(sql, data)
conn.commit()
cursor = conn.cursor()
cursor.execute("SELECT * FROM test")
rows = cursor.fetchall()
for r in rows:
print "In database (%s): %s" % (len(r[0]), r[0])
This gives:
$ ./sqlite3_test.py
Original (84):
streamtyped???#???OC_PythonString?NSString?NSObject??i?+
Hello World?
In database (84):
streamtyped???#???OC_PythonString?NSString?NSObject??i?+
Hello World?
Related
I have followed this article set up, it all seems to work properly, but I would like now to perform a SQL query and pass the result into a pandas data frame, how could I proceed?
This is what I have now;
host_server = os.environ.get('host_server', 'localhost')
db_server_port = urllib.parse.quote_plus(str(os.environ.get('db_server_port', '5432')))
database_name = os.environ.get('database_name', 'my_data_base123')
db_username = urllib.parse.quote_plus(str(os.environ.get('db_username', 'my_user_name123')))
db_password = urllib.parse.quote_plus(str(os.environ.get('db_password', 'my_password123')))
ssl_mode = urllib.parse.quote_plus(str(os.environ.get('ssl_mode','prefer')))
DATABASE_URL = 'postgresql://{}:{}#{}:{}/{}?sslmode={}'.format(db_username, db_password, host_server, db_server_port, database_name, ssl_mode)
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
engine = sqlalchemy.create_engine(
DATABASE_URL, pool_size=3, max_overflow=0
)
metadata.create_all(database)
app = FastAPI(title="REST API using FastAPI PostgreSQL Async EndPoints")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"]
)
#app.on_event("startup")
async def startup():
await database.connect()
#app.on_event("shutdown")
async def shutdown():
await database.disconnect()
Trying to access the database with a SQL query:
with engine.connect() as conn:
result = conn.execute(text("select 'hello world'))
print(result.all())
as it says in the sqlalchemy documentation , but i get some errors, like:
print(result.all())
AttributeError: 'ResultProxy' object has no attribute 'all'
even if i try to access the tables of my database
with engine.connect() as conn:
result = conn.execute(text("select * FROM users"))
print(result.all())
i get the same error
It is solved, I had to upgrade SQLAlchemy
sudo pip install sqlalchemy --upgrade
I'm trying to run the sample program from this RedisLabs page.
I chose Option A - which was to set up the free Redis cloud server.
(Seems like if you install manually, then you have to add the JSON as a plugin.)
I'm able to connect and use other "set" commands, but getting error on JSON:
File "C:\Users\nwalt\.virtualenvs\TDAmeritradeGetQuotes\lib\site-packages\redis\client.py", line 901, in execute_command
return self.parse_response(conn, command_name, **options)
File "C:\Users\nwalt\.virtualenvs\TDAmeritradeGetQuotes\lib\site-packages\redis\client.py", line 915, in parse_response
response = connection.read_response()
File "C:\Users\nwalt\.virtualenvs\TDAmeritradeGetQuotes\lib\site-packages\redis\connection.py", line 756, in read_response
raise response
redis.exceptions.ResponseError: unknown command 'JSON.SET'
My Python test program (except put in the sample endpoint before posting):
import redis
import json
import pprint
host_info = "redis.us-east-1-1.ec2.cloud.redislabs.com"
redisObj = redis.Redis(host=host_info, port=18274, password='xxx')
print ("Normal call to Redis")
redisObj.set('foo', 'bar')
value = redisObj.get('foo')
print(value)
capitals = {
"Lebanon": "Beirut",
"Norway": "Oslo",
"France": "Paris"
}
print ("capitals - before call to Redis")
pprint.pprint(capitals)
print("JSON call to Redis")
redisObj.execute_command('JSON.SET', 'doc', '.', json.dumps(capitals))
print("Data Saved, now fetch data back from redis")
reply = json.loads(redisObj.execute_command('JSON.GET', 'doc'))
print("reply from Redis get")
pprint.pprint(reply)
This is the screen shot from their website where I created the database. I didn't see any option to enable JSON or add any modules.
Not sure this was available when I created the REDIS database, but it is now. When you create it on redislabs.com, you can turn on the modules, and pick one from the list.
Then use this library: "rejson" from https://pypi.org/project/rejson/ to get the method "jsonset" method, using such code such as this:
rj = Client(host=config_dict['REDIS_CONFIG_HOST'], port=config_dict['REDIS_CONFIG_PORT'], password=config_dict['REDIS_CONFIG_PASSWORD'], decode_responses=True)
out_doc = {}
out_doc['firstname'] = "John"
out_doc['lastname'] = "Doe"
rj.jsonset('config', Path.rootPath(), out_doc)
get_doc = rj.jsonget('config', Path.rootPath())
pprint.pprint(get_doc)
I'm not used the cloud redis, in my local the Python don't load the JSON.SET
I just so make done, in this sample https://onelinerhub.com/python-redis/save-json-to-redis
I am compressing a string using zlib, then storing in Aerospike bin. On retrieval and decompressing, I am getting "zlib.error: Error -5 while decompressing data: incomplete or truncated stream"
When I compared original compressed data and retrieved compressed data, some thing is missing at the end in retrieved data.
I am using Aerospike 3.7.3 & python client 2.0.1
Please help
Thanks
Update: Tried using bz2. Throws ValueError: couldn't find end of stream while retrieve and decompress. Looks like aerospike is stripping of the last byte or something else from the blob.
Update: Posting the code
import aerospike
import bz2
config = {
'hosts': [
( '127.0.0.1', 3000 )
],
'policies': {
'timeout': 1000 # milliseconds
}
}
client = aerospike.client(config)
client.connect()
content = "An Aerospike Query"
content_bz2 = bz2.compress(content)
key = ('benchmark', 'myset', 55)
#client.put(key, {'bin0':content_bz2})
(key, meta, bins) = client.get(key)
print bz2.decompress(bins['bin0'])
Getting Following Error:
Traceback (most recent call last):
File "asread.py", line 22, in <module>
print bz2.decompress(bins['bin0'])
ValueError: couldn't find end of stream
The bz.compress method returns a string, and the client sees that type and tries to convert it to the server's as_str type. If it runs into a \0 in an unexpected position it will truncate the string, causing your error.
Instead, make sure to cast binary data to a bytearray, which the client converts to the server's as_bytes type. On the read operation, bz.decompress will work with the bytearray data and give you back the original string.
from __future__ import print_function
import aerospike
import bz2
config = {'hosts': [( '33.33.33.91', 3000 )]}
client = aerospike.client(config)
client.connect()
content = "An Aerospike Query"
content_bz2 = bytearray(bz2.compress(content))
key = ('test', 'bytesss', 1)
client.put(key, {'bin0':content_bz2})
(key, meta, bins) = client.get(key)
print(type(bins['bin0']))
bin0 = bz2.decompress(bins['bin0'])
print(type(bin0))
print(bin0)
Gives back
<type 'bytearray'>
<type 'str'>
An Aerospike Query
I am new to IBPy and really interested to know how to get account parameters for multiple accounts. The code below only gives me the output in the command line but I couldn't figure out how to store those information into a dataframe. the function updateAccountValue() doesn't have a unique id that i can use as index for the dataframe.
from ib.opt import Connection, message
import pandas as pd
import time
def error_handler(msg):
"""Handles the capturing of error messages"""
print "Server Error: %s" % msg
def updateAccount_handler(msg):
if msg.key in ['AccountType','NetLiquidation']:
print msg.key, msg.value
if __name__ == "__main__":
conn = Connection.create(port=7497, clientId = 93)
conn.connect()
conn.register(error_handler, 'Error')
conn.register(updateAccount_handler,message.updateAccountValue)
# we can do a loop, i am just giving a simple example for 2 accounts
conn.reqAccountUpdates(1,"Uxxxx008")
time.sleep(0.5)
conn.reqAccountUpdates(1,"Uxxxx765")
time.sleep(0.5)
conn.disconnect()
the output looks like this:
Server Version: 76
TWS Time at connection:20150729 12:46:56 EST
Server Error: <error id=-1, errorCode=2104, errorMsg=Market data farm connection is OK:usfuture>
Server Error: <error id=-1, errorCode=2104, errorMsg=Market data farm connection is OK:usfuture.us>
Server Error: <error id=-1, errorCode=2104, errorMsg=Market data farm connection is OK:usfarm>
Server Error: <error id=-1, errorCode=2106, errorMsg=HMDS data farm connection is OK:ushmds>
AccountType INDIVIDUAL
NetLiquidation 2625.24
AccountType IRA-ROTH NEW
NetLiquidation 11313.83
End goal is to store these information into a pandas dataframe format with an unique id of account number.
Ok, The IBpy works in sender/Receiver architecture. What I would like suggest you is to have a global variable to store the account information.
from ib.opt import Connection, message
import pandas as pd
import time
class account_update :
acc_info = [] # list to store all your account info
# you can have list,tuple,panda dataframe or any data strucure I am using normal list to demonstrate
def error_handler(self,msg):
"""Handles the capturing of error messages"""
print "Server Error: %s" % msg
def updateAccount_handler(self,msg):
if msg.key in ['AccountType','NetLiquidation']:
self.acc_info.append(msg.key, msg.value)
def main(self):
conn = Connection.create(port=7497, clientId = 93)
conn.connect()
conn.register(error_handler, 'Error')
conn.register(updateAccount_handler,message.updateAccountValue)
conn.reqAccountUpdates(1,"Uxxxx008")
time.sleep(0.5)
conn.reqAccountUpdates(1,"Uxxxx765")
time.sleep(0.5)
conn.disconnect()
if __name__ == "__main__" :
account_update().main()
So the logic is simple,create a global variable and make your response handler method to update the global variable whenever it receives a response. Hope it works :)
is it possible to use redis's MOVE command to move all keys from 1 database to another? The move command only moves 1 key, but I need to move all the keys in the database.
I would recommend taking a look at the following alpha version app to backup and restore redis databases.. (you can install it via gem install redis-dump). You could redis-dump your databaseand then redis-load into another database via the --database argument.
redis-dump project
If this doesn't fit your purposes, you may need to make use of a scripting language's redis bindings (or alternatively throw something together using bash / redis-cli / xargs, etc). If you need assistance along these lines then we probably need more details first.
I've wrote a small python script to move data between two redis servers:(only support list and string types, and you must install python redis client):
'''
Created on 2011-11-9
#author: wuyi
'''
import redis
from optparse import OptionParser
import time
def mv_str(r_source, r_dest, quiet):
keys = r_source.keys("*")
for k in keys:
if r_dest.keys(k):
print "skipping %s"%k
continue
else:
print "copying %s"%k
r_dest.set(k, r_source.get(k))
def mv_list(r_source, r_dest, quiet):
keys = r_source.keys("*")
for k in keys:
length = r_source.llen(k)
i = 0
while (i<length):
print "add queue no.:%d"%i
v = r_source.lindex(k, i)
r_dest.rpush(k, v)
i += 1
if __name__ == "__main__":
usage = """usage: %prog [options] source dest"""
parser = OptionParser(usage=usage)
parser.add_option("-q", "--quiet", dest="quiet",
default = False, action="store_true",
help="quiet mode")
parser.add_option("-p", "--port", dest="port",
default = 6380,
help="port for both source and dest")
parser.add_option("", "--dbs", dest="dbs",
default = "0",
help="db list: 0 1 120 220...")
parser.add_option("-t", "--type", dest="type",
default = "normal",
help="available types: normal, lpoplist")
parser.add_option("", "--tmpdb", dest="tmpdb",
default = 0,
help="tmp db number to store tmp data")
(options, args) = parser.parse_args()
if not len(args) == 2:
print usage
exit(1)
source = args[0]
dest = args[1]
if source == dest:
print "dest must not be the same as source!"
exit(2)
dbs = options.dbs.split(' ')
for db in dbs:
r_source = redis.Redis(host=source, db=db, password="", port=int(options.port))
r_dest = redis.Redis(host=dest, db=db, password="", port=int(options.port))
print "______________db____________:%s"%db
time.sleep(2)
if options.type == "normal":
mv_str(r_source, r_dest, options.quiet)
elif options.type == "lpoplist":
mv_list(r_source, r_dest, options.quiet)
del r_source
del r_dest
you can try my own tool, rdd
it's a command line utility,
can dump database to a file, work on it (filter, match, merge, ...), and back it in a redis instance
take care, alpha stage, https://github.com/r043v/rdd/
Now that redis has scripting using lua, you can easily write a command that loops through all the keys, checks their type and moves them accordingly to a new database.
I suggest you can try it as below:
1. copy the rdb file to another dir;
2. modify the rdb file name;
3. modify the redis configure file adapter to the new db;