send file to serial port in python - xmodem

i am trying to send the file below '105.8k' to my energy meter.
i am using the xmodem example from pypi but i get the following error:
Traceback (most recent call last):
File "C:\Py\mainpy.py", line 68, in <module>
status = modem.send(f, retry=3)
File "C:\Users\admin\AppData\Local\Programs\Python\Python38-32\lib\site-packages\xmodem\__init__.py", line 270, in send
char = self.getc(1)
File "C:\py\mainpy.py", line 62, in getc
return ser.read(size) or None
AttributeError: 'str' object has no attribute 'read'
the code i use:
### send file to port###
ser = serialPortCombobox.get().split(" ")[0]
def getc(size, timeout=1):
return ser.read(size) or None
def putc(data, timeout=1):
return ser.write(data)
modem = XMODEM(getc, putc)
f = open('105.8k', 'rb')
status = modem.send(f, retry=3)
ser.close()
stream.close()
thank you for your help.

Related

how to convert imagenet/mobilenet_v2_130_224/classification into mlmodel

i m very new to this but I am trying to solve the issue somehow. this is my convert.py file
import numpy as n
import tensorflow as tf
import coremltools as ct
print(n.__version__)
print(tf.__version__)
print(ct.__version__)
loaded_model = tf.saved_model.load("mobilenet_v2_130_224.h5")
mlmodel = ct.convert(loaded_model, inputs=[ct.ImageType()], classifier_config=ct.ClassifierConfig("labels.txt"), source='tensorflow')
mlmodel.short_description = "My Classifier"
mlmodel.license = "Apache 2.0"
spec = mlmodel.get_spec()
ct.utils.rename_feature(spec, "input_1", "imageInput")
ct.utils.rename_feature(spec, "Identity", "classResult")
mlmodel = ct.models.MLModel(spec)
print(mlmodel)
mlmodel.save("model_299x299.mlmodel")
I downloaded the model from [here][1] and unzipped on desktop. I have M1 iMac. why do I get these errors below? how can I convert this particular model into mlmodel? if my convert file is wrongly written what should be the best practice of it ?
Traceback (most recent call last):
File "/Users/asduskun/Desktop/convert.py", line 20, in <module>
mlmodel = ct.convert(loaded_model, inputs=[ct.ImageType()], classifier_config=ct.ClassifierConfig("labels.txt"), source='tensorflow')
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/_converters_entry.py", line 444, in convert
mlmodel = mil_convert(
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/converter.py", line 187, in mil_convert
return _mil_convert(model, convert_from, convert_to, ConverterRegistry, MLModel, compute_units, **kwargs)
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/converter.py", line 211, in _mil_convert
proto, mil_program = mil_convert_to_proto(
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/converter.py", line 281, in mil_convert_to_proto
prog = frontend_converter(model, **kwargs)
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/converter.py", line 99, in __call__
return tf2_loader.load()
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/frontend/tensorflow/load.py", line 61, in load
self._graph_def = self._graph_def_from_model(output_names)
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/frontend/tensorflow2/load.py", line 133, in _graph_def_from_model
cfs, graph_def = self._get_concrete_functions_and_graph_def()
File "/Users/asduskun/miniconda3/lib/python3.10/site-packages/coremltools/converters/mil/frontend/tensorflow2/load.py", line 125, in _get_concrete_functions_and_graph_def
raise NotImplementedError(msg.format(self.model))
NotImplementedError: Expected model format: [SavedModel | [concrete_function] | tf.keras.Model | .h5 | GraphDef], got <tensorflow.python.saved_model.load.Loader._recreate_base_user_object.<locals>._UserObject object at 0x1676455a0>
[1]: https://tfhub.dev/google/imagenet/mobilenet_v2_130_224/classification/5

Scrapy Selenium geckodriver problem - error while trying to scrape

Unhandled error in Deferred: 2020-07-24 09:12:40 [twisted] CRITICAL: Unhandled error in Deferred:
Traceback (most recent call last): File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 192, in crawl return self._crawl(crawler, *args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 196, in _crawl d = crawler.crawl(*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1613, in unwindGenerator return _cancellableInlineCallbacks(gen) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1529, in _cancellableInlineCallbacks
_inlineCallbacks(None, g, status)
--- --- File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks result = g.send(result) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 87, in crawl self.engine = self._create_engine() File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 101, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/engine.py", line 69, in init self.downloader = downloader_cls(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/downloader/init.py", line 83, in init self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 53, in from_crawler return cls.from_settings(crawler.settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 35, in from_settings mw = create_instance(mwcls, settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/utils/misc.py", line 150, in create_instance instance = objcls.from_crawler(crawler,
*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 67, in from_crawler middleware = cls( File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 43, in init for argument in driver_arguments: builtins.TypeError: 'NoneType' object is not iterable
2020-07-24 09:12:40 [twisted] CRITICAL: Traceback (most recent call last): File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks result = g.send(result) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 87, in crawl self.engine = self._create_engine() File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 101, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/engine.py", line 69, in init self.downloader = downloader_cls(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/downloader/init.py", line 83, in init self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 53, in from_crawler return cls.from_settings(crawler.settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 35, in from_settings mw = create_instance(mwcls, settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/utils/misc.py", line 150, in create_instance instance = objcls.from_crawler(crawler,
*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 67, in from_crawler middleware = cls( File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 43, in init for argument in driver_arguments: TypeError: 'NoneType' object is not iterable
my settings.py
from shutil import which
SELENIUM_DRIVER_NAME = 'firefox'
SELENIUM_DRIVER_EXECUTABLE_PATH = which('geckodriver')
SELENIUM_BROWSER_EXECUTABLE_PATH = which('firefox')
...
'scrapy_selenium.SeleniumMiddleware': 800,
looks like permissions for driver are good:
:/usr/local/bin$ ll | grep gecko
-rwxrwxrwx 1 baku baku 7008696 lip 24 09:09 geckodriver*
crawler code:
class LinkedInProfileSeleniumSpider(scrapy.Spider):
name = 'lips'
allowed_domains = ['www.linkedin.com']
def start_requests(self):
yield SeleniumRequest(
url="https://www.linkedin.com/login/",
callback=self.proceed_login,
wait_until=(
EC.presence_of_element_located(
(By.CSS_SELECTOR, "#username")
)
),
script='window.scrollTo(0, document.body.scrollHeight);',
wait_time=30
)
def proceed_login(self, response):
# AFTER LOGIN
driver = response.request.meta['driver']
...
can you please help why it's failing? thanks!
( btw it works with chrome drivers, fails with gecko )
The same problem I had on mac, this one I am trying on ubuntu machine.
Not sure what can be the issue, there to debug etc.
I does not even land into self.proceed_login. Fails on first request.

python alternative for devices which does not support SSH exec_command

I am new to python and want to know alternate way for doing the following.
I am having issue with the exec_command of paramiko...
Following is the code:
sshdell = paramiko.SSHClient()
sshdell.set_missing_host_key_policy(paramiko.AutoAddPolicy())
sshdell.connect('ip', port=22, username='user', password='pwd')
stdin,stdout,stderr = sshdell.exec_command("ping 4.2.2.2 interface X1")
ping_check = stdout.readlines()
for line in ping_check:
print(line)
the given error is thrown.
Traceback (most recent call last):
File "delltest.py", line 36, in <module>
stdin,stdout,stderr = sshdell.exec_command("ping 4.2.2.2 interface X1")
File "C:\python35\lib\site-packages\paramiko\client.py", line 441, in exec_command
chan.exec_command(command)
File "C:\python35\lib\site-packages\paramiko\channel.py", line 60, in _check
return func(self, *args, **kwds)
File "C:\python35\lib\site-packages\paramiko\channel.py", line 234, in exec_command
self._wait_for_event()
File "C:\python35\lib\site-packages\paramiko\channel.py", line 1161, in _wait_for_event
raise e
paramiko.ssh_exception.SSHException: Channel closed.
Please suggest as my device may not support the exec_command() function.

Kazoo package using Jython

Kazoo's fairly working under the Python, but the project which i'm working on requires to use it under the Jython.
Here is the issue:
>>> from kazoo.client import KazooClient
>>> zk = KazooClient('127.0.0.1')
>>> zk.start()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\jython2.7.0\Lib\site-packages\kazoo\client.py", line 541, in start
event = self.start_async()
File "C:\jython2.7.0\Lib\site-packages\kazoo\client.py", line 576, in start_async
self._connection.start()
File "C:\jython2.7.0\Lib\site-packages\kazoo\protocol\connection.py", line 170, in start
rw_sockets = self.handler.create_socket_pair()
File "C:\jython2.7.0\Lib\site-packages\kazoo\handlers\threading.py", line 165, in create_socket_pair
return utils.create_socket_pair(socket)
File "C:\jython2.7.0\Lib\site-packages\kazoo\handlers\utils.py", line 148, in create_socket_pair
temp_srv_sock.bind(('', port))
File "C:\jython2.7.0\Lib\_socket.py", line 1367, in meth
return getattr(self._sock,name)(*args)
File "C:\jython2.7.0\Lib\_socket.py", line 812, in bind
self.bind_addr = _get_jsockaddr(address, self.family, self.type, self.proto, AI_PASSIVE)
File "C:\jython2.7.0\Lib\_socket.py", line 1565, in _get_jsockaddr
addr = _get_jsockaddr2(address_object, family, sock_type, proto, flags)
File "C:\jython2.7.0\Lib\_socket.py", line 1594, in _get_jsockaddr2
hostname = {AF_INET: INADDR_ANY, AF_INET6: IN6ADDR_ANY_INIT}[family]
KeyError: 0
How i'd already said - there is no this kind issue using the python.
I'm pretty sure that it is connected with the Jython-version of the _socket.py file, but don't know the workaround.
What can you recommend?

AndroidViewClient serialno error

I'm testing various smartphones using AndroidViewClient.
To prevent connection errors, I used connection options(kwargs1, kwargs2) as follows.
from com.dtmilano.android.viewclient import *
from com.dtmilano.android.adb.adbclient import *
kwargs1 = {'ignoresecuredevice': True}
kwargs2 = {'startviewserver': False, 'autodump': False}
vc = ViewClient(*ViewClient.connectToDeviceOrExit(**kwargs1), **kwargs2)
device, serialno = vc.device, vc.serialno
adb = AdbClient(serialno=serialno)
MODEL = adb.getProperty('ro.product.model')
print 'MODEL :', MODEL
So, connection errors disappeared.
But some phones with special serial number(such as 'LG-F160S-e0a852', 'EF47S01111100117300', ...) raised following serialno error.
Traceback (most recent call last):
File "D:\$Project\Eclipse\_Python\AutoTest\01_get_property4.py", line 43, in <module>
adb = AdbClient(serialno=serialno)
File "D:\$Project\Eclipse\AndroidViewClient-master\src\com\dtmilano\android\adb\adbclient.py", line 108, in __init__
self.__setTransport()
File "D:\$Project\Eclipse\AndroidViewClient-master\src\com\dtmilano\android\adb\adbclient.py", line 251, in __setTransport
raise RuntimeError("ERROR: couldn't find device that matches '%s'" % self.serialno)
RuntimeError: ERROR: couldn't find device that matches '8b1ac56e'
How can I get correct serialno or prevent this error?