"UnicodeDecodeError" running infer_detections script - tensorflow

Running the infer_detections script with my frozen graph and test set in the following way:
python -m infer_detections --input_tfrecord_paths=../data/coco_testdev.record --output_tfrecord_path=../data/inference --inference_graph=../model/fine_tuned_model/frozen_inference_graph.pb --discard_image_pixels
throws the error UnicodeDecodeError: 'utf-8' codec can't decode byte 0xff in position 394: invalid start byte.
The complete stack trace is:
Traceback (most recent call last):
File "C:\ProgramData\Anaconda3\lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "C:\ProgramData\Anaconda3\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "C:\Users\me\Documents\GitHub\TransferLearningWithTensorflowAPI\scripts\infer_detections.py", line 96, in <module>
tf.app.run()
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\platform\app.py", line 124, in run
_sys.exit(main(argv))
File "C:\Users\me\Documents\GitHub\TransferLearningWithTensorflowAPI\scripts\infer_detections.py", line 74, in main
image_tensor, FLAGS.inference_graph)
File "C:\ProgramData\Anaconda3\lib\site-packages\object_detection-0.1-py3.6.egg\object_detection\inference\detection_inference.py", line 69, in build_inference_graph
graph_content = graph_def_file.read()
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\lib\io\file_io.py", line 126, in read
pywrap_tensorflow.ReadFromStream(self._read_buf, length, status))
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\lib\io\file_io.py", line 94, in _prepare_value
return compat.as_str_any(val)
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\util\compat.py", line 106, in as_str_any
return as_str(value)
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\util\compat.py", line 84, in as_text
return bytes_or_text.decode(encoding)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xff in position 394: invalid start byte
What could be the problem?

Change (lines 68-69 under object_detection/inference/detection_inference.py )
with tf.gfile.Open(inference_graph_path, 'r') as graph_def_file:
graph_content = graph_def_file.read()
to
with tf.gfile.Open(inference_graph_path, 'rb') as graph_def_file:
graph_content = graph_def_file.read()

Related

pyshark "TypeError: sequence item 6: expected str instance, _io.TextIOWrapper found"

I am using pyshark for live packet capture. when I pass a parameter output_file = myFilObject for saving captures to a file,
getting following error on sniffing line. If output_file parameter is removed, this works absolutely fine. Please suggest.
MySampleCode:
import pyshark
def capturePacket():
outputF = open('capturepcap.pcap', 'w')
cap = pyshark.LiveCapture(interface='Ethernet 8', output_file=outputF)
cap.sniff(timeout=60)
outputF.close()
Error:
Traceback (most recent call last):
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\runpy.py", line 86, in _run_code
exec(code, run_globals)
File "c:\Users\wxyz\.vscode\extensions\ms-python.python-2022.6.2\pythonFiles\lib\python\debugpy\__main__.py", line 45, in <module>
cli.main()
File "c:\Users\wxyz\.vscode\extensions\ms-python.python-2022.6.2\pythonFiles\lib\python\debugpy/..\debugpy\server\cli.py", line 444, in main
run()
File "c:\Users\wxyz\.vscode\extensions\ms-python.python-2022.6.2\pythonFiles\lib\python\debugpy/..\debugpy\server\cli.py", line 285, in run_file
runpy.run_path(target_as_str, run_name=compat.force_str("__main__"))
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\runpy.py", line 269, in run_path
return _run_module_code(code, init_globals, run_name,
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\runpy.py", line 96, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\runpy.py", line 86, in _run_code
exec(code, run_globals)
File "c:\Users\wxyz\Documents\automation\practice_set_script\paket_capture\basic_packetCapture.py", line 29, in <module>
capturePacket()
File "c:\Users\wxyz\Documents\automation\practice_set_script\paket_capture\basic_packetCapture.py", line 22, in capturePacket
cap.sniff(timeout=60)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\site-packages\pyshark\capture\capture.py", line 137, in load_packets
self.apply_on_packets(keep_packet, timeout=timeout, packet_count=packet_count)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\site-packages\pyshark\capture\capture.py", line 274, in apply_on_packets
return self.eventloop.run_until_complete(coro)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 641, in run_until_complete
return future.result()
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\asyncio\tasks.py", line 445, in wait_for
return fut.result()
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\site-packages\pyshark\capture\capture.py", line 283, in packets_from_tshark
tshark_process = await self._get_tshark_process(packet_count=packet_count)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\site-packages\pyshark\capture\live_capture.py", line 94, in _get_tshark_process
tshark = await super(LiveCapture, self)._get_tshark_process(packet_count=packet_count, stdin=read)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\site-packages\pyshark\capture\capture.py", line 399, in _get_tshark_process
self._log.debug("Creating TShark subprocess with parameters: " + " ".join(parameters))
TypeError: sequence item 6: expected str instance, _io.TextIOWrapper found
Error on reading from the event loop self pipe
loop: <ProactorEventLoop running=True closed=False debug=False>
Traceback (most recent call last):
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\asyncio\proactor_events.py", line 779, in _loop_self_reading
f = self._proactor.recv(self._ssock, 4096)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 450, in recv
self._register_with_iocp(conn)
File "C:\Users\wxyz\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 723, in _register_with_iocp
_overlapped.CreateIoCompletionPort(obj.fileno(), self._iocp, 0, 0)
OSError: [WinError 87] The parameter is incorrect
PS C:\Users\wxyz\Documents\automation\practice_set_script\paket_capture>
The issue in your code is these lines:
outputF = open('capturepcap.pcap', 'w')
cap = pyshark.LiveCapture(interface='Ethernet 8', output_file=outputF)
The output_file parameter is a string and not a io.TextIOWrapper
:param output_file: A string of a file to write every read packet into (useful when filtering).
So this works:
import pyshark
def capturePacket():
cap = pyshark.LiveCapture(interface='en0', output_file='capturepcap.pcap')
cap.sniff(timeout=60)
capturePacket()
Here is a reference that I put together on using PyShark

Blender throws keyframe error when exporting as gltf (glb) file with animations - Key.path_resolve could not be resolved

I am pretty new to blender, animations and gltf. I have successfully created my 3d model and was able to export that as glb file. Currently, I am trying to add a walking animation but the export to gltf with animations doesn't work. This is the error that the exporter is throwing:
Python: Traceback (most recent call last):
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\__init__.py", line 575, in execute
return gltf2_blender_export.save(context, export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_export.py", line 46, in save
json, buffer = __export(export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_export.py", line 63, in __export
__gather_gltf(exporter, export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_export.py", line 72, in __gather_gltf
active_scene_idx, scenes, animations = gltf2_blender_gather.gather_gltf2(export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather.py", line 39, in gather_gltf2
animations += __gather_animations(blender_scene, export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather.py", line 83, in __gather_animations
animations_, merged_tracks = gltf2_blender_gather_animations.gather_animations(_blender_object, merged_tracks, len(animations), export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animations.py", line 76, in gather_animations
animation = __gather_animation(blender_action, blender_object, export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animations.py", line 114, in __gather_animation
channels=__gather_channels(blender_action, blender_object, export_settings),
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animations.py", line 150, in __gather_channels
blender_action, blender_object, export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_cache.py", line 65, in wrapper_cached
result = func(*args)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_channels.py", line 83, in gather_animation_channels
None)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_channels.py", line 202, in __gather_animation_channel
sampler=__gather_sampler(channels, blender_object, export_settings, bake_bone, bake_channel, bake_range_start, bake_range_end, action_name, driver_obj),
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_channels.py", line 263, in __gather_sampler
export_settings
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_cache.py", line 65, in wrapper_cached
result = func(*args)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_samplers.py", line 65, in gather_animation_sampler
bake_bone, bake_channel, bake_range_start, bake_range_end, action_name, driver_obj, export_settings),
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_cache.py", line 65, in wrapper_cached
result = func(*args)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_samplers.py", line 240, in __gather_input
export_settings)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_cache.py", line 65, in wrapper_cached
result = func(*args)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_sampler_keyframes.py", line 240, in gather_keyframes
step
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_cache.py", line 90, in wrapper_bonecache
result = func(*args)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_animation_sampler_keyframes.py", line 177, in get_bone_matrix
drivers_to_manage = get_sk_drivers(obj_driver)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_cache.py", line 117, in wrapper_skdriverdiscover
result = func(*args)
File "E:\Applications\Blender\2.92\scripts\addons\io_scene_gltf2\blender\exp\gltf2_blender_gather_drivers.py", line 54, in get_sk_drivers
sk_name = child.data.shape_keys.path_resolve(get_target_object_path(sk_c.data_path)).name
ValueError: Key.path_resolve("key_blocks["Key 1"]") could not be resolved
location: <unknown location>:-1
I suppose that the gltf exporter can't find my keyframes, but I have no clue on how to fix that issue.
Any help is much appreciated!
Thanks
I have submitted a bug report for the gltf exporter. You can look at it here: https://github.com/KhronosGroup/glTF-Blender-IO/issues/1401
Removing invalid drivers fixes the issue.

Python 3.8 Downloading Packages/Modules error using PIP

I am trying to install numpy but it is giving this error please help what should I do ?
ERROR: Exception:
Traceback (most recent call last):
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_vendor\urllib3\response.py", line 425, in _error_catcher
yield
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_vendor\urllib3\response.py", line 507, in read
data = self._fp.read(amt) if not fp_closed else b""
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_vendor\cachecontrol\filewrapper.py", line 62, in read
data = self.__fp.read(amt)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\http\client.py", line 454, in read
n = self.readinto(b)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\http\client.py", line 498, in readinto
n = self.fp.readinto(b)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\socket.py", line 669, in readinto
return self._sock.recv_into(b)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\ssl.py", line 1241, in recv_into
return self.read(nbytes, buffer)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\ssl.py", line 1099, in read
return self._sslobj.read(len, buffer)
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\cli\base_command.py", line 186, in _main
status = self.run(options, args)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\commands\install.py", line 331, in run
resolver.resolve(requirement_set)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\operations\prepare.py", line 480, in prepare_linked_requirement
local_path = unpack_url(
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\operations\prepare.py", line 282, in unpack_url
return unpack_http_url(
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\operations\prepare.py", line 158, in unpack_http_url
from_path, content_type = _download_http_url(
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\operations\prepare.py", line 303, in _download_http_url
for chunk in download.chunks:
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\utils\ui.py", line 160, in iter
for x in it:
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_internal\network\utils.py", line 15, in response_chunks
for chunk in response.raw.stream(
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_vendor\urllib3\response.py", line 564, in stream
data = self.read(amt=amt, decode_content=decode_content)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_vendor\urllib3\response.py", line 529, in read
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\contextlib.py", line 131, in __exit__
self.gen.throw(type, value, traceback)
File "c:\users\cutea\appdata\local\programs\python\python38-32\lib\site-packages\pip\_vendor\urllib3\response.py", line 430, in _error_catcher
raise ReadTimeoutError(self._pool, None, "Read timed out.")
pip._vendor.urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Read timed out.
Look directly at the last line :
Read timed out
Connect to wifi or faster internet and try again.
my internet connection was poor then i got this error. Then i tried it with faster connection and it worked for me...

How to solve Tensorflow.js Converter error?

I'm trying to convert frozen graph to json file. I use this command:
tensorflowjs_converter --input_format=tf_frozen_model --output_node_names="SemanticPredictions" --saved_model_tags=serve frozen_inference_graph.pb mymodal
But it gives this error:
Traceback (most recent call last):
File "d:\programdata\anaconda3\envs\tensorflow0\lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "d:\programdata\anaconda3\envs\tensorflow0\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "D:\ProgramData\Anaconda3\envs\tensorflow0\Scripts\tensorflowjs_converter.exe\__main__.py", line 7, in <module>
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflowjs\converters\converter.py", line 645, in pip_main
main([' '.join(sys.argv[1:])])
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflowjs\converters\converter.py", line 649, in main
convert(argv[0].split(' '))
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflowjs\converters\converter.py", line 632, in convert
strip_debug_ops=args.strip_debug_ops)
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflowjs\converters\tf_saved_model_conversion_v2.py", line 379, in convert_tf_frozen_model
strip_debug_ops=strip_debug_ops)
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflowjs\converters\tf_saved_model_conversion_v2.py", line 133, in optimize_graph
graph.add_to_collection('train_op', graph.get_operation_by_name(name))
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflow_core\python\framework\ops.py", line 3633, in get_operation_by_name
return self.as_graph_element(name, allow_tensor=False, allow_operation=True)
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflow_core\python\framework\ops.py", line 3505, in as_graph_element
return self._as_graph_element_locked(obj, allow_tensor, allow_operation)
File "d:\programdata\anaconda3\envs\tensorflow0\lib\site-packages\tensorflow_core\python\framework\ops.py", line 3565, in _as_graph_element_locked
"graph." % repr(name))
KeyError: "The name 'SemanticPredictions' refers to an Operation not in the graph."
I don't why it gives KeyError: "The name 'SemanticPredictions' refers to an Operation not in the graph." error.

IndentationError: expected an indented block, Scrapy

career#careercrawler:~/stack/stack$ scrapy crawl stack
Traceback (most recent call last): File
"/home/career/.local/bin/scrapy", line 11, in
sys.exit(execute())
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/cmdline.py",
line 141, in execute
cmd.crawler_process = CrawlerProcess(settings)
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/crawler.py",
line 238, in init
super(CrawlerProcess, self).init(settings)
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/crawler.py",
line 129, in init
self.spider_loader = _get_spider_loader(settings)
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/crawler.py",
line 325, in _get_spider_loader
return loader_cls.from_settings(settings.frozencopy())
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/spiderloader.py",
line 33, in from_settings
return cls(settings)
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/spiderloader.py",
line 20, in init
self._load_all_spiders()
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/spiderloader.py",
line 28, in _load_all_spiders
for module in walk_modules(name):
File
"/home/career/.local/lib/python2.7/site-packages/scrapy/utils/misc.py",
line 71, in walk_modules
submod = import_module(fullpath)
File "/usr/lib/python2.7/importlib/init.py", line 37, in
import_module
import(name)
File "/home/career/stack/stack/spiders/stack_spider.py", line 4, in
from stack.items import StackItem
File "/home/career/stack/stack/items.py", line 13
title = scrapy.Field()
^
IndentationError: expected an indented block
This is my error, I don't know what is happening there. Someone help me, please.
This error is because of intention,
As mentioned in the traceback:
/home/career/stack/stack/items.py", line 13 title = scrapy.Field()
go to ~/stack/stack/items.py and check indentation at line 13.