Django Compressor returns error when applied - redis

I have been implementing this active search with htmx. I changed my settings after which my app couldn't work here is part of my settings
COMPRESS_FILTERS = {'css': ['libman.admin.PostCSSFilter']}
COMPRESS_ROOT = BASE_DIR / 'static'
COMPRESS_ENABLED = True
STATICFILES_FINDERS = [
'compressor.finders.CompressorFinder',
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [os.path.join(BASE_DIR,'static')]
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
I already have compressor added to my installed apps
This is what I have in my libman app admin.
from compressor.filters import CompilerFilter
class PostCSSFilter(CompilerFilter):
command = 'postcss'
When I applied these two to my base file, I got an error.
{% compress css %}
<link rel="stylesheet" href="{% static 'src/main.css' %}">
{% endcompress %}
<!-- new -->
{% compress js %}
<script type="text/javascript" src="{% static 'src/htmx.js' %}"></script>
{% endcompress %}
I got the error below. Would anyone help me debug it??
C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\views\debug.py:420: ExceptionCycleWarning: Cycle in the exception chain detected: exception 'Error 10061 connecting to 127.0.0.1:6379. No connection could be made because the target machine actively refused it.' encountered again.
warnings.warn(
Internal Server Error: /library/home
Traceback (most recent call last):
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django_redis\cache.py", line 31, in _decorator
return method(self, *args, **kwargs)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django_redis\cache.py", line 98, in _get
return self.client.get(key, default=default, version=version, client=client)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django_redis\client\default.py", line 260, in get
raise ConnectionInterrupted(connection=client) from e
django_redis.exceptions.ConnectionInterrupted: Redis ConnectionError: Error 10061 connecting to 127.0.0.1:6379. No connection could be made because the target machine actively refused it.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\core\handlers\exception.py", line 47, in inner
response = get_response(request)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\core\handlers\base.py", line 181, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\contrib\auth\decorators.py", line 21, in _wrapped_view
return view_func(request, *args, **kwargs)
File "D:\Python\Django\Completed Projects\lib_system\Library-System\libman\views.py", line 273, in index
return render(request, 'libman/home.html',{'books':books,'students':students,
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\shortcuts.py", line 19, in render
content = loader.render_to_string(template_name, context, request, using=using)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\loader.py", line 62, in render_to_string
return template.render(context, request)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\backends\django.py", line 61, in render
return self.template.render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 170, in render
return self._render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 162, in _render
return self.nodelist.render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 938, in render
bit = node.render_annotated(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 905, in render_annotated
return self.render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\loader_tags.py", line 150, in render
return compiled_parent._render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 162, in _render
return self.nodelist.render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 938, in render
bit = node.render_annotated(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\template\base.py", line 905, in render_annotated
return self.render(context)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\templatetags\compress.py", line 143, in render
return self.render_compressed(context, self.kind, self.mode, forced=forced)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\templatetags\compress.py", line 111, in render_compressed
cache_key, cache_content = self.render_cached(compressor, kind, mode)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\templatetags\compress.py", line 89, in render_cached
cache_key = get_templatetag_cachekey(compressor, mode, kind)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\cache.py", line 104, in get_templatetag_cachekey
"templatetag.%s.%s.%s" % (compressor.cachekey, mode, kind))
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\utils\functional.py", line 48, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\base.py", line 200, in cachekey
[self.content] + self.mtimes).encode(self.charset), 12)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django\utils\functional.py", line 48, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\base.py", line 193, in mtimes
return [str(get_mtime(value))
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\base.py", line 193, in <listcomp>
return [str(get_mtime(value))
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\compressor\cache.py", line 110, in get_mtime
mtime = cache.get(key)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django_redis\cache.py", line 91, in get
value = self._get(key, default, version, client)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django_redis\cache.py", line 38, in _decorator
raise e.__cause__
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\django_redis\client\default.py", line 258, in get
value = client.get(key)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\redis\client.py", line 1606, in get
return self.execute_command('GET', name)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\redis\client.py", line 898, in execute_command
conn = self.connection or pool.get_connection(command_name, **options)
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\redis\connection.py", line 1192, in get_connection
connection.connect()
File "C:\Users\Ptar\AppData\Local\Programs\Python\Python39\lib\site-packages\redis\connection.py", line 563, in connect
raise ConnectionError(self._error_message(e))
redis.exceptions.ConnectionError: Error 10061 connecting to 127.0.0.1:6379. No connection could be made because the target machine actively refused it.

Related

Odoo showing KeyError: 'last_path_node' after migrate from v12 to v13 with Openupgrade

I migrate my odoo instance from v12 to v13 using Openupgrade without errors. I also made change in my code and update my custom modules using
python3 odoo-bin --addons-path="./addons, ./extra_addons, ./OCA/contract" -p 8069 -d database_name -u module_name
also without error. My odoo instance is apparently loading without error, I can even see the odoo main interface but when and click an element from the main menu (lets say "contacts") it shows "Internal Server Error" and when I check the logs I can see the following errors " path = options['last_path_node']
KeyError: 'last_path_node'"
Where is the full log of the error
**Contexto del error:
Vista `App Drawer - Web Client`
[view_id: 941, xml_id: web_responsive.webclient_bootstrap, model: n/a, parent_id: 178]
2022-09-08 13:33:39,610 19400 INFO wimax13 werkzeug: 127.0.0.1 - - [08/Sep/2022 13:33:39] "GET /web HTTP/1.1" 500 - 386 0.534 1.152
2022-09-08 13:33:39,973 19400 ERROR wimax13 werkzeug: Error on request:
Traceback (most recent call last):
File "/home/ernesto/Programming/odoo/wimax13/lib/python3.6/site-packages/werkzeug/serving.py", line 270, in run_wsgi
execute(self.server.app)
File "/home/ernesto/Programming/odoo/wimax13/lib/python3.6/site-packages/werkzeug/serving.py", line 258, in execute
application_iter = app(environ, start_response)
File "/home/ernesto/Programming/odoo/wimax13/odoo/service/server.py", line 439, in app
return self.app(e, s)
File "/home/ernesto/Programming/odoo/wimax13/odoo/service/wsgi_server.py", line 142, in application
return application_unproxied(environ, start_response)
File "/home/ernesto/Programming/odoo/wimax13/odoo/service/wsgi_server.py", line 117, in application_unproxied
result = odoo.http.root(environ, start_response)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 1287, in __call__
return self.dispatch(environ, start_response)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 1257, in __call__
return self.app(environ, start_wrapped)
File "/home/ernesto/Programming/odoo/wimax13/lib/python3.6/site-packages/werkzeug/wsgi.py", line 766, in __call__
return self.app(environ, start_response)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 1457, in dispatch
result = ir_http._dispatch()
File "/home/ernesto/Programming/odoo/wimax13/addons/website/models/ir_http.py", line 172, in _dispatch
response = super(Http, cls)._dispatch()
File "/home/ernesto/Programming/odoo/wimax13/addons/auth_signup/models/ir_http.py", line 19, in _dispatch
return super(Http, cls)._dispatch()
File "/home/ernesto/Programming/odoo/wimax13/addons/web_editor/models/ir_http.py", line 21, in _dispatch
return super(IrHttp, cls)._dispatch()
File "/home/ernesto/Programming/odoo/wimax13/addons/utm/models/ir_http.py", line 29, in _dispatch
response = super(IrHttp, cls)._dispatch()
File "/home/ernesto/Programming/odoo/wimax13/addons/http_routing/models/ir_http.py", line 519, in _dispatch
result = super(IrHttp, cls)._dispatch()
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_http.py", line 238, in _dispatch
return cls._handle_exception(e)
File "/home/ernesto/Programming/odoo/wimax13/addons/utm/models/ir_http.py", line 34, in _handle_exception
response = super(IrHttp, cls)._handle_exception(exc)
File "/home/ernesto/Programming/odoo/wimax13/addons/http_routing/models/ir_http.py", line 610, in _handle_exception
return super(IrHttp, cls)._handle_exception(exception)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_http.py", line 206, in _handle_exception
return request._handle_exception(exception)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 750, in _handle_exception
return super(HttpRequest, self)._handle_exception(exception)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 310, in _handle_exception
raise pycompat.reraise(type(exception), exception, sys.exc_info()[2])
File "/home/ernesto/Programming/odoo/wimax13/odoo/tools/pycompat.py", line 14, in reraise
raise value
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_http.py", line 234, in _dispatch
result = request.dispatch()
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 809, in dispatch
r = self._call_function(**self.params)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 350, in _call_function
return checked_call(self.db, *args, **kwargs)
File "/home/ernesto/Programming/odoo/wimax13/odoo/service/model.py", line 94, in wrapper
return f(dbname, *args, **kwargs)
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 342, in checked_call
result.flatten()
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 1236, in flatten
self.response.append(self.render())
File "/home/ernesto/Programming/odoo/wimax13/odoo/http.py", line 1229, in render
return env["ir.ui.view"].render_template(self.template, self.qcontext)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_ui_view.py", line 1191, in render_template
return self.browse(self.get_view_id(template)).render(values, engine)
File "/home/ernesto/Programming/odoo/wimax13/addons/website/models/ir_ui_view.py", line 336, in render
return super(View, self).render(values, engine=engine, minimal_qcontext=minimal_qcontext)
File "/home/ernesto/Programming/odoo/wimax13/addons/web_editor/models/ir_ui_view.py", line 27, in render
return super(IrUiView, self).render(values=values, engine=engine, minimal_qcontext=minimal_qcontext)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_ui_view.py", line 1199, in render
return self.env[engine].render(self.id, qcontext)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_qweb.py", line 58, in render
result = super(IrQWeb, self).render(id_or_xml_id, values=values, **context)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/qweb.py", line 260, in render
self.compile(template, options)(self, body.append, values or {})
File "<decorator-gen-54>", line 2, in compile
File "/home/ernesto/Programming/odoo/wimax13/odoo/tools/cache.py", line 90, in lookup
value = d[key] = self.method(*args, **kwargs)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/ir_qweb.py", line 113, in compile
return super(IrQWeb, self).compile(id_or_xml_id, options=options)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/qweb.py", line 277, in compile
element, document = self.get_template(template, options)
File "/home/ernesto/Programming/odoo/wimax13/odoo/addons/base/models/qweb.py", line 364, in get_template
path = options['last_path_node']
KeyError: 'last_path_node' - - -**
Based on the third line of your Traceback, your error seems to be related with this view:
view_id: 941, xml_id: web_responsive.webclient_bootstrap
You can access this view using its id: 941 in this kind of url:
https://your-oerp.odoo.com/web?#id=941&action=28&model=ir.ui.view&view_type=form
to provide me its content... to investigate the error
Is this view (with relativ high id: 941) "not native" / part of your custom module ?
On the other hand, this view name (id:941): "web_responsive.webclient_bootstrap" doesn t exist in Odoo 13 and seems to have been replaced:
In Odoo 13, the existing bootstrap-related views are:
web._assets_bootstrap
web.webclient_bootstrap
web_enterprise.webclient_bootstrap
web_studio.webclient_bootstrap

solidity ConnectionError: HTTPConnectionPool(host='0.0.0.0', port=7545) #30

Macbook Pro : Monterey
Intel Core i7
Brownie v1.17.2
I am learning solidity according to reference(https://www.youtube.com/watch?v=M576WGiDBdQ&t=25510s).
At youtube freedcodecamp 5:41:17 ,when tried to deploy to ganache-local,pop out these error information.
before refactoring deployed to ganache UI and rinkeby successfully, and i add ganache-local to brownie network this error pop.
ConnectionError: HTTPConnectionPool(host='0.0.0.0', port=7545): Max retries exceeded with url: / (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fd62639d700>: Failed to establish a new connection: [Errno 61] Connection refused'))
full error
BronieFundMe2022Project is the active project.
Running 'scripts/deploy.py::main'...
The active network is ganache-local
Deploying Mocks....
File "brownie/_cli/run.py", line 50, in main
return_value, frame = run(
File "brownie/project/scripts.py", line 103, in run
return_value = f_locals[method_name](*args, **kwargs)
File "./scripts/deploy.py", line 27, in main
deploy_fund_me()
File "./scripts/deploy.py", line 16, in deploy_fund_me
deploy_mocks()
File "./scripts/helpful_scripts.py", line 20, in deploy_mocks
MockV3Aggregator.deploy(DECIMALS,Web3.toWei(STARTING_PRICE,"ether"),{"from":get_account()})
File "brownie/network/contract.py", line 528, in __call__
return tx["from"].deploy(
File "brownie/network/account.py", line 510, in deploy
receipt, exc = self._make_transaction(
File "brownie/network/account.py", line 720, in _make_transaction
gas_price, gas_strategy, gas_iter = self._gas_price(gas_price)
File "brownie/network/account.py", line 456, in _gas_price
return web3.eth.generate_gas_price(), None, None
File "web3/eth.py", line 877, in generate_gas_price
return self._generate_gas_price(transaction_params)
File "web3/eth.py", line 173, in _generate_gas_price
return self.gasPriceStrategy(self.web3, transaction_params)
File "web3/gas_strategies/rpc.py", line 20, in rpc_gas_price_strategy
return web3.manager.request_blocking(RPC.eth_gasPrice, [])
File "web3/manager.py", line 197, in request_blocking
response = self._make_request(method, params)
File "web3/manager.py", line 150, in _make_request
return request_func(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "web3/middleware/formatting.py", line 76, in apply_formatters
response = make_request(method, params)
File "web3/middleware/gas_price_strategy.py", line 90, in middleware
return make_request(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "web3/middleware/formatting.py", line 76, in apply_formatters
response = make_request(method, params)
File "web3/middleware/attrdict.py", line 33, in middleware
response = make_request(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "web3/middleware/formatting.py", line 76, in apply_formatters
response = make_request(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "web3/middleware/formatting.py", line 76, in apply_formatters
response = make_request(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "web3/middleware/formatting.py", line 76, in apply_formatters
response = make_request(method, params)
File "web3/middleware/buffered_gas_estimate.py", line 40, in middleware
return make_request(method, params)
File "web3/middleware/exception_retry_request.py", line 105, in middleware
return make_request(method, params)
File "web3/providers/rpc.py", line 88, in make_request
raw_response = make_post_request(
File "web3/_utils/request.py", line 48, in make_post_request
response = session.post(endpoint_uri, data=data, *args, **kwargs) # type: ignore
File "requests/sessions.py", line 590, in post
return self.request('POST', url, data=data, json=json, **kwargs)
File "requests/sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "requests/sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "requests/adapters.py", line 516, in send
raise ConnectionError(e, request=request)
ConnectionError: HTTPConnectionPool(host='0.0.0.0', port=7545): Max retries exceeded with url: / (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fd62639d700>: Failed to establish a new connection: [Errno 61] Connection refused'))
deploy scripts
from brownie import FundMe,network,config,MockV3Aggregator
from scripts.helpful_scripts import (deploy_mocks, get_account,LOCAL_BLOCKCHAIN_ENVIRONMENT)
def deploy_fund_me():
account = get_account()
# pass the price feed address to our fundme contract
# if we are on a persistent network like rinkeby, use the associated address
# otherwise, deploy mocks
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENT:
price_feed_address = config["network"][network_showactive()]["eth_usd_price_feed"]
else:
deploy_mocks()
price_feed_address = MockV3Aggregator[-1].address
fund_me = FundMe.deploy(
price_feed_address,
{"from":account},
publish_source=config["networks"][network.show_active()].get("verify"),
)
print(f"The contract deploy to {fund_me.address}")
def main():
deploy_fund_me()
Problem fixed by using "brownie networks delete ganache-local" ,then add the network again.
and set the ganache port to 8545, chainid to 1337
and reboot .
Run "brownie accounts delete (name of account you stored the Ganache address)
Then run your Ganache normally
if 0.0.0.0 doesn't work, try 127.0.0.1, because a server with 0.0.0.0 address will accept connections on 127.0.0.1 too
for example:
brownie networks add Ethereum ganache-local host=http://127.0.0.1:8545 chainid=1337

Why does a requests.exceptions.ReadTimeout errors happens when executing (heavy?) code in ganache using web3py?

I'm trying to generate a sorted list of random uint32 numbers. Generating the list is easily done:
for (uint24 i = 1; i < limit; i++) {
seed = uint(keccak256(abi.encodePacked(seed)));
sorted[i] = uint32(seed);
}
where limit is an uint24 indicating the number of samples, seed is an arbitrary uint and sampled is an uint32[limit]. However, if I try to generate a sorted array like this:
for (uint24 i = 1; i < limit; i++) {
seed = uint(keccak256(abi.encodePacked(seed)));
sorted[i] = uint32(seed);
uint24 j = i;
while (sorted[j - 1] > sorted[j]) {
(sorted[j - 1], sorted[j]) = (sorted[j], sorted[j - 1]);
j--;
if (j == 0) {
break;
}
}
}
then this yields the expected result for small values of limit (like 10), but web3py fails with the following error for bigger inputs (like 300) when I try to call the function associated with the previous code:
Traceback (most recent call last):
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 445, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 440, in _make_request
httplib_response = conn.getresponse()
File "/usr/lib/python3.9/http/client.py", line 1371, in getresponse
response.begin()
File "/usr/lib/python3.9/http/client.py", line 319, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.9/http/client.py", line 280, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/usr/lib/python3.9/socket.py", line 704, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/personal/Python/venv/lib/python3.9/site-packages/requests/adapters.py", line 439, in send
resp = conn.urlopen(
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 755, in urlopen
retries = retries.increment(
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/util/retry.py", line 532, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/packages/six.py", line 770, in reraise
raise value
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 699, in urlopen
httplib_response = self._make_request(
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 447, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/personal/Python/venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 336, in _raise_timeout
raise ReadTimeoutError(
urllib3.exceptions.ReadTimeoutError: HTTPConnectionPool(host='127.0.0.1', port=8545): Read timed out. (read timeout=10)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/contract.py", line 957, in call
return call_contract_function(
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/contract.py", line 1501, in call_contract_function
return_data = web3.eth.call(
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/module.py", line 57, in caller
result = w3.manager.request_blocking(method_str,
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/manager.py", line 186, in request_blocking
response = self._make_request(method, params)
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/manager.py", line 147, in _make_request
return request_func(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/formatting.py", line 76, in apply_formatters
response = make_request(method, params)
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/gas_price_strategy.py", line 90, in middleware
return make_request(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/formatting.py", line 74, in apply_formatters
response = make_request(method, formatted_params)
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/attrdict.py", line 33, in middleware
response = make_request(method, params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/formatting.py", line 74, in apply_formatters
response = make_request(method, formatted_params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/formatting.py", line 74, in apply_formatters
response = make_request(method, formatted_params)
File "cytoolz/functoolz.pyx", line 250, in cytoolz.functoolz.curry.__call__
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/formatting.py", line 74, in apply_formatters
response = make_request(method, formatted_params)
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/buffered_gas_estimate.py", line 40, in middleware
return make_request(method, params)
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/middleware/exception_retry_request.py", line 105, in middleware
return make_request(method, params)
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/providers/rpc.py", line 88, in make_request
raw_response = make_post_request(
File "/home/personal/Python/venv/lib/python3.9/site-packages/web3/_utils/request.py", line 48, in make_post_request
response = session.post(endpoint_uri, data=data, *args, **kwargs) # type: ignore
File "/home/personal/Python/venv/lib/python3.9/site-packages/requests/sessions.py", line 590, in post
return self.request('POST', url, data=data, json=json, **kwargs)
File "/home/personal/Python/venv/lib/python3.9/site-packages/requests/sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "/home/personal/Python/venv/lib/python3.9/site-packages/requests/sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "/home/personal/Python/venv/lib/python3.9/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPConnectionPool(host='127.0.0.1', port=8545): Read timed out. (read timeout=10)
I guess that Ganache is quite busy and that's why it can't answer fast enough for web3py to be satisfied, but the added code doesn't seem so heavy that it cannot be dealt with. Or do I miss something else that makes this code too heavy for Ganache?
you can set timeout in your Web3py http provider:
Web3(Web3.HTTPProvider(endpoint_uri=http://127.0.0.1:8545,request_kwargs={'timeout': 600})

How to store crawled data from Scrapy to FTP as csv?

My scrapy settings.py
from datetime import datetime
file_name = datetime.today().strftime('%Y-%m-%d_%H%M_')
save_name = file_name + 'Mobile_Nshopping'
FEED_URI = 'ftp://myusername:mypassword#ftp.mymail.com/uploads/%(save_name)s.csv'
when I'm running my spider scrapy crawl my_project_name getting error...
Can I have to create a pipeline?
\scrapy\extensions\feedexport.py:247: ScrapyDeprecationWarning: The `FEED_URI` and `FEED_FORMAT` settings have been deprecated in favor of the `FEEDS` setting. Please see the `FEEDS` setting docs for more details
exporter = cls(crawler)
Traceback (most recent call last):
File "c:\users\viren\appdata\local\programs\python\python38\lib\runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "c:\users\viren\appdata\local\programs\python\python38\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "C:\Users\viren\AppData\Local\Programs\Python\Python38\Scripts\scrapy.exe\__main__.py", line 7, in <module>
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py", line 145, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py", line 100, in _run_print_help
func(*a, **kw)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py", line 153, in _run_command
cmd.run(args, opts)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\commands\crawl.py", line 22, in run
crawl_defer = self.crawler_process.crawl(spname, **opts.spargs)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py", line 191, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py", line 224, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py", line 229, in _create_crawler
return Crawler(spidercls, self.settings)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py", line 72, in __init__
self.extensions = ExtensionManager.from_crawler(self)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\middleware.py", line 53, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\middleware.py", line 35, in from_settings
mw = create_instance(mwcls, settings, crawler)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\utils\misc.py", line 167, in create_instance
instance = objcls.from_crawler(crawler, *args, **kwargs)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 247, in from_crawler
exporter = cls(crawler)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 282, in __init__
if not self._storage_supported(uri, feed_options):
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 427, in _storage_supported
self._get_storage(uri, feed_options)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 458, in _get_storage
instance = build_instance(feedcls.from_crawler, crawler)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 455, in build_instance
return build_storage(builder, uri, feed_options=feed_options, preargs=preargs)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 46, in build_storage
return builder(*preargs, uri, *args, **kwargs)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 201, in from_crawler
return build_storage(
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 46, in build_storage
return builder(*preargs, uri, *args, **kwargs)
File "c:\users\viren\appdata\local\programs\python\python38\lib\site-packages\scrapy\extensions\feedexport.py", line 192, in __init__
self.port = int(u.port or '21')
File "c:\users\viren\appdata\local\programs\python\python38\lib\urllib\parse.py", line 174, in port
raise ValueError(message) from None
ValueError: Port could not be cast to integer value as 'Edh=)9sd'
I don't know how to store CSV into FTP.
error is coming because my password is int?
Is there anything I forget to write?
Can I have to create a pipeline?
Yes, you probably should create a pipeline. As shown in the Scrapy Architecture Diagram, the basic concept is this: requests are sent, responses come back and processed by the spider, and finally, the pipeline does something with the items returned by the spider. In your case, you could create a pipeline that saves the data in a CSV file and uploads it to an ftp server. See Scrapy's Item Pipeline documentation for more information.
I don't know how to store CSV into FTP. error is coming because my password is int? Is there anything I forget to write?
I believe this is due to the deprecation error below (and shown at the top of the errors you provided):
ScrapyDeprecationWarning: The FEED_URI and FEED_FORMAT settings have been deprecated in favor of the FEEDS setting. Please see the FEEDS setting docs for more details.
Try replacing FEED_URI with FEEDS; see the Scrapy documentation on FEEDS.
You need to specify the port as well.
You can specify this in settings.
See also class definition from scrapy docs
class FTPFilesStore:
FTP_USERNAME = None
FTP_PASSWORD = None
USE_ACTIVE_MODE = None
def __init__(self, uri):
if not uri.startswith("ftp://"):
raise ValueError(f"Incorrect URI scheme in {uri}, expected 'ftp'")
u = urlparse(uri)
self.port = u.port
self.host = u.hostname
self.port = int(u.port or 21)
self.username = u.username or self.FTP_USERNAME
self.password = u.password or self.FTP_PASSWORD
self.basedir = u.path.rstrip('/')

Scrapy Selenium geckodriver problem - error while trying to scrape

Unhandled error in Deferred: 2020-07-24 09:12:40 [twisted] CRITICAL: Unhandled error in Deferred:
Traceback (most recent call last): File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 192, in crawl return self._crawl(crawler, *args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 196, in _crawl d = crawler.crawl(*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1613, in unwindGenerator return _cancellableInlineCallbacks(gen) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1529, in _cancellableInlineCallbacks
_inlineCallbacks(None, g, status)
--- --- File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks result = g.send(result) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 87, in crawl self.engine = self._create_engine() File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 101, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/engine.py", line 69, in init self.downloader = downloader_cls(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/downloader/init.py", line 83, in init self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 53, in from_crawler return cls.from_settings(crawler.settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 35, in from_settings mw = create_instance(mwcls, settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/utils/misc.py", line 150, in create_instance instance = objcls.from_crawler(crawler,
*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 67, in from_crawler middleware = cls( File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 43, in init for argument in driver_arguments: builtins.TypeError: 'NoneType' object is not iterable
2020-07-24 09:12:40 [twisted] CRITICAL: Traceback (most recent call last): File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks result = g.send(result) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 87, in crawl self.engine = self._create_engine() File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 101, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/engine.py", line 69, in init self.downloader = downloader_cls(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/downloader/init.py", line 83, in init self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 53, in from_crawler return cls.from_settings(crawler.settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 35, in from_settings mw = create_instance(mwcls, settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/utils/misc.py", line 150, in create_instance instance = objcls.from_crawler(crawler,
*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 67, in from_crawler middleware = cls( File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 43, in init for argument in driver_arguments: TypeError: 'NoneType' object is not iterable
my settings.py
from shutil import which
SELENIUM_DRIVER_NAME = 'firefox'
SELENIUM_DRIVER_EXECUTABLE_PATH = which('geckodriver')
SELENIUM_BROWSER_EXECUTABLE_PATH = which('firefox')
...
'scrapy_selenium.SeleniumMiddleware': 800,
looks like permissions for driver are good:
:/usr/local/bin$ ll | grep gecko
-rwxrwxrwx 1 baku baku 7008696 lip 24 09:09 geckodriver*
crawler code:
class LinkedInProfileSeleniumSpider(scrapy.Spider):
name = 'lips'
allowed_domains = ['www.linkedin.com']
def start_requests(self):
yield SeleniumRequest(
url="https://www.linkedin.com/login/",
callback=self.proceed_login,
wait_until=(
EC.presence_of_element_located(
(By.CSS_SELECTOR, "#username")
)
),
script='window.scrollTo(0, document.body.scrollHeight);',
wait_time=30
)
def proceed_login(self, response):
# AFTER LOGIN
driver = response.request.meta['driver']
...
can you please help why it's failing? thanks!
( btw it works with chrome drivers, fails with gecko )
The same problem I had on mac, this one I am trying on ubuntu machine.
Not sure what can be the issue, there to debug etc.
I does not even land into self.proceed_login. Fails on first request.