In a project I tried to implemented circuit breaker pattern on Redis service. I found a project at shopify that has an implementation
https://github.com/Shopify/semian
https://github.com/Shopify/semian/blob/master/lib/semian/redis.rb
I followed its instructions and tried to deploy on my production environment and I got lot of Connection Timeout error at Sidekiq initialization.
It looks like the redis/adapter used by semian are conflicting with the redis calls used at sidekiq.
Redis::TimeoutError: [redis_x.y.z.k:port/x] Connection timed out
> -74 non-project frames
1
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/connection/ruby.rb" line 56 in block in _read_from_socket
2
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/connection/ruby.rb" line 52 in loop
3
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/connection/ruby.rb" line 52 in _read_from_socket
4
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/connection/ruby.rb" line 45 in gets
5
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/connection/ruby.rb" line 378 in read
6
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 280 in block in read
7
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 268 in io
8
File "/usr/local/bundle/gems/semian-0.16.0/lib/semian/redis.rb" line 95 in block in io
9
File "/usr/local/bundle/gems/semian-0.16.0/lib/semian/adapter.rb" line 74 in mark_resource_as_acquired
10
File "/usr/local/bundle/gems/semian-0.16.0/lib/semian/adapter.rb" line 38 in block in acquire_semian_resource
11
File "/usr/local/bundle/gems/semian-0.16.0/lib/semian/unprotected_resource.rb" line 27 in acquire
12
File "/usr/local/bundle/gems/semian-0.16.0/lib/semian/adapter.rb" line 37 in acquire_semian_resource
13
File "/usr/local/bundle/gems/semian-0.16.0/lib/semian/redis.rb" line 94 in io
14
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 279 in read
15
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 131 in block in call
16
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 248 in block (2 levels) in process
17
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 389 in ensure_connected
18
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 238 in block in process
19
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 325 in logging
20
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 237 in process
21
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 131 in call
22
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/contrib/redis/instrumentation.rb" line 30 in block in call
23
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/trace_operation.rb" line 172 in block in measure
24
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/span_operation.rb" line 153 in measure
25
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/trace_operation.rb" line 172 in measure
26
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/tracer.rb" line 376 in start_span
27
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/tracer.rb" line 159 in block in trace
28
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/context.rb" line 45 in activate!
29
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/tracer.rb" line 158 in trace
30
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing.rb" line 18 in trace
31
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/contrib/redis/instrumentation.rb" line 24 in call
32
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 226 in block in call_with_timeout
33
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 300 in with_socket_timeout
34
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis/client.rb" line 225 in call_with_timeout
35
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis.rb" line 1284 in block in brpoplpush
36
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis.rb" line 69 in block in synchronize
37
File "/usr/local/lib/ruby/2.7.0/monitor.rb" line 202 in synchronize
38
File "/usr/local/lib/ruby/2.7.0/monitor.rb" line 202 in mon_synchronize
39
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis.rb" line 69 in synchronize
40
File "/usr/local/bundle/gems/redis-4.2.5/lib/redis.rb" line 1281 in brpoplpush
41
File "/usr/local/bundle/gems/sidekiq-pro-5.2.1/lib/sidekiq/pro/super_fetch.rb" line 337 in block in strict
42
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq.rb" line 98 in block in redis
43
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 63 in block (2 levels) in with
44
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 62 in handle_interrupt
45
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 62 in block in with
46
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 59 in handle_interrupt
47
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 59 in with
48
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq.rb" line 95 in redis
49
File "/usr/local/bundle/gems/sidekiq-pro-5.2.1/lib/sidekiq/pro/super_fetch.rb" line 323 in strict
50
File "/usr/local/bundle/gems/sidekiq-pro-5.2.1/lib/sidekiq/pro/super_fetch.rb" line 300 in call
51
File "/usr/local/bundle/gems/sidekiq-pro-5.2.1/lib/sidekiq/pro/super_fetch.rb" line 300 in get_job
52
File "/usr/local/bundle/gems/sidekiq-pro-5.2.1/lib/sidekiq/pro/super_fetch.rb" line 288 in block in retrieve_work
53
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 63 in block (2 levels) in with
54
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 62 in handle_interrupt
55
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 62 in block in with
56
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 59 in handle_interrupt
57
File "/usr/local/bundle/gems/connection_pool-2.2.3/lib/connection_pool.rb" line 59 in with
58
File "/usr/local/bundle/gems/sidekiq-pro-5.2.1/lib/sidekiq/pro/super_fetch.rb" line 287 in retrieve_work
59
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq/processor.rb" line 83 in get_one
60
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq/processor.rb" line 95 in fetch
61
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/contrib/sidekiq/server_internal_tracer/job_fetch.rb" line 26 in block in fetch
62
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/trace_operation.rb" line 172 in block in measure
63
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/span_operation.rb" line 153 in measure
64
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/trace_operation.rb" line 172 in measure
65
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/tracer.rb" line 376 in start_span
66
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/tracer.rb" line 159 in block in trace
67
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/context.rb" line 45 in activate!
68
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/tracer.rb" line 158 in trace
69
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing.rb" line 18 in trace
70
File "/usr/local/bundle/gems/ddtrace-1.0.0/lib/datadog/tracing/contrib/sidekiq/server_internal_tracer/job_fetch.rb" line 15 in fetch
71
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq/processor.rb" line 77 in process_one
72
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq/processor.rb" line 68 in run
73
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq/util.rb" line 15 in watchdog
74
File "/usr/local/bundle/gems/sidekiq-6.1.3/lib/sidekiq/util.rb" line 24 in block in safe_thread
Do anyone tried it before using Semian + Redis/Adapter and Sidekiq?
Any clues ? Thank you in advance
Related
The code below is an automated CookieClicker I wrote for experimenting with ActionChains. It's based on a tutorial video, at 9:42. (Link)
When I run this code, the for loop runs down 1000 times but only 1 click happens. Multiple clicks only happen if I remove "#" from the commented line, so I run actions.click(cookie) each time. As for the video, that one extra line of code is not necessary. What can be the cause of that?
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.action_chains import ActionChains
s = Service("C:\Program Files (x86)\chromedriver.exe")
driver = webdriver.Chrome(service=s)
driver.maximize_window()
driver.get("https://orteil.dashnet.org/cookieclicker/")
driver.implicitly_wait(5)
cookie=driver.find_element(By.ID,"bigCookie")
cookie_count = driver.find_element(By.ID,"cookies")
actions = ActionChains(driver)
actions.click(cookie)
for i in range(1000):
#actions.click(cookie)
actions.perform()
count=int(cookie_count.text.split(" ")[0])
print(i,count)
driver.quit()
The ActionChains implementation
ActionChains can be used in a chain pattern. When you call methods for actions on the ActionChains object, the actions are stored in a queue in the ActionChains object. When you call perform(), the events are fired in the order they are queued up.
perform()
Performs all stored actions.
Conclusion
perform() would fire the events stored in the queue. In your usecase, the actions.click(cookie) is the event.
Your optimal code block will be:
driver.get("https://orteil.dashnet.org/cookieclicker/")
cookie_count = WebDriverWait(driver, 20).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#cookies")))
cookie = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR, "#bigCookie")))
for i in range(100):
ActionChains(driver).click(cookie).perform()
count = cookie_count.text.split(" ")[0]
print(i,count)
driver.quit()
Console Output:
0 0
1 1
2 2
3 3
4 4
5 5
6 6
7 7
8 8
9 9
10 10
11 11
12 12
13 13
14 14
15 15
16 16
17 17
18 18
19 19
20 20
21 21
22 22
23 23
24 24
25 25
26 26
27 27
28 28
29 29
30 30
31 31
32 32
33 33
34 34
35 35
36 36
37 37
38 38
39 39
40 40
41 41
42 42
43 43
44 44
45 45
46 46
47 47
48 48
49 49
50 50
51 51
52 52
53 53
54 54
55 55
56 56
57 57
58 58
59 59
60 60
61 61
62 62
63 63
64 64
65 65
66 66
67 67
68 68
69 69
70 70
71 71
72 72
73 73
74 74
75 75
76 76
77 77
78 78
79 79
80 80
81 81
82 82
83 83
84 84
85 85
86 86
87 87
88 88
89 89
90 90
91 91
92 92
93 93
94 94
95 95
96 96
97 97
98 98
99 99
I assume you are using actions, for the sake of using it, or learning about it, since you could simply call cookie.click() to get the desired result.
Actions are used when you need to perform some "action" to an element other than find it or click on it, i.e. you want to right click, or click and hold, or send a keystroke combination and so on. Check Selenium Actions for more info.
As for using actions to click, you need to also understand that perform gets the composite object (call the build function of Actions) of your actions and execute them. Since your actions are declared outside the for loop, after the first click, the perform() function has no more actions to perform.
TLDR:
Either remove the comment of actions.click(cookie) inside your for loop or use cookie.click() to get the same result without using actions.
for i in range(10):
actions.click(cookie)
actions.perform()
#cookie.click()
count=int(cookie_count.text.split(" ")[0])
print(i,count)
driver.quit()
Colab Notebook of it working
I will ask my question with an example. I have 2 files:
File1-
TR100013|c0_g1
TR100013|c0_g2
TR10009|c0_g1
TR10009|c0_g2
File2-
TR100013|c0_g1 AT1G01360.1 78.79 165 35 0 301 795 19 183 2E-089 272
TR100013|c0_g2 AT1G01360.1 78.79 165 35 0 301 795 19 183 2E-089 272
TR10009|c0_g1 AT1G16240.3 77.42 62 14 0 261 76 113 174 4E-025 95.9
TR10009|c0_g2 AT1G16240.2 69.17 120 37 0 1007 648 113 232 2E-050 171
TR29295|c0_g1 AT1G22540.1 69.19 172 53 2 6 521 34 200 2E-053 180
TR49005|c5_g1 AT5G24530.1 69.21 302 90 1 909 13 39 340 5E-157 446
Expected Output :
TR100013|c0_g1 AT1G01360.1 78.79 165 35 0 301 795 19 183 2E-089 272
TR100013|c0_g2 AT1G01360.1 78.79 165 35 0 301 795 19 183 2E-089 272
TR10009|c0_g1 AT1G16240.3 77.42 62 14 0 261 76 113 174 4E-025 95.9
TR10009|c0_g2 AT1G16240.2 69.17 120 37 0 1007 648 113 232 2E-050 171
I want to compare two files. If the first column is same in both files, then print the whole line of second file which is common in both files.
Using awk:
awk 'NR==FNR{a[$1]++;next};a[$1]' file1 file2
grep can do the same:
grep -wf file1 file2
-w is to match whole word only.
-f specifies the file with the pattern.
So here is my first test for S3 buckets using boto:
import boto
user_name, access_key, secret_key = "testing-user", "xxxxxxxxxxxxx", "xxxxxxxx/xxxxxxxxxxxx/xxxxxxxxxx(xxxxx)"
conn = boto.connect_s3(access_key, secret_key)
buckets = conn.get_all_buckets()
I get the following error:
Traceback (most recent call last):
File "test-s3.py", line 9, in <module>
buckets = conn.get_all_buckets()
File "xxxxxx/lib/python2.7/site-packages/boto/s3/connection.py", line 440, in get_all_buckets
response.status, response.reason, body)
boto.exception.S3ResponseError: S3ResponseError: 403 Forbidden
<?xml version="1.0" encoding="UTF-8"?>
<Error><Code>SignatureDoesNotMatch</Code><Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message><AWSAccessKeyId>AKIAJMHSZXU6MORWA5GA</AWSAccessKeyId><StringToSign>GET
Mon, 18 May 2015 06:21:58 GMT
/</StringToSign><SignatureProvided>c/+YJAZVInsfmd5giMQmrh81DPA=</SignatureProvided><StringToSignBytes>47 45 54 0a 0a 0a 4d 6f 6e 2c 20 31 38 20 4d 61 79 20 32 30 31 35 20 30 36 3a 32 31 3a 35 38 20 47 4d 54 0a 2f</StringToSignBytes><RequestId>5733F9C8926497E6</RequestId><HostId>FXPejeYuvZ+oV2DJLh7HBpryOh4Ve3Mmj8g8bKA2f/4dTWDHJiG8Bpir8EykLYYW1OJMhZorbIQ=</HostId></Error>
How am I supposed to fix this?
Boto version is 2.38.0
Had the same issue. In my case, my generated security key had a special character '+' in between. So I deleted my key and regenerated a new key and it worked with the new key with no '+'.
Source
Today, I saw an error response with SignatureDoesNotMatch while playing around an S3 API locally and replacing localhost with 127.0.0.1 fixed the problem in my case.
I have an rsa.PublicKey object (retrieved from an rsa.PrivateKey). And I'm trying to export it into the OpenSSH format, to display it in a web page.
I've noticed the go.crypto/ssh library, which seems to be doing this.
And there's the discussion about it's implementation (it's actually exactly what I need to do)
Unfortunately, I'm getting a bit stuck, as the byte array returned is in an unknown encoding and I can't just transform it to a string to display it.
func PublicKey(rsaKey rsa.PublicKey) string {
key, _ := ssh.NewPublicKey(&rsaKey)
marshalled := ssh.MarshalPublicKey(key)
return string(marshalled)
}
This seems to work as it adds the ssh-rsa at the beginning of the string. However, most characters aren't recognized.
Here's the bytes array I'm retrieving for a lambda public key:
[0 0 0 7 115 115 104 45 114 115 97 0 0 0 3 1 0 1 0 0 0 65 0 178 153 15 73 196 125 250 140 212 0 174 106 77 27 138 59 106 19 100 43 35 242 139 0 59 251 151 121 10 222 154 76 200 43 139 42 129 116 125 222 192 139 98 150 229 58 8 195 49 104 126 242 92 75 244 147 107 161 192 230 4 30 157 21]
Any hint on properly displaying this bytes array as a string?
Marshaling a key is for the wire format. You just need to base64 encode the bytes:
base64.StdEncoding.EncodeToString(marshalled) + "\n"
New to python (very cool), first question. I am reading a 50+ mb ascii file, scanning for property tags and parsing the data into a numpy array. I have placed timing reports throughout the loop and found the culprit, the while loop using np.append(). Wondering if there is a faster method.
This is a sample input file format with fake data for debugging:
...
tag parameter
char name "Poro"
array float data 100
1 2 3 4 5 6 7 8 9 10 11 12
13 14 15 16 17 18 19 20 21 22 23 24
25 26 27 28 29 30 31 32 33 34 35 36
37 38 39 40 41 42 43 44 45 46 47 48
49 50 51 52 53 54 55 56 56 58 59 60
61 62 63 64 65 66 67 68 69 70 71 72
73 74 75 76 77 78 79 80 81 82 83 84
85 86 87 88 89 90 91 92 93 94 95 96
97 98 99 100
endtag
...
and this is the code fragment, where it's the while loop that is taking 70 seconds for a 350k element array:
def readParameter(self, parameterName):
startTime = time.time()
intervalTime = time.time()
token = "tag parameter"
self.inputBuffer.seek(0)
for lineno, line in enumerate(self.inputBuffer, 1):
if token in line:
line = self.inputBuffer.next().replace('"', '').split()
elapsedTime = time.time() - intervalTime
logging.debug(" Time to readParameter find token: " + str(elapsedTime))
intervalTime = time.time()
if line[2] == parameterName:
line = self.inputBuffer.next()
line = self.inputBuffer.next()
np.parameterArray = np.fromstring(line, dtype=float, sep=" ")
line = self.inputBuffer.next()
**while not "endtag" in line:
np.parameterArray = np.append(np.parameterArray, np.fromstring(line, dtype=float, sep=" "))
line = self.inputBuffer.next()**
elapsedTime = time.time() - startTime
logging.debug(" Time to readParameter load array: " + str(elapsedTime))
break
elapsedTime = time.time() - startTime
logging.debug(" Time to readParameter: " + str(elapsedTime))
logging.debug(np.parameterArray)
np.parameterArray = self.make3D(np.parameterArray)
return np.parameterArray
Thanks, Jeff
Appending to an array requires resizing the array, which usually requires allocating a new block of memory that's big enough to hold the new array, copying the existing array to the new location, and freeing the memory it used to use. All of those operations are expensive, and you're doing them for each element. With 350k elements, it's basically garbage-collector memory fragmentation stress-test.
Pre-allocate your array. You've got the count parameter, so make an array that size, and inside your loop, just assign the newly-parsed element to the next spot in the array, instead of appending it. You'll have to keep your own counter of how many elements have been filled. (You could instead iterate over the elements of the blank array and replace them, but that would make error handling a bit trickier to add in.)