Skip to content

Commit ace76d3

Browse files
authored
fix: remove dependencies on the thread local storage (#250)
1 parent daad2f0 commit ace76d3

File tree

10 files changed

+112
-123
lines changed

10 files changed

+112
-123
lines changed

.github/workflows/test.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@ jobs:
194194
- name: Print disk info
195195
run: df -h
196196
- name: Run minitest
197-
run: bundle exec rake bench
197+
run: bundle exec rake bench | grep BenchCommand | grep -v 'Envoy#bench_pipeline_echo\|Envoy#bench_single_echo' | sort
198198
- name: Reset qdisc
199199
run: |
200200
for i in {5..9..2}

lib/redis_client/cluster/node.rb

+43-41
Original file line numberDiff line numberDiff line change
@@ -94,28 +94,19 @@ def load_info(options, **kwargs) # rubocop:disable Metrics/AbcSize, Metrics/Cycl
9494
startup_options = options.to_a.sample(MAX_STARTUP_SAMPLE).to_h
9595
startup_nodes = ::RedisClient::Cluster::Node.new(startup_options, **kwargs)
9696
startup_nodes.each_slice(MAX_THREADS).with_index do |chuncked_startup_nodes, chuncked_idx|
97-
threads = chuncked_startup_nodes.each_with_index.map do |raw_client, idx|
98-
Thread.new(raw_client, (MAX_THREADS * chuncked_idx) + idx) do |cli, i|
99-
Thread.current[:index] = i
100-
reply = cli.call('CLUSTER', 'NODES')
101-
Thread.current[:info] = parse_cluster_node_reply(reply)
102-
rescue StandardError => e
103-
Thread.current[:error] = e
104-
ensure
105-
cli&.close
97+
chuncked_startup_nodes
98+
.each_with_index
99+
.map { |raw_client, idx| [(MAX_THREADS * chuncked_idx) + idx, build_thread_for_cluster_node(raw_client)] }
100+
.each do |i, t|
101+
case v = t.value
102+
when StandardError
103+
errors ||= Array.new(startup_size)
104+
errors[i] = v
105+
else
106+
node_info_list ||= Array.new(startup_size)
107+
node_info_list[i] = v
108+
end
106109
end
107-
end
108-
109-
threads.each do |t|
110-
t.join
111-
if t.key?(:info)
112-
node_info_list ||= Array.new(startup_size)
113-
node_info_list[t[:index]] = t[:info]
114-
elsif t.key?(:error)
115-
errors ||= Array.new(startup_size)
116-
errors[t[:index]] = t[:error]
117-
end
118-
end
119110
end
120111

121112
raise ::RedisClient::Cluster::InitialSetupError, errors if node_info_list.nil?
@@ -132,6 +123,17 @@ def load_info(options, **kwargs) # rubocop:disable Metrics/AbcSize, Metrics/Cycl
132123

133124
private
134125

126+
def build_thread_for_cluster_node(raw_client)
127+
Thread.new(raw_client) do |client|
128+
reply = client.call('CLUSTER', 'NODES')
129+
parse_cluster_node_reply(reply)
130+
rescue StandardError => e
131+
e
132+
ensure
133+
client&.close
134+
end
135+
end
136+
135137
# @see https://redis.io/commands/cluster-nodes/
136138
# @see https://github.com/redis/redis/blob/78960ad57b8a5e6af743d789ed8fd767e37d42b8/src/cluster.c#L4660-L4683
137139
def parse_cluster_node_reply(reply) # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
@@ -331,33 +333,33 @@ def call_multiple_nodes!(clients, method, command, args, &block)
331333
raise ::RedisClient::Cluster::ErrorCollection, errors
332334
end
333335

334-
def try_map(clients) # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
336+
def try_map(clients, &block)
335337
results = errors = nil
336338
clients.each_slice(MAX_THREADS) do |chuncked_clients|
337-
threads = chuncked_clients.map do |k, v|
338-
Thread.new(k, v) do |node_key, client|
339-
Thread.current[:node_key] = node_key
340-
reply = yield(node_key, client)
341-
Thread.current[:result] = reply
342-
rescue StandardError => e
343-
Thread.current[:error] = e
344-
end
345-
end
346-
347-
threads.each do |t|
348-
t.join
349-
if t.key?(:result)
350-
results ||= {}
351-
results[t[:node_key]] = t[:result]
352-
elsif t.key?(:error)
353-
errors ||= {}
354-
errors[t[:node_key]] = t[:error]
339+
chuncked_clients
340+
.map { |node_key, client| [node_key, build_thread_for_command(node_key, client, &block)] }
341+
.each do |node_key, thread|
342+
case v = thread.value
343+
when StandardError
344+
errors ||= {}
345+
errors[node_key] = v
346+
else
347+
results ||= {}
348+
results[node_key] = v
349+
end
355350
end
356-
end
357351
end
358352

359353
[results, errors]
360354
end
355+
356+
def build_thread_for_command(node_key, client)
357+
Thread.new(node_key, client) do |nk, cli|
358+
yield(nk, cli)
359+
rescue StandardError => e
360+
e
361+
end
362+
end
361363
end
362364
end
363365
end

lib/redis_client/cluster/node/latency_replica.rb

+18-21
Original file line numberDiff line numberDiff line change
@@ -39,30 +39,27 @@ def any_replica_node_key(seed: nil)
3939

4040
private
4141

42-
def measure_latencies(clients) # rubocop:disable Metrics/AbcSize
42+
def measure_latencies(clients)
4343
clients.each_slice(::RedisClient::Cluster::Node::MAX_THREADS).each_with_object({}) do |chuncked_clients, acc|
44-
threads = chuncked_clients.map do |k, v|
45-
Thread.new(k, v) do |node_key, client|
46-
Thread.current[:node_key] = node_key
47-
48-
min = DUMMY_LATENCY_MSEC
49-
MEASURE_ATTEMPT_COUNT.times do
50-
starting = Process.clock_gettime(Process::CLOCK_MONOTONIC, :microsecond)
51-
client.call_once('PING')
52-
duration = Process.clock_gettime(Process::CLOCK_MONOTONIC, :microsecond) - starting
53-
min = duration if duration < min
54-
end
55-
56-
Thread.current[:latency] = min
57-
rescue StandardError
58-
Thread.current[:latency] = DUMMY_LATENCY_MSEC
59-
end
60-
end
44+
chuncked_clients
45+
.map { |node_key, client| [node_key, build_thread_for_measuring_latency(client)] }
46+
.each { |node_key, thread| acc[node_key] = thread.value }
47+
end
48+
end
6149

62-
threads.each do |t|
63-
t.join
64-
acc[t[:node_key]] = t[:latency]
50+
def build_thread_for_measuring_latency(client)
51+
Thread.new(client) do |cli|
52+
min = DUMMY_LATENCY_MSEC
53+
MEASURE_ATTEMPT_COUNT.times do
54+
starting = Process.clock_gettime(Process::CLOCK_MONOTONIC, :microsecond)
55+
cli.call_once('PING')
56+
duration = Process.clock_gettime(Process::CLOCK_MONOTONIC, :microsecond) - starting
57+
min = duration if duration < min
6558
end
59+
60+
min
61+
rescue StandardError
62+
DUMMY_LATENCY_MSEC
6663
end
6764
end
6865

lib/redis_client/cluster/node/replica_mixin.rb

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ def any_primary_node_key(seed: nil)
2424
private
2525

2626
def build_clients(primary_node_keys, options, pool, **kwargs)
27-
options.filter_map do |node_key, option|
27+
options.to_h do |node_key, option|
2828
option = option.merge(kwargs.reject { |k, _| ::RedisClient::Cluster::Node::IGNORE_GENERIC_CONFIG_KEYS.include?(k) })
2929
config = ::RedisClient::Cluster::Node::Config.new(scale_read: !primary_node_keys.include?(node_key), **option)
3030
client = pool.nil? ? config.new_client : config.new_pool(**pool)
3131
[node_key, client]
32-
end.to_h
32+
end
3333
end
3434
end
3535
end

lib/redis_client/cluster/pipeline.rb

+27-31
Original file line numberDiff line numberDiff line change
@@ -148,38 +148,23 @@ def empty?
148148
def execute # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
149149
all_replies = errors = nil
150150
@pipelines&.each_slice(MAX_THREADS) do |chuncked_pipelines|
151-
threads = chuncked_pipelines.map do |node_key, pipeline|
152-
Thread.new(node_key, pipeline) do |nk, pl|
153-
Thread.current[:node_key] = nk
154-
replies = do_pipelining(@router.find_node(nk), pl)
155-
raise ReplySizeError, "commands: #{pl._size}, replies: #{replies.size}" if pl._size != replies.size
156-
157-
Thread.current[:replies] = replies
158-
rescue ::RedisClient::Cluster::Pipeline::RedirectionNeeded => e
159-
Thread.current[:redirection_needed] = e
160-
rescue StandardError => e
161-
Thread.current[:error] = e
162-
end
163-
end
164-
165-
threads.each(&:join)
166-
threads.each do |t|
167-
if t.key?(:replies)
168-
all_replies ||= Array.new(@size)
169-
@pipelines[t[:node_key]]
170-
.outer_indices
171-
.each_with_index { |outer, inner| all_replies[outer] = t[:replies][inner] }
172-
elsif t.key?(:redirection_needed)
173-
all_replies ||= Array.new(@size)
174-
pipeline = @pipelines[t[:node_key]]
175-
err = t[:redirection_needed]
176-
err.indices.each { |i| err.replies[i] = handle_redirection(err.replies[i], pipeline, i) }
177-
pipeline.outer_indices.each_with_index { |outer, inner| all_replies[outer] = err.replies[inner] }
178-
elsif t.key?(:error)
179-
errors ||= {}
180-
errors[t[:node_key]] = t[:error]
151+
chuncked_pipelines
152+
.map { |node_key, pipeline| [node_key, build_thread_for_pipeline(@router, node_key, pipeline)] }
153+
.each do |node_key, thread|
154+
case v = thread.value
155+
when ::RedisClient::Cluster::Pipeline::RedirectionNeeded
156+
all_replies ||= Array.new(@size)
157+
pipeline = @pipelines[node_key]
158+
v.indices.each { |i| v.replies[i] = handle_redirection(v.replies[i], pipeline, i) }
159+
pipeline.outer_indices.each_with_index { |outer, inner| all_replies[outer] = v.replies[inner] }
160+
when StandardError
161+
errors ||= {}
162+
errors[node_key] = v
163+
else
164+
all_replies ||= Array.new(@size)
165+
@pipelines[node_key].outer_indices.each_with_index { |outer, inner| all_replies[outer] = v[inner] }
166+
end
181167
end
182-
end
183168
end
184169

185170
raise ::RedisClient::Cluster::ErrorCollection, errors unless errors.nil?
@@ -197,6 +182,17 @@ def append_pipeline(node_key)
197182
@pipelines[node_key]
198183
end
199184

185+
def build_thread_for_pipeline(router, node_key, pipeline)
186+
Thread.new(router, node_key, pipeline) do |rt, nk, pl|
187+
replies = do_pipelining(rt.find_node(nk), pl)
188+
raise ReplySizeError, "commands: #{pl._size}, replies: #{replies.size}" if pl._size != replies.size
189+
190+
replies
191+
rescue StandardError => e
192+
e
193+
end
194+
end
195+
200196
def do_pipelining(client, pipeline)
201197
case client
202198
when ::RedisClient then send_pipeline(client, pipeline)

lib/redis_client/cluster/pub_sub.rb

+3-3
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def take_message(timeout)
2424
@worker = subscribe(@client, timeout) if @worker.nil?
2525
return if @worker.alive?
2626

27-
message = @worker[:reply]
27+
message = @worker.value
2828
@worker = nil
2929
message
3030
end
@@ -33,9 +33,9 @@ def take_message(timeout)
3333

3434
def subscribe(client, timeout)
3535
Thread.new(client, timeout) do |pubsub, to|
36-
Thread.current[:reply] = pubsub.next_event(to)
36+
pubsub.next_event(to)
3737
rescue StandardError => e
38-
Thread.current[:reply] = e
38+
e
3939
end
4040
end
4141
end

test/bench_command.rb

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ class Envoy < BenchmarkWrapper
6969
include BenchmarkMixinForProxy
7070

7171
# https://www.envoyproxy.io/docs/envoy/latest/intro/arch_overview/other_protocols/redis#supported-commands
72-
def bench_echo
72+
def bench_single_echo
7373
skip('Envoy does not support ECHO command.')
7474
end
7575

test/benchmark_mixin.rb

+3-3
Original file line numberDiff line numberDiff line change
@@ -16,23 +16,23 @@ def teardown
1616
@client&.close
1717
end
1818

19-
def bench_echo
19+
def bench_single_echo
2020
assert_performance_linear(MIN_THRESHOLD) do |n|
2121
n.times do
2222
@client.call('ECHO', 'Hello world')
2323
end
2424
end
2525
end
2626

27-
def bench_set
27+
def bench_single_set
2828
assert_performance_linear(MIN_THRESHOLD) do |n|
2929
n.times do |i|
3030
@client.call('SET', "key#{i}", i)
3131
end
3232
end
3333
end
3434

35-
def bench_get
35+
def bench_single_get
3636
assert_performance_linear(MIN_THRESHOLD) do |n|
3737
n.times do |i|
3838
@client.call('GET', "key#{i}")

test/redis_client/cluster/test_normalized_cmd_name.rb

+8-14
Original file line numberDiff line numberDiff line change
@@ -66,24 +66,18 @@ def test_thread_safety
6666
attempts = Array.new(100, 'dummy')
6767

6868
threads = attempts.each_with_index.map do |_, i|
69-
Thread.new do
70-
Thread.current[:index] = i
71-
got = if i.even?
72-
@subject.get_by_command(%w[SET foo bar])
73-
else
74-
@subject.clear ? 'set' : 'clear failed'
75-
end
76-
Thread.current[:got] = got
69+
Thread.new(i) do
70+
if i.even?
71+
@subject.get_by_command(%w[SET foo bar])
72+
else
73+
@subject.clear ? 'set' : 'clear failed'
74+
end
7775
rescue StandardError => e
78-
Thread.current[:got] = "#{e.class.name}: #{e.message}"
76+
"#{e.class.name}: #{e.message}"
7977
end
8078
end
8179

82-
threads.each do |t|
83-
t.join
84-
attempts[t[:index]] = t[:got]
85-
end
86-
80+
threads.each_with_index { |t, i| attempts[i] = t.value }
8781
attempts.each { |got| assert_equal('set', got) }
8882
end
8983
end

test/test_concurrency.rb

+6-6
Original file line numberDiff line numberDiff line change
@@ -59,13 +59,13 @@ def test_threading
5959
Thread.new do
6060
ATTEMPTS.times { MAX_THREADS.times { |i| @client.call('INCR', "key#{i}") } }
6161
ATTEMPTS.times { MAX_THREADS.times { |i| @client.call('DECR', "key#{i}") } }
62+
nil
6263
rescue StandardError => e
63-
Thread.current[:error] = e
64+
e
6465
end
6566
end
6667

67-
threads.each(&:join)
68-
threads.each { |t| assert_nil(t[:error]) }
68+
threads.each { |t| assert_nil(t.value) }
6969
MAX_THREADS.times { |i| assert_equal(WANT, @client.call('GET', "key#{i}")) }
7070
end
7171

@@ -74,13 +74,13 @@ def test_threading_with_pipelining
7474
Thread.new do
7575
@client.pipelined { |pi| ATTEMPTS.times { MAX_THREADS.times { |i| pi.call('INCR', "key#{i}") } } }
7676
@client.pipelined { |pi| ATTEMPTS.times { MAX_THREADS.times { |i| pi.call('DECR', "key#{i}") } } }
77+
nil
7778
rescue StandardError => e
78-
Thread.current[:error] = e
79+
e
7980
end
8081
end
8182

82-
threads.each(&:join)
83-
threads.each { |t| assert_nil(t[:error]) }
83+
threads.each { |t| assert_nil(t.value) }
8484
MAX_THREADS.times { |i| assert_equal(WANT, @client.call('GET', "key#{i}")) }
8585
end
8686

0 commit comments

Comments
 (0)