concurrent-ruby 0.5.0 → 0.6.0.pre.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +88 -77
- data/lib/concurrent.rb +17 -2
- data/lib/concurrent/actor.rb +17 -0
- data/lib/concurrent/actor_context.rb +31 -0
- data/lib/concurrent/actor_ref.rb +39 -0
- data/lib/concurrent/agent.rb +12 -3
- data/lib/concurrent/async.rb +290 -0
- data/lib/concurrent/atomic.rb +5 -9
- data/lib/concurrent/cached_thread_pool.rb +39 -137
- data/lib/concurrent/channel/blocking_ring_buffer.rb +60 -0
- data/lib/concurrent/channel/buffered_channel.rb +83 -0
- data/lib/concurrent/channel/channel.rb +11 -0
- data/lib/concurrent/channel/probe.rb +19 -0
- data/lib/concurrent/channel/ring_buffer.rb +54 -0
- data/lib/concurrent/channel/unbuffered_channel.rb +34 -0
- data/lib/concurrent/channel/waitable_list.rb +38 -0
- data/lib/concurrent/configuration.rb +92 -0
- data/lib/concurrent/dataflow.rb +9 -3
- data/lib/concurrent/delay.rb +88 -0
- data/lib/concurrent/exchanger.rb +31 -0
- data/lib/concurrent/fixed_thread_pool.rb +28 -122
- data/lib/concurrent/future.rb +10 -5
- data/lib/concurrent/immediate_executor.rb +3 -2
- data/lib/concurrent/ivar.rb +2 -1
- data/lib/concurrent/java_cached_thread_pool.rb +45 -0
- data/lib/concurrent/java_fixed_thread_pool.rb +37 -0
- data/lib/concurrent/java_thread_pool_executor.rb +194 -0
- data/lib/concurrent/per_thread_executor.rb +23 -0
- data/lib/concurrent/postable.rb +2 -0
- data/lib/concurrent/processor_count.rb +125 -0
- data/lib/concurrent/promise.rb +42 -18
- data/lib/concurrent/ruby_cached_thread_pool.rb +37 -0
- data/lib/concurrent/ruby_fixed_thread_pool.rb +31 -0
- data/lib/concurrent/ruby_thread_pool_executor.rb +268 -0
- data/lib/concurrent/ruby_thread_pool_worker.rb +69 -0
- data/lib/concurrent/simple_actor_ref.rb +124 -0
- data/lib/concurrent/thread_local_var.rb +1 -1
- data/lib/concurrent/thread_pool_executor.rb +30 -0
- data/lib/concurrent/timer_task.rb +13 -10
- data/lib/concurrent/tvar.rb +212 -0
- data/lib/concurrent/utilities.rb +1 -0
- data/lib/concurrent/version.rb +1 -1
- data/spec/concurrent/actor_context_spec.rb +37 -0
- data/spec/concurrent/actor_ref_shared.rb +313 -0
- data/spec/concurrent/actor_spec.rb +9 -1
- data/spec/concurrent/agent_spec.rb +97 -96
- data/spec/concurrent/async_spec.rb +320 -0
- data/spec/concurrent/cached_thread_pool_shared.rb +137 -0
- data/spec/concurrent/channel/blocking_ring_buffer_spec.rb +149 -0
- data/spec/concurrent/channel/buffered_channel_spec.rb +151 -0
- data/spec/concurrent/channel/channel_spec.rb +37 -0
- data/spec/concurrent/channel/probe_spec.rb +49 -0
- data/spec/concurrent/channel/ring_buffer_spec.rb +126 -0
- data/spec/concurrent/channel/unbuffered_channel_spec.rb +132 -0
- data/spec/concurrent/configuration_spec.rb +134 -0
- data/spec/concurrent/dataflow_spec.rb +109 -27
- data/spec/concurrent/delay_spec.rb +77 -0
- data/spec/concurrent/exchanger_spec.rb +66 -0
- data/spec/concurrent/fixed_thread_pool_shared.rb +136 -0
- data/spec/concurrent/future_spec.rb +60 -51
- data/spec/concurrent/global_thread_pool_shared.rb +33 -0
- data/spec/concurrent/immediate_executor_spec.rb +4 -25
- data/spec/concurrent/ivar_spec.rb +36 -23
- data/spec/concurrent/java_cached_thread_pool_spec.rb +64 -0
- data/spec/concurrent/java_fixed_thread_pool_spec.rb +64 -0
- data/spec/concurrent/java_thread_pool_executor_spec.rb +71 -0
- data/spec/concurrent/obligation_shared.rb +32 -20
- data/spec/concurrent/{global_thread_pool_spec.rb → per_thread_executor_spec.rb} +9 -13
- data/spec/concurrent/processor_count_spec.rb +20 -0
- data/spec/concurrent/promise_spec.rb +29 -41
- data/spec/concurrent/ruby_cached_thread_pool_spec.rb +69 -0
- data/spec/concurrent/ruby_fixed_thread_pool_spec.rb +39 -0
- data/spec/concurrent/ruby_thread_pool_executor_spec.rb +183 -0
- data/spec/concurrent/simple_actor_ref_spec.rb +219 -0
- data/spec/concurrent/thread_pool_class_cast_spec.rb +40 -0
- data/spec/concurrent/thread_pool_executor_shared.rb +155 -0
- data/spec/concurrent/thread_pool_shared.rb +98 -36
- data/spec/concurrent/tvar_spec.rb +137 -0
- data/spec/spec_helper.rb +4 -0
- data/spec/support/functions.rb +4 -0
- metadata +85 -20
- data/lib/concurrent/cached_thread_pool/worker.rb +0 -91
- data/lib/concurrent/channel.rb +0 -63
- data/lib/concurrent/fixed_thread_pool/worker.rb +0 -54
- data/lib/concurrent/global_thread_pool.rb +0 -42
- data/spec/concurrent/cached_thread_pool_spec.rb +0 -101
- data/spec/concurrent/channel_spec.rb +0 -86
- data/spec/concurrent/fixed_thread_pool_spec.rb +0 -92
- data/spec/concurrent/uses_global_thread_pool_shared.rb +0 -64
@@ -0,0 +1,20 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
module Concurrent
|
4
|
+
|
5
|
+
describe '#processor_count' do
|
6
|
+
|
7
|
+
it 'retuns a positive integer' do
|
8
|
+
Concurrent::processor_count.should be_a Integer
|
9
|
+
Concurrent::processor_count.should >= 1
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
describe '#physical_processor_count' do
|
14
|
+
|
15
|
+
it 'retuns a positive integer' do
|
16
|
+
Concurrent::physical_processor_count.should be_a Integer
|
17
|
+
Concurrent::physical_processor_count.should >= 1
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -1,32 +1,26 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
require_relative 'obligation_shared'
|
3
|
-
require_relative 'uses_global_thread_pool_shared'
|
4
3
|
|
5
4
|
module Concurrent
|
6
5
|
|
7
6
|
describe Promise do
|
8
7
|
|
9
|
-
let
|
10
|
-
it_should_behave_like Concurrent::UsesGlobalThreadPool
|
8
|
+
let(:executor) { PerThreadExecutor.new }
|
11
9
|
|
12
|
-
let(:empty_root) { Promise.new { nil } }
|
10
|
+
let(:empty_root) { Promise.new(executor: executor){ nil } }
|
13
11
|
let!(:fulfilled_value) { 10 }
|
14
12
|
let!(:rejected_reason) { StandardError.new('mojo jojo') }
|
15
13
|
|
16
14
|
let(:pending_subject) do
|
17
|
-
Promise.new{ sleep(0.3); fulfilled_value }.execute
|
15
|
+
Promise.new(executor: executor){ sleep(0.3); fulfilled_value }.execute
|
18
16
|
end
|
19
17
|
|
20
18
|
let(:fulfilled_subject) do
|
21
|
-
Promise.fulfill(fulfilled_value)
|
19
|
+
Promise.fulfill(fulfilled_value, executor: executor)
|
22
20
|
end
|
23
21
|
|
24
22
|
let(:rejected_subject) do
|
25
|
-
Promise.reject(
|
26
|
-
end
|
27
|
-
|
28
|
-
before(:each) do
|
29
|
-
Promise.thread_pool = FixedThreadPool.new(1)
|
23
|
+
Promise.reject(rejected_reason, executor: executor)
|
30
24
|
end
|
31
25
|
|
32
26
|
it_should_behave_like :obligation
|
@@ -74,28 +68,28 @@ module Concurrent
|
|
74
68
|
|
75
69
|
describe '.new' do
|
76
70
|
it 'should return an unscheduled Promise' do
|
77
|
-
p = Promise.new { nil }
|
71
|
+
p = Promise.new(executor: executor){ nil }
|
78
72
|
p.should be_unscheduled
|
79
73
|
end
|
80
74
|
end
|
81
75
|
|
82
76
|
describe '.execute' do
|
83
77
|
it 'creates a new Promise' do
|
84
|
-
p = Promise.execute{ nil }
|
78
|
+
p = Promise.execute(executor: executor){ nil }
|
85
79
|
p.should be_a(Promise)
|
86
80
|
end
|
87
81
|
|
88
82
|
it 'passes the block to the new Promise' do
|
89
|
-
p = Promise.execute { 20 }
|
83
|
+
p = Promise.execute(executor: executor){ 20 }
|
90
84
|
sleep(0.1)
|
91
85
|
p.value.should eq 20
|
92
86
|
end
|
93
87
|
|
94
88
|
it 'calls #execute on the new Promise' do
|
95
89
|
p = double('promise')
|
96
|
-
Promise.stub(:new).with(
|
90
|
+
Promise.stub(:new).with({executor: executor}).and_return(p)
|
97
91
|
p.should_receive(:execute).with(no_args)
|
98
|
-
Promise.execute{ nil }
|
92
|
+
Promise.execute(executor: executor){ nil }
|
99
93
|
end
|
100
94
|
end
|
101
95
|
end
|
@@ -105,15 +99,14 @@ module Concurrent
|
|
105
99
|
context 'unscheduled' do
|
106
100
|
|
107
101
|
it 'sets the promise to :pending' do
|
108
|
-
p = Promise.new { sleep(0.1) }.execute
|
102
|
+
p = Promise.new(executor: executor){ sleep(0.1) }.execute
|
109
103
|
p.should be_pending
|
110
104
|
end
|
111
105
|
|
112
106
|
it 'posts the block given in construction' do
|
113
|
-
|
114
|
-
Promise.new { nil }.execute
|
107
|
+
executor.should_receive(:post).with(any_args)
|
108
|
+
Promise.new(executor: executor){ nil }.execute
|
115
109
|
end
|
116
|
-
|
117
110
|
end
|
118
111
|
|
119
112
|
context 'pending' do
|
@@ -124,19 +117,17 @@ module Concurrent
|
|
124
117
|
end
|
125
118
|
|
126
119
|
it 'does not posts again' do
|
127
|
-
|
120
|
+
executor.should_receive(:post).with(any_args).once
|
128
121
|
pending_subject.execute
|
129
122
|
end
|
130
|
-
|
131
123
|
end
|
132
124
|
|
133
|
-
|
134
125
|
describe 'with children' do
|
135
126
|
|
136
|
-
let(:root) { Promise.new { sleep(0.1); nil } }
|
137
|
-
let(:c1) { root.then { nil } }
|
138
|
-
let(:c2) { root.then { nil } }
|
139
|
-
let(:c2_1) { c2.then { nil } }
|
127
|
+
let(:root) { Promise.new(executor: executor){ sleep(0.1); nil } }
|
128
|
+
let(:c1) { root.then { sleep(0.1); nil } }
|
129
|
+
let(:c2) { root.then { sleep(0.1); nil } }
|
130
|
+
let(:c2_1) { c2.then { sleep(0.1); nil } }
|
140
131
|
|
141
132
|
context 'when called on the root' do
|
142
133
|
it 'should set all promises to :pending' do
|
@@ -156,7 +147,6 @@ module Concurrent
|
|
156
147
|
[root, c1, c2, c2_1].each { |p| p.should be_pending }
|
157
148
|
end
|
158
149
|
end
|
159
|
-
|
160
150
|
end
|
161
151
|
end
|
162
152
|
|
@@ -186,7 +176,7 @@ module Concurrent
|
|
186
176
|
|
187
177
|
context 'unscheduled' do
|
188
178
|
|
189
|
-
let(:p1) { Promise.new {nil} }
|
179
|
+
let(:p1) { Promise.new(executor: executor){nil} }
|
190
180
|
let(:child) { p1.then{} }
|
191
181
|
|
192
182
|
it 'returns a new promise' do
|
@@ -225,7 +215,6 @@ module Concurrent
|
|
225
215
|
child = fulfilled_subject.then(Proc.new{ 7 }) { |v| v + 5 }
|
226
216
|
child.value.should eq fulfilled_value + 5
|
227
217
|
end
|
228
|
-
|
229
218
|
end
|
230
219
|
|
231
220
|
context 'rejected' do
|
@@ -240,7 +229,6 @@ module Concurrent
|
|
240
229
|
child = rejected_subject.then(Proc.new{ 7 }) { |v| v + 5 }
|
241
230
|
child.value.should eq 7
|
242
231
|
end
|
243
|
-
|
244
232
|
end
|
245
233
|
|
246
234
|
it 'can be called more than once' do
|
@@ -276,13 +264,13 @@ module Concurrent
|
|
276
264
|
|
277
265
|
it 'passes the result of each block to all its children' do
|
278
266
|
expected = nil
|
279
|
-
Promise.new{ 20 }.then{ |result| expected = result }.execute
|
267
|
+
Promise.new(executor: executor){ 20 }.then{ |result| expected = result }.execute
|
280
268
|
sleep(0.1)
|
281
269
|
expected.should eq 20
|
282
270
|
end
|
283
271
|
|
284
272
|
it 'sets the promise value to the result if its block' do
|
285
|
-
root = Promise.new{ 20 }
|
273
|
+
root = Promise.new(executor: executor){ 20 }
|
286
274
|
p = root.then{ |result| result * 2}.execute
|
287
275
|
sleep(0.1)
|
288
276
|
root.value.should eq 20
|
@@ -290,26 +278,26 @@ module Concurrent
|
|
290
278
|
end
|
291
279
|
|
292
280
|
it 'sets the promise state to :fulfilled if the block completes' do
|
293
|
-
p = Promise.new{ 10 * 2 }.then{|result| result * 2}.execute
|
281
|
+
p = Promise.new(executor: executor){ 10 * 2 }.then{|result| result * 2}.execute
|
294
282
|
sleep(0.1)
|
295
283
|
p.should be_fulfilled
|
296
284
|
end
|
297
285
|
|
298
286
|
it 'passes the last result through when a promise has no block' do
|
299
287
|
expected = nil
|
300
|
-
Promise.new{ 20 }.then(Proc.new{}).then{|result| expected = result}.execute
|
288
|
+
Promise.new(executor: executor){ 20 }.then(Proc.new{}).then{|result| expected = result}.execute
|
301
289
|
sleep(0.1)
|
302
290
|
expected.should eq 20
|
303
291
|
end
|
304
292
|
|
305
293
|
it 'uses result as fulfillment value when a promise has no block' do
|
306
|
-
p = Promise.new{ 20 }.then(Proc.new{}).execute
|
294
|
+
p = Promise.new(executor: executor){ 20 }.then(Proc.new{}).execute
|
307
295
|
sleep(0.1)
|
308
296
|
p.value.should eq 20
|
309
297
|
end
|
310
298
|
|
311
299
|
it 'can manage long chain' do
|
312
|
-
root = Promise.new { 20 }
|
300
|
+
root = Promise.new(executor: executor){ 20 }
|
313
301
|
p1 = root.then { |b| b * 3 }
|
314
302
|
p2 = root.then { |c| c + 2 }
|
315
303
|
p3 = p1.then { |d| d + 7 }
|
@@ -328,26 +316,26 @@ module Concurrent
|
|
328
316
|
|
329
317
|
it 'passes the reason to all its children' do
|
330
318
|
expected = nil
|
331
|
-
Promise.new{ raise ArgumentError }.then(Proc.new{ |reason| expected = reason }).execute
|
319
|
+
Promise.new(executor: executor){ raise ArgumentError }.then(Proc.new{ |reason| expected = reason }).execute
|
332
320
|
sleep(0.1)
|
333
321
|
expected.should be_a ArgumentError
|
334
322
|
end
|
335
323
|
|
336
324
|
it 'sets the promise value to the result if its block' do
|
337
|
-
root = Promise.new{ raise ArgumentError }
|
325
|
+
root = Promise.new(executor: executor){ raise ArgumentError }
|
338
326
|
p = root.then(Proc.new{ |reason| 42 }).execute
|
339
327
|
sleep(0.1)
|
340
328
|
p.value.should eq 42
|
341
329
|
end
|
342
330
|
|
343
331
|
it 'sets the promise state to :rejected if the block completes' do
|
344
|
-
p = Promise.new{ raise ArgumentError }.execute
|
332
|
+
p = Promise.new(executor: executor){ raise ArgumentError }.execute
|
345
333
|
sleep(0.1)
|
346
334
|
p.should be_rejected
|
347
335
|
end
|
348
336
|
|
349
337
|
it 'uses reason as rejection reason when a promise has no rescue callable' do
|
350
|
-
p = Promise.new{ raise ArgumentError }.then
|
338
|
+
p = Promise.new(executor: ImmediateExecutor.new){ raise ArgumentError }.then{ |val| val }.execute
|
351
339
|
sleep(0.1)
|
352
340
|
p.should be_rejected
|
353
341
|
p.reason.should be_a ArgumentError
|
@@ -0,0 +1,69 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require_relative 'cached_thread_pool_shared'
|
3
|
+
|
4
|
+
module Concurrent
|
5
|
+
|
6
|
+
describe RubyCachedThreadPool do
|
7
|
+
|
8
|
+
subject do
|
9
|
+
described_class.new(
|
10
|
+
max_threads: 5,
|
11
|
+
overflow_policy: :discard,
|
12
|
+
gc_interval: 0
|
13
|
+
)
|
14
|
+
end
|
15
|
+
|
16
|
+
after(:each) do
|
17
|
+
subject.kill
|
18
|
+
sleep(0.1)
|
19
|
+
end
|
20
|
+
|
21
|
+
it_should_behave_like :cached_thread_pool
|
22
|
+
|
23
|
+
context 'garbage collection' do
|
24
|
+
|
25
|
+
subject{ described_class.new(idletime: 1, max_threads: 5, gc_interval: 0) }
|
26
|
+
|
27
|
+
it 'removes from pool any thread that has been idle too long' do
|
28
|
+
3.times { subject << proc{ sleep(0.1) } }
|
29
|
+
sleep(0.1)
|
30
|
+
subject.length.should eq 3
|
31
|
+
sleep(2)
|
32
|
+
subject << proc{ nil }
|
33
|
+
sleep(0.1)
|
34
|
+
subject.length.should < 3
|
35
|
+
end
|
36
|
+
|
37
|
+
it 'removes from pool any dead thread' do
|
38
|
+
3.times { subject << proc{ sleep(0.1); raise Exception } }
|
39
|
+
sleep(0.1)
|
40
|
+
subject.length.should eq 3
|
41
|
+
sleep(2)
|
42
|
+
subject << proc{ nil }
|
43
|
+
sleep(0.1)
|
44
|
+
subject.length.should < 3
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
context 'worker creation and caching' do
|
49
|
+
|
50
|
+
subject{ described_class.new(idletime: 1, max_threads: 5) }
|
51
|
+
|
52
|
+
it 'creates new workers when there are none available' do
|
53
|
+
subject.length.should eq 0
|
54
|
+
5.times{ sleep(0.1); subject << proc{ sleep(1) } }
|
55
|
+
sleep(1)
|
56
|
+
subject.length.should eq 5
|
57
|
+
end
|
58
|
+
|
59
|
+
it 'uses existing idle threads' do
|
60
|
+
5.times{ subject << proc{ sleep(0.1) } }
|
61
|
+
sleep(1)
|
62
|
+
subject.length.should >= 5
|
63
|
+
3.times{ subject << proc{ sleep(1) } }
|
64
|
+
sleep(0.1)
|
65
|
+
subject.length.should >= 5
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require_relative 'fixed_thread_pool_shared'
|
3
|
+
|
4
|
+
module Concurrent
|
5
|
+
|
6
|
+
describe RubyFixedThreadPool do
|
7
|
+
|
8
|
+
subject { described_class.new(5, overflow_policy: :discard) }
|
9
|
+
|
10
|
+
after(:each) do
|
11
|
+
subject.kill
|
12
|
+
sleep(0.1)
|
13
|
+
end
|
14
|
+
|
15
|
+
it_should_behave_like :fixed_thread_pool
|
16
|
+
|
17
|
+
context 'exception handling' do
|
18
|
+
|
19
|
+
it 'restarts threads that experience exception' do
|
20
|
+
count = subject.length
|
21
|
+
count.times{ subject << proc{ raise StandardError } }
|
22
|
+
sleep(1)
|
23
|
+
subject.length.should eq count
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
context 'worker creation and caching' do
|
28
|
+
|
29
|
+
it 'creates new workers when there are none available' do
|
30
|
+
pool = described_class.new(5)
|
31
|
+
pool.current_length.should eq 0
|
32
|
+
5.times{ pool << proc{ sleep(1) } }
|
33
|
+
sleep(0.1)
|
34
|
+
pool.current_length.should eq 5
|
35
|
+
pool.kill
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,183 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require_relative 'thread_pool_executor_shared'
|
3
|
+
|
4
|
+
module Concurrent
|
5
|
+
|
6
|
+
describe RubyThreadPoolExecutor do
|
7
|
+
|
8
|
+
after(:each) do
|
9
|
+
subject.kill
|
10
|
+
sleep(0.1)
|
11
|
+
end
|
12
|
+
|
13
|
+
subject do
|
14
|
+
RubyThreadPoolExecutor.new(
|
15
|
+
min_threads: 2,
|
16
|
+
max_threads: 5,
|
17
|
+
idletime: 60,
|
18
|
+
max_queue: 10,
|
19
|
+
overflow_policy: :discard
|
20
|
+
)
|
21
|
+
end
|
22
|
+
|
23
|
+
it_should_behave_like :thread_pool_executor
|
24
|
+
|
25
|
+
context '#remaining_capacity' do
|
26
|
+
|
27
|
+
let!(:expected_max){ 100 }
|
28
|
+
|
29
|
+
subject do
|
30
|
+
RubyThreadPoolExecutor.new(
|
31
|
+
min_threads: 10,
|
32
|
+
max_threads: 20,
|
33
|
+
idletime: 60,
|
34
|
+
max_queue: expected_max,
|
35
|
+
overflow_policy: :discard
|
36
|
+
)
|
37
|
+
end
|
38
|
+
|
39
|
+
it 'returns :max_length when no tasks are enqueued' do
|
40
|
+
5.times{ subject.post{ nil } }
|
41
|
+
sleep(0.1)
|
42
|
+
subject.remaining_capacity.should eq expected_max
|
43
|
+
end
|
44
|
+
|
45
|
+
it 'returns the remaining capacity when tasks are enqueued' do
|
46
|
+
100.times{ subject.post{ sleep(0.5) } }
|
47
|
+
sleep(0.1)
|
48
|
+
subject.remaining_capacity.should < expected_max
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
context '#overload_policy' do
|
53
|
+
|
54
|
+
let!(:min_threads){ 1 }
|
55
|
+
let!(:max_threads){ 1 }
|
56
|
+
let!(:idletime){ 60 }
|
57
|
+
let!(:max_queue){ 1 }
|
58
|
+
|
59
|
+
context ':abort' do
|
60
|
+
|
61
|
+
subject do
|
62
|
+
described_class.new(
|
63
|
+
min_threads: min_threads,
|
64
|
+
max_threads: max_threads,
|
65
|
+
idletime: idletime,
|
66
|
+
max_queue: max_queue,
|
67
|
+
overflow_policy: :abort
|
68
|
+
)
|
69
|
+
end
|
70
|
+
|
71
|
+
specify '#post raises an error when the queue is at capacity' do
|
72
|
+
expect {
|
73
|
+
100.times{ subject.post{ sleep(1) } }
|
74
|
+
}.to raise_error(Concurrent::RejectedExecutionError)
|
75
|
+
end
|
76
|
+
|
77
|
+
specify '#<< raises an error when the queue is at capacity' do
|
78
|
+
expect {
|
79
|
+
100.times{ subject << proc { sleep(1) } }
|
80
|
+
}.to raise_error(Concurrent::RejectedExecutionError)
|
81
|
+
end
|
82
|
+
|
83
|
+
specify 'a #post task is never executed when the queue is at capacity' do
|
84
|
+
executed = Concurrent::AtomicFixnum.new(0)
|
85
|
+
100.times do
|
86
|
+
begin
|
87
|
+
subject.post{ executed.increment }
|
88
|
+
rescue
|
89
|
+
end
|
90
|
+
end
|
91
|
+
sleep(0.1)
|
92
|
+
executed.value.should < 100
|
93
|
+
end
|
94
|
+
|
95
|
+
specify 'a #<< task is never executed when the queue is at capacity' do
|
96
|
+
executed = Concurrent::AtomicFixnum.new(0)
|
97
|
+
100.times do
|
98
|
+
begin
|
99
|
+
subject << proc { executed.increment }
|
100
|
+
rescue
|
101
|
+
end
|
102
|
+
end
|
103
|
+
sleep(0.1)
|
104
|
+
executed.value.should < 100
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
context ':discard' do
|
109
|
+
|
110
|
+
subject do
|
111
|
+
described_class.new(
|
112
|
+
min_threads: min_threads,
|
113
|
+
max_threads: max_threads,
|
114
|
+
idletime: idletime,
|
115
|
+
max_queue: max_queue,
|
116
|
+
overflow_policy: :discard
|
117
|
+
)
|
118
|
+
end
|
119
|
+
|
120
|
+
specify 'a #post task is never executed when the queue is at capacity' do
|
121
|
+
executed = Concurrent::AtomicFixnum.new(0)
|
122
|
+
100.times do
|
123
|
+
subject.post{ executed.increment }
|
124
|
+
end
|
125
|
+
sleep(0.1)
|
126
|
+
executed.value.should < 100
|
127
|
+
end
|
128
|
+
|
129
|
+
specify 'a #<< task is never executed when the queue is at capacity' do
|
130
|
+
executed = Concurrent::AtomicFixnum.new(0)
|
131
|
+
100.times do
|
132
|
+
subject << proc { executed.increment }
|
133
|
+
end
|
134
|
+
sleep(0.1)
|
135
|
+
executed.value.should < 100
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
context ':caller_runs' do
|
140
|
+
|
141
|
+
subject do
|
142
|
+
described_class.new(
|
143
|
+
min_threads: 1,
|
144
|
+
max_threads: 1,
|
145
|
+
idletime: idletime,
|
146
|
+
max_queue: 1,
|
147
|
+
overflow_policy: :caller_runs
|
148
|
+
)
|
149
|
+
end
|
150
|
+
|
151
|
+
specify '#post does not create any new threads when the queue is at capacity' do
|
152
|
+
initial = Thread.list.length
|
153
|
+
5.times{ subject.post{ sleep(0.1) } }
|
154
|
+
Thread.list.length.should < initial + 5
|
155
|
+
end
|
156
|
+
|
157
|
+
specify '#post executes the task on the current thread when the queue is at capacity' do
|
158
|
+
subject.should_receive(:handle_overflow).with(any_args).at_least(:once)
|
159
|
+
5.times{ subject.post{ sleep(0.1) } }
|
160
|
+
end
|
161
|
+
|
162
|
+
specify '#post executes the task on the current thread when the queue is at capacity' do
|
163
|
+
bucket = []
|
164
|
+
5.times{|i| subject.post{ bucket.push(i) } }
|
165
|
+
sleep(0.1)
|
166
|
+
bucket.sort.should eq [0, 1, 2, 3, 4]
|
167
|
+
end
|
168
|
+
|
169
|
+
specify '#<< executes the task on the current thread when the queue is at capacity' do
|
170
|
+
subject.should_receive(:handle_overflow).with(any_args).at_least(:once)
|
171
|
+
5.times{ subject << proc { sleep(0.1) } }
|
172
|
+
end
|
173
|
+
|
174
|
+
specify '#post executes the task on the current thread when the queue is at capacity' do
|
175
|
+
bucket = []
|
176
|
+
5.times{|i| subject << proc { bucket.push(i) } }
|
177
|
+
sleep(0.1)
|
178
|
+
bucket.sort.should eq [0, 1, 2, 3, 4]
|
179
|
+
end
|
180
|
+
end
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|