concurrent-ruby 0.5.0 → 0.6.0.pre.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +88 -77
- data/lib/concurrent.rb +17 -2
- data/lib/concurrent/actor.rb +17 -0
- data/lib/concurrent/actor_context.rb +31 -0
- data/lib/concurrent/actor_ref.rb +39 -0
- data/lib/concurrent/agent.rb +12 -3
- data/lib/concurrent/async.rb +290 -0
- data/lib/concurrent/atomic.rb +5 -9
- data/lib/concurrent/cached_thread_pool.rb +39 -137
- data/lib/concurrent/channel/blocking_ring_buffer.rb +60 -0
- data/lib/concurrent/channel/buffered_channel.rb +83 -0
- data/lib/concurrent/channel/channel.rb +11 -0
- data/lib/concurrent/channel/probe.rb +19 -0
- data/lib/concurrent/channel/ring_buffer.rb +54 -0
- data/lib/concurrent/channel/unbuffered_channel.rb +34 -0
- data/lib/concurrent/channel/waitable_list.rb +38 -0
- data/lib/concurrent/configuration.rb +92 -0
- data/lib/concurrent/dataflow.rb +9 -3
- data/lib/concurrent/delay.rb +88 -0
- data/lib/concurrent/exchanger.rb +31 -0
- data/lib/concurrent/fixed_thread_pool.rb +28 -122
- data/lib/concurrent/future.rb +10 -5
- data/lib/concurrent/immediate_executor.rb +3 -2
- data/lib/concurrent/ivar.rb +2 -1
- data/lib/concurrent/java_cached_thread_pool.rb +45 -0
- data/lib/concurrent/java_fixed_thread_pool.rb +37 -0
- data/lib/concurrent/java_thread_pool_executor.rb +194 -0
- data/lib/concurrent/per_thread_executor.rb +23 -0
- data/lib/concurrent/postable.rb +2 -0
- data/lib/concurrent/processor_count.rb +125 -0
- data/lib/concurrent/promise.rb +42 -18
- data/lib/concurrent/ruby_cached_thread_pool.rb +37 -0
- data/lib/concurrent/ruby_fixed_thread_pool.rb +31 -0
- data/lib/concurrent/ruby_thread_pool_executor.rb +268 -0
- data/lib/concurrent/ruby_thread_pool_worker.rb +69 -0
- data/lib/concurrent/simple_actor_ref.rb +124 -0
- data/lib/concurrent/thread_local_var.rb +1 -1
- data/lib/concurrent/thread_pool_executor.rb +30 -0
- data/lib/concurrent/timer_task.rb +13 -10
- data/lib/concurrent/tvar.rb +212 -0
- data/lib/concurrent/utilities.rb +1 -0
- data/lib/concurrent/version.rb +1 -1
- data/spec/concurrent/actor_context_spec.rb +37 -0
- data/spec/concurrent/actor_ref_shared.rb +313 -0
- data/spec/concurrent/actor_spec.rb +9 -1
- data/spec/concurrent/agent_spec.rb +97 -96
- data/spec/concurrent/async_spec.rb +320 -0
- data/spec/concurrent/cached_thread_pool_shared.rb +137 -0
- data/spec/concurrent/channel/blocking_ring_buffer_spec.rb +149 -0
- data/spec/concurrent/channel/buffered_channel_spec.rb +151 -0
- data/spec/concurrent/channel/channel_spec.rb +37 -0
- data/spec/concurrent/channel/probe_spec.rb +49 -0
- data/spec/concurrent/channel/ring_buffer_spec.rb +126 -0
- data/spec/concurrent/channel/unbuffered_channel_spec.rb +132 -0
- data/spec/concurrent/configuration_spec.rb +134 -0
- data/spec/concurrent/dataflow_spec.rb +109 -27
- data/spec/concurrent/delay_spec.rb +77 -0
- data/spec/concurrent/exchanger_spec.rb +66 -0
- data/spec/concurrent/fixed_thread_pool_shared.rb +136 -0
- data/spec/concurrent/future_spec.rb +60 -51
- data/spec/concurrent/global_thread_pool_shared.rb +33 -0
- data/spec/concurrent/immediate_executor_spec.rb +4 -25
- data/spec/concurrent/ivar_spec.rb +36 -23
- data/spec/concurrent/java_cached_thread_pool_spec.rb +64 -0
- data/spec/concurrent/java_fixed_thread_pool_spec.rb +64 -0
- data/spec/concurrent/java_thread_pool_executor_spec.rb +71 -0
- data/spec/concurrent/obligation_shared.rb +32 -20
- data/spec/concurrent/{global_thread_pool_spec.rb → per_thread_executor_spec.rb} +9 -13
- data/spec/concurrent/processor_count_spec.rb +20 -0
- data/spec/concurrent/promise_spec.rb +29 -41
- data/spec/concurrent/ruby_cached_thread_pool_spec.rb +69 -0
- data/spec/concurrent/ruby_fixed_thread_pool_spec.rb +39 -0
- data/spec/concurrent/ruby_thread_pool_executor_spec.rb +183 -0
- data/spec/concurrent/simple_actor_ref_spec.rb +219 -0
- data/spec/concurrent/thread_pool_class_cast_spec.rb +40 -0
- data/spec/concurrent/thread_pool_executor_shared.rb +155 -0
- data/spec/concurrent/thread_pool_shared.rb +98 -36
- data/spec/concurrent/tvar_spec.rb +137 -0
- data/spec/spec_helper.rb +4 -0
- data/spec/support/functions.rb +4 -0
- metadata +85 -20
- data/lib/concurrent/cached_thread_pool/worker.rb +0 -91
- data/lib/concurrent/channel.rb +0 -63
- data/lib/concurrent/fixed_thread_pool/worker.rb +0 -54
- data/lib/concurrent/global_thread_pool.rb +0 -42
- data/spec/concurrent/cached_thread_pool_spec.rb +0 -101
- data/spec/concurrent/channel_spec.rb +0 -86
- data/spec/concurrent/fixed_thread_pool_spec.rb +0 -92
- data/spec/concurrent/uses_global_thread_pool_shared.rb +0 -64
@@ -0,0 +1,132 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
module Concurrent
|
4
|
+
|
5
|
+
describe UnbufferedChannel do
|
6
|
+
|
7
|
+
let!(:channel) { subject }
|
8
|
+
let(:probe) { Probe.new }
|
9
|
+
|
10
|
+
context 'with one thread' do
|
11
|
+
|
12
|
+
context 'without timeout' do
|
13
|
+
|
14
|
+
describe '#push' do
|
15
|
+
it 'should block' do
|
16
|
+
t = Thread.new { channel.push 5 }
|
17
|
+
sleep(0.05)
|
18
|
+
t.status.should eq 'sleep'
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
describe '#pop' do
|
23
|
+
it 'should block' do
|
24
|
+
t = Thread.new { channel.pop }
|
25
|
+
sleep(0.05)
|
26
|
+
t.status.should eq 'sleep'
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
33
|
+
|
34
|
+
context 'cooperating threads' do
|
35
|
+
|
36
|
+
it 'passes the pushed value to thread waiting on pop' do
|
37
|
+
result = nil
|
38
|
+
|
39
|
+
Thread.new { channel.push 42 }
|
40
|
+
Thread.new { result = channel.pop; }
|
41
|
+
|
42
|
+
sleep(0.1)
|
43
|
+
|
44
|
+
result.should eq 42
|
45
|
+
end
|
46
|
+
|
47
|
+
it 'passes the pushed value to only one thread' do
|
48
|
+
result = []
|
49
|
+
|
50
|
+
Thread.new { channel.push 37 }
|
51
|
+
Thread.new { result << channel.pop }
|
52
|
+
Thread.new { result << channel.pop }
|
53
|
+
|
54
|
+
sleep(0.1)
|
55
|
+
|
56
|
+
result.should have(1).items
|
57
|
+
end
|
58
|
+
|
59
|
+
it 'gets the pushed value when ready' do
|
60
|
+
result = nil
|
61
|
+
|
62
|
+
Thread.new { result = channel.pop; }
|
63
|
+
Thread.new { channel.push 57 }
|
64
|
+
|
65
|
+
sleep(0.1)
|
66
|
+
|
67
|
+
result.should eq 57
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
describe 'select' do
|
72
|
+
|
73
|
+
it 'does not block' do
|
74
|
+
t = Thread.new { channel.select(probe) }
|
75
|
+
|
76
|
+
sleep(0.05)
|
77
|
+
|
78
|
+
t.status.should eq false
|
79
|
+
end
|
80
|
+
|
81
|
+
it 'gets notified by writer thread' do
|
82
|
+
channel.select(probe)
|
83
|
+
|
84
|
+
Thread.new { channel.push 82 }
|
85
|
+
|
86
|
+
probe.value.should eq 82
|
87
|
+
end
|
88
|
+
|
89
|
+
it 'ignores already set probes and waits for a new one' do
|
90
|
+
probe.set(27)
|
91
|
+
|
92
|
+
channel.select(probe)
|
93
|
+
|
94
|
+
t = Thread.new { channel.push 72 }
|
95
|
+
|
96
|
+
sleep(0.05)
|
97
|
+
|
98
|
+
t.status.should eq 'sleep'
|
99
|
+
|
100
|
+
new_probe = Probe.new
|
101
|
+
|
102
|
+
channel.select(new_probe)
|
103
|
+
|
104
|
+
sleep(0.05)
|
105
|
+
|
106
|
+
new_probe.value.should eq 72
|
107
|
+
end
|
108
|
+
|
109
|
+
end
|
110
|
+
|
111
|
+
describe 'probe set' do
|
112
|
+
|
113
|
+
it 'has size zero after creation' do
|
114
|
+
channel.probe_set_size.should eq 0
|
115
|
+
end
|
116
|
+
|
117
|
+
it 'increases size after a select' do
|
118
|
+
channel.select(probe)
|
119
|
+
channel.probe_set_size.should eq 1
|
120
|
+
end
|
121
|
+
|
122
|
+
it 'decreases size after a removal' do
|
123
|
+
channel.select(probe)
|
124
|
+
channel.remove_probe(probe)
|
125
|
+
channel.probe_set_size.should eq 0
|
126
|
+
end
|
127
|
+
|
128
|
+
end
|
129
|
+
|
130
|
+
|
131
|
+
end
|
132
|
+
end
|
@@ -0,0 +1,134 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
module Concurrent
|
4
|
+
|
5
|
+
describe OptionsParser do
|
6
|
+
|
7
|
+
subject do
|
8
|
+
Class.new{ include OptionsParser }.new
|
9
|
+
end
|
10
|
+
|
11
|
+
let(:executor){ ImmediateExecutor.new }
|
12
|
+
|
13
|
+
let(:task_pool){ ImmediateExecutor.new }
|
14
|
+
let(:operation_pool){ ImmediateExecutor.new }
|
15
|
+
|
16
|
+
context '#get_executor_from' do
|
17
|
+
|
18
|
+
it 'returns the given :executor' do
|
19
|
+
subject.get_executor_from(executor: executor).should eq executor
|
20
|
+
end
|
21
|
+
|
22
|
+
it 'returns the global operation pool when :operation is true' do
|
23
|
+
Concurrent.configuration.should_receive(:global_operation_pool).
|
24
|
+
and_return(:operation_pool)
|
25
|
+
subject.get_executor_from(operation: true)
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'returns the global task pool when :operation is false' do
|
29
|
+
Concurrent.configuration.should_receive(:global_task_pool).
|
30
|
+
and_return(:task_pool)
|
31
|
+
subject.get_executor_from(operation: false)
|
32
|
+
end
|
33
|
+
|
34
|
+
it 'returns the global operation pool when :task is false' do
|
35
|
+
Concurrent.configuration.should_receive(:global_operation_pool).
|
36
|
+
and_return(:operation_pool)
|
37
|
+
subject.get_executor_from(task: false)
|
38
|
+
end
|
39
|
+
|
40
|
+
it 'returns the global task pool when :task is true' do
|
41
|
+
Concurrent.configuration.should_receive(:global_task_pool).
|
42
|
+
and_return(:task_pool)
|
43
|
+
subject.get_executor_from(task: true)
|
44
|
+
end
|
45
|
+
|
46
|
+
it 'returns the global task pool when :executor is nil' do
|
47
|
+
Concurrent.configuration.should_receive(:global_task_pool).
|
48
|
+
and_return(:task_pool)
|
49
|
+
subject.get_executor_from(executor: nil)
|
50
|
+
end
|
51
|
+
|
52
|
+
it 'returns the global task pool when no option is given' do
|
53
|
+
Concurrent.configuration.should_receive(:global_task_pool).
|
54
|
+
and_return(:task_pool)
|
55
|
+
subject.get_executor_from
|
56
|
+
end
|
57
|
+
|
58
|
+
specify ':executor overrides :operation' do
|
59
|
+
subject.get_executor_from(executor: executor, operation: true).
|
60
|
+
should eq executor
|
61
|
+
end
|
62
|
+
|
63
|
+
specify ':executor overrides :task' do
|
64
|
+
subject.get_executor_from(executor: executor, task: true).
|
65
|
+
should eq executor
|
66
|
+
end
|
67
|
+
|
68
|
+
specify ':operation overrides :task' do
|
69
|
+
Concurrent.configuration.should_receive(:global_operation_pool).
|
70
|
+
and_return(:operation_pool)
|
71
|
+
subject.get_executor_from(operation: true, task: true)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
describe Configuration do
|
77
|
+
|
78
|
+
context 'global task pool' do
|
79
|
+
|
80
|
+
specify 'reader creates a default pool when first called if none exists' do
|
81
|
+
Concurrent.configuration.global_task_pool.should_not be_nil
|
82
|
+
Concurrent.configuration.global_task_pool.should respond_to(:post)
|
83
|
+
end
|
84
|
+
|
85
|
+
specify 'writer memoizes the given executor' do
|
86
|
+
executor = ImmediateExecutor.new
|
87
|
+
Concurrent.configure do |config|
|
88
|
+
config.global_task_pool = executor
|
89
|
+
end
|
90
|
+
Concurrent.configuration.global_task_pool.should eq executor
|
91
|
+
end
|
92
|
+
|
93
|
+
specify 'writer raises an exception if called twice' do
|
94
|
+
executor = ImmediateExecutor.new
|
95
|
+
Concurrent.configure do |config|
|
96
|
+
config.global_task_pool = executor
|
97
|
+
end
|
98
|
+
expect {
|
99
|
+
Concurrent.configure do |config|
|
100
|
+
config.global_task_pool = executor
|
101
|
+
end
|
102
|
+
}.to raise_error(ConfigurationError)
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
context 'global operation pool' do
|
107
|
+
|
108
|
+
specify 'reader creates a default pool when first called if none exists' do
|
109
|
+
Concurrent.configuration.global_operation_pool.should_not be_nil
|
110
|
+
Concurrent.configuration.global_operation_pool.should respond_to(:post)
|
111
|
+
end
|
112
|
+
|
113
|
+
specify 'writer memoizes the given executor' do
|
114
|
+
executor = ImmediateExecutor.new
|
115
|
+
Concurrent.configure do |config|
|
116
|
+
config.global_operation_pool = executor
|
117
|
+
end
|
118
|
+
Concurrent.configuration.global_operation_pool.should eq executor
|
119
|
+
end
|
120
|
+
|
121
|
+
specify 'writer raises an exception if called twice' do
|
122
|
+
executor = ImmediateExecutor.new
|
123
|
+
Concurrent.configure do |config|
|
124
|
+
config.global_operation_pool = executor
|
125
|
+
end
|
126
|
+
expect {
|
127
|
+
Concurrent.configure do |config|
|
128
|
+
config.global_operation_pool = executor
|
129
|
+
end
|
130
|
+
}.to raise_error(ConfigurationError)
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
@@ -4,116 +4,198 @@ module Concurrent
|
|
4
4
|
|
5
5
|
describe 'dataflow' do
|
6
6
|
|
7
|
-
|
8
|
-
|
9
|
-
end
|
7
|
+
let(:executor) { ImmediateExecutor.new }
|
8
|
+
let(:root_executor) { PerThreadExecutor.new }
|
10
9
|
|
11
10
|
it 'raises an exception when no block given' do
|
12
11
|
expect { Concurrent::dataflow }.to raise_error(ArgumentError)
|
12
|
+
expect { Concurrent::dataflow_with(root_executor) }.to raise_error(ArgumentError)
|
13
|
+
end
|
14
|
+
|
15
|
+
specify '#dataflow uses the global task pool' do
|
16
|
+
input = Future.execute{0}
|
17
|
+
Concurrent.should_receive(:dataflow_with).once.
|
18
|
+
with(Concurrent.configuration.global_task_pool, input)
|
19
|
+
Concurrent::dataflow(input){0}
|
20
|
+
end
|
21
|
+
|
22
|
+
specify '#dataflow_with uses the given executor' do
|
23
|
+
input = Future.execute{0}
|
24
|
+
result = Future.new{0}
|
25
|
+
|
26
|
+
Future.should_receive(:new).with(executor: root_executor).and_return(result)
|
27
|
+
Concurrent::dataflow_with(root_executor, input){0}
|
28
|
+
end
|
29
|
+
|
30
|
+
specify '#dataflow_with raises an exception when no executor given' do
|
31
|
+
expect {
|
32
|
+
Concurrent::dataflow_with(nil){ nil }
|
33
|
+
}.to raise_error(ArgumentError)
|
13
34
|
end
|
14
35
|
|
15
36
|
it 'accepts zero or more dependencies' do
|
16
37
|
Concurrent::dataflow(){0}
|
17
38
|
Concurrent::dataflow(Future.execute{0}){0}
|
18
39
|
Concurrent::dataflow(Future.execute{0}, Future.execute{0}){0}
|
40
|
+
|
41
|
+
Concurrent::dataflow_with(root_executor, ){0}
|
42
|
+
Concurrent::dataflow_with(root_executor, Future.execute{0}){0}
|
43
|
+
Concurrent::dataflow_with(root_executor, Future.execute{0}, Future.execute{0}){0}
|
19
44
|
end
|
20
45
|
|
21
46
|
it 'accepts uncompleted dependencies' do
|
22
|
-
d = Future.new{0}
|
47
|
+
d = Future.new(executor: executor){0}
|
23
48
|
Concurrent::dataflow(d){0}
|
24
49
|
d.execute
|
50
|
+
|
51
|
+
d = Future.new(executor: executor){0}
|
52
|
+
Concurrent::dataflow_with(root_executor, d){0}
|
53
|
+
d.execute
|
25
54
|
end
|
26
55
|
|
27
56
|
it 'accepts completed dependencies' do
|
28
|
-
d = Future.new{0}
|
57
|
+
d = Future.new(executor: executor){0}
|
29
58
|
d.execute
|
30
59
|
Concurrent::dataflow(d){0}
|
60
|
+
|
61
|
+
d = Future.new(executor: executor){0}
|
62
|
+
d.execute
|
63
|
+
Concurrent::dataflow_with(root_executor, d){0}
|
31
64
|
end
|
32
65
|
|
33
66
|
it 'raises an exception if any dependencies are not IVars' do
|
34
67
|
expect { Concurrent::dataflow(nil) }.to raise_error(ArgumentError)
|
35
68
|
expect { Concurrent::dataflow(Future.execute{0}, nil) }.to raise_error(ArgumentError)
|
36
69
|
expect { Concurrent::dataflow(nil, Future.execute{0}) }.to raise_error(ArgumentError)
|
70
|
+
|
71
|
+
expect { Concurrent::dataflow_with(root_executor, nil) }.to raise_error(ArgumentError)
|
72
|
+
expect { Concurrent::dataflow_with(root_executor, Future.execute{0}, nil) }.to raise_error(ArgumentError)
|
73
|
+
expect { Concurrent::dataflow_with(root_executor, nil, Future.execute{0}) }.to raise_error(ArgumentError)
|
37
74
|
end
|
38
75
|
|
39
76
|
it 'returns a Future' do
|
40
77
|
Concurrent::dataflow{0}.should be_a(Future)
|
78
|
+
Concurrent::dataflow{0}.should be_a(Future)
|
41
79
|
end
|
42
80
|
|
43
81
|
context 'does not schedule the Future' do
|
44
82
|
|
45
83
|
specify 'if no dependencies are completed' do
|
46
|
-
d = Future.new{0}
|
84
|
+
d = Future.new(executor: executor){0}
|
47
85
|
f = Concurrent::dataflow(d){0}
|
48
86
|
f.should be_unscheduled
|
49
87
|
d.execute
|
88
|
+
|
89
|
+
d = Future.new(executor: executor){0}
|
90
|
+
f = Concurrent::dataflow_with(root_executor, d){0}
|
91
|
+
f.should be_unscheduled
|
92
|
+
d.execute
|
50
93
|
end
|
51
94
|
|
52
95
|
specify 'if one dependency of two is completed' do
|
53
|
-
d1 = Future.new{0}
|
54
|
-
d2 = Future.new{0}
|
96
|
+
d1 = Future.new(executor: executor){0}
|
97
|
+
d2 = Future.new(executor: executor){0}
|
55
98
|
f = Concurrent::dataflow(d1, d2){0}
|
56
99
|
d1.execute
|
57
100
|
f.should be_unscheduled
|
58
101
|
d2.execute
|
102
|
+
|
103
|
+
d1 = Future.new(executor: executor){0}
|
104
|
+
d2 = Future.new(executor: executor){0}
|
105
|
+
f = Concurrent::dataflow_with(root_executor, d1, d2){0}
|
106
|
+
d1.execute
|
107
|
+
f.should be_unscheduled
|
108
|
+
d2.execute
|
59
109
|
end
|
60
110
|
end
|
61
111
|
|
62
112
|
context 'schedules the Future when all dependencies are available' do
|
63
113
|
|
64
114
|
specify 'if there is just one' do
|
65
|
-
d = Future.new{0}
|
115
|
+
d = Future.new(executor: executor){0}
|
66
116
|
f = Concurrent::dataflow(d){0}
|
67
117
|
d.execute
|
68
118
|
f.value.should eq 0
|
119
|
+
|
120
|
+
d = Future.new(executor: executor){0}
|
121
|
+
f = Concurrent::dataflow_with(root_executor, d){0}
|
122
|
+
d.execute
|
123
|
+
f.value.should eq 0
|
69
124
|
end
|
70
125
|
|
71
126
|
specify 'if there is more than one' do
|
72
|
-
d1 = Future.new{0}
|
73
|
-
d2 = Future.new{0}
|
127
|
+
d1 = Future.new(executor: executor){0}
|
128
|
+
d2 = Future.new(executor: executor){0}
|
74
129
|
f = Concurrent::dataflow(d1, d2){0}
|
75
130
|
d1.execute
|
76
131
|
d2.execute
|
77
132
|
f.value.should eq 0
|
133
|
+
|
134
|
+
d1 = Future.new(executor: executor){0}
|
135
|
+
d2 = Future.new(executor: executor){0}
|
136
|
+
f = Concurrent::dataflow_with(root_executor, d1, d2){0}
|
137
|
+
d1.execute
|
138
|
+
d2.execute
|
139
|
+
f.value.should eq 0
|
78
140
|
end
|
79
141
|
end
|
80
142
|
|
81
143
|
context 'counts already executed dependencies' do
|
82
144
|
|
83
145
|
specify 'if there is just one' do
|
84
|
-
d = Future.new{0}
|
146
|
+
d = Future.new(executor: executor){0}
|
85
147
|
d.execute
|
86
148
|
f = Concurrent::dataflow(d){0}
|
87
149
|
f.value.should eq 0
|
150
|
+
|
151
|
+
d = Future.new(executor: executor){0}
|
152
|
+
d.execute
|
153
|
+
f = Concurrent::dataflow_with(root_executor, d){0}
|
154
|
+
f.value.should eq 0
|
88
155
|
end
|
89
156
|
|
90
157
|
specify 'if there is more than one' do
|
91
|
-
d1 = Future.new{0}
|
92
|
-
d2 = Future.new{0}
|
158
|
+
d1 = Future.new(executor: executor){0}
|
159
|
+
d2 = Future.new(executor: executor){0}
|
93
160
|
d1.execute
|
94
161
|
d2.execute
|
95
162
|
f = Concurrent::dataflow(d1, d2){0}
|
96
163
|
f.value.should eq 0
|
164
|
+
|
165
|
+
d1 = Future.new(executor: executor){0}
|
166
|
+
d2 = Future.new(executor: executor){0}
|
167
|
+
d1.execute
|
168
|
+
d2.execute
|
169
|
+
f = Concurrent::dataflow_with(root_executor, d1, d2){0}
|
170
|
+
f.value.should eq 0
|
97
171
|
end
|
98
172
|
end
|
99
173
|
|
100
174
|
context 'passes the values of dependencies into the block' do
|
101
175
|
|
102
176
|
specify 'if there is just one' do
|
103
|
-
d = Future.new{14}
|
104
|
-
f = Concurrent::dataflow(d)
|
105
|
-
|
106
|
-
|
177
|
+
d = Future.new(executor: executor){14}
|
178
|
+
f = Concurrent::dataflow(d){|v| v }
|
179
|
+
d.execute
|
180
|
+
f.value.should eq 14
|
181
|
+
|
182
|
+
d = Future.new(executor: executor){14}
|
183
|
+
f = Concurrent::dataflow_with(root_executor, d){|v| v }
|
107
184
|
d.execute
|
108
185
|
f.value.should eq 14
|
109
186
|
end
|
110
187
|
|
111
188
|
specify 'if there is more than one' do
|
112
|
-
d1 = Future.new{14}
|
113
|
-
d2 = Future.new{2}
|
114
|
-
f = Concurrent::dataflow(d1, d2)
|
115
|
-
|
116
|
-
|
189
|
+
d1 = Future.new(executor: executor){14}
|
190
|
+
d2 = Future.new(executor: executor){2}
|
191
|
+
f = Concurrent::dataflow(d1, d2) {|v1, v2| v1 + v2}
|
192
|
+
d1.execute
|
193
|
+
d2.execute
|
194
|
+
f.value.should eq 16
|
195
|
+
|
196
|
+
d1 = Future.new(executor: executor){14}
|
197
|
+
d2 = Future.new(executor: executor){2}
|
198
|
+
f = Concurrent::dataflow_with(root_executor, d1, d2) {|v1, v2| v1 + v2}
|
117
199
|
d1.execute
|
118
200
|
d2.execute
|
119
201
|
f.value.should eq 16
|
@@ -122,7 +204,7 @@ module Concurrent
|
|
122
204
|
|
123
205
|
context 'module function' do
|
124
206
|
|
125
|
-
it 'can be called as Concurrent.dataflow' do
|
207
|
+
it 'can be called as Concurrent.dataflow and Concurrent.dataflow_with' do
|
126
208
|
|
127
209
|
def fib_with_dot(n)
|
128
210
|
if n < 2
|
@@ -130,7 +212,7 @@ module Concurrent
|
|
130
212
|
else
|
131
213
|
n1 = fib_with_dot(n - 1)
|
132
214
|
n2 = fib_with_dot(n - 2)
|
133
|
-
Concurrent.
|
215
|
+
Concurrent.dataflow_with(root_executor, n1, n2) { n1.value + n2.value }
|
134
216
|
end
|
135
217
|
end
|
136
218
|
|
@@ -139,7 +221,7 @@ module Concurrent
|
|
139
221
|
expected.value.should eq 377
|
140
222
|
end
|
141
223
|
|
142
|
-
it 'can be called as Concurrent::dataflow' do
|
224
|
+
it 'can be called as Concurrent::dataflow and Concurrent::dataflow_with' do
|
143
225
|
|
144
226
|
def fib_with_colons(n)
|
145
227
|
if n < 2
|
@@ -147,7 +229,7 @@ module Concurrent
|
|
147
229
|
else
|
148
230
|
n1 = fib_with_colons(n - 1)
|
149
231
|
n2 = fib_with_colons(n - 2)
|
150
|
-
Concurrent::
|
232
|
+
Concurrent::dataflow_with(root_executor, n1, n2) { n1.value + n2.value }
|
151
233
|
end
|
152
234
|
end
|
153
235
|
|