concurrent-ruby 0.1.1 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +48 -1
  3. data/lib/concurrent.rb +8 -1
  4. data/lib/concurrent/agent.rb +19 -40
  5. data/lib/concurrent/cached_thread_pool.rb +10 -11
  6. data/lib/concurrent/defer.rb +8 -12
  7. data/lib/concurrent/executor.rb +95 -0
  8. data/lib/concurrent/fixed_thread_pool.rb +12 -6
  9. data/lib/concurrent/functions.rb +120 -0
  10. data/lib/concurrent/future.rb +8 -20
  11. data/lib/concurrent/global_thread_pool.rb +13 -0
  12. data/lib/concurrent/goroutine.rb +5 -1
  13. data/lib/concurrent/null_thread_pool.rb +22 -0
  14. data/lib/concurrent/obligation.rb +10 -64
  15. data/lib/concurrent/promise.rb +38 -60
  16. data/lib/concurrent/reactor.rb +166 -0
  17. data/lib/concurrent/reactor/drb_async_demux.rb +83 -0
  18. data/lib/concurrent/reactor/tcp_sync_demux.rb +131 -0
  19. data/lib/concurrent/supervisor.rb +100 -0
  20. data/lib/concurrent/thread_pool.rb +16 -5
  21. data/lib/concurrent/utilities.rb +8 -0
  22. data/lib/concurrent/version.rb +1 -1
  23. data/md/defer.md +4 -4
  24. data/md/executor.md +187 -0
  25. data/md/promise.md +2 -0
  26. data/md/thread_pool.md +27 -0
  27. data/spec/concurrent/agent_spec.rb +8 -27
  28. data/spec/concurrent/cached_thread_pool_spec.rb +14 -1
  29. data/spec/concurrent/defer_spec.rb +17 -21
  30. data/spec/concurrent/event_machine_defer_proxy_spec.rb +159 -149
  31. data/spec/concurrent/executor_spec.rb +200 -0
  32. data/spec/concurrent/fixed_thread_pool_spec.rb +2 -3
  33. data/spec/concurrent/functions_spec.rb +217 -0
  34. data/spec/concurrent/future_spec.rb +4 -11
  35. data/spec/concurrent/global_thread_pool_spec.rb +38 -0
  36. data/spec/concurrent/goroutine_spec.rb +15 -0
  37. data/spec/concurrent/null_thread_pool_spec.rb +54 -0
  38. data/spec/concurrent/obligation_shared.rb +127 -116
  39. data/spec/concurrent/promise_spec.rb +16 -14
  40. data/spec/concurrent/reactor/drb_async_demux_spec.rb +196 -0
  41. data/spec/concurrent/reactor/tcp_sync_demux_spec.rb +410 -0
  42. data/spec/concurrent/reactor_spec.rb +364 -0
  43. data/spec/concurrent/supervisor_spec.rb +258 -0
  44. data/spec/concurrent/thread_pool_shared.rb +156 -161
  45. data/spec/concurrent/utilities_spec.rb +30 -1
  46. data/spec/spec_helper.rb +13 -0
  47. metadata +38 -9
@@ -35,7 +35,7 @@ module Concurrent
35
35
  end
36
36
 
37
37
  def shutdown?
38
- return ! running?
38
+ return @status == :shutdown
39
39
  end
40
40
 
41
41
  def killed?
@@ -43,10 +43,14 @@ module Concurrent
43
43
  end
44
44
 
45
45
  def shutdown
46
- atomic {
47
- @pool.size.times{ @queue << :stop }
48
- @status = :shuttingdown
49
- }
46
+ mutex.synchronize do
47
+ if @pool.empty?
48
+ @status = :shutdown
49
+ else
50
+ @status = :shuttingdown
51
+ @pool.size.times{ @queue << :stop }
52
+ end
53
+ end
50
54
  end
51
55
 
52
56
  def wait_for_termination(timeout = nil)
@@ -61,5 +65,12 @@ module Concurrent
61
65
  self.post(&block)
62
66
  return self
63
67
  end
68
+
69
+ protected
70
+
71
+ # @private
72
+ def mutex # :nodoc:
73
+ @mutex || Mutex.new
74
+ end
64
75
  end
65
76
  end
@@ -20,5 +20,13 @@ module Kernel
20
20
  }.resume
21
21
  end
22
22
  module_function :atomic
23
+ end
24
+
25
+ class Mutex
23
26
 
27
+ def sync_with_timeout(timeout, &block)
28
+ Timeout::timeout(timeout) {
29
+ synchronize(&block)
30
+ }
31
+ end
24
32
  end
@@ -1,3 +1,3 @@
1
1
  module Concurrent
2
- VERSION = '0.1.1'
2
+ VERSION = '0.2.0'
3
3
  end
@@ -2,14 +2,14 @@
2
2
 
3
3
  In the pantheon of concurrency objects a `Defer` sits somewhere between `Future` and `Promise`.
4
4
  Inspired by [EventMachine's *defer* method](https://github.com/eventmachine/eventmachine/wiki/EM::Deferrable-and-EM.defer),
5
- a `Defer` can be considered a non-blocking `Future` or a simplified, non-blocking `Promise`.
5
+ a `Defer` can be considered a non-blocking `Future` or a simplified, non-blocking `Promise`. Defers run on the global thread pool.
6
6
 
7
7
  Unlike `Future` and `Promise` a defer is non-blocking. The deferred *operation* is performed on another
8
8
  thread. If the *operation* is successful an optional *callback* is called on the same thread as the *operation*.
9
9
  The result of the *operation* is passed to the *callbacl*. If the *operation* fails (by raising an exception)
10
- then an optional *errorback* (error callback) is called on
11
- the same thread as the *operation*. The raised exception is passed to the *errorback*. The calling thread is
12
- never aware of the result of the *operation*. This approach fits much more cleanly within an
10
+ then an optional *errorback* (error callback) is called on the same thread as the *operation*. The raised
11
+ exception is passed to the *errorback*. The calling thread is never aware of the result of the *operation*.
12
+ This approach fits much more cleanly within an
13
13
  [event-driven](http://en.wikipedia.org/wiki/Event-driven_programming) application.
14
14
 
15
15
  The operation of a `Defer` can easily be simulated using either `Future` or `Promise` and traditional branching
@@ -0,0 +1,187 @@
1
+ # Being of Sound Mind
2
+
3
+ A very common currency pattern is to run a thread that performs a task at regular
4
+ intervals. The thread that peforms the task sleeps for the given interval then
5
+ waked up and performs the task. Later, rinse, repeat... This pattern causes two
6
+ problems. First, it is difficult to test the business logic of the task becuse the
7
+ task itself is tightly couple with the threading. Second, an exception in the task
8
+ can cause the entire thread to abend. In a long-running application where the task
9
+ thread is intended to run for days/weeks/years a crashed task thread can pose a real
10
+ problem. The `Executor` class alleviates both problems.
11
+
12
+ When an executor is launched it starts a thread for monitoring the execution interval.
13
+ The executor thread does not perform the task, however. Instead, the executor
14
+ launches the task on a separat thread. The advantage of this approach is that if
15
+ the task crashes it will only kill the task thread, not the executor thread. The
16
+ executor thread can then log the success or failure of the task. The executor
17
+ can even be configured with a timeout value allowing it to kill a task that runs
18
+ to long and then log the error.
19
+
20
+ One other advantage of the `Executor` class is that it forces the bsiness logic to
21
+ be completely decoupled from the threading logic. The business logic can be tested
22
+ separately then passed to the an executor for scheduling and running.
23
+
24
+ Unlike some of the others concurrency objects in the library, executors do not
25
+ run on the global. In my experience the types of tasks that will benefit from
26
+ the `Executor` class tend to also be long running. For this reason they get their
27
+ own thread every time the task is executed.
28
+
29
+ ## ExecutionContext
30
+
31
+ When an executor is run the return value is an `ExecutionContext` object. An
32
+ `ExecutionContext` object has several attribute readers (`#name`, `#execution_interval`,
33
+ and `#timeout_interval`). It also provides several `Thread` operations which can
34
+ be performed against the internal thread. These include `#status`, `#join`, and
35
+ `kill`.
36
+
37
+ ## Custom Logging
38
+
39
+ An executor will write a log message to standard out at the completion of every
40
+ task run. When the task is successful the log message is tagged at the `:info`
41
+ level. When the task times out the log message is tagged at the `warn` level.
42
+ When the task fails tocomplete (most likely because of exception) the log
43
+ message is tagged at the `error` level.
44
+
45
+ The default logging behavior can be overridden by passing a `proc` to the executor
46
+ on creation. The block will be passes three (3) arguments every time it is run:
47
+ executor `name`, log `level`, and the log `msg` (message). The `proc` can do
48
+ whatever it wanst with these arguments.
49
+
50
+ ## Examples
51
+
52
+ A basic example:
53
+
54
+ ```ruby
55
+ require 'concurrent'
56
+
57
+ ec = Concurrent::Executor.run('Foo'){ puts 'Boom!' }
58
+
59
+ ec.name #=> "Foo"
60
+ ec.execution_interval #=> 60 == Concurrent::Executor::EXECUTION_INTERVAL
61
+ ec.timeout_interval #=> 30 == Concurrent::Executor::TIMEOUT_INTERVAL
62
+ ec.status #=> "sleep"
63
+
64
+ # wait 60 seconds...
65
+ #=> 'Boom!'
66
+ #=> ' INFO (2013-08-02 23:20:15) Foo: execution completed successfully'
67
+
68
+ ec.kill #=> true
69
+ ```
70
+
71
+ Both the execution_interval and the timeout_interval can be configured:
72
+
73
+ ```ruby
74
+ ec = Concurrent::Executor.run('Foo', execution_interval: 5, timeout_interval: 5) do
75
+ puts 'Boom!'
76
+ end
77
+
78
+ ec.execution_interval #=> 5
79
+ ec.timeout_interval #=> 5
80
+ ```
81
+
82
+ By default an `Executor` will wait for `:execution_interval` seconds before running the block.
83
+ To run the block immediately set the `:run_now` option to `true`:
84
+
85
+ ```ruby
86
+ ec = Concurrent::Executor.run('Foo', run_now: true){ puts 'Boom!' }
87
+ #=> 'Boom!''
88
+ #=> ' INFO (2013-08-15 21:35:14) Foo: execution completed successfully'
89
+ ec.status #=> "sleep"
90
+ >>
91
+ ```
92
+
93
+ A simple example with timeout and task exception:
94
+
95
+ ```ruby
96
+ ec = Concurrent::Executor.run('Foo', execution_interval: 1, timeout_interval: 1){ sleep(10) }
97
+
98
+ #=> WARN (2013-08-02 23:45:26) Foo: execution timed out after 1 seconds
99
+ #=> WARN (2013-08-02 23:45:28) Foo: execution timed out after 1 seconds
100
+ #=> WARN (2013-08-02 23:45:30) Foo: execution timed out after 1 seconds
101
+
102
+ ec = Concurrent::Executor.run('Foo', execution_interval: 1){ raise StandardError }
103
+
104
+ #=> ERROR (2013-08-02 23:47:31) Foo: execution failed with error 'StandardError'
105
+ #=> ERROR (2013-08-02 23:47:32) Foo: execution failed with error 'StandardError'
106
+ #=> ERROR (2013-08-02 23:47:33) Foo: execution failed with error 'StandardError'
107
+ ```
108
+
109
+ For custom logging, simply provide a `proc` when creating an executor:
110
+
111
+ ```ruby
112
+ file_logger = proc do |name, level, msg|
113
+ open('executor.log', 'a') do |f|
114
+ f << ("%5s (%s) %s: %s\n" % [level.upcase, Time.now.strftime("%F %T"), name, msg])
115
+ end
116
+ end
117
+
118
+ ec = Concurrent::Executor.run('Foo', execution_interval: 5, logger: file_logger) do
119
+ puts 'Boom!'
120
+ end
121
+
122
+ # the log file contains
123
+ # INFO (2013-08-02 23:30:19) Foo: execution completed successfully
124
+ # INFO (2013-08-02 23:30:24) Foo: execution completed successfully
125
+ # INFO (2013-08-02 23:30:29) Foo: execution completed successfully
126
+ # INFO (2013-08-02 23:30:34) Foo: execution completed successfully
127
+ # INFO (2013-08-02 23:30:39) Foo: execution completed successfully
128
+ # INFO (2013-08-02 23:30:44) Foo: execution completed successfully
129
+ ```
130
+
131
+ It is also possible to access the default stdout logger from within a logger `proc`:
132
+
133
+ ```ruby
134
+ file_logger = proc do |name, level, msg|
135
+ Concurrent::Executor::STDOUT_LOGGER.call(name, level, msg)
136
+ open('executor.log', 'a') do |f|
137
+ f << ("%5s (%s) %s: %s\n" % [level.upcase, Time.now.strftime("%F %T"), name, msg])
138
+ end
139
+ end
140
+
141
+ ec = Concurrent::Executor.run('Foo', execution_interval: 5, logger: file_logger) do
142
+ puts 'Boom!'
143
+ end
144
+
145
+ # wait...
146
+
147
+ #=> Boom!
148
+ #=> INFO (2013-08-02 23:40:49) Foo: execution completed successfully
149
+ #=> Boom!
150
+ #=> INFO (2013-08-02 23:40:54) Foo: execution completed successfully
151
+ #=> Boom!
152
+ #=> INFO (2013-08-02 23:40:59) Foo: execution completed successfully
153
+
154
+ # and the log file contains
155
+ # INFO (2013-08-02 23:39:52) Foo: execution completed successfully
156
+ # INFO (2013-08-02 23:39:57) Foo: execution completed successfully
157
+ # INFO (2013-08-02 23:40:49) Foo: execution completed successfully
158
+ ```
159
+
160
+ ## Copyright
161
+
162
+ *Concurrent Ruby* is Copyright &copy; 2013 [Jerry D'Antonio](https://twitter.com/jerrydantonio).
163
+ It is free software and may be redistributed under the terms specified in the LICENSE file.
164
+
165
+ ## License
166
+
167
+ Released under the MIT license.
168
+
169
+ http://www.opensource.org/licenses/mit-license.php
170
+
171
+ > Permission is hereby granted, free of charge, to any person obtaining a copy
172
+ > of this software and associated documentation files (the "Software"), to deal
173
+ > in the Software without restriction, including without limitation the rights
174
+ > to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
175
+ > copies of the Software, and to permit persons to whom the Software is
176
+ > furnished to do so, subject to the following conditions:
177
+ >
178
+ > The above copyright notice and this permission notice shall be included in
179
+ > all copies or substantial portions of the Software.
180
+ >
181
+ > THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
182
+ > IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
183
+ > FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
184
+ > AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
185
+ > LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
186
+ > OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
187
+ > THE SOFTWARE.
@@ -28,6 +28,8 @@ A *timeout* value can be passed to `value` to limit how long the call will block
28
28
  block indefinitely. If `0` the call will not block. Any other integer or float value will indicate the
29
29
  maximum number of seconds to block.
30
30
 
31
+ Promises run on the global thread pool.
32
+
31
33
  ## Examples
32
34
 
33
35
  Start by requiring promises
@@ -151,6 +151,12 @@ $GLOBAL_THREAD_POOL = Concurrent::FixedThreadPool.new(10)
151
151
  old_global_pool.shutdown
152
152
  ```
153
153
 
154
+ ### NullThreadPool
155
+
156
+ If for some reason an appliction would be better served by *not* having a global thread pool, the
157
+ `NullThreadPool` is provided. The `NullThreadPool` is compatible with the global thread pool but
158
+ it is not an actual thread pool. Instead it spawns a new thread on every call to the `post` method.
159
+
154
160
  ### EventMachine
155
161
 
156
162
  The [EventMachine](http://rubyeventmachine.com/) library (source [online](https://github.com/eventmachine/eventmachine))
@@ -167,6 +173,27 @@ require 'functional/concurrency'
167
173
  $GLOBAL_THREAD_POOL = EventMachineDeferProxy.new
168
174
  ```
169
175
 
176
+ ## Per-class Thread Pools
177
+
178
+ Many of the classes in this library use the global thread pool rather than creating new threads.
179
+ Classes such as `Agent`, `Defer`, and others follow this pattern. There may be cases where a
180
+ program would be better suited for one or more of these classes used a different thread pool.
181
+ All classes that use the global thread pool support a class-level `thread_pool` attribute accessor.
182
+ This property defaults to the global thread pool but can be changed at any time. Once changed, all
183
+ new instances of that class will use the new thread pool.
184
+
185
+ ```ruby
186
+ Concurrent::Agent.thread_pool == $GLOBAL_THREAD_POOL #=> true
187
+
188
+ $GLOBAL_THREAD_POOL = Concurrent::FixedThreadPool.new(10) #=> #<Concurrent::FixedThreadPool:0x007fe31130f1f0 ...
189
+
190
+ Concurrent::Agent.thread_pool == $GLOBAL_THREAD_POOL #=> false
191
+
192
+ Concurrent::Defer.thread_pool = Concurrent::CachedThreadPool.new #=> #<Concurrent::CachedThreadPool:0x007fef1c6b6b48 ...
193
+ Concurrent::Defer.thread_pool == Concurrent::Agent.thread_pool #=> false
194
+ Concurrent::Defer.thread_pool == $GLOBAL_THREAD_POOL #=> false
195
+ ```
196
+
170
197
  ## Copyright
171
198
 
172
199
  *Concurrent Ruby* is Copyright &copy; 2013 [Jerry D'Antonio](https://twitter.com/jerrydantonio).
@@ -16,7 +16,7 @@ module Concurrent
16
16
  end
17
17
 
18
18
  before(:each) do
19
- $GLOBAL_THREAD_POOL = CachedThreadPool.new
19
+ Agent.thread_pool = FixedThreadPool.new(1)
20
20
  end
21
21
 
22
22
  context '#initialize' do
@@ -38,7 +38,7 @@ module Concurrent
38
38
  end
39
39
 
40
40
  it 'spawns the worker thread' do
41
- $GLOBAL_THREAD_POOL.should_receive(:post).once.with(any_args())
41
+ Agent.thread_pool.should_receive(:post).once.with(any_args())
42
42
  Agent.new(0)
43
43
  end
44
44
  end
@@ -92,6 +92,8 @@ module Concurrent
92
92
  context '#post' do
93
93
 
94
94
  it 'adds the given block to the queue' do
95
+ subject.post{ sleep(100) }
96
+ sleep(0.1)
95
97
  before = subject.length
96
98
  subject.post{ nil }
97
99
  subject.post{ nil }
@@ -99,6 +101,8 @@ module Concurrent
99
101
  end
100
102
 
101
103
  it 'does not add to the queue when no block is given' do
104
+ subject.post{ sleep(100) }
105
+ sleep(0.1)
102
106
  before = subject.length
103
107
  subject.post
104
108
  subject.post{ nil }
@@ -113,6 +117,8 @@ module Concurrent
113
117
  end
114
118
 
115
119
  it 'should increase by one for each #post' do
120
+ subject.post{ sleep(100) }
121
+ sleep(0.1)
116
122
  subject.post{ sleep }
117
123
  subject.post{ sleep }
118
124
  subject.post{ sleep }
@@ -375,31 +381,6 @@ module Concurrent
375
381
  sleep(0.1)
376
382
  observer.value.should eq 10
377
383
  end
378
-
379
- it 'aliases #<< for Agent#post' do
380
- subject << proc{ 100 }
381
- sleep(0.1)
382
- subject.value.should eq 100
383
-
384
- subject << lambda{ 100 }
385
- sleep(0.1)
386
- subject.value.should eq 100
387
- end
388
-
389
- it 'aliases Kernel#agent for Agent.new' do
390
- agent(10).should be_a(Agent)
391
- end
392
-
393
- it 'aliases Kernel#deref for #deref' do
394
- deref(Agent.new(10)).should eq 10
395
- deref(Agent.new(10), 10).should eq 10
396
- end
397
-
398
- it 'aliases Kernel:post for Agent#post' do
399
- post(subject){ 100 }
400
- sleep(0.1)
401
- subject.value.should eq 100
402
- end
403
384
  end
404
385
  end
405
386
  end
@@ -10,6 +10,7 @@ module Concurrent
10
10
  it_should_behave_like 'Thread Pool'
11
11
 
12
12
  context '#initialize' do
13
+
13
14
  it 'aliases Concurrent#new_cached_thread_pool' do
14
15
  pool = Concurrent.new_cached_thread_pool
15
16
  pool.should be_a(CachedThreadPool)
@@ -20,7 +21,7 @@ module Concurrent
20
21
  context '#kill' do
21
22
 
22
23
  it 'kills all threads' do
23
- Thread.should_receive(:kill).exactly(5).times
24
+ Thread.should_receive(:kill).at_least(5).times
24
25
  pool = CachedThreadPool.new
25
26
  5.times{ sleep(0.1); pool << proc{ sleep(1) } }
26
27
  sleep(1)
@@ -108,5 +109,17 @@ module Concurrent
108
109
  subject.instance_variable_get(:@collector).status.should be_false
109
110
  end
110
111
  end
112
+
113
+ context '#status' do
114
+
115
+ it 'returns an empty collection when the pool is empty' do
116
+ subject.status.should be_empty
117
+ end
118
+
119
+ it 'returns one status object for each thread in the pool' do
120
+ 3.times{ sleep(0.1); subject << proc{ sleep(0.5) } }
121
+ subject.status.length.should eq 3
122
+ end
123
+ end
111
124
  end
112
125
  end
@@ -4,11 +4,11 @@ module Concurrent
4
4
 
5
5
  describe Defer do
6
6
 
7
- context '#initialize' do
7
+ before(:each) do
8
+ Defer.thread_pool = FixedThreadPool.new(1)
9
+ end
8
10
 
9
- before(:each) do
10
- $GLOBAL_THREAD_POOL = FixedThreadPool.new(1)
11
- end
11
+ context '#initialize' do
12
12
 
13
13
  it 'raises an exception if no block or operation given' do
14
14
  lambda {
@@ -19,23 +19,19 @@ module Concurrent
19
19
  it 'raises an exception if both a block and an operation given' do
20
20
  lambda {
21
21
  operation = proc{ nil }
22
- Defer.new(operation, nil, nil){ nil }
22
+ Defer.new(op: operation){ nil }
23
23
  }.should raise_error(ArgumentError)
24
24
  end
25
25
 
26
26
  it 'starts the thread if an operation is given' do
27
- $GLOBAL_THREAD_POOL.should_receive(:post).once.with(any_args())
27
+ Defer.thread_pool.should_receive(:post).once.with(any_args())
28
28
  operation = proc{ nil }
29
- Defer.new(operation, nil, nil)
29
+ Defer.new(op: operation)
30
30
  end
31
31
 
32
32
  it 'does not start the thread if neither a callback or errorback is given' do
33
- $GLOBAL_THREAD_POOL.should_not_receive(:post)
34
- Defer.new(nil, nil, nil){ nil }
35
- end
36
-
37
- it 'aliases Kernel#defer' do
38
- defer{ nil }.should be_a(Defer)
33
+ Defer.thread_pool.should_not_receive(:post)
34
+ Defer.new{ nil }
39
35
  end
40
36
  end
41
37
 
@@ -56,14 +52,14 @@ module Concurrent
56
52
  it 'raises an exception if an operation was provided at construction' do
57
53
  lambda {
58
54
  operation = proc{ nil }
59
- Defer.new(operation, nil, nil).then{|result| nil }
55
+ Defer.new(op: operation).then{|result| nil }
60
56
  }.should raise_error(IllegalMethodCallError)
61
57
  end
62
58
 
63
59
  it 'raises an exception if a callback was provided at construction' do
64
60
  lambda {
65
61
  callback = proc{|result|nil }
66
- Defer.new(nil, callback, nil){ nil }.then{|result| nil }
62
+ Defer.new(callback: callback){ nil }.then{|result| nil }
67
63
  }.should raise_error(IllegalMethodCallError)
68
64
  end
69
65
 
@@ -90,14 +86,14 @@ module Concurrent
90
86
  it 'raises an exception if an operation was provided at construction' do
91
87
  lambda {
92
88
  operation = proc{ nil }
93
- Defer.new(operation, nil, nil).rescue{|ex| nil }
89
+ Defer.new(op: operation).rescue{|ex| nil }
94
90
  }.should raise_error(IllegalMethodCallError)
95
91
  end
96
92
 
97
93
  it 'raises an exception if an errorback was provided at construction' do
98
94
  lambda {
99
95
  errorback = proc{|ex| nil }
100
- Defer.new(nil, nil, errorback){ nil }.rescue{|ex| nil }
96
+ Defer.new(errorback: errorback){ nil }.rescue{|ex| nil }
101
97
  }.should raise_error(IllegalMethodCallError)
102
98
  end
103
99
 
@@ -123,14 +119,14 @@ module Concurrent
123
119
 
124
120
  it 'starts the thread if not started' do
125
121
  deferred = Defer.new{ nil }
126
- $GLOBAL_THREAD_POOL.should_receive(:post).once.with(any_args())
122
+ Defer.thread_pool.should_receive(:post).once.with(any_args())
127
123
  deferred.go
128
124
  end
129
125
 
130
126
  it 'does nothing if called more than once' do
131
127
  deferred = Defer.new{ nil }
132
128
  deferred.go
133
- $GLOBAL_THREAD_POOL.should_not_receive(:post)
129
+ Defer.thread_pool.should_not_receive(:post)
134
130
  deferred.go
135
131
  end
136
132
 
@@ -138,8 +134,8 @@ module Concurrent
138
134
  operation = proc{ nil }
139
135
  callback = proc{|result| nil }
140
136
  errorback = proc{|ex| nil }
141
- deferred = Defer.new(operation, callback, errorback)
142
- $GLOBAL_THREAD_POOL.should_not_receive(:post)
137
+ deferred = Defer.new(op: operation, callback: callback, errorback: errorback)
138
+ Defer.thread_pool.should_not_receive(:post)
143
139
  deferred.go
144
140
  end
145
141
  end