concurrent-ruby 0.5.0 → 0.6.0.pre.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +88 -77
- data/lib/concurrent.rb +17 -2
- data/lib/concurrent/actor.rb +17 -0
- data/lib/concurrent/actor_context.rb +31 -0
- data/lib/concurrent/actor_ref.rb +39 -0
- data/lib/concurrent/agent.rb +12 -3
- data/lib/concurrent/async.rb +290 -0
- data/lib/concurrent/atomic.rb +5 -9
- data/lib/concurrent/cached_thread_pool.rb +39 -137
- data/lib/concurrent/channel/blocking_ring_buffer.rb +60 -0
- data/lib/concurrent/channel/buffered_channel.rb +83 -0
- data/lib/concurrent/channel/channel.rb +11 -0
- data/lib/concurrent/channel/probe.rb +19 -0
- data/lib/concurrent/channel/ring_buffer.rb +54 -0
- data/lib/concurrent/channel/unbuffered_channel.rb +34 -0
- data/lib/concurrent/channel/waitable_list.rb +38 -0
- data/lib/concurrent/configuration.rb +92 -0
- data/lib/concurrent/dataflow.rb +9 -3
- data/lib/concurrent/delay.rb +88 -0
- data/lib/concurrent/exchanger.rb +31 -0
- data/lib/concurrent/fixed_thread_pool.rb +28 -122
- data/lib/concurrent/future.rb +10 -5
- data/lib/concurrent/immediate_executor.rb +3 -2
- data/lib/concurrent/ivar.rb +2 -1
- data/lib/concurrent/java_cached_thread_pool.rb +45 -0
- data/lib/concurrent/java_fixed_thread_pool.rb +37 -0
- data/lib/concurrent/java_thread_pool_executor.rb +194 -0
- data/lib/concurrent/per_thread_executor.rb +23 -0
- data/lib/concurrent/postable.rb +2 -0
- data/lib/concurrent/processor_count.rb +125 -0
- data/lib/concurrent/promise.rb +42 -18
- data/lib/concurrent/ruby_cached_thread_pool.rb +37 -0
- data/lib/concurrent/ruby_fixed_thread_pool.rb +31 -0
- data/lib/concurrent/ruby_thread_pool_executor.rb +268 -0
- data/lib/concurrent/ruby_thread_pool_worker.rb +69 -0
- data/lib/concurrent/simple_actor_ref.rb +124 -0
- data/lib/concurrent/thread_local_var.rb +1 -1
- data/lib/concurrent/thread_pool_executor.rb +30 -0
- data/lib/concurrent/timer_task.rb +13 -10
- data/lib/concurrent/tvar.rb +212 -0
- data/lib/concurrent/utilities.rb +1 -0
- data/lib/concurrent/version.rb +1 -1
- data/spec/concurrent/actor_context_spec.rb +37 -0
- data/spec/concurrent/actor_ref_shared.rb +313 -0
- data/spec/concurrent/actor_spec.rb +9 -1
- data/spec/concurrent/agent_spec.rb +97 -96
- data/spec/concurrent/async_spec.rb +320 -0
- data/spec/concurrent/cached_thread_pool_shared.rb +137 -0
- data/spec/concurrent/channel/blocking_ring_buffer_spec.rb +149 -0
- data/spec/concurrent/channel/buffered_channel_spec.rb +151 -0
- data/spec/concurrent/channel/channel_spec.rb +37 -0
- data/spec/concurrent/channel/probe_spec.rb +49 -0
- data/spec/concurrent/channel/ring_buffer_spec.rb +126 -0
- data/spec/concurrent/channel/unbuffered_channel_spec.rb +132 -0
- data/spec/concurrent/configuration_spec.rb +134 -0
- data/spec/concurrent/dataflow_spec.rb +109 -27
- data/spec/concurrent/delay_spec.rb +77 -0
- data/spec/concurrent/exchanger_spec.rb +66 -0
- data/spec/concurrent/fixed_thread_pool_shared.rb +136 -0
- data/spec/concurrent/future_spec.rb +60 -51
- data/spec/concurrent/global_thread_pool_shared.rb +33 -0
- data/spec/concurrent/immediate_executor_spec.rb +4 -25
- data/spec/concurrent/ivar_spec.rb +36 -23
- data/spec/concurrent/java_cached_thread_pool_spec.rb +64 -0
- data/spec/concurrent/java_fixed_thread_pool_spec.rb +64 -0
- data/spec/concurrent/java_thread_pool_executor_spec.rb +71 -0
- data/spec/concurrent/obligation_shared.rb +32 -20
- data/spec/concurrent/{global_thread_pool_spec.rb → per_thread_executor_spec.rb} +9 -13
- data/spec/concurrent/processor_count_spec.rb +20 -0
- data/spec/concurrent/promise_spec.rb +29 -41
- data/spec/concurrent/ruby_cached_thread_pool_spec.rb +69 -0
- data/spec/concurrent/ruby_fixed_thread_pool_spec.rb +39 -0
- data/spec/concurrent/ruby_thread_pool_executor_spec.rb +183 -0
- data/spec/concurrent/simple_actor_ref_spec.rb +219 -0
- data/spec/concurrent/thread_pool_class_cast_spec.rb +40 -0
- data/spec/concurrent/thread_pool_executor_shared.rb +155 -0
- data/spec/concurrent/thread_pool_shared.rb +98 -36
- data/spec/concurrent/tvar_spec.rb +137 -0
- data/spec/spec_helper.rb +4 -0
- data/spec/support/functions.rb +4 -0
- metadata +85 -20
- data/lib/concurrent/cached_thread_pool/worker.rb +0 -91
- data/lib/concurrent/channel.rb +0 -63
- data/lib/concurrent/fixed_thread_pool/worker.rb +0 -54
- data/lib/concurrent/global_thread_pool.rb +0 -42
- data/spec/concurrent/cached_thread_pool_spec.rb +0 -101
- data/spec/concurrent/channel_spec.rb +0 -86
- data/spec/concurrent/fixed_thread_pool_spec.rb +0 -92
- data/spec/concurrent/uses_global_thread_pool_shared.rb +0 -64
@@ -0,0 +1,290 @@
|
|
1
|
+
require 'thread'
|
2
|
+
require 'concurrent/configuration'
|
3
|
+
require 'concurrent/ivar'
|
4
|
+
require 'concurrent/future'
|
5
|
+
require 'concurrent/thread_pool_executor'
|
6
|
+
|
7
|
+
module Concurrent
|
8
|
+
|
9
|
+
# A mixin module that provides simple asynchronous behavior to any standard
|
10
|
+
# class/object or object.
|
11
|
+
#
|
12
|
+
# Scenario:
|
13
|
+
# As a stateful, plain old Ruby class/object
|
14
|
+
# I want safe, asynchronous behavior
|
15
|
+
# So my long-running methods don't block the main thread
|
16
|
+
#
|
17
|
+
# Stateful, mutable objects must be managed carefully when used asynchronously.
|
18
|
+
# But Ruby is an object-oriented language so designing with objects and classes
|
19
|
+
# plays to Ruby's strengths and is often more natural to many Ruby programmers.
|
20
|
+
# The +Async+ module is a way to mix simple yet powerful asynchronous capabilities
|
21
|
+
# into any plain old Ruby object or class. These capabilities provide a reasonable
|
22
|
+
# level of thread safe guarantees when used correctly.
|
23
|
+
#
|
24
|
+
# When this module is mixed into a class or object it provides to new methods:
|
25
|
+
# +async+ and +await+. These methods are thread safe with respect to the enclosing
|
26
|
+
# object. The former method allows methods to be called asynchronously by posting
|
27
|
+
# to the global thread pool. The latter allows a method to be called synchronously
|
28
|
+
# on the current thread but does so safely with respect to any pending asynchronous
|
29
|
+
# method calls. Both methods return an +Obligation+ which can be inspected for
|
30
|
+
# the result of the method call. Calling a method with +async+ will return a
|
31
|
+
# +:pending+ +Obligation+ whereas +await+ will return a +:complete+ +Obligation+.
|
32
|
+
#
|
33
|
+
# Very loosely based on the +async+ and +await+ keywords in C#.
|
34
|
+
#
|
35
|
+
# @example Defining an asynchronous class
|
36
|
+
# class Echo
|
37
|
+
# include Concurrent::Async
|
38
|
+
#
|
39
|
+
# def echo(msg)
|
40
|
+
# sleep(rand)
|
41
|
+
# print "#{msg}\n"
|
42
|
+
# nil
|
43
|
+
# end
|
44
|
+
# end
|
45
|
+
#
|
46
|
+
# horn = Echo.new
|
47
|
+
# horn.echo('zero') # synchronous, not thread-safe
|
48
|
+
#
|
49
|
+
# horn.async.echo('one') # asynchronous, non-blocking, thread-safe
|
50
|
+
# horn.await.echo('two') # synchronous, blocking, thread-safe
|
51
|
+
#
|
52
|
+
# @example Monkey-patching an existing object
|
53
|
+
# numbers = 1_000_000.times.collect{ rand }
|
54
|
+
# numbers.extend(Concurrent::Async)
|
55
|
+
#
|
56
|
+
# future = numbers.async.max
|
57
|
+
# future.state #=> :pending
|
58
|
+
#
|
59
|
+
# sleep(2)
|
60
|
+
#
|
61
|
+
# future.state #=> :fulfilled
|
62
|
+
# future.value #=> 0.999999138918843
|
63
|
+
#
|
64
|
+
# @note Thread safe guarantees can only be made when asynchronous method calls
|
65
|
+
# are not mixed with synchronous method calls. Use only synchronous calls
|
66
|
+
# when the object is used exclusively on a single thread. Use only
|
67
|
+
# +async+ and +await+ when the object is shared between threads. Once you
|
68
|
+
# call a method using +async+, you should no longer call any methods
|
69
|
+
# directly on the object. Use +async+ and +await+ exclusively from then on.
|
70
|
+
# With careful programming it is possible to switch back and forth but it's
|
71
|
+
# also very easy to create race conditions and break your application.
|
72
|
+
# Basically, it's "async all the way down."
|
73
|
+
#
|
74
|
+
# @since 0.6.0
|
75
|
+
#
|
76
|
+
# @see Concurrent::Obligation
|
77
|
+
module Async
|
78
|
+
|
79
|
+
# Check for the presence of a method on an object and determine if a given
|
80
|
+
# set of arguments matches the required arity.
|
81
|
+
#
|
82
|
+
# @param [Object] obj the object to check against
|
83
|
+
# @param [Symbol] method the method to check the object for
|
84
|
+
# @param [Array] args zero or more arguments for the arity check
|
85
|
+
#
|
86
|
+
# @raise [NameError] the object does not respond to +method+ method
|
87
|
+
# @raise [ArgumentError] the given +args+ do not match the arity of +method+
|
88
|
+
#
|
89
|
+
# @note This check is imperfect because of the way Ruby reports the arity of
|
90
|
+
# methods with a variable number of arguments. It is possible to determine
|
91
|
+
# if too few arguments are given but impossible to determine if too many
|
92
|
+
# arguments are given. This check may also fail to recognize dynamic behavior
|
93
|
+
# of the object, such as methods simulated with +method_missing+.
|
94
|
+
#
|
95
|
+
# @see http://www.ruby-doc.org/core-2.1.1/Method.html#method-i-arity Method#arity
|
96
|
+
# @see http://ruby-doc.org/core-2.1.0/Object.html#method-i-respond_to-3F Object#respond_to?
|
97
|
+
# @see http://www.ruby-doc.org/core-2.1.0/BasicObject.html#method-i-method_missing BasicObject#method_missing
|
98
|
+
def validate_argc(obj, method, *args)
|
99
|
+
argc = args.length
|
100
|
+
arity = obj.method(method).arity
|
101
|
+
|
102
|
+
if arity >= 0 && argc != arity
|
103
|
+
raise ArgumentError.new("wrong number of arguments (#{argc} for #{arity})")
|
104
|
+
elsif arity < 0 && (arity = (arity + 1).abs) > argc
|
105
|
+
raise ArgumentError.new("wrong number of arguments (#{argc} for #{arity}..*)")
|
106
|
+
end
|
107
|
+
end
|
108
|
+
module_function :validate_argc
|
109
|
+
|
110
|
+
# Delegates synchronous, thread-safe method calls to the wrapped object.
|
111
|
+
#
|
112
|
+
# @!visibility private
|
113
|
+
class AwaitDelegator # :nodoc:
|
114
|
+
|
115
|
+
# Create a new delegator object wrapping the given +delegate+ and
|
116
|
+
# protecting it with the given +mutex+.
|
117
|
+
#
|
118
|
+
# @param [Object] delegate the object to wrap and delegate method calls to
|
119
|
+
# @param [Mutex] mutex the mutex lock to use when delegating method calls
|
120
|
+
def initialize(delegate, mutex)
|
121
|
+
@delegate = delegate
|
122
|
+
@mutex = mutex
|
123
|
+
end
|
124
|
+
|
125
|
+
# Delegates method calls to the wrapped object. For performance,
|
126
|
+
# dynamically defines the given method on the delegator so that
|
127
|
+
# all future calls to +method+ will not be directed here.
|
128
|
+
#
|
129
|
+
# @param [Symbol] method the method being called
|
130
|
+
# @param [Array] args zero or more arguments to the method
|
131
|
+
#
|
132
|
+
# @return [IVar] the result of the method call
|
133
|
+
#
|
134
|
+
# @raise [NameError] the object does not respond to +method+ method
|
135
|
+
# @raise [ArgumentError] the given +args+ do not match the arity of +method+
|
136
|
+
def method_missing(method, *args, &block)
|
137
|
+
super unless @delegate.respond_to?(method)
|
138
|
+
Async::validate_argc(@delegate, method, *args)
|
139
|
+
|
140
|
+
self.define_singleton_method(method) do |*args|
|
141
|
+
Async::validate_argc(@delegate, method, *args)
|
142
|
+
ivar = Concurrent::IVar.new
|
143
|
+
value, reason = nil, nil
|
144
|
+
begin
|
145
|
+
mutex.synchronize do
|
146
|
+
value = @delegate.send(method, *args, &block)
|
147
|
+
end
|
148
|
+
rescue => reason
|
149
|
+
# caught
|
150
|
+
ensure
|
151
|
+
return ivar.complete(reason.nil?, value, reason)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
self.send(method, *args)
|
156
|
+
end
|
157
|
+
|
158
|
+
# The lock used when delegating methods to the wrapped object.
|
159
|
+
#
|
160
|
+
# @!visibility private
|
161
|
+
attr_reader :mutex # :nodoc:
|
162
|
+
end
|
163
|
+
|
164
|
+
# Delegates asynchronous, thread-safe method calls to the wrapped object.
|
165
|
+
#
|
166
|
+
# @!visibility private
|
167
|
+
class AsyncDelegator # :nodoc:
|
168
|
+
|
169
|
+
# Create a new delegator object wrapping the given +delegate+ and
|
170
|
+
# protecting it with the given +mutex+.
|
171
|
+
#
|
172
|
+
# @param [Object] delegate the object to wrap and delegate method calls to
|
173
|
+
# @param [Mutex] mutex the mutex lock to use when delegating method calls
|
174
|
+
def initialize(delegate, executor, mutex)
|
175
|
+
@delegate = delegate
|
176
|
+
@executor = executor
|
177
|
+
@mutex = mutex
|
178
|
+
end
|
179
|
+
|
180
|
+
# Delegates method calls to the wrapped object. For performance,
|
181
|
+
# dynamically defines the given method on the delegator so that
|
182
|
+
# all future calls to +method+ will not be directed here.
|
183
|
+
#
|
184
|
+
# @param [Symbol] method the method being called
|
185
|
+
# @param [Array] args zero or more arguments to the method
|
186
|
+
#
|
187
|
+
# @return [IVar] the result of the method call
|
188
|
+
#
|
189
|
+
# @raise [NameError] the object does not respond to +method+ method
|
190
|
+
# @raise [ArgumentError] the given +args+ do not match the arity of +method+
|
191
|
+
def method_missing(method, *args, &block)
|
192
|
+
super unless @delegate.respond_to?(method)
|
193
|
+
Async::validate_argc(@delegate, method, *args)
|
194
|
+
|
195
|
+
self.define_singleton_method(method) do |*args|
|
196
|
+
Async::validate_argc(@delegate, method, *args)
|
197
|
+
Concurrent::Future.execute(executor: @executor) do
|
198
|
+
mutex.synchronize do
|
199
|
+
@delegate.send(method, *args, &block)
|
200
|
+
end
|
201
|
+
end
|
202
|
+
end
|
203
|
+
|
204
|
+
self.send(method, *args)
|
205
|
+
end
|
206
|
+
|
207
|
+
private
|
208
|
+
|
209
|
+
# The lock used when delegating methods to the wrapped object.
|
210
|
+
#
|
211
|
+
# @!visibility private
|
212
|
+
attr_reader :mutex # :nodoc:
|
213
|
+
end
|
214
|
+
|
215
|
+
# Causes the chained method call to be performed asynchronously on the
|
216
|
+
# global thread pool. The method called by this method will return a
|
217
|
+
# +Future+ object in the +:pending+ state and the method call will have
|
218
|
+
# been scheduled on the global thread pool. The final disposition of the
|
219
|
+
# method call can be obtained by inspecting the returned +Future+.
|
220
|
+
#
|
221
|
+
# Before scheduling the method on the global thread pool a best-effort
|
222
|
+
# attempt will be made to validate that the method exists on the object
|
223
|
+
# and that the given arguments match the arity of the requested function.
|
224
|
+
# Due to the dynamic nature of Ruby and limitations of its reflection
|
225
|
+
# library, some edge cases will be missed. For more information see
|
226
|
+
# the documentation for the +validate_argc+ method.
|
227
|
+
#
|
228
|
+
# @note The method call is guaranteed to be thread safe with respect to
|
229
|
+
# all other method calls against the same object that are called with
|
230
|
+
# either +async+ or +await+. The mutable nature of Ruby references
|
231
|
+
# (and object orientation in general) prevent any other thread safety
|
232
|
+
# guarantees. Do NOT mix non-protected method calls with protected
|
233
|
+
# method call. Use ONLY protected method calls when sharing the object
|
234
|
+
# between threads.
|
235
|
+
#
|
236
|
+
# @return [Concurrent::Future] the pending result of the asynchronous operation
|
237
|
+
#
|
238
|
+
# @raise [NameError] the object does not respond to +method+ method
|
239
|
+
# @raise [ArgumentError] the given +args+ do not match the arity of +method+
|
240
|
+
#
|
241
|
+
# @see Concurrent::Future
|
242
|
+
def async
|
243
|
+
@__async_delegator__ ||= AsyncDelegator.new(self, executor, await.mutex)
|
244
|
+
end
|
245
|
+
alias_method :future, :async
|
246
|
+
|
247
|
+
# Causes the chained method call to be performed synchronously on the
|
248
|
+
# current thread. The method called by this method will return an
|
249
|
+
# +IVar+ object in either the +:fulfilled+ or +rejected+ state and the
|
250
|
+
# method call will have completed. The final disposition of the
|
251
|
+
# method call can be obtained by inspecting the returned +IVar+.
|
252
|
+
#
|
253
|
+
# Before scheduling the method on the global thread pool a best-effort
|
254
|
+
# attempt will be made to validate that the method exists on the object
|
255
|
+
# and that the given arguments match the arity of the requested function.
|
256
|
+
# Due to the dynamic nature of Ruby and limitations of its reflection
|
257
|
+
# library, some edge cases will be missed. For more information see
|
258
|
+
# the documentation for the +validate_argc+ method.
|
259
|
+
#
|
260
|
+
# @note The method call is guaranteed to be thread safe with respect to
|
261
|
+
# all other method calls against the same object that are called with
|
262
|
+
# either +async+ or +await+. The mutable nature of Ruby references
|
263
|
+
# (and object orientation in general) prevent any other thread safety
|
264
|
+
# guarantees. Do NOT mix non-protected method calls with protected
|
265
|
+
# method call. Use ONLY protected method calls when sharing the object
|
266
|
+
# between threads.
|
267
|
+
#
|
268
|
+
# @return [Concurrent::IVar] the completed result of the synchronous operation
|
269
|
+
#
|
270
|
+
# @raise [NameError] the object does not respond to +method+ method
|
271
|
+
# @raise [ArgumentError] the given +args+ do not match the arity of +method+
|
272
|
+
#
|
273
|
+
# @see Concurrent::IVar
|
274
|
+
def await
|
275
|
+
@__await_delegator__ ||= AwaitDelegator.new(self, Mutex.new)
|
276
|
+
end
|
277
|
+
alias_method :delay, :await
|
278
|
+
|
279
|
+
def executor=(executor)
|
280
|
+
raise ArgumentError.new('executor has already been set') unless @__async__executor__.nil?
|
281
|
+
@__async__executor__ = executor
|
282
|
+
end
|
283
|
+
|
284
|
+
private
|
285
|
+
|
286
|
+
def executor
|
287
|
+
@__async__executor__ ||= Concurrent.configuration.global_task_pool
|
288
|
+
end
|
289
|
+
end
|
290
|
+
end
|
data/lib/concurrent/atomic.rb
CHANGED
@@ -3,8 +3,7 @@ module Concurrent
|
|
3
3
|
# @!visibility private
|
4
4
|
module MutexAtomicFixnum # :nodoc:
|
5
5
|
|
6
|
-
|
7
|
-
def allocate_storage(init) # :nodoc:
|
6
|
+
def allocate_storage(init)
|
8
7
|
@value = init
|
9
8
|
@mutex = Mutex.new
|
10
9
|
end
|
@@ -34,8 +33,7 @@ module Concurrent
|
|
34
33
|
end
|
35
34
|
end
|
36
35
|
|
37
|
-
|
38
|
-
def compare_and_set(expect, update) # :nodoc:
|
36
|
+
def compare_and_set(expect, update)
|
39
37
|
@mutex.synchronize do
|
40
38
|
if @value == expect
|
41
39
|
@value = update
|
@@ -50,8 +48,7 @@ module Concurrent
|
|
50
48
|
# @!visibility private
|
51
49
|
module JavaAtomicFixnum # :nodoc:
|
52
50
|
|
53
|
-
|
54
|
-
def allocate_storage(init) # :nodoc:
|
51
|
+
def allocate_storage(init)
|
55
52
|
@atomic = java.util.concurrent.atomic.AtomicLong.new(init)
|
56
53
|
end
|
57
54
|
|
@@ -72,8 +69,7 @@ module Concurrent
|
|
72
69
|
@atomic.decrement_and_get
|
73
70
|
end
|
74
71
|
|
75
|
-
|
76
|
-
def compare_and_set(expect, update) # :nodoc:
|
72
|
+
def compare_and_set(expect, update)
|
77
73
|
@atomic.compare_and_set(expect, update)
|
78
74
|
end
|
79
75
|
end
|
@@ -113,7 +109,7 @@ module Concurrent
|
|
113
109
|
allocate_storage(init)
|
114
110
|
end
|
115
111
|
|
116
|
-
if
|
112
|
+
if RUBY_PLATFORM == 'java'
|
117
113
|
include JavaAtomicFixnum
|
118
114
|
else
|
119
115
|
include MutexAtomicFixnum
|
@@ -1,143 +1,45 @@
|
|
1
|
-
require '
|
2
|
-
|
3
|
-
require 'concurrent/event'
|
4
|
-
require 'concurrent/cached_thread_pool/worker'
|
1
|
+
require 'concurrent/ruby_cached_thread_pool'
|
5
2
|
|
6
3
|
module Concurrent
|
7
4
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
if @idle.length + @busy.length < @max_threads
|
47
|
-
worker = create_worker_thread
|
48
|
-
else
|
49
|
-
worker = @busy.shift
|
50
|
-
end
|
51
|
-
else
|
52
|
-
worker = @idle.pop
|
53
|
-
end
|
54
|
-
|
55
|
-
@busy.push(worker)
|
56
|
-
worker.signal(*args, &block)
|
57
|
-
|
58
|
-
prune_stale_workers
|
59
|
-
true
|
60
|
-
end
|
61
|
-
end
|
62
|
-
|
63
|
-
def running?
|
64
|
-
return @state == :running
|
65
|
-
end
|
66
|
-
|
67
|
-
def wait_for_termination(timeout = nil)
|
68
|
-
return @terminator.wait(timeout)
|
69
|
-
end
|
70
|
-
|
71
|
-
def shutdown
|
72
|
-
@mutex.synchronize do
|
73
|
-
break unless @state == :running
|
74
|
-
if @idle.empty? && @busy.empty?
|
75
|
-
@state = :shutdown
|
76
|
-
@terminator.set
|
77
|
-
else
|
78
|
-
@state = :shuttingdown
|
79
|
-
@idle.each{|worker| worker.stop }
|
80
|
-
@busy.each{|worker| worker.stop }
|
81
|
-
end
|
82
|
-
end
|
83
|
-
end
|
84
|
-
|
85
|
-
def kill
|
86
|
-
@mutex.synchronize do
|
87
|
-
break if @state == :shutdown
|
88
|
-
@state = :shutdown
|
89
|
-
@idle.each{|worker| worker.kill }
|
90
|
-
@busy.each{|worker| worker.kill }
|
91
|
-
@terminator.set
|
92
|
-
end
|
93
|
-
end
|
94
|
-
|
95
|
-
def length
|
96
|
-
@mutex.synchronize do
|
97
|
-
@state == :running ? @busy.length + @idle.length : 0
|
98
|
-
end
|
99
|
-
end
|
100
|
-
|
101
|
-
def on_worker_exit(worker)
|
102
|
-
@mutex.synchronize do
|
103
|
-
@idle.delete(worker)
|
104
|
-
@busy.delete(worker)
|
105
|
-
if @idle.empty? && @busy.empty? && @state != :running
|
106
|
-
@state = :shutdown
|
107
|
-
@terminator.set
|
108
|
-
end
|
109
|
-
end
|
110
|
-
end
|
111
|
-
|
112
|
-
def on_end_task(worker)
|
113
|
-
@mutex.synchronize do
|
114
|
-
break unless @state == :running
|
115
|
-
@busy.delete(worker)
|
116
|
-
@idle.push(worker)
|
117
|
-
end
|
118
|
-
end
|
119
|
-
|
120
|
-
protected
|
121
|
-
|
122
|
-
def create_worker_thread
|
123
|
-
wrkr = Worker.new(self)
|
124
|
-
Thread.new(wrkr, self) do |worker, parent|
|
125
|
-
Thread.current.abort_on_exception = false
|
126
|
-
worker.run
|
127
|
-
parent.on_worker_exit(worker)
|
128
|
-
end
|
129
|
-
return wrkr
|
130
|
-
end
|
131
|
-
|
132
|
-
def prune_stale_workers
|
133
|
-
@idle.reject! do |worker|
|
134
|
-
if worker.idletime > @idletime
|
135
|
-
worker.stop
|
136
|
-
true
|
137
|
-
else
|
138
|
-
worker.dead?
|
139
|
-
end
|
140
|
-
end
|
5
|
+
if RUBY_PLATFORM == 'java'
|
6
|
+
require 'concurrent/java_cached_thread_pool'
|
7
|
+
# @!macro [attach] cached_thread_pool
|
8
|
+
# A thread pool that dynamically grows and shrinks to fit the current workload.
|
9
|
+
# New threads are created as needed, existing threads are reused, and threads
|
10
|
+
# that remain idle for too long are killed and removed from the pool. These
|
11
|
+
# pools are particularly suited to applications that perform a high volume of
|
12
|
+
# short-lived tasks.
|
13
|
+
#
|
14
|
+
# On creation a +CachedThreadPool+ has zero running threads. New threads are
|
15
|
+
# created on the pool as new operations are +#post+. The size of the pool
|
16
|
+
# will grow until +#max_length+ threads are in the pool or until the number
|
17
|
+
# of threads exceeds the number of running and pending operations. When a new
|
18
|
+
# operation is post to the pool the first available idle thread will be tasked
|
19
|
+
# with the new operation.
|
20
|
+
#
|
21
|
+
# Should a thread crash for any reason the thread will immediately be removed
|
22
|
+
# from the pool. Similarly, threads which remain idle for an extended period
|
23
|
+
# of time will be killed and reclaimed. Thus these thread pools are very
|
24
|
+
# efficient at reclaiming unused resources.
|
25
|
+
#
|
26
|
+
# The API and behavior of this class are based on Java's +CachedThreadPool+
|
27
|
+
#
|
28
|
+
# @note When running on the JVM (JRuby) this class will inherit from +JavaCachedThreadPool+.
|
29
|
+
# On all other platforms it will inherit from +RubyCachedThreadPool+.
|
30
|
+
#
|
31
|
+
# @see Concurrent::RubyCachedThreadPool
|
32
|
+
# @see Concurrent::JavaCachedThreadPool
|
33
|
+
#
|
34
|
+
# @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html
|
35
|
+
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html
|
36
|
+
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html
|
37
|
+
# @see http://stackoverflow.com/questions/17957382/fixedthreadpool-vs-cachedthreadpool-the-lesser-of-two-evils
|
38
|
+
class CachedThreadPool < JavaCachedThreadPool
|
39
|
+
end
|
40
|
+
else
|
41
|
+
# @!macro cached_thread_pool
|
42
|
+
class CachedThreadPool < RubyCachedThreadPool
|
141
43
|
end
|
142
44
|
end
|
143
45
|
end
|