concurrent-ruby 0.9.0 → 0.9.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +15 -1
- data/README.md +23 -19
- data/lib/concurrent/atomic/atomic_fixnum.rb +29 -8
- data/lib/concurrent/atomic/read_write_lock.rb +11 -11
- data/lib/concurrent/atomic/reentrant_read_write_lock.rb +375 -0
- data/lib/concurrent/atomic/thread_local_var.rb +181 -51
- data/lib/concurrent/collection/copy_on_notify_observer_set.rb +1 -1
- data/lib/concurrent/configuration.rb +8 -4
- data/lib/concurrent/delay.rb +0 -1
- data/lib/concurrent/errors.rb +4 -0
- data/lib/concurrent/executor/executor.rb +2 -1
- data/lib/concurrent/executor/ruby_thread_pool_executor.rb +9 -10
- data/lib/concurrent/scheduled_task.rb +0 -2
- data/lib/concurrent/synchronization/monitor_object.rb +1 -0
- data/lib/concurrent/synchronization/object.rb +5 -0
- data/lib/concurrent/synchronization/rbx_object.rb +21 -8
- data/lib/concurrent/utility/at_exit.rb +1 -1
- data/lib/concurrent/utility/native_extension_loader.rb +1 -4
- data/lib/concurrent/version.rb +2 -2
- metadata +3 -3
- data/lib/concurrent/atomic/thread_local_var/weak_key_map.rb +0 -236
@@ -1,4 +1,4 @@
|
|
1
|
-
require '
|
1
|
+
require 'thread'
|
2
2
|
|
3
3
|
module Concurrent
|
4
4
|
|
@@ -56,45 +56,29 @@ module Concurrent
|
|
56
56
|
#
|
57
57
|
# @return [Object] the current value
|
58
58
|
def value
|
59
|
-
|
60
|
-
|
61
|
-
if value.nil?
|
62
|
-
@default
|
63
|
-
elsif value == NIL_SENTINEL
|
64
|
-
nil
|
65
|
-
else
|
66
|
-
value
|
67
|
-
end
|
59
|
+
raise NotImplementedError
|
68
60
|
end
|
69
61
|
|
70
62
|
# @!macro [attach] thread_local_var_method_set
|
71
63
|
#
|
72
64
|
# Sets the current thread's copy of this thread-local variable to the specified value.
|
73
|
-
#
|
65
|
+
#
|
74
66
|
# @param [Object] value the value to set
|
75
67
|
# @return [Object] the new value
|
76
68
|
def value=(value)
|
77
|
-
|
69
|
+
raise NotImplementedError
|
78
70
|
end
|
79
71
|
|
80
72
|
# @!macro [attach] thread_local_var_method_bind
|
81
73
|
#
|
82
74
|
# Bind the given value to thread local storage during
|
83
75
|
# execution of the given block.
|
84
|
-
#
|
76
|
+
#
|
85
77
|
# @param [Object] value the value to bind
|
86
78
|
# @yield the operation to be performed with the bound variable
|
87
79
|
# @return [Object] the value
|
88
80
|
def bind(value, &block)
|
89
|
-
|
90
|
-
stored_value = NIL_SENTINEL
|
91
|
-
else
|
92
|
-
stored_value = value
|
93
|
-
end
|
94
|
-
|
95
|
-
set(stored_value, &block)
|
96
|
-
|
97
|
-
value
|
81
|
+
raise NotImplementedError
|
98
82
|
end
|
99
83
|
|
100
84
|
protected
|
@@ -103,46 +87,171 @@ module Concurrent
|
|
103
87
|
def allocate_storage
|
104
88
|
raise NotImplementedError
|
105
89
|
end
|
90
|
+
end
|
91
|
+
|
92
|
+
# @!visibility private
|
93
|
+
# @!macro internal_implementation_note
|
94
|
+
class RubyThreadLocalVar < AbstractThreadLocalVar
|
95
|
+
|
96
|
+
# Each thread has a (lazily initialized) array of thread-local variable values
|
97
|
+
# Each time a new thread-local var is created, we allocate an "index" for it
|
98
|
+
# For example, if the allocated index is 1, that means slot #1 in EVERY
|
99
|
+
# thread's thread-local array will be used for the value of that TLV
|
100
|
+
#
|
101
|
+
# The good thing about using a per-THREAD structure to hold values, rather
|
102
|
+
# than a per-TLV structure, is that no synchronization is needed when
|
103
|
+
# reading and writing those values (since the structure is only ever
|
104
|
+
# accessed by a single thread)
|
105
|
+
#
|
106
|
+
# Of course, when a TLV is GC'd, 1) we need to recover its index for use
|
107
|
+
# by other new TLVs (otherwise the thread-local arrays could get bigger
|
108
|
+
# and bigger with time), and 2) we need to null out all the references
|
109
|
+
# held in the now-unused slots (both to avoid blocking GC of those objects,
|
110
|
+
# and also to prevent "stale" values from being passed on to a new TLV
|
111
|
+
# when the index is reused)
|
112
|
+
# Because we need to null out freed slots, we need to keep references to
|
113
|
+
# ALL the thread-local arrays -- ARRAYS is for that
|
114
|
+
# But when a Thread is GC'd, we need to drop the reference to its thread-local
|
115
|
+
# array, so we don't leak memory
|
106
116
|
|
107
117
|
# @!visibility private
|
108
|
-
|
109
|
-
|
118
|
+
FREE = []
|
119
|
+
LOCK = Mutex.new
|
120
|
+
ARRAYS = {} # used as a hash set
|
121
|
+
@@next = 0
|
122
|
+
private_constant :FREE, :LOCK, :ARRAYS
|
123
|
+
|
124
|
+
# @!macro [attach] thread_local_var_method_initialize
|
125
|
+
#
|
126
|
+
# Creates a thread local variable.
|
127
|
+
#
|
128
|
+
# @param [Object] default the default value when otherwise unset
|
129
|
+
def initialize(default = nil)
|
130
|
+
@default = default
|
131
|
+
allocate_storage
|
110
132
|
end
|
111
133
|
|
112
|
-
# @!
|
113
|
-
def
|
114
|
-
|
134
|
+
# @!macro thread_local_var_method_get
|
135
|
+
def value
|
136
|
+
if array = get_threadlocal_array
|
137
|
+
value = array[@index]
|
138
|
+
if value.nil?
|
139
|
+
@default
|
140
|
+
elsif value.equal?(NIL_SENTINEL)
|
141
|
+
nil
|
142
|
+
else
|
143
|
+
value
|
144
|
+
end
|
145
|
+
else
|
146
|
+
@default
|
147
|
+
end
|
115
148
|
end
|
116
|
-
end
|
117
149
|
|
118
|
-
|
119
|
-
|
120
|
-
|
150
|
+
# @!macro thread_local_var_method_set
|
151
|
+
def value=(value)
|
152
|
+
me = Thread.current
|
153
|
+
# We could keep the thread-local arrays in a hash, keyed by Thread
|
154
|
+
# But why? That would require locking
|
155
|
+
# Using Ruby's built-in thread-local storage is faster
|
156
|
+
unless array = get_threadlocal_array(me)
|
157
|
+
array = set_threadlocal_array([], me)
|
158
|
+
LOCK.synchronize { ARRAYS[array.object_id] = array }
|
159
|
+
ObjectSpace.define_finalizer(me, self.class.thread_finalizer(array))
|
160
|
+
end
|
161
|
+
array[@index] = (value.nil? ? NIL_SENTINEL : value)
|
162
|
+
value
|
163
|
+
end
|
164
|
+
|
165
|
+
# @!macro thread_local_var_method_bind
|
166
|
+
def bind(value, &block)
|
167
|
+
if block_given?
|
168
|
+
old_value = self.value
|
169
|
+
begin
|
170
|
+
self.value = value
|
171
|
+
yield
|
172
|
+
ensure
|
173
|
+
self.value = old_value
|
174
|
+
end
|
175
|
+
end
|
176
|
+
end
|
121
177
|
|
122
178
|
protected
|
123
179
|
|
124
180
|
# @!visibility private
|
125
181
|
def allocate_storage
|
126
|
-
@
|
182
|
+
@index = LOCK.synchronize do
|
183
|
+
FREE.pop || begin
|
184
|
+
result = @@next
|
185
|
+
@@next += 1
|
186
|
+
result
|
187
|
+
end
|
188
|
+
end
|
189
|
+
ObjectSpace.define_finalizer(self, self.class.threadlocal_finalizer(@index))
|
127
190
|
end
|
128
191
|
|
129
192
|
# @!visibility private
|
130
|
-
def
|
131
|
-
|
193
|
+
def self.threadlocal_finalizer(index)
|
194
|
+
proc do
|
195
|
+
LOCK.synchronize do
|
196
|
+
FREE.push(index)
|
197
|
+
# The cost of GC'ing a TLV is linear in the number of threads using TLVs
|
198
|
+
# But that is natural! More threads means more storage is used per TLV
|
199
|
+
# So naturally more CPU time is required to free more storage
|
200
|
+
ARRAYS.each_value do |array|
|
201
|
+
array[index] = nil
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
132
205
|
end
|
133
206
|
|
134
207
|
# @!visibility private
|
135
|
-
def
|
136
|
-
|
208
|
+
def self.thread_finalizer(array)
|
209
|
+
proc do
|
210
|
+
LOCK.synchronize do
|
211
|
+
# The thread which used this thread-local array is now gone
|
212
|
+
# So don't hold onto a reference to the array (thus blocking GC)
|
213
|
+
ARRAYS.delete(array.object_id)
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|
137
217
|
|
138
|
-
|
218
|
+
private
|
139
219
|
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
220
|
+
if Thread.instance_methods.include?(:thread_variable_get)
|
221
|
+
|
222
|
+
def get_threadlocal_array(thread = Thread.current)
|
223
|
+
thread.thread_variable_get(:__threadlocal_array__)
|
224
|
+
end
|
225
|
+
|
226
|
+
def set_threadlocal_array(array, thread = Thread.current)
|
227
|
+
thread.thread_variable_set(:__threadlocal_array__, array)
|
228
|
+
end
|
229
|
+
|
230
|
+
else
|
231
|
+
|
232
|
+
def get_threadlocal_array(thread = Thread.current)
|
233
|
+
thread[:__threadlocal_array__]
|
234
|
+
end
|
235
|
+
|
236
|
+
def set_threadlocal_array(array, thread = Thread.current)
|
237
|
+
thread[:__threadlocal_array__] = array
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
# This exists only for use in testing
|
242
|
+
# @!visibility private
|
243
|
+
def value_for(thread)
|
244
|
+
if array = get_threadlocal_array(thread)
|
245
|
+
value = array[@index]
|
246
|
+
if value.nil?
|
247
|
+
@default
|
248
|
+
elsif value.equal?(NIL_SENTINEL)
|
249
|
+
nil
|
250
|
+
else
|
251
|
+
value
|
145
252
|
end
|
253
|
+
else
|
254
|
+
@default
|
146
255
|
end
|
147
256
|
end
|
148
257
|
end
|
@@ -153,21 +262,42 @@ module Concurrent
|
|
153
262
|
# @!macro internal_implementation_note
|
154
263
|
class JavaThreadLocalVar < AbstractThreadLocalVar
|
155
264
|
|
156
|
-
|
265
|
+
# @!macro thread_local_var_method_get
|
266
|
+
def value
|
267
|
+
value = @var.get
|
157
268
|
|
158
|
-
|
159
|
-
|
160
|
-
|
269
|
+
if value.nil?
|
270
|
+
@default
|
271
|
+
elsif value == NIL_SENTINEL
|
272
|
+
nil
|
273
|
+
else
|
274
|
+
value
|
275
|
+
end
|
161
276
|
end
|
162
277
|
|
163
|
-
# @!
|
164
|
-
def
|
165
|
-
@var.
|
278
|
+
# @!macro thread_local_var_method_set
|
279
|
+
def value=(value)
|
280
|
+
@var.set(value)
|
281
|
+
end
|
282
|
+
|
283
|
+
# @!macro thread_local_var_method_bind
|
284
|
+
def bind(value, &block)
|
285
|
+
if block_given?
|
286
|
+
old_value = @var.get
|
287
|
+
begin
|
288
|
+
@var.set(value)
|
289
|
+
yield
|
290
|
+
ensure
|
291
|
+
@var.set(old_value)
|
292
|
+
end
|
293
|
+
end
|
166
294
|
end
|
167
295
|
|
296
|
+
protected
|
297
|
+
|
168
298
|
# @!visibility private
|
169
|
-
def
|
170
|
-
@var.
|
299
|
+
def allocate_storage
|
300
|
+
@var = java.lang.ThreadLocal.new
|
171
301
|
end
|
172
302
|
end
|
173
303
|
end
|
@@ -1,9 +1,13 @@
|
|
1
1
|
require 'thread'
|
2
|
-
require 'concurrent/
|
2
|
+
require 'concurrent/delay'
|
3
3
|
require 'concurrent/errors'
|
4
|
-
require 'concurrent/
|
4
|
+
require 'concurrent/atomic/atomic_reference'
|
5
5
|
require 'concurrent/concern/deprecation'
|
6
6
|
require 'concurrent/concern/logging'
|
7
|
+
require 'concurrent/executor/timer_set'
|
8
|
+
require 'concurrent/executor/immediate_executor'
|
9
|
+
require 'concurrent/executor/fixed_thread_pool'
|
10
|
+
require 'concurrent/executor/thread_pool_executor'
|
7
11
|
require 'concurrent/utility/at_exit'
|
8
12
|
require 'concurrent/utility/processor_counter'
|
9
13
|
|
@@ -32,8 +36,8 @@ module Concurrent
|
|
32
36
|
formatted_message
|
33
37
|
end
|
34
38
|
|
35
|
-
lambda do |
|
36
|
-
logger.add
|
39
|
+
lambda do |loglevel, progname, message = nil, &block|
|
40
|
+
logger.add loglevel, message, progname, &block
|
37
41
|
end
|
38
42
|
end
|
39
43
|
|
data/lib/concurrent/delay.rb
CHANGED
data/lib/concurrent/errors.rb
CHANGED
@@ -15,6 +15,10 @@ module Concurrent
|
|
15
15
|
# Raised when an attempt is made to violate an immutability guarantee.
|
16
16
|
ImmutabilityError = Class.new(Error)
|
17
17
|
|
18
|
+
# Raised when an operation is attempted which is not legal given the
|
19
|
+
# receiver's current state
|
20
|
+
IllegalOperationError = Class.new(Error)
|
21
|
+
|
18
22
|
# Raised when an object's methods are called when it has not been
|
19
23
|
# properly initialized.
|
20
24
|
InitializationError = Class.new(Error)
|
@@ -166,7 +166,7 @@ module Concurrent
|
|
166
166
|
def ns_kill_execution
|
167
167
|
# TODO log out unprocessed tasks in queue
|
168
168
|
# TODO try to shutdown first?
|
169
|
-
@pool.each
|
169
|
+
@pool.each(&:kill)
|
170
170
|
@pool.clear
|
171
171
|
@ready.clear
|
172
172
|
end
|
@@ -297,32 +297,31 @@ module Concurrent
|
|
297
297
|
private
|
298
298
|
|
299
299
|
def create_worker(queue, pool, idletime)
|
300
|
-
Thread.new(queue, pool, idletime) do |
|
300
|
+
Thread.new(queue, pool, idletime) do |my_queue, my_pool, my_idletime|
|
301
301
|
last_message = Concurrent.monotonic_time
|
302
302
|
catch(:stop) do
|
303
303
|
loop do
|
304
304
|
|
305
|
-
case message =
|
305
|
+
case message = my_queue.pop
|
306
306
|
when :idle_test
|
307
|
-
if (Concurrent.monotonic_time - last_message) >
|
308
|
-
|
307
|
+
if (Concurrent.monotonic_time - last_message) > my_idletime
|
308
|
+
my_pool.remove_busy_worker(self)
|
309
309
|
throw :stop
|
310
310
|
else
|
311
|
-
|
311
|
+
my_pool.worker_not_old_enough(self)
|
312
312
|
end
|
313
313
|
|
314
314
|
when :stop
|
315
|
-
|
315
|
+
my_pool.remove_busy_worker(self)
|
316
316
|
throw :stop
|
317
317
|
|
318
318
|
else
|
319
319
|
task, args = message
|
320
|
-
run_task
|
320
|
+
run_task my_pool, task, args
|
321
321
|
last_message = Concurrent.monotonic_time
|
322
322
|
|
323
|
-
|
323
|
+
my_pool.ready_worker(self)
|
324
324
|
end
|
325
|
-
|
326
325
|
end
|
327
326
|
end
|
328
327
|
end
|
@@ -1,9 +1,7 @@
|
|
1
1
|
require 'concurrent/errors'
|
2
2
|
require 'concurrent/ivar'
|
3
|
-
require 'concurrent/configuration'
|
4
3
|
require 'concurrent/collection/copy_on_notify_observer_set'
|
5
4
|
require 'concurrent/executor/executor'
|
6
|
-
require 'concurrent/executor/timer_set'
|
7
5
|
require 'concurrent/utility/monotonic_time'
|
8
6
|
require 'concurrent/concern/deprecation'
|
9
7
|
|
@@ -2,35 +2,48 @@ module Concurrent
|
|
2
2
|
module Synchronization
|
3
3
|
|
4
4
|
if Concurrent.on_rbx?
|
5
|
-
|
5
|
+
|
6
6
|
# @!visibility private
|
7
7
|
# @!macro internal_implementation_note
|
8
8
|
class RbxObject < AbstractObject
|
9
9
|
def initialize
|
10
|
-
@
|
10
|
+
@__Waiters__ = []
|
11
|
+
@__owner__ = nil
|
11
12
|
ensure_ivar_visibility!
|
12
13
|
end
|
13
14
|
|
14
15
|
protected
|
15
16
|
|
16
17
|
def synchronize(&block)
|
17
|
-
|
18
|
+
if @__owner__ == Thread.current
|
19
|
+
yield
|
20
|
+
else
|
21
|
+
Rubinius.lock(self)
|
22
|
+
begin
|
23
|
+
@__owner__ = Thread.current
|
24
|
+
result = yield
|
25
|
+
ensure
|
26
|
+
@__owner__ = nil
|
27
|
+
Rubinius.unlock(self)
|
28
|
+
result
|
29
|
+
end
|
30
|
+
end
|
18
31
|
end
|
19
32
|
|
20
33
|
def ns_wait(timeout = nil)
|
21
34
|
wchan = Rubinius::Channel.new
|
22
35
|
|
23
36
|
begin
|
24
|
-
@
|
37
|
+
@__Waiters__.push wchan
|
25
38
|
Rubinius.unlock(self)
|
26
39
|
signaled = wchan.receive_timeout timeout
|
27
40
|
ensure
|
28
41
|
Rubinius.lock(self)
|
29
42
|
|
30
|
-
if !signaled && !@
|
43
|
+
if !signaled && !@__Waiters__.delete(wchan)
|
31
44
|
# we timed out, but got signaled afterwards,
|
32
45
|
# so pass that signal on to the next waiter
|
33
|
-
@
|
46
|
+
@__Waiters__.shift << true unless @__Waiters__.empty?
|
34
47
|
end
|
35
48
|
end
|
36
49
|
|
@@ -38,12 +51,12 @@ module Concurrent
|
|
38
51
|
end
|
39
52
|
|
40
53
|
def ns_signal
|
41
|
-
@
|
54
|
+
@__Waiters__.shift << true unless @__Waiters__.empty?
|
42
55
|
self
|
43
56
|
end
|
44
57
|
|
45
58
|
def ns_broadcast
|
46
|
-
@
|
59
|
+
@__Waiters__.shift << true until @__Waiters__.empty?
|
47
60
|
self
|
48
61
|
end
|
49
62
|
|