concurrent-ruby 1.1.5 → 1.1.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (135) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +65 -1
  3. data/Gemfile +6 -10
  4. data/{LICENSE.md → LICENSE.txt} +18 -20
  5. data/README.md +46 -23
  6. data/Rakefile +46 -44
  7. data/ext/concurrent-ruby/com/concurrent_ruby/ext/JavaAtomicFixnumLibrary.java +0 -0
  8. data/ext/concurrent-ruby/com/concurrent_ruby/ext/JavaSemaphoreLibrary.java +52 -22
  9. data/lib/{concurrent → concurrent-ruby/concurrent}/agent.rb +0 -0
  10. data/lib/{concurrent → concurrent-ruby/concurrent}/array.rb +6 -6
  11. data/lib/{concurrent → concurrent-ruby/concurrent}/async.rb +10 -20
  12. data/lib/{concurrent → concurrent-ruby/concurrent}/atom.rb +1 -1
  13. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/abstract_thread_local_var.rb +0 -0
  14. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/atomic_boolean.rb +2 -2
  15. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/atomic_fixnum.rb +0 -0
  16. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/atomic_markable_reference.rb +0 -0
  17. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/atomic_reference.rb +1 -0
  18. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/count_down_latch.rb +1 -1
  19. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/cyclic_barrier.rb +0 -0
  20. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/event.rb +2 -2
  21. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/java_count_down_latch.rb +0 -0
  22. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/java_thread_local_var.rb +0 -0
  23. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/mutex_atomic_boolean.rb +0 -0
  24. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/mutex_atomic_fixnum.rb +0 -0
  25. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/mutex_count_down_latch.rb +0 -0
  26. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/mutex_semaphore.rb +18 -2
  27. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/read_write_lock.rb +0 -0
  28. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/reentrant_read_write_lock.rb +4 -6
  29. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/ruby_thread_local_var.rb +60 -40
  30. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/semaphore.rb +26 -5
  31. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic/thread_local_var.rb +1 -1
  32. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic_reference/mutex_atomic.rb +0 -0
  33. data/lib/{concurrent → concurrent-ruby/concurrent}/atomic_reference/numeric_cas_wrapper.rb +0 -0
  34. data/lib/{concurrent → concurrent-ruby/concurrent}/atomics.rb +0 -0
  35. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/copy_on_notify_observer_set.rb +0 -0
  36. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/copy_on_write_observer_set.rb +0 -0
  37. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/java_non_concurrent_priority_queue.rb +0 -0
  38. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/lock_free_stack.rb +0 -0
  39. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/map/atomic_reference_map_backend.rb +0 -0
  40. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/map/mri_map_backend.rb +1 -1
  41. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/map/non_concurrent_map_backend.rb +0 -0
  42. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/map/synchronized_map_backend.rb +0 -0
  43. data/lib/concurrent-ruby/concurrent/collection/map/truffleruby_map_backend.rb +14 -0
  44. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/non_concurrent_priority_queue.rb +1 -1
  45. data/lib/{concurrent → concurrent-ruby/concurrent}/collection/ruby_non_concurrent_priority_queue.rb +11 -1
  46. data/lib/{concurrent → concurrent-ruby/concurrent}/concern/deprecation.rb +0 -0
  47. data/lib/{concurrent → concurrent-ruby/concurrent}/concern/dereferenceable.rb +2 -2
  48. data/lib/{concurrent → concurrent-ruby/concurrent}/concern/logging.rb +0 -0
  49. data/lib/{concurrent → concurrent-ruby/concurrent}/concern/obligation.rb +0 -0
  50. data/lib/{concurrent → concurrent-ruby/concurrent}/concern/observable.rb +0 -0
  51. data/lib/concurrent-ruby/concurrent/concurrent_ruby.jar +0 -0
  52. data/lib/{concurrent → concurrent-ruby/concurrent}/configuration.rb +13 -9
  53. data/lib/{concurrent → concurrent-ruby/concurrent}/constants.rb +0 -0
  54. data/lib/{concurrent → concurrent-ruby/concurrent}/dataflow.rb +0 -0
  55. data/lib/{concurrent → concurrent-ruby/concurrent}/delay.rb +0 -0
  56. data/lib/{concurrent → concurrent-ruby/concurrent}/errors.rb +0 -0
  57. data/lib/{concurrent → concurrent-ruby/concurrent}/exchanger.rb +0 -0
  58. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/abstract_executor_service.rb +33 -36
  59. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/cached_thread_pool.rb +4 -4
  60. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/executor_service.rb +2 -2
  61. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/fixed_thread_pool.rb +29 -15
  62. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/immediate_executor.rb +0 -0
  63. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/indirect_immediate_executor.rb +0 -0
  64. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/java_executor_service.rb +19 -7
  65. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/java_single_thread_executor.rb +4 -3
  66. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/java_thread_pool_executor.rb +19 -2
  67. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/ruby_executor_service.rb +10 -6
  68. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/ruby_single_thread_executor.rb +0 -1
  69. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/ruby_thread_pool_executor.rb +46 -42
  70. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/safe_task_executor.rb +5 -5
  71. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/serial_executor_service.rb +0 -0
  72. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/serialized_execution.rb +0 -0
  73. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/serialized_execution_delegator.rb +0 -0
  74. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/simple_executor_service.rb +1 -1
  75. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/single_thread_executor.rb +1 -0
  76. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/thread_pool_executor.rb +2 -1
  77. data/lib/{concurrent → concurrent-ruby/concurrent}/executor/timer_set.rb +0 -1
  78. data/lib/{concurrent → concurrent-ruby/concurrent}/executors.rb +0 -0
  79. data/lib/{concurrent → concurrent-ruby/concurrent}/future.rb +0 -0
  80. data/lib/{concurrent → concurrent-ruby/concurrent}/hash.rb +1 -1
  81. data/lib/{concurrent → concurrent-ruby/concurrent}/immutable_struct.rb +9 -1
  82. data/lib/{concurrent → concurrent-ruby/concurrent}/ivar.rb +0 -0
  83. data/lib/{concurrent → concurrent-ruby/concurrent}/map.rb +14 -5
  84. data/lib/{concurrent → concurrent-ruby/concurrent}/maybe.rb +0 -0
  85. data/lib/{concurrent → concurrent-ruby/concurrent}/mutable_struct.rb +12 -2
  86. data/lib/{concurrent → concurrent-ruby/concurrent}/mvar.rb +0 -0
  87. data/lib/{concurrent → concurrent-ruby/concurrent}/options.rb +0 -0
  88. data/lib/{concurrent → concurrent-ruby/concurrent}/promise.rb +1 -0
  89. data/lib/{concurrent → concurrent-ruby/concurrent}/promises.rb +0 -0
  90. data/lib/{concurrent → concurrent-ruby/concurrent}/re_include.rb +0 -0
  91. data/lib/{concurrent → concurrent-ruby/concurrent}/scheduled_task.rb +29 -16
  92. data/lib/{concurrent → concurrent-ruby/concurrent}/set.rb +19 -11
  93. data/lib/{concurrent → concurrent-ruby/concurrent}/settable_struct.rb +11 -1
  94. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/abstract_lockable_object.rb +0 -0
  95. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/abstract_object.rb +0 -0
  96. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/abstract_struct.rb +11 -0
  97. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/condition.rb +0 -0
  98. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/jruby_lockable_object.rb +0 -0
  99. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/jruby_object.rb +0 -0
  100. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/lock.rb +0 -0
  101. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/lockable_object.rb +3 -5
  102. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/mri_object.rb +0 -0
  103. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/mutex_lockable_object.rb +12 -0
  104. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/object.rb +0 -0
  105. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/rbx_lockable_object.rb +6 -0
  106. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/rbx_object.rb +0 -0
  107. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/truffleruby_object.rb +0 -0
  108. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization/volatile.rb +0 -0
  109. data/lib/{concurrent → concurrent-ruby/concurrent}/synchronization.rb +0 -0
  110. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/synchronized_delegator.rb +0 -0
  111. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/adder.rb +0 -0
  112. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/cheap_lockable.rb +0 -0
  113. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/data_structures.rb +26 -1
  114. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/power_of_two_tuple.rb +0 -0
  115. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/striped64.rb +1 -1
  116. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/volatile.rb +0 -0
  117. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util/xor_shift_random.rb +0 -0
  118. data/lib/{concurrent → concurrent-ruby/concurrent}/thread_safe/util.rb +0 -0
  119. data/lib/{concurrent → concurrent-ruby/concurrent}/timer_task.rb +11 -34
  120. data/lib/{concurrent → concurrent-ruby/concurrent}/tuple.rb +0 -0
  121. data/lib/{concurrent → concurrent-ruby/concurrent}/tvar.rb +19 -56
  122. data/lib/{concurrent → concurrent-ruby/concurrent}/utility/engine.rb +0 -0
  123. data/lib/concurrent-ruby/concurrent/utility/monotonic_time.rb +90 -0
  124. data/lib/{concurrent → concurrent-ruby/concurrent}/utility/native_extension_loader.rb +0 -0
  125. data/lib/{concurrent → concurrent-ruby/concurrent}/utility/native_integer.rb +0 -0
  126. data/lib/{concurrent → concurrent-ruby/concurrent}/utility/processor_counter.rb +5 -33
  127. data/lib/concurrent-ruby/concurrent/version.rb +3 -0
  128. data/lib/concurrent-ruby/concurrent-ruby.rb +5 -0
  129. data/lib/{concurrent.rb → concurrent-ruby/concurrent.rb} +0 -0
  130. metadata +132 -132
  131. data/lib/concurrent/concurrent_ruby.jar +0 -0
  132. data/lib/concurrent/utility/at_exit.rb +0 -97
  133. data/lib/concurrent/utility/monotonic_time.rb +0 -58
  134. data/lib/concurrent/version.rb +0 -3
  135. data/lib/concurrent-ruby.rb +0 -1
@@ -58,26 +58,6 @@ module Concurrent
58
58
  # end
59
59
  # ```
60
60
  #
61
- # When defining a constructor it is critical that the first line be a call to
62
- # `super` with no arguments. The `super` method initializes the background
63
- # thread and other asynchronous components.
64
- #
65
- # ```
66
- # class BackgroundLogger
67
- # include Concurrent::Async
68
- #
69
- # def initialize(level)
70
- # super()
71
- # @logger = Logger.new(STDOUT)
72
- # @logger.level = level
73
- # end
74
- #
75
- # def info(msg)
76
- # @logger.info(msg)
77
- # end
78
- # end
79
- # ```
80
- #
81
61
  # Mixing this module into a class provides each object two proxy methods:
82
62
  # `async` and `await`. These methods are thread safe with respect to the
83
63
  # enclosing object. The former proxy allows methods to be called
@@ -292,6 +272,7 @@ module Concurrent
292
272
  obj.send(:init_synchronization)
293
273
  obj
294
274
  end
275
+ ruby2_keywords :new if respond_to?(:ruby2_keywords, true)
295
276
  end
296
277
  private_constant :ClassMethods
297
278
 
@@ -309,6 +290,7 @@ module Concurrent
309
290
  @delegate = delegate
310
291
  @queue = []
311
292
  @executor = Concurrent.global_io_executor
293
+ @ruby_pid = $$
312
294
  end
313
295
 
314
296
  # Delegates method calls to the wrapped object.
@@ -326,6 +308,7 @@ module Concurrent
326
308
 
327
309
  ivar = Concurrent::IVar.new
328
310
  synchronize do
311
+ reset_if_forked
329
312
  @queue.push [ivar, method, args, block]
330
313
  @executor.post { perform } if @queue.length == 1
331
314
  end
@@ -361,6 +344,13 @@ module Concurrent
361
344
  end
362
345
  end
363
346
  end
347
+
348
+ def reset_if_forked
349
+ if $$ != @ruby_pid
350
+ @queue.clear
351
+ @ruby_pid = $$
352
+ end
353
+ end
364
354
  end
365
355
  private_constant :AsyncDelegator
366
356
 
@@ -18,7 +18,7 @@ require 'concurrent/synchronization'
18
18
  # uncoordinated, *synchronous* change of individual values. Best used when
19
19
  # the value will undergo frequent reads but only occasional, though complex,
20
20
  # updates. Suitable when the result of an update must be known immediately.
21
- # * *{Concurrent::AtomicReference}:* A simple object reference that can be
21
+ # * *{Concurrent::AtomicReference}:* A simple object reference that can be updated
22
22
  # atomically. Updates are synchronous but fast. Best used when updates a
23
23
  # simple set operations. Not suitable when updates are complex.
24
24
  # {Concurrent::AtomicBoolean} and {Concurrent::AtomicFixnum} are similar
@@ -41,13 +41,13 @@ module Concurrent
41
41
  #
42
42
  # Explicitly sets the value to true.
43
43
  #
44
- # @return [Boolean] true is value has changed, otherwise false
44
+ # @return [Boolean] true if value has changed, otherwise false
45
45
 
46
46
  # @!macro atomic_boolean_method_make_false
47
47
  #
48
48
  # Explicitly sets the value to false.
49
49
  #
50
- # @return [Boolean] true is value has changed, otherwise false
50
+ # @return [Boolean] true if value has changed, otherwise false
51
51
 
52
52
  ###################################################################
53
53
 
@@ -170,6 +170,7 @@ module Concurrent
170
170
  alias_method :compare_and_swap, :compare_and_set
171
171
  alias_method :swap, :get_and_set
172
172
  end
173
+ TruffleRubyAtomicReference
173
174
  when Concurrent.on_rbx?
174
175
  # @note Extends `Rubinius::AtomicReference` version adding aliases
175
176
  # and numeric logic.
@@ -1,6 +1,6 @@
1
+ require 'concurrent/utility/engine'
1
2
  require 'concurrent/atomic/mutex_count_down_latch'
2
3
  require 'concurrent/atomic/java_count_down_latch'
3
- require 'concurrent/utility/engine'
4
4
 
5
5
  module Concurrent
6
6
 
@@ -19,7 +19,7 @@ module Concurrent
19
19
  # t1 = Thread.new do
20
20
  # puts "t1 is waiting"
21
21
  # event.wait(1)
22
- # puts "event ocurred"
22
+ # puts "event occurred"
23
23
  # end
24
24
  #
25
25
  # t2 = Thread.new do
@@ -30,8 +30,8 @@ module Concurrent
30
30
  # [t1, t2].each(&:join)
31
31
  #
32
32
  # # prints:
33
- # # t2 calling set
34
33
  # # t1 is waiting
34
+ # # t2 calling set
35
35
  # # event occurred
36
36
  class Event < Synchronization::LockableObject
37
37
 
@@ -23,7 +23,14 @@ module Concurrent
23
23
 
24
24
  synchronize do
25
25
  try_acquire_timed(permits, nil)
26
- nil
26
+ end
27
+
28
+ return unless block_given?
29
+
30
+ begin
31
+ yield
32
+ ensure
33
+ release(permits)
27
34
  end
28
35
  end
29
36
 
@@ -48,13 +55,22 @@ module Concurrent
48
55
  Utility::NativeInteger.ensure_integer_and_bounds permits
49
56
  Utility::NativeInteger.ensure_positive permits
50
57
 
51
- synchronize do
58
+ acquired = synchronize do
52
59
  if timeout.nil?
53
60
  try_acquire_now(permits)
54
61
  else
55
62
  try_acquire_timed(permits, timeout)
56
63
  end
57
64
  end
65
+
66
+ return acquired unless block_given?
67
+ return unless acquired
68
+
69
+ begin
70
+ yield
71
+ ensure
72
+ release(permits)
73
+ end
58
74
  end
59
75
 
60
76
  # @!macro semaphore_method_release
@@ -267,12 +267,10 @@ module Concurrent
267
267
  # running right now, AND no writers who came before us still waiting to
268
268
  # acquire the lock
269
269
  # Additionally, if any read locks have been taken, we must hold all of them
270
- if c == held
271
- # If we successfully swap the RUNNING_WRITER bit on, then we can go ahead
272
- if @Counter.compare_and_set(c, c+RUNNING_WRITER)
273
- @HeldCount.value = held + WRITE_LOCK_HELD
274
- return true
275
- end
270
+ if held > 0 && @Counter.compare_and_set(1, c+RUNNING_WRITER)
271
+ # If we are the only one reader and successfully swap the RUNNING_WRITER bit on, then we can go ahead
272
+ @HeldCount.value = held + WRITE_LOCK_HELD
273
+ return true
276
274
  elsif @Counter.compare_and_set(c, c+WAITING_WRITER)
277
275
  while true
278
276
  # Now we have successfully incremented, so no more readers will be able to increment
@@ -28,16 +28,31 @@ module Concurrent
28
28
  # But when a Thread is GC'd, we need to drop the reference to its thread-local
29
29
  # array, so we don't leak memory
30
30
 
31
- # @!visibility private
32
- FREE = []
33
- LOCK = Mutex.new
34
- ARRAYS = {} # used as a hash set
35
- @@next = 0
36
- private_constant :FREE, :LOCK, :ARRAYS
31
+ FREE = []
32
+ LOCK = Mutex.new
33
+ THREAD_LOCAL_ARRAYS = {} # used as a hash set
34
+
35
+ # synchronize when not on MRI
36
+ # on MRI using lock in finalizer leads to "can't be called from trap context" error
37
+ # so the code is carefully written to be tread-safe on MRI relying on GIL
38
+
39
+ if Concurrent.on_cruby?
40
+ # @!visibility private
41
+ def self.semi_sync(&block)
42
+ block.call
43
+ end
44
+ else
45
+ # @!visibility private
46
+ def self.semi_sync(&block)
47
+ LOCK.synchronize(&block)
48
+ end
49
+ end
50
+
51
+ private_constant :FREE, :LOCK, :THREAD_LOCAL_ARRAYS
37
52
 
38
53
  # @!macro thread_local_var_method_get
39
54
  def value
40
- if array = get_threadlocal_array
55
+ if (array = get_threadlocal_array)
41
56
  value = array[@index]
42
57
  if value.nil?
43
58
  default
@@ -57,10 +72,10 @@ module Concurrent
57
72
  # We could keep the thread-local arrays in a hash, keyed by Thread
58
73
  # But why? That would require locking
59
74
  # Using Ruby's built-in thread-local storage is faster
60
- unless array = get_threadlocal_array(me)
75
+ unless (array = get_threadlocal_array(me))
61
76
  array = set_threadlocal_array([], me)
62
- LOCK.synchronize { ARRAYS[array.object_id] = array }
63
- ObjectSpace.define_finalizer(me, self.class.thread_finalizer(array))
77
+ self.class.semi_sync { THREAD_LOCAL_ARRAYS[array.object_id] = array }
78
+ ObjectSpace.define_finalizer(me, self.class.thread_finalizer(array.object_id))
64
79
  end
65
80
  array[@index] = (value.nil? ? NULL : value)
66
81
  value
@@ -70,48 +85,52 @@ module Concurrent
70
85
 
71
86
  # @!visibility private
72
87
  def allocate_storage
73
- @index = LOCK.synchronize do
74
- FREE.pop || begin
75
- result = @@next
76
- @@next += 1
77
- result
78
- end
79
- end
80
- ObjectSpace.define_finalizer(self, self.class.threadlocal_finalizer(@index))
88
+ @index = FREE.pop || next_index
89
+
90
+ ObjectSpace.define_finalizer(self, self.class.thread_local_finalizer(@index))
81
91
  end
82
92
 
83
93
  # @!visibility private
84
- def self.threadlocal_finalizer(index)
94
+ def self.thread_local_finalizer(index)
85
95
  proc do
86
- Thread.new do # avoid error: can't be called from trap context
87
- LOCK.synchronize do
88
- FREE.push(index)
89
- # The cost of GC'ing a TLV is linear in the number of threads using TLVs
90
- # But that is natural! More threads means more storage is used per TLV
91
- # So naturally more CPU time is required to free more storage
92
- ARRAYS.each_value do |array|
93
- array[index] = nil
94
- end
95
- end
96
+ semi_sync do
97
+ # The cost of GC'ing a TLV is linear in the number of threads using TLVs
98
+ # But that is natural! More threads means more storage is used per TLV
99
+ # So naturally more CPU time is required to free more storage
100
+ #
101
+ # DO NOT use each_value which might conflict with new pair assignment
102
+ # into the hash in #value= method
103
+ THREAD_LOCAL_ARRAYS.values.each { |array| array[index] = nil }
104
+ # free index has to be published after the arrays are cleared
105
+ FREE.push(index)
96
106
  end
97
107
  end
98
108
  end
99
109
 
100
110
  # @!visibility private
101
- def self.thread_finalizer(array)
111
+ def self.thread_finalizer(id)
102
112
  proc do
103
- Thread.new do # avoid error: can't be called from trap context
104
- LOCK.synchronize do
105
- # The thread which used this thread-local array is now gone
106
- # So don't hold onto a reference to the array (thus blocking GC)
107
- ARRAYS.delete(array.object_id)
108
- end
113
+ semi_sync do
114
+ # The thread which used this thread-local array is now gone
115
+ # So don't hold onto a reference to the array (thus blocking GC)
116
+ THREAD_LOCAL_ARRAYS.delete(id)
109
117
  end
110
118
  end
111
119
  end
112
120
 
113
121
  private
114
122
 
123
+ # noinspection RubyClassVariableUsageInspection
124
+ @@next = 0
125
+ # noinspection RubyClassVariableUsageInspection
126
+ def next_index
127
+ LOCK.synchronize do
128
+ result = @@next
129
+ @@next += 1
130
+ result
131
+ end
132
+ end
133
+
115
134
  if Thread.instance_methods.include?(:thread_variable_get)
116
135
 
117
136
  def get_threadlocal_array(thread = Thread.current)
@@ -136,21 +155,22 @@ module Concurrent
136
155
  # This exists only for use in testing
137
156
  # @!visibility private
138
157
  def value_for(thread)
139
- if array = get_threadlocal_array(thread)
158
+ if (array = get_threadlocal_array(thread))
140
159
  value = array[@index]
141
160
  if value.nil?
142
- default_for(thread)
161
+ get_default
143
162
  elsif value.equal?(NULL)
144
163
  nil
145
164
  else
146
165
  value
147
166
  end
148
167
  else
149
- default_for(thread)
168
+ get_default
150
169
  end
151
170
  end
152
171
 
153
- def default_for(thread)
172
+ # @!visibility private
173
+ def get_default
154
174
  if @default_block
155
175
  raise "Cannot use default_for with default block"
156
176
  else
@@ -16,14 +16,16 @@ module Concurrent
16
16
  # @!macro semaphore_method_acquire
17
17
  #
18
18
  # Acquires the given number of permits from this semaphore,
19
- # blocking until all are available.
19
+ # blocking until all are available. If a block is given,
20
+ # yields to it and releases the permits afterwards.
20
21
  #
21
22
  # @param [Fixnum] permits Number of permits to acquire
22
23
  #
23
24
  # @raise [ArgumentError] if `permits` is not an integer or is less than
24
25
  # one
25
26
  #
26
- # @return [nil]
27
+ # @return [nil, BasicObject] Without a block, `nil` is returned. If a block
28
+ # is given, its return value is returned.
27
29
 
28
30
  # @!macro semaphore_method_available_permits
29
31
  #
@@ -41,7 +43,9 @@ module Concurrent
41
43
  #
42
44
  # Acquires the given number of permits from this semaphore,
43
45
  # only if all are available at the time of invocation or within
44
- # `timeout` interval
46
+ # `timeout` interval. If a block is given, yields to it if the permits
47
+ # were successfully acquired, and releases them afterward, returning the
48
+ # block's return value.
45
49
  #
46
50
  # @param [Fixnum] permits the number of permits to acquire
47
51
  #
@@ -51,8 +55,10 @@ module Concurrent
51
55
  # @raise [ArgumentError] if `permits` is not an integer or is less than
52
56
  # one
53
57
  #
54
- # @return [Boolean] `false` if no permits are available, `true` when
55
- # acquired a permit
58
+ # @return [true, false, nil, BasicObject] `false` if no permits are
59
+ # available, `true` when acquired a permit. If a block is given, the
60
+ # block's return value is returned if the permits were acquired; if not,
61
+ # `nil` is returned.
56
62
 
57
63
  # @!macro semaphore_method_release
58
64
  #
@@ -106,6 +112,8 @@ module Concurrent
106
112
  # releasing a blocking acquirer.
107
113
  # However, no actual permit objects are used; the Semaphore just keeps a
108
114
  # count of the number available and acts accordingly.
115
+ # Alternatively, permits may be acquired within a block, and automatically
116
+ # released after the block finishes executing.
109
117
  #
110
118
  # @!macro semaphore_public_api
111
119
  # @example
@@ -140,6 +148,19 @@ module Concurrent
140
148
  # # Thread 4 releasing semaphore
141
149
  # # Thread 1 acquired semaphore
142
150
  #
151
+ # @example
152
+ # semaphore = Concurrent::Semaphore.new(1)
153
+ #
154
+ # puts semaphore.available_permits
155
+ # semaphore.acquire do
156
+ # puts semaphore.available_permits
157
+ # end
158
+ # puts semaphore.available_permits
159
+ #
160
+ # # prints:
161
+ # # 1
162
+ # # 0
163
+ # # 1
143
164
  class Semaphore < SemaphoreImplementation
144
165
  end
145
166
  end
@@ -1,6 +1,6 @@
1
+ require 'concurrent/utility/engine'
1
2
  require 'concurrent/atomic/ruby_thread_local_var'
2
3
  require 'concurrent/atomic/java_thread_local_var'
3
- require 'concurrent/utility/engine'
4
4
 
5
5
  module Concurrent
6
6
 
@@ -19,7 +19,7 @@ module Concurrent
19
19
  end
20
20
 
21
21
  def compute_if_absent(key)
22
- if stored_value = _get(key) # fast non-blocking path for the most likely case
22
+ if NULL != (stored_value = @backend.fetch(key, NULL)) # fast non-blocking path for the most likely case
23
23
  stored_value
24
24
  else
25
25
  @write_lock.synchronize { super }
@@ -0,0 +1,14 @@
1
+ module Concurrent
2
+
3
+ # @!visibility private
4
+ module Collection
5
+
6
+ # @!visibility private
7
+ class TruffleRubyMapBackend < TruffleRuby::ConcurrentMap
8
+ def initialize(options = nil)
9
+ options ||= {}
10
+ super(initial_capacity: options[:initial_capacity], load_factor: options[:load_factor])
11
+ end
12
+ end
13
+ end
14
+ end
@@ -1,6 +1,6 @@
1
+ require 'concurrent/utility/engine'
1
2
  require 'concurrent/collection/java_non_concurrent_priority_queue'
2
3
  require 'concurrent/collection/ruby_non_concurrent_priority_queue'
3
- require 'concurrent/utility/engine'
4
4
 
5
5
  module Concurrent
6
6
  module Collection
@@ -30,7 +30,7 @@ module Concurrent
30
30
  if @queue[k] == item
31
31
  swap(k, @length)
32
32
  @length -= 1
33
- sink(k)
33
+ sink(k) || swim(k)
34
34
  @queue.pop
35
35
  else
36
36
  k += 1
@@ -126,12 +126,17 @@ module Concurrent
126
126
  #
127
127
  # @!visibility private
128
128
  def sink(k)
129
+ success = false
130
+
129
131
  while (j = (2 * k)) <= @length do
130
132
  j += 1 if j < @length && ! ordered?(j, j+1)
131
133
  break if ordered?(k, j)
132
134
  swap(k, j)
135
+ success = true
133
136
  k = j
134
137
  end
138
+
139
+ success
135
140
  end
136
141
 
137
142
  # Percolate up to maintain heap invariant.
@@ -140,10 +145,15 @@ module Concurrent
140
145
  #
141
146
  # @!visibility private
142
147
  def swim(k)
148
+ success = false
149
+
143
150
  while k > 1 && ! ordered?(k/2, k) do
144
151
  swap(k, k/2)
145
152
  k = k/2
153
+ success = true
146
154
  end
155
+
156
+ success
147
157
  end
148
158
  end
149
159
  end
@@ -37,8 +37,8 @@ module Concurrent
37
37
  # returning data to the caller (dereferencing).
38
38
  #
39
39
  # @note Most classes that include this module will call `#set_deref_options`
40
- # from within the constructor, thus allowing these options to be set at
41
- # object creation.
40
+ # from within the constructor, thus allowing these options to be set at
41
+ # object creation.
42
42
  #
43
43
  # @param [Hash] opts the options defining dereference behavior.
44
44
  # @option opts [String] :dup_on_deref (false) call `#dup` before returning the data
@@ -3,13 +3,14 @@ require 'concurrent/delay'
3
3
  require 'concurrent/errors'
4
4
  require 'concurrent/atomic/atomic_reference'
5
5
  require 'concurrent/concern/logging'
6
+ require 'concurrent/concern/deprecation'
6
7
  require 'concurrent/executor/immediate_executor'
7
8
  require 'concurrent/executor/cached_thread_pool'
8
- require 'concurrent/utility/at_exit'
9
9
  require 'concurrent/utility/processor_counter'
10
10
 
11
11
  module Concurrent
12
12
  extend Concern::Logging
13
+ extend Concern::Deprecation
13
14
 
14
15
  autoload :Options, 'concurrent/options'
15
16
  autoload :TimerSet, 'concurrent/executor/timer_set'
@@ -97,15 +98,15 @@ module Concurrent
97
98
  end
98
99
 
99
100
  # @!visibility private
100
- GLOBAL_FAST_EXECUTOR = Delay.new { Concurrent.new_fast_executor(auto_terminate: true) }
101
+ GLOBAL_FAST_EXECUTOR = Delay.new { Concurrent.new_fast_executor }
101
102
  private_constant :GLOBAL_FAST_EXECUTOR
102
103
 
103
104
  # @!visibility private
104
- GLOBAL_IO_EXECUTOR = Delay.new { Concurrent.new_io_executor(auto_terminate: true) }
105
+ GLOBAL_IO_EXECUTOR = Delay.new { Concurrent.new_io_executor }
105
106
  private_constant :GLOBAL_IO_EXECUTOR
106
107
 
107
108
  # @!visibility private
108
- GLOBAL_TIMER_SET = Delay.new { TimerSet.new(auto_terminate: true) }
109
+ GLOBAL_TIMER_SET = Delay.new { TimerSet.new }
109
110
  private_constant :GLOBAL_TIMER_SET
110
111
 
111
112
  # @!visibility private
@@ -115,7 +116,7 @@ module Concurrent
115
116
  # Disables AtExit handlers including pool auto-termination handlers.
116
117
  # When disabled it will be the application programmer's responsibility
117
118
  # to ensure that the handlers are shutdown properly prior to application
118
- # exit by calling {AtExit.run} method.
119
+ # exit by calling `AtExit.run` method.
119
120
  #
120
121
  # @note this option should be needed only because of `at_exit` ordering
121
122
  # issues which may arise when running some of the testing frameworks.
@@ -125,9 +126,10 @@ module Concurrent
125
126
  # @note This method should *never* be called
126
127
  # from within a gem. It should *only* be used from within the main
127
128
  # application and even then it should be used only when necessary.
128
- # @see AtExit
129
+ # @deprecated Has no effect since it is no longer needed, see https://github.com/ruby-concurrency/concurrent-ruby/pull/841.
130
+ #
129
131
  def self.disable_at_exit_handlers!
130
- AtExit.enabled = false
132
+ deprecated "Method #disable_at_exit_handlers! has no effect since it is no longer needed, see https://github.com/ruby-concurrency/concurrent-ruby/pull/841."
131
133
  end
132
134
 
133
135
  # Global thread pool optimized for short, fast *operations*.
@@ -171,14 +173,16 @@ module Concurrent
171
173
  auto_terminate: opts.fetch(:auto_terminate, true),
172
174
  idletime: 60, # 1 minute
173
175
  max_queue: 0, # unlimited
174
- fallback_policy: :abort # shouldn't matter -- 0 max queue
176
+ fallback_policy: :abort, # shouldn't matter -- 0 max queue
177
+ name: "fast"
175
178
  )
176
179
  end
177
180
 
178
181
  def self.new_io_executor(opts = {})
179
182
  CachedThreadPool.new(
180
183
  auto_terminate: opts.fetch(:auto_terminate, true),
181
- fallback_policy: :abort # shouldn't matter -- 0 max queue
184
+ fallback_policy: :abort, # shouldn't matter -- 0 max queue
185
+ name: "io"
182
186
  )
183
187
  end
184
188
  end