concurrent-ruby 1.0.5 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. checksums.yaml +5 -5
  2. data/CHANGELOG.md +65 -0
  3. data/Gemfile +39 -0
  4. data/{LICENSE.txt → LICENSE.md} +2 -0
  5. data/README.md +207 -105
  6. data/Rakefile +314 -0
  7. data/ext/concurrent-ruby/ConcurrentRubyService.java +17 -0
  8. data/ext/concurrent-ruby/com/concurrent_ruby/ext/AtomicReferenceLibrary.java +175 -0
  9. data/ext/concurrent-ruby/com/concurrent_ruby/ext/JRubyMapBackendLibrary.java +248 -0
  10. data/ext/concurrent-ruby/com/concurrent_ruby/ext/JavaAtomicBooleanLibrary.java +93 -0
  11. data/ext/concurrent-ruby/com/concurrent_ruby/ext/JavaAtomicFixnumLibrary.java +113 -0
  12. data/ext/concurrent-ruby/com/concurrent_ruby/ext/JavaSemaphoreLibrary.java +159 -0
  13. data/ext/concurrent-ruby/com/concurrent_ruby/ext/SynchronizationLibrary.java +306 -0
  14. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/ConcurrentHashMap.java +31 -0
  15. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/ConcurrentHashMapV8.java +3863 -0
  16. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/LongAdder.java +203 -0
  17. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/Striped64.java +342 -0
  18. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/nounsafe/ConcurrentHashMapV8.java +3800 -0
  19. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/nounsafe/LongAdder.java +204 -0
  20. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/nounsafe/Striped64.java +291 -0
  21. data/ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166y/ThreadLocalRandom.java +199 -0
  22. data/lib/concurrent/agent.rb +7 -7
  23. data/lib/concurrent/array.rb +59 -32
  24. data/lib/concurrent/async.rb +4 -4
  25. data/lib/concurrent/atom.rb +9 -9
  26. data/lib/concurrent/atomic/atomic_boolean.rb +24 -20
  27. data/lib/concurrent/atomic/atomic_fixnum.rb +27 -23
  28. data/lib/concurrent/atomic/atomic_markable_reference.rb +164 -0
  29. data/lib/concurrent/atomic/atomic_reference.rb +185 -32
  30. data/lib/concurrent/atomic/count_down_latch.rb +6 -6
  31. data/lib/concurrent/atomic/cyclic_barrier.rb +1 -1
  32. data/lib/concurrent/atomic/event.rb +1 -1
  33. data/lib/concurrent/atomic/java_count_down_latch.rb +9 -6
  34. data/lib/concurrent/atomic/mutex_atomic_boolean.rb +2 -0
  35. data/lib/concurrent/atomic/mutex_count_down_latch.rb +1 -0
  36. data/lib/concurrent/atomic/read_write_lock.rb +2 -1
  37. data/lib/concurrent/atomic/reentrant_read_write_lock.rb +3 -1
  38. data/lib/concurrent/atomic/semaphore.rb +8 -8
  39. data/lib/concurrent/atomic/thread_local_var.rb +7 -7
  40. data/lib/concurrent/atomic_reference/mutex_atomic.rb +3 -8
  41. data/lib/concurrent/atomic_reference/numeric_cas_wrapper.rb +1 -1
  42. data/lib/concurrent/atomics.rb +0 -43
  43. data/lib/concurrent/collection/lock_free_stack.rb +158 -0
  44. data/lib/concurrent/collection/map/atomic_reference_map_backend.rb +3 -3
  45. data/lib/concurrent/collection/map/non_concurrent_map_backend.rb +1 -2
  46. data/lib/concurrent/collection/non_concurrent_priority_queue.rb +29 -29
  47. data/lib/concurrent/concern/dereferenceable.rb +1 -1
  48. data/lib/concurrent/concern/logging.rb +6 -1
  49. data/lib/concurrent/concern/observable.rb +7 -7
  50. data/lib/concurrent/concurrent_ruby.jar +0 -0
  51. data/lib/concurrent/configuration.rb +1 -6
  52. data/lib/concurrent/constants.rb +1 -1
  53. data/lib/concurrent/dataflow.rb +2 -1
  54. data/lib/concurrent/delay.rb +9 -7
  55. data/lib/concurrent/exchanger.rb +21 -25
  56. data/lib/concurrent/executor/abstract_executor_service.rb +2 -2
  57. data/lib/concurrent/executor/cached_thread_pool.rb +1 -1
  58. data/lib/concurrent/executor/executor_service.rb +15 -15
  59. data/lib/concurrent/executor/fixed_thread_pool.rb +18 -18
  60. data/lib/concurrent/executor/java_thread_pool_executor.rb +10 -7
  61. data/lib/concurrent/executor/single_thread_executor.rb +2 -2
  62. data/lib/concurrent/executor/thread_pool_executor.rb +6 -6
  63. data/lib/concurrent/executor/timer_set.rb +1 -1
  64. data/lib/concurrent/future.rb +4 -1
  65. data/lib/concurrent/hash.rb +53 -30
  66. data/lib/concurrent/ivar.rb +5 -6
  67. data/lib/concurrent/map.rb +178 -81
  68. data/lib/concurrent/maybe.rb +1 -1
  69. data/lib/concurrent/mutable_struct.rb +15 -14
  70. data/lib/concurrent/mvar.rb +2 -2
  71. data/lib/concurrent/promise.rb +53 -21
  72. data/lib/concurrent/promises.rb +1936 -0
  73. data/lib/concurrent/re_include.rb +58 -0
  74. data/lib/concurrent/set.rb +66 -0
  75. data/lib/concurrent/settable_struct.rb +1 -0
  76. data/lib/concurrent/synchronization/abstract_lockable_object.rb +5 -5
  77. data/lib/concurrent/synchronization/abstract_struct.rb +6 -4
  78. data/lib/concurrent/synchronization/lockable_object.rb +6 -6
  79. data/lib/concurrent/synchronization/{mri_lockable_object.rb → mutex_lockable_object.rb} +19 -14
  80. data/lib/concurrent/synchronization/object.rb +8 -4
  81. data/lib/concurrent/synchronization/truffleruby_object.rb +46 -0
  82. data/lib/concurrent/synchronization/volatile.rb +11 -9
  83. data/lib/concurrent/synchronization.rb +4 -5
  84. data/lib/concurrent/thread_safe/util/data_structures.rb +63 -0
  85. data/lib/concurrent/thread_safe/util/striped64.rb +9 -4
  86. data/lib/concurrent/timer_task.rb +5 -2
  87. data/lib/concurrent/tuple.rb +1 -1
  88. data/lib/concurrent/tvar.rb +2 -2
  89. data/lib/concurrent/utility/193.rb +17 -0
  90. data/lib/concurrent/utility/at_exit.rb +1 -1
  91. data/lib/concurrent/utility/engine.rb +4 -4
  92. data/lib/concurrent/utility/monotonic_time.rb +3 -3
  93. data/lib/concurrent/utility/native_extension_loader.rb +31 -33
  94. data/lib/concurrent/utility/processor_counter.rb +0 -2
  95. data/lib/concurrent/version.rb +2 -2
  96. data/lib/concurrent-ruby.rb +1 -0
  97. data/lib/concurrent.rb +26 -20
  98. metadata +33 -18
  99. data/lib/concurrent/atomic_reference/concurrent_update_error.rb +0 -8
  100. data/lib/concurrent/atomic_reference/direct_update.rb +0 -81
  101. data/lib/concurrent/atomic_reference/jruby+truffle.rb +0 -2
  102. data/lib/concurrent/atomic_reference/jruby.rb +0 -16
  103. data/lib/concurrent/atomic_reference/rbx.rb +0 -22
  104. data/lib/concurrent/atomic_reference/ruby.rb +0 -32
  105. data/lib/concurrent/edge.rb +0 -26
  106. data/lib/concurrent/lazy_register.rb +0 -81
  107. data/lib/concurrent/synchronization/truffle_lockable_object.rb +0 -9
  108. data/lib/concurrent/synchronization/truffle_object.rb +0 -31
  109. data/lib/concurrent/thread_safe/util/array_hash_rbx.rb +0 -30
@@ -0,0 +1,158 @@
1
+ module Concurrent
2
+
3
+ # @!macro warn.edge
4
+ class LockFreeStack < Synchronization::Object
5
+
6
+ safe_initialization!
7
+
8
+ class Node
9
+ # TODO (pitr-ch 20-Dec-2016): Could be unified with Stack class?
10
+
11
+ # @return [Node]
12
+ attr_reader :next_node
13
+
14
+ # @return [Object]
15
+ attr_reader :value
16
+
17
+ # @!visibility private
18
+ # allow to nil-ify to free GC when the entry is no longer relevant, not synchronised
19
+ attr_writer :value
20
+
21
+ def initialize(value, next_node)
22
+ @value = value
23
+ @next_node = next_node
24
+ end
25
+
26
+ singleton_class.send :alias_method, :[], :new
27
+ end
28
+
29
+ # The singleton for empty node
30
+ EMPTY = Node[nil, nil]
31
+ def EMPTY.next_node
32
+ self
33
+ end
34
+
35
+ attr_atomic(:head)
36
+ private :head, :head=, :swap_head, :compare_and_set_head, :update_head
37
+
38
+ # @!visibility private
39
+ def self.of1(value)
40
+ new Node[value, EMPTY]
41
+ end
42
+
43
+ # @!visibility private
44
+ def self.of2(value1, value2)
45
+ new Node[value1, Node[value2, EMPTY]]
46
+ end
47
+
48
+ # @param [Node] head
49
+ def initialize(head = EMPTY)
50
+ super()
51
+ self.head = head
52
+ end
53
+
54
+ # @param [Node] head
55
+ # @return [true, false]
56
+ def empty?(head = self.head)
57
+ head.equal? EMPTY
58
+ end
59
+
60
+ # @param [Node] head
61
+ # @param [Object] value
62
+ # @return [true, false]
63
+ def compare_and_push(head, value)
64
+ compare_and_set_head head, Node[value, head]
65
+ end
66
+
67
+ # @param [Object] value
68
+ # @return [self]
69
+ def push(value)
70
+ while true
71
+ current_head = head
72
+ return self if compare_and_set_head current_head, Node[value, current_head]
73
+ end
74
+ end
75
+
76
+ # @return [Node]
77
+ def peek
78
+ head
79
+ end
80
+
81
+ # @param [Node] head
82
+ # @return [true, false]
83
+ def compare_and_pop(head)
84
+ compare_and_set_head head, head.next_node
85
+ end
86
+
87
+ # @return [Object]
88
+ def pop
89
+ while true
90
+ current_head = head
91
+ return current_head.value if compare_and_set_head current_head, current_head.next_node
92
+ end
93
+ end
94
+
95
+ # @param [Node] head
96
+ # @return [true, false]
97
+ def compare_and_clear(head)
98
+ compare_and_set_head head, EMPTY
99
+ end
100
+
101
+ include Enumerable
102
+
103
+ # @param [Node] head
104
+ # @return [self]
105
+ def each(head = nil)
106
+ return to_enum(:each, head) unless block_given?
107
+ it = head || peek
108
+ until it.equal?(EMPTY)
109
+ yield it.value
110
+ it = it.next_node
111
+ end
112
+ self
113
+ end
114
+
115
+ # @return [true, false]
116
+ def clear
117
+ while true
118
+ current_head = head
119
+ return false if current_head == EMPTY
120
+ return true if compare_and_set_head current_head, EMPTY
121
+ end
122
+ end
123
+
124
+ # @param [Node] head
125
+ # @return [true, false]
126
+ def clear_if(head)
127
+ compare_and_set_head head, EMPTY
128
+ end
129
+
130
+ # @param [Node] head
131
+ # @param [Node] new_head
132
+ # @return [true, false]
133
+ def replace_if(head, new_head)
134
+ compare_and_set_head head, new_head
135
+ end
136
+
137
+ # @return [self]
138
+ # @yield over the cleared stack
139
+ # @yieldparam [Object] value
140
+ def clear_each(&block)
141
+ while true
142
+ current_head = head
143
+ return self if current_head == EMPTY
144
+ if compare_and_set_head current_head, EMPTY
145
+ each current_head, &block
146
+ return self
147
+ end
148
+ end
149
+ end
150
+
151
+ # @return [String] Short string representation.
152
+ def to_s
153
+ format '%s %s>', super[0..-2], to_a.to_s
154
+ end
155
+
156
+ alias_method :inspect, :to_s
157
+ end
158
+ end
@@ -289,7 +289,7 @@ module Concurrent
289
289
  end
290
290
  end
291
291
  elsif cas_hash(my_hash, my_hash | WAITING)
292
- force_aquire_lock(table, i)
292
+ force_acquire_lock(table, i)
293
293
  break
294
294
  end
295
295
  end
@@ -330,7 +330,7 @@ module Concurrent
330
330
  end
331
331
 
332
332
  private
333
- def force_aquire_lock(table, i)
333
+ def force_acquire_lock(table, i)
334
334
  cheap_synchronize do
335
335
  if equal?(table.volatile_get(i)) && (hash & WAITING) == WAITING
336
336
  cheap_wait
@@ -831,7 +831,7 @@ module Concurrent
831
831
  # no lock needed (or available) if bin >= 0, because we're not popping values from locked_indexes until we've run through the whole table
832
832
  redo unless (bin >= 0 ? table.cas(i, nil, forwarder) : lock_and_clean_up_reverse_forwarders(table, old_table_size, new_table, i, forwarder))
833
833
  elsif Node.locked_hash?(node_hash = node.hash)
834
- locked_indexes ||= Array.new
834
+ locked_indexes ||= ::Array.new
835
835
  if bin < 0 && locked_arr_idx > 0
836
836
  locked_arr_idx -= 1
837
837
  i, locked_indexes[locked_arr_idx] = locked_indexes[locked_arr_idx], i # swap with another bin
@@ -10,7 +10,7 @@ module Concurrent
10
10
 
11
11
  # WARNING: all public methods of the class must operate on the @backend
12
12
  # directly without calling each other. This is important because of the
13
- # SynchronizedMapBackend which uses a non-reentrant mutex for perfomance
13
+ # SynchronizedMapBackend which uses a non-reentrant mutex for performance
14
14
  # reasons.
15
15
  def initialize(options = nil)
16
16
  @backend = {}
@@ -95,7 +95,6 @@ module Concurrent
95
95
  end
96
96
 
97
97
  def each_pair
98
- return enum_for :each_pair unless block_given?
99
98
  dupped_backend.each_pair do |k, v|
100
99
  yield k, v
101
100
  end
@@ -15,7 +15,7 @@ module Concurrent
15
15
  end
16
16
  private_constant :NonConcurrentPriorityQueueImplementation
17
17
 
18
- # @!macro [attach] priority_queue
18
+ # @!macro priority_queue
19
19
  #
20
20
  # A queue collection in which the elements are sorted based on their
21
21
  # comparison (spaceship) operator `<=>`. Items are added to the queue
@@ -45,7 +45,7 @@ module Concurrent
45
45
  # @see http://algs4.cs.princeton.edu/24pq/MaxPQ.java.html
46
46
  #
47
47
  # @see http://docs.oracle.com/javase/7/docs/api/java/util/PriorityQueue.html
48
- #
48
+ #
49
49
  # @!visibility private
50
50
  class NonConcurrentPriorityQueue < NonConcurrentPriorityQueueImplementation
51
51
 
@@ -60,83 +60,83 @@ module Concurrent
60
60
  alias_method :enq, :push
61
61
 
62
62
  # @!method initialize(opts = {})
63
- # @!macro [new] priority_queue_method_initialize
63
+ # @!macro priority_queue_method_initialize
64
64
  #
65
65
  # Create a new priority queue with no items.
66
- #
66
+ #
67
67
  # @param [Hash] opts the options for creating the queue
68
68
  # @option opts [Symbol] :order (:max) dictates the order in which items are
69
69
  # stored: from highest to lowest when `:max` or `:high`; from lowest to
70
70
  # highest when `:min` or `:low`
71
71
 
72
72
  # @!method clear
73
- # @!macro [new] priority_queue_method_clear
73
+ # @!macro priority_queue_method_clear
74
74
  #
75
75
  # Removes all of the elements from this priority queue.
76
76
 
77
77
  # @!method delete(item)
78
- # @!macro [new] priority_queue_method_delete
78
+ # @!macro priority_queue_method_delete
79
79
  #
80
80
  # Deletes all items from `self` that are equal to `item`.
81
- #
81
+ #
82
82
  # @param [Object] item the item to be removed from the queue
83
83
  # @return [Object] true if the item is found else false
84
84
 
85
85
  # @!method empty?
86
- # @!macro [new] priority_queue_method_empty
87
- #
86
+ # @!macro priority_queue_method_empty
87
+ #
88
88
  # Returns `true` if `self` contains no elements.
89
- #
89
+ #
90
90
  # @return [Boolean] true if there are no items in the queue else false
91
91
 
92
92
  # @!method include?(item)
93
- # @!macro [new] priority_queue_method_include
93
+ # @!macro priority_queue_method_include
94
94
  #
95
95
  # Returns `true` if the given item is present in `self` (that is, if any
96
96
  # element == `item`), otherwise returns false.
97
- #
97
+ #
98
98
  # @param [Object] item the item to search for
99
- #
99
+ #
100
100
  # @return [Boolean] true if the item is found else false
101
101
 
102
102
  # @!method length
103
- # @!macro [new] priority_queue_method_length
104
- #
103
+ # @!macro priority_queue_method_length
104
+ #
105
105
  # The current length of the queue.
106
- #
106
+ #
107
107
  # @return [Fixnum] the number of items in the queue
108
108
 
109
109
  # @!method peek
110
- # @!macro [new] priority_queue_method_peek
111
- #
110
+ # @!macro priority_queue_method_peek
111
+ #
112
112
  # Retrieves, but does not remove, the head of this queue, or returns `nil`
113
113
  # if this queue is empty.
114
- #
114
+ #
115
115
  # @return [Object] the head of the queue or `nil` when empty
116
116
 
117
117
  # @!method pop
118
- # @!macro [new] priority_queue_method_pop
119
- #
118
+ # @!macro priority_queue_method_pop
119
+ #
120
120
  # Retrieves and removes the head of this queue, or returns `nil` if this
121
121
  # queue is empty.
122
- #
122
+ #
123
123
  # @return [Object] the head of the queue or `nil` when empty
124
124
 
125
125
  # @!method push(item)
126
- # @!macro [new] priority_queue_method_push
127
- #
126
+ # @!macro priority_queue_method_push
127
+ #
128
128
  # Inserts the specified element into this priority queue.
129
- #
129
+ #
130
130
  # @param [Object] item the item to insert onto the queue
131
131
 
132
132
  # @!method self.from_list(list, opts = {})
133
- # @!macro [new] priority_queue_method_from_list
134
- #
133
+ # @!macro priority_queue_method_from_list
134
+ #
135
135
  # Create a new priority queue from the given list.
136
- #
136
+ #
137
137
  # @param [Enumerable] list the list to build the queue from
138
138
  # @param [Hash] opts the options for creating the queue
139
- #
139
+ #
140
140
  # @return [NonConcurrentPriorityQueue] the newly created and populated queue
141
141
  end
142
142
  end
@@ -32,7 +32,7 @@ module Concurrent
32
32
  synchronize{ @value = value }
33
33
  end
34
34
 
35
- # @!macro [attach] dereferenceable_set_deref_options
35
+ # @!macro dereferenceable_set_deref_options
36
36
  # Set the options which define the operations #value performs before
37
37
  # returning data to the caller (dereferencing).
38
38
  #
@@ -17,7 +17,12 @@ module Concurrent
17
17
  def log(level, progname, message = nil, &block)
18
18
  #NOTE: Cannot require 'concurrent/configuration' above due to circular references.
19
19
  # Assume that the gem has been initialized if we've gotten this far.
20
- (@logger || Concurrent.global_logger).call level, progname, message, &block
20
+ logger = if defined?(@logger) && @logger
21
+ @logger
22
+ else
23
+ Concurrent.global_logger
24
+ end
25
+ logger.call level, progname, message, &block
21
26
  rescue => error
22
27
  $stderr.puts "`Concurrent.configuration.logger` failed to log #{[level, progname, message, block]}\n" +
23
28
  "#{error.message} (#{error.class})\n#{error.backtrace.join "\n"}"
@@ -49,7 +49,7 @@ module Concurrent
49
49
  # or an AtomicFixum)
50
50
  module Observable
51
51
 
52
- # @!macro [attach] observable_add_observer
52
+ # @!macro observable_add_observer
53
53
  #
54
54
  # Adds an observer to this set. If a block is passed, the observer will be
55
55
  # created by this method and no other params should be passed.
@@ -72,8 +72,8 @@ module Concurrent
72
72
  self
73
73
  end
74
74
 
75
- # @!macro [attach] observable_delete_observer
76
- #
75
+ # @!macro observable_delete_observer
76
+ #
77
77
  # Remove `observer` as an observer on this object so that it will no
78
78
  # longer receive notifications.
79
79
  #
@@ -83,17 +83,17 @@ module Concurrent
83
83
  observers.delete_observer(observer)
84
84
  end
85
85
 
86
- # @!macro [attach] observable_delete_observers
87
- #
86
+ # @!macro observable_delete_observers
87
+ #
88
88
  # Remove all observers associated with this object.
89
- #
89
+ #
90
90
  # @return [Observable] self
91
91
  def delete_observers
92
92
  observers.delete_observers
93
93
  self
94
94
  end
95
95
 
96
- # @!macro [attach] observable_count_observers
96
+ # @!macro observable_count_observers
97
97
  #
98
98
  # Return the number of observers associated with this object.
99
99
  #
Binary file
@@ -175,13 +175,8 @@ module Concurrent
175
175
  end
176
176
 
177
177
  def self.new_io_executor(opts = {})
178
- ThreadPoolExecutor.new(
179
- min_threads: [2, Concurrent.processor_count].max,
180
- max_threads: ThreadPoolExecutor::DEFAULT_MAX_POOL_SIZE,
181
- # max_threads: 1000,
178
+ CachedThreadPool.new(
182
179
  auto_terminate: opts.fetch(:auto_terminate, true),
183
- idletime: 60, # 1 minute
184
- max_queue: 0, # unlimited
185
180
  fallback_policy: :abort # shouldn't matter -- 0 max queue
186
181
  )
187
182
  end
@@ -3,6 +3,6 @@ module Concurrent
3
3
  # Various classes within allows for +nil+ values to be stored,
4
4
  # so a special +NULL+ token is required to indicate the "nil-ness".
5
5
  # @!visibility private
6
- NULL = Object.new
6
+ NULL = ::Object.new
7
7
 
8
8
  end
@@ -18,7 +18,8 @@ module Concurrent
18
18
  end
19
19
  end
20
20
 
21
- # {include:file:doc/dataflow.md}
21
+ # Dataflow allows you to create a task that will be scheduled when all of its data dependencies are available.
22
+ # {include:file:docs-source/dataflow.md}
22
23
  #
23
24
  # @param [Future] inputs zero or more `Future` operations that this dataflow depends upon
24
25
  #
@@ -32,7 +32,7 @@ module Concurrent
32
32
  #
33
33
  # @!macro copy_options
34
34
  #
35
- # @!macro [attach] delay_note_regarding_blocking
35
+ # @!macro delay_note_regarding_blocking
36
36
  # @note The default behavior of `Delay` is to block indefinitely when
37
37
  # calling either `value` or `wait`, executing the delayed operation on
38
38
  # the current thread. This makes the `timeout` value completely
@@ -81,13 +81,15 @@ module Concurrent
81
81
  # this function has been optimized for performance and
82
82
  # should not be modified without running new benchmarks
83
83
  synchronize do
84
- execute = @computing = true unless @computing
84
+ execute = @evaluation_started = true unless @evaluation_started
85
85
  if execute
86
86
  begin
87
87
  set_state(true, @task.call, nil)
88
88
  rescue => ex
89
89
  set_state(false, nil, ex)
90
90
  end
91
+ elsif incomplete?
92
+ raise IllegalOperationError, 'Recursive call to #value during evaluation of the Delay'
91
93
  end
92
94
  end
93
95
  if @do_nothing_on_deref
@@ -144,7 +146,7 @@ module Concurrent
144
146
  def reconfigure(&block)
145
147
  synchronize do
146
148
  raise ArgumentError.new('no block given') unless block_given?
147
- unless @computing
149
+ unless @evaluation_started
148
150
  @task = block
149
151
  true
150
152
  else
@@ -160,9 +162,9 @@ module Concurrent
160
162
  set_deref_options(opts)
161
163
  @executor = opts[:executor]
162
164
 
163
- @task = block
164
- @state = :pending
165
- @computing = false
165
+ @task = block
166
+ @state = :pending
167
+ @evaluation_started = false
166
168
  end
167
169
 
168
170
  private
@@ -173,7 +175,7 @@ module Concurrent
173
175
  # should not be modified without running new benchmarks
174
176
  execute = task = nil
175
177
  synchronize do
176
- execute = @computing = true unless @computing
178
+ execute = @evaluation_started = true unless @evaluation_started
177
179
  task = @task
178
180
  end
179
181
 
@@ -8,7 +8,7 @@ require 'concurrent/utility/monotonic_time'
8
8
 
9
9
  module Concurrent
10
10
 
11
- # @!macro [attach] exchanger
11
+ # @!macro exchanger
12
12
  #
13
13
  # A synchronization point at which threads can pair and swap elements within
14
14
  # pairs. Each thread presents some object on entry to the exchange method,
@@ -24,9 +24,6 @@ module Concurrent
24
24
  # will remain correct.
25
25
  #
26
26
  # @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Exchanger.html java.util.concurrent.Exchanger
27
- #
28
- # @!macro edge_warning
29
- #
30
27
  # @example
31
28
  #
32
29
  # exchanger = Concurrent::Exchanger.new
@@ -36,20 +33,19 @@ module Concurrent
36
33
  # Thread.new { puts "second: " << exchanger.exchange('bar', 1) } #=> "second: foo"
37
34
  # ]
38
35
  # threads.each {|t| t.join(2) }
39
- #
36
+
40
37
  # @!visibility private
41
38
  class AbstractExchanger < Synchronization::Object
42
39
 
43
40
  # @!visibility private
44
- CANCEL = Object.new
41
+ CANCEL = ::Object.new
45
42
  private_constant :CANCEL
46
43
 
47
- # @!macro [attach] exchanger_method_initialize
48
44
  def initialize
49
45
  super
50
46
  end
51
47
 
52
- # @!macro [attach] exchanger_method_do_exchange
48
+ # @!macro exchanger_method_do_exchange
53
49
  #
54
50
  # Waits for another thread to arrive at this exchange point (unless the
55
51
  # current thread is interrupted), and then transfers the given object to
@@ -61,7 +57,7 @@ module Concurrent
61
57
  # @param [Object] value the value to exchange with another thread
62
58
  # @param [Numeric, nil] timeout in seconds, `nil` blocks indefinitely
63
59
  #
64
- # @!macro [attach] exchanger_method_exchange
60
+ # @!macro exchanger_method_exchange
65
61
  #
66
62
  # In some edge cases when a `timeout` is given a return value of `nil` may be
67
63
  # ambiguous. Specifically, if `nil` is a valid value in the exchange it will
@@ -75,8 +71,7 @@ module Concurrent
75
71
  end
76
72
 
77
73
  # @!macro exchanger_method_do_exchange
78
- #
79
- # @!macro [attach] exchanger_method_exchange_bang
74
+ # @!macro exchanger_method_exchange_bang
80
75
  #
81
76
  # On timeout a {Concurrent::TimeoutError} exception will be raised.
82
77
  #
@@ -91,8 +86,7 @@ module Concurrent
91
86
  end
92
87
 
93
88
  # @!macro exchanger_method_do_exchange
94
- #
95
- # @!macro [attach] exchanger_method_try_exchange
89
+ # @!macro exchanger_method_try_exchange
96
90
  #
97
91
  # The return value will be a {Concurrent::Maybe} set to `Just` on success or
98
92
  # `Nothing` on timeout.
@@ -130,7 +124,6 @@ module Concurrent
130
124
  end
131
125
  end
132
126
 
133
- # @!macro exchanger
134
127
  # @!macro internal_implementation_note
135
128
  # @!visibility private
136
129
  class RubyExchanger < AbstractExchanger
@@ -148,8 +141,8 @@ module Concurrent
148
141
 
149
142
  def initialize(item)
150
143
  super()
151
- @Item = item
152
- @Latch = Concurrent::CountDownLatch.new
144
+ @Item = item
145
+ @Latch = Concurrent::CountDownLatch.new
153
146
  self.value = nil
154
147
  end
155
148
 
@@ -163,7 +156,6 @@ module Concurrent
163
156
  end
164
157
  private_constant :Node
165
158
 
166
- # @!macro exchanger_method_initialize
167
159
  def initialize
168
160
  super
169
161
  end
@@ -218,7 +210,7 @@ module Concurrent
218
210
  # node's initial value. It never changes. It's what the fulfiller returns on
219
211
  # success. The occupier's hole is where the fulfiller put its item. It's the
220
212
  # item that the occupier returns on success. The latch is used for synchronization.
221
- # Becuase a thread may act as either an occupier or fulfiller (or possibly
213
+ # Because a thread may act as either an occupier or fulfiller (or possibly
222
214
  # both in periods of high contention) every thread creates a node when
223
215
  # the exchange method is first called.
224
216
  #
@@ -260,8 +252,8 @@ module Concurrent
260
252
  # - Wake the sleeping occupier
261
253
  # - Return the occupier's item
262
254
 
263
- value = NULL if value.nil? # The sentinel allows nil to be a valid value
264
- me = Node.new(value) # create my node in case I need to occupy
255
+ value = NULL if value.nil? # The sentinel allows nil to be a valid value
256
+ me = Node.new(value) # create my node in case I need to occupy
265
257
  end_at = Concurrent.monotonic_time + timeout.to_f # The time to give up
266
258
 
267
259
  result = loop do
@@ -298,12 +290,10 @@ module Concurrent
298
290
 
299
291
  if Concurrent.on_jruby?
300
292
 
301
- # @!macro exchanger
302
293
  # @!macro internal_implementation_note
303
294
  # @!visibility private
304
295
  class JavaExchanger < AbstractExchanger
305
296
 
306
- # @!macro exchanger_method_initialize
307
297
  def initialize
308
298
  @exchanger = java.util.concurrent.Exchanger.new
309
299
  end
@@ -314,11 +304,17 @@ module Concurrent
314
304
  #
315
305
  # @return [Object, CANCEL] the value exchanged by the other thread; {CANCEL} on timeout
316
306
  def do_exchange(value, timeout)
307
+ result = nil
317
308
  if timeout.nil?
318
- @exchanger.exchange(value)
309
+ Synchronization::JRuby.sleep_interruptibly do
310
+ result = @exchanger.exchange(value)
311
+ end
319
312
  else
320
- @exchanger.exchange(value, 1000 * timeout, java.util.concurrent.TimeUnit::MILLISECONDS)
313
+ Synchronization::JRuby.sleep_interruptibly do
314
+ result = @exchanger.exchange(value, 1000 * timeout, java.util.concurrent.TimeUnit::MILLISECONDS)
315
+ end
321
316
  end
317
+ result
322
318
  rescue java.util.concurrent.TimeoutException
323
319
  CANCEL
324
320
  end
@@ -339,7 +335,7 @@ module Concurrent
339
335
  class Exchanger < ExchangerImplementation
340
336
 
341
337
  # @!method initialize
342
- # @!macro exchanger_method_initialize
338
+ # Creates exchanger instance
343
339
 
344
340
  # @!method exchange(value, timeout = nil)
345
341
  # @!macro exchanger_method_do_exchange
@@ -93,7 +93,7 @@ module Concurrent
93
93
  raise NotImplementedError
94
94
  end
95
95
 
96
- # @!macro [attach] executor_service_method_ns_shutdown_execution
96
+ # @!macro executor_service_method_ns_shutdown_execution
97
97
  #
98
98
  # Callback method called when an orderly shutdown has completed.
99
99
  # The default behavior is to signal all waiting threads.
@@ -101,7 +101,7 @@ module Concurrent
101
101
  # do nothing
102
102
  end
103
103
 
104
- # @!macro [attach] executor_service_method_ns_kill_execution
104
+ # @!macro executor_service_method_ns_kill_execution
105
105
  #
106
106
  # Callback method called when the executor has been killed.
107
107
  # The default behavior is to do nothing.
@@ -26,7 +26,7 @@ module Concurrent
26
26
  # @!macro thread_pool_options
27
27
  class CachedThreadPool < ThreadPoolExecutor
28
28
 
29
- # @!macro [attach] cached_thread_pool_method_initialize
29
+ # @!macro cached_thread_pool_method_initialize
30
30
  #
31
31
  # Create a new thread pool.
32
32
  #