concurrent-ruby-edge 0.3.0 → 0.3.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 051e8d02906d153d112b5f2e2c86948d3cf0fd26
4
- data.tar.gz: f4a53c06cfbf931dfc11245be8dabdf579d0e361
3
+ metadata.gz: da7e2be8cf30bba4051e8af6b0040d4f4892fae5
4
+ data.tar.gz: 70067e829e45a4365fa67fd9306be7658cc65d95
5
5
  SHA512:
6
- metadata.gz: 9b8490d330375764f70d70f212de4697dd3fbf83b0b4a7da1ffba7c86599fc64df3c4f25801ea0531970a04d2b62e04bad7701df9ce25afd24779dbd6b54d0fd
7
- data.tar.gz: 6fc402ca7a4434e2b8e97a71fd351c81bbf8ff9467ccceb8159c78993fc8219e032704644cfbb965a34879aa5922bddc8d96f5f78343d0b0834602340d122a52
6
+ metadata.gz: ae1e26dc4dd7dcd7df7185aa68d43eb010e6982bc7ea0da112c66c2c65ecf53456e71399145b0dc0c7c1f60a59382f1142224be367ff285d39807df585b85539
7
+ data.tar.gz: 0207d055c1c55f1ffe7ff7914acaebc0876b801391c7e0068c12f43beab2f2d6f97ad0c02cce9d7f8a92a2dabe5fe790008f995229d058c875c46b97c2c95b0e
data/README.md CHANGED
@@ -84,7 +84,7 @@ Structure classes derived from Ruby's [Struct](http://ruby-doc.org/core-2.2.0/St
84
84
 
85
85
  Thread-safe variables:
86
86
 
87
- * [Agent](http://ruby-concurrency.github.io/concurrent-ruby/Concurrent/Agent.html): A way to manage shared, mutable, *asynchronous*, independent, state. Based on Clojure's [Agent](http://clojure.org/agents).
87
+ * [Agent](http://ruby-concurrency.github.io/concurrent-ruby/Concurrent/Agent.html): A way to manage shared, mutable, *asynchronous*, independent state. Based on Clojure's [Agent](http://clojure.org/agents).
88
88
  * [Atom](http://ruby-concurrency.github.io/concurrent-ruby/Concurrent/Atom.html): A way to manage shared, mutable, *synchronous*, independent state. Based on Clojure's [Atom](http://clojure.org/atoms).
89
89
  * [AtomicBoolean](http://ruby-concurrency.github.io/concurrent-ruby/Concurrent/AtomicBoolean.html) A boolean value that can be updated atomically.
90
90
  * [AtomicFixnum](http://ruby-concurrency.github.io/concurrent-ruby/Concurrent/AtomicFixnum.html) A numeric value that can be updated atomically.
@@ -14,3 +14,5 @@ require 'concurrent/edge/lock_free_stack'
14
14
  require 'concurrent/edge/promises'
15
15
  require 'concurrent/edge/cancellation'
16
16
  require 'concurrent/edge/throttle'
17
+
18
+ require 'concurrent/edge/processing_actor'
@@ -70,7 +70,9 @@ module Concurrent
70
70
  end
71
71
 
72
72
  all_terminations.chain_resolvable(@terminated)
73
- all_terminations.chain_resolvable(envelope.future) if envelope && envelope.future
73
+ if envelope && envelope.future
74
+ all_terminations.chain { |fulfilled, _, reason| envelope.future.resolve fulfilled, true, reason }
75
+ end
74
76
 
75
77
  broadcast(true, [:terminated, reason]) # TODO do not end up in Dead Letter Router
76
78
  parent << :remove_child if parent
@@ -43,7 +43,7 @@ module Concurrent
43
43
  envelope_to_redirect = if envelope.future
44
44
  envelope
45
45
  else
46
- Envelope.new(envelope.message, Concurrent::Promises.future, envelope.sender, envelope.address)
46
+ Envelope.new(envelope.message, Promises.resolvable_future, envelope.sender, envelope.address)
47
47
  end
48
48
  envelope_to_redirect.future.on_fulfillment! { @balancer << :subscribe } # TODO check safety of @balancer reading
49
49
  redirect @balancer, envelope_to_redirect
@@ -6,9 +6,8 @@ module Concurrent
6
6
  # An atomic reference which maintains an object reference along with a mark bit
7
7
  # that can be updated atomically.
8
8
  #
9
- # @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicMarkableReference.html java.util.concurrent.atomic.AtomicMarkableReference
10
- #
11
- # @api Edge
9
+ # @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicMarkableReference.html
10
+ # java.util.concurrent.atomic.AtomicMarkableReference
12
11
  class AtomicMarkableReference < ::Concurrent::Synchronization::Object
13
12
 
14
13
  private(*attr_atomic(:reference))
@@ -1,14 +1,15 @@
1
1
  module Concurrent
2
2
 
3
3
  # Provides tools for cooperative cancellation.
4
- # Inspired by https://msdn.microsoft.com/en-us/library/dd537607(v=vs.110).aspx
4
+ # Inspired by <https://msdn.microsoft.com/en-us/library/dd537607(v=vs.110).aspx>
5
+ #
5
6
  # @example
6
7
  # # Create new cancellation. `cancellation` is used for cancelling, `token` is passed down to
7
8
  # # tasks for cooperative cancellation
8
9
  # cancellation, token = Concurrent::Cancellation.create
9
10
  # Thread.new(token) do |token|
10
- # # Count 1+1 (simulating some other meaningful work) repeatedly until the token is cancelled through
11
- # # cancellation.
11
+ # # Count 1+1 (simulating some other meaningful work) repeatedly
12
+ # # until the token is cancelled through cancellation.
12
13
  # token.loop_until_canceled { 1+1 }
13
14
  # end
14
15
  # sleep 0.1
@@ -1,5 +1,6 @@
1
1
  module Concurrent
2
2
 
3
+ # @!visibility private
3
4
  class LockFreeQueue < Synchronization::Object
4
5
 
5
6
  class Node < Synchronization::Object
@@ -1,4 +1,6 @@
1
1
  module Concurrent
2
+
3
+ # @!visibility private
2
4
  class LockFreeStack < Synchronization::Object
3
5
 
4
6
  safe_initialization!
@@ -118,5 +120,7 @@ module Concurrent
118
120
  def to_s
119
121
  format '<#%s:0x%x %s>', self.class, object_id << 1, to_a.to_s
120
122
  end
123
+
124
+ alias_method :inspect, :to_s
121
125
  end
122
126
  end
@@ -0,0 +1,161 @@
1
+ module Concurrent
2
+
3
+ # A new implementation of actor which also simulates the process, therefore it can be used
4
+ # in the same way as Erlang's actors but **without** occupying thread. A tens of thousands
5
+ # ProcessingActors can run at the same time sharing a thread pool.
6
+ # @example
7
+ # # Runs on a pool, does not consume 50_000 threads
8
+ # actors = 50_000.times.map do |i|
9
+ # Concurrent::ProcessingActor.act(i) { |a, i| a.receive.then_on(:fast, i) { |m, i| m + i } }
10
+ # end
11
+ #
12
+ # actors.each { |a| a.tell 1 }
13
+ # values = actors.map(&:termination).map(&:value)
14
+ # values[0,5] # => [1, 2, 3, 4, 5]
15
+ # values[-5, 5] # => [49996, 49997, 49998, 49999, 50000]
16
+ # @!macro edge_warning
17
+ class ProcessingActor < Synchronization::Object
18
+ # TODO (pitr-ch 18-Dec-2016): (un)linking, bidirectional, sends special message, multiple link calls has no effect,
19
+ # TODO (pitr-ch 21-Dec-2016): Make terminated a cancellation token?
20
+ # link_spawn atomic, Can it be fixed by sending exit when linked dead actor?
21
+
22
+ safe_initialization!
23
+
24
+ # @return [Promises::Channel] actor's mailbox.
25
+ def mailbox
26
+ @Mailbox
27
+ end
28
+
29
+ # @return [Promises::Future(Object)] a future which is resolved when the actor ends its processing.
30
+ # It can either be fulfilled with a value when actor ends normally or rejected with
31
+ # a reason (exception) when actor fails.
32
+ def termination
33
+ @Terminated.with_hidden_resolvable
34
+ end
35
+
36
+ # Creates an actor.
37
+ # @see #act_listening Behaves the same way, but does not take mailbox as a first argument.
38
+ # @return [ProcessingActor]
39
+ # @example
40
+ # actor = Concurrent::ProcessingActor.act do |actor|
41
+ # actor.receive.then do |message|
42
+ # # the actor ends normally with message
43
+ # message
44
+ # end
45
+ # end
46
+ #
47
+ # actor.tell :a_message
48
+ # # => <#Concurrent::ProcessingActor:0x7fff11280560 termination:pending>
49
+ # actor.termination.value! # => :a_message
50
+ def self.act(*args, &process)
51
+ act_listening Promises::Channel.new, *args, &process
52
+ end
53
+
54
+ # Creates an actor listening to a specified channel (mailbox).
55
+ # @param [Object] args Arguments passed to the process.
56
+ # @param [Promises::Channel] channel which serves as mailing box. The channel can have limited
57
+ # size to achieve backpressure.
58
+ # @yield args to the process to get back a future which represents the actors execution.
59
+ # @yieldparam [Object] *args
60
+ # @yieldreturn [Promises::Future(Object)] a future representing next step of execution
61
+ # @return [ProcessingActor]
62
+ # @example
63
+ # # TODO (pitr-ch 19-Jan-2017): actor with limited mailbox
64
+ def self.act_listening(channel, *args, &process)
65
+ actor = ProcessingActor.new channel
66
+ Promises.
67
+ future(actor, *args, &process).
68
+ run.
69
+ chain_resolvable(actor.instance_variable_get(:@Terminated))
70
+ actor
71
+ end
72
+
73
+ # Receives a message when available, used in the actor's process.
74
+ # @return [Promises::Future(Object)] a future which will be fulfilled with a message from
75
+ # mailbox when it is available.
76
+ def receive(probe = Promises.resolvable_future)
77
+ # TODO (pitr-ch 27-Dec-2016): patterns
78
+ @Mailbox.pop probe
79
+ end
80
+
81
+ # Tells a message to the actor. May block current thread if the mailbox is full.
82
+ # {#tell} is a better option since it does not block. It's usually used to integrate with
83
+ # threading code.
84
+ # @example
85
+ # Thread.new(actor) do |actor|
86
+ # # ...
87
+ # actor.tell! :a_message # blocks until the message is told
88
+ # # (there is a space for it in the channel)
89
+ # # ...
90
+ # end
91
+ # @param [Object] message
92
+ # @return [self]
93
+ def tell!(message)
94
+ @Mailbox.push(message).wait!
95
+ self
96
+ end
97
+
98
+ # Tells a message to the actor.
99
+ # @param [Object] message
100
+ # @return [Promises::Future(ProcessingActor)] a future which will be fulfilled with the actor
101
+ # when the message is pushed to mailbox.
102
+ def tell(message)
103
+ @Mailbox.push(message).then(self) { |_, actor| actor }
104
+ end
105
+
106
+ # Simplifies common pattern when a message sender also requires an answer to the message
107
+ # from the actor. It appends a resolvable_future for the answer after the message.
108
+ # @todo has to be nice also on the receive side, cannot make structure like this [message = [...], answer]
109
+ # all receives should receive something friendly
110
+ # @param [Object] message
111
+ # @param [Promises::ResolvableFuture] answer
112
+ # @return [Promises::Future] a future which will be fulfilled with the answer to the message
113
+ # @example
114
+ # add_once_actor = Concurrent::ProcessingActor.act do |actor|
115
+ # actor.receive.then do |(a, b), answer|
116
+ # result = a + b
117
+ # answer.fulfill result
118
+ # # terminate with result value
119
+ # result
120
+ # end
121
+ # end
122
+ # # => <#Concurrent::ProcessingActor:0x7fcd1315f6e8 termination:pending>
123
+ #
124
+ # add_once_actor.ask([1, 2]).value! # => 3
125
+ # # fails the actor already added once
126
+ # add_once_actor.ask(%w(ab cd)).reason
127
+ # # => #<RuntimeError: actor terminated normally before answering with a value: 3>
128
+ # add_once_actor.termination.value! # => 3
129
+ def ask(message, answer = Promises.resolvable_future)
130
+ tell [message, answer]
131
+ # do not leave answers unanswered when actor terminates.
132
+ Promises.any(
133
+ Promises.fulfilled_future(:answer).zip(answer),
134
+ Promises.fulfilled_future(:termination).zip(@Terminated)
135
+ ).chain do |fulfilled, (which, value), (_, reason)|
136
+ # TODO (pitr-ch 20-Jan-2017): we have to know which future was resolved
137
+ # TODO (pitr-ch 20-Jan-2017): make the combinator programmable, so anyone can create what is needed
138
+ # TODO (pitr-ch 19-Jan-2017): ensure no callbacks are accumulated on @Terminated
139
+ if which == :termination
140
+ raise reason.nil? ? format('actor terminated normally before answering with a value: %s', value) : reason
141
+ else
142
+ fulfilled ? value : raise(reason)
143
+ end
144
+ end
145
+ end
146
+
147
+ # @return [String] string representation.
148
+ def inspect
149
+ format '<#%s:0x%x termination:%s>', self.class, object_id << 1, termination.state
150
+ end
151
+
152
+ private
153
+
154
+ def initialize(channel = Promises::Channel.new)
155
+ @Mailbox = channel
156
+ @Terminated = Promises.resolvable_future
157
+ super()
158
+ end
159
+
160
+ end
161
+ end
@@ -73,10 +73,21 @@ module Concurrent
73
73
  module FactoryMethods
74
74
  extend ReInclude
75
75
 
76
+ module Configuration
77
+ # @return [Executor, :io, :fast] the executor which is used when none is supplied
78
+ # to a factory method. The method can be overridden in the receivers of
79
+ # `include FactoryMethod`
80
+ def default_executor
81
+ :io
82
+ end
83
+ end
84
+
85
+ include Configuration
86
+
76
87
  # @!macro promises.shortcut.on
77
88
  # @return [ResolvableEvent]
78
89
  def resolvable_event
79
- resolvable_event_on :io
90
+ resolvable_event_on default_executor
80
91
  end
81
92
 
82
93
  # Created resolvable event, user is responsible for resolving the event once by
@@ -84,14 +95,14 @@ module Concurrent
84
95
  #
85
96
  # @!macro promises.param.default_executor
86
97
  # @return [ResolvableEvent]
87
- def resolvable_event_on(default_executor = :io)
98
+ def resolvable_event_on(default_executor = self.default_executor)
88
99
  ResolvableEventPromise.new(default_executor).future
89
100
  end
90
101
 
91
102
  # @!macro promises.shortcut.on
92
103
  # @return [ResolvableFuture]
93
104
  def resolvable_future
94
- resolvable_future_on :io
105
+ resolvable_future_on default_executor
95
106
  end
96
107
 
97
108
  # Creates resolvable future, user is responsible for resolving the future once by
@@ -100,14 +111,14 @@ module Concurrent
100
111
  #
101
112
  # @!macro promises.param.default_executor
102
113
  # @return [ResolvableFuture]
103
- def resolvable_future_on(default_executor = :io)
114
+ def resolvable_future_on(default_executor = self.default_executor)
104
115
  ResolvableFuturePromise.new(default_executor).future
105
116
  end
106
117
 
107
118
  # @!macro promises.shortcut.on
108
119
  # @return [Future]
109
120
  def future(*args, &task)
110
- future_on(:io, *args, &task)
121
+ future_on(default_executor, *args, &task)
111
122
  end
112
123
 
113
124
  # @!macro [new] promises.future-on1
@@ -129,7 +140,7 @@ module Concurrent
129
140
  #
130
141
  # @!macro promises.param.default_executor
131
142
  # @return [Future]
132
- def resolved_future(fulfilled, value, reason, default_executor = :io)
143
+ def resolved_future(fulfilled, value, reason, default_executor = self.default_executor)
133
144
  ImmediateFuturePromise.new(default_executor, fulfilled, value, reason).future
134
145
  end
135
146
 
@@ -137,7 +148,7 @@ module Concurrent
137
148
  #
138
149
  # @!macro promises.param.default_executor
139
150
  # @return [Future]
140
- def fulfilled_future(value, default_executor = :io)
151
+ def fulfilled_future(value, default_executor = self.default_executor)
141
152
  resolved_future true, value, nil, default_executor
142
153
  end
143
154
 
@@ -145,7 +156,7 @@ module Concurrent
145
156
  #
146
157
  # @!macro promises.param.default_executor
147
158
  # @return [Future]
148
- def rejected_future(reason, default_executor = :io)
159
+ def rejected_future(reason, default_executor = self.default_executor)
149
160
  resolved_future false, nil, reason, default_executor
150
161
  end
151
162
 
@@ -153,7 +164,7 @@ module Concurrent
153
164
  #
154
165
  # @!macro promises.param.default_executor
155
166
  # @return [Event]
156
- def resolved_event(default_executor = :io)
167
+ def resolved_event(default_executor = self.default_executor)
157
168
  ImmediateEventPromise.new(default_executor).event
158
169
  end
159
170
 
@@ -164,26 +175,26 @@ module Concurrent
164
175
  # @!macro promises.param.default_executor
165
176
  # @return [Event, Future]
166
177
  #
167
- # @overload create(nil, default_executor = :io)
178
+ # @overload create(nil, default_executor = self.default_executor)
168
179
  # @param [nil] nil
169
180
  # @return [Event] resolved event.
170
181
  #
171
- # @overload create(a_future, default_executor = :io)
182
+ # @overload create(a_future, default_executor = self.default_executor)
172
183
  # @param [Future] a_future
173
184
  # @return [Future] a future which will be resolved when a_future is.
174
185
  #
175
- # @overload create(an_event, default_executor = :io)
186
+ # @overload create(an_event, default_executor = self.default_executor)
176
187
  # @param [Event] an_event
177
188
  # @return [Event] an event which will be resolved when an_event is.
178
189
  #
179
- # @overload create(exception, default_executor = :io)
190
+ # @overload create(exception, default_executor = self.default_executor)
180
191
  # @param [Exception] exception
181
192
  # @return [Future] a rejected future with the exception as its reason.
182
193
  #
183
- # @overload create(value, default_executor = :io)
194
+ # @overload create(value, default_executor = self.default_executor)
184
195
  # @param [Object] value when none of the above overloads fits
185
196
  # @return [Future] a fulfilled future with the value.
186
- def create(argument = nil, default_executor = :io)
197
+ def create(argument = nil, default_executor = self.default_executor)
187
198
  case argument
188
199
  when AbstractEventFuture
189
200
  # returning wrapper would change nothing
@@ -200,7 +211,7 @@ module Concurrent
200
211
  # @!macro promises.shortcut.on
201
212
  # @return [Future]
202
213
  def delay(*args, &task)
203
- delay_on :io, *args, &task
214
+ delay_on default_executor, *args, &task
204
215
  end
205
216
 
206
217
  # @!macro promises.future-on1
@@ -214,7 +225,7 @@ module Concurrent
214
225
  # @!macro promises.shortcut.on
215
226
  # @return [Future]
216
227
  def schedule(intended_time, *args, &task)
217
- schedule_on :io, intended_time, *args, &task
228
+ schedule_on default_executor, intended_time, *args, &task
218
229
  end
219
230
 
220
231
  # @!macro promises.future-on1
@@ -231,7 +242,7 @@ module Concurrent
231
242
  # @!macro promises.shortcut.on
232
243
  # @return [Future]
233
244
  def zip_futures(*futures_and_or_events)
234
- zip_futures_on :io, *futures_and_or_events
245
+ zip_futures_on default_executor, *futures_and_or_events
235
246
  end
236
247
 
237
248
  # Creates new future which is resolved after all futures_and_or_events are resolved.
@@ -253,7 +264,7 @@ module Concurrent
253
264
  # @!macro promises.shortcut.on
254
265
  # @return [Event]
255
266
  def zip_events(*futures_and_or_events)
256
- zip_events_on :io, *futures_and_or_events
267
+ zip_events_on default_executor, *futures_and_or_events
257
268
  end
258
269
 
259
270
  # Creates new event which is resolved after all futures_and_or_events are resolved.
@@ -269,7 +280,7 @@ module Concurrent
269
280
  # @!macro promises.shortcut.on
270
281
  # @return [Future]
271
282
  def any_resolved_future(*futures_and_or_events)
272
- any_resolved_future_on :io, *futures_and_or_events
283
+ any_resolved_future_on default_executor, *futures_and_or_events
273
284
  end
274
285
 
275
286
  alias_method :any, :any_resolved_future
@@ -291,7 +302,7 @@ module Concurrent
291
302
  # @!macro promises.shortcut.on
292
303
  # @return [Future]
293
304
  def any_fulfilled_future(*futures_and_or_events)
294
- any_fulfilled_future_on :io, *futures_and_or_events
305
+ any_fulfilled_future_on default_executor, *futures_and_or_events
295
306
  end
296
307
 
297
308
  # Creates new future which is resolved after first of futures_and_or_events is fulfilled.
@@ -310,7 +321,7 @@ module Concurrent
310
321
  # @!macro promises.shortcut.on
311
322
  # @return [Future]
312
323
  def any_event(*futures_and_or_events)
313
- any_event_on :io, *futures_and_or_events
324
+ any_event_on default_executor, *futures_and_or_events
314
325
  end
315
326
 
316
327
  # Creates new event which becomes resolved after first of the futures_and_or_events resolves.
@@ -727,7 +738,18 @@ module Concurrent
727
738
  @Waiters.each.to_a
728
739
  end
729
740
 
741
+ # # @!visibility private
742
+ def add_callback_notify_blocked(promise, index)
743
+ add_callback :callback_notify_blocked, promise, index
744
+ end
745
+
730
746
  # @!visibility private
747
+ def add_callback_clear_delayed_node(node)
748
+ add_callback(:callback_clear_delayed_node, node)
749
+ end
750
+
751
+ private
752
+
731
753
  def add_callback(method, *args)
732
754
  state = internal_state
733
755
  if resolved?(state)
@@ -741,7 +763,9 @@ module Concurrent
741
763
  self
742
764
  end
743
765
 
744
- private
766
+ def callback_clear_delayed_node(state, node)
767
+ node.value = nil
768
+ end
745
769
 
746
770
  # @return [Boolean]
747
771
  def wait_until_resolved(timeout)
@@ -962,7 +986,9 @@ module Concurrent
962
986
  if reason.size > 1
963
987
  Concurrent::MultipleErrors.new reason
964
988
  else
965
- reason[0].exception(*args)
989
+ ex = reason[0].exception(*args)
990
+ ex.set_backtrace ex.backtrace + caller
991
+ ex
966
992
  end
967
993
  end
968
994
 
@@ -1200,7 +1226,7 @@ module Concurrent
1200
1226
  end
1201
1227
 
1202
1228
  def callback_on_resolution(state, args, callback)
1203
- callback.call state.result, *args
1229
+ callback.call *state.result, *args
1204
1230
  end
1205
1231
 
1206
1232
  end
@@ -1324,7 +1350,7 @@ module Concurrent
1324
1350
 
1325
1351
  alias_method :inspect, :to_s
1326
1352
 
1327
- def delayed
1353
+ def delayed_because
1328
1354
  nil
1329
1355
  end
1330
1356
 
@@ -1379,55 +1405,45 @@ module Concurrent
1379
1405
  private_class_method :new
1380
1406
 
1381
1407
  def self.new_blocked_by1(blocker, *args, &block)
1382
- blocker_delayed = blocker.promise.delayed
1383
- delayed = blocker_delayed ? LockFreeStack.new.push(blocker_delayed) : nil
1384
- promise = new(delayed, 1, *args, &block)
1385
- ensure
1386
- blocker.add_callback :callback_notify_blocked, promise, 0
1408
+ blocker_delayed = blocker.promise.delayed_because
1409
+ promise = new(blocker_delayed, 1, *args, &block)
1410
+ blocker.add_callback_notify_blocked promise, 0
1411
+ promise
1387
1412
  end
1388
1413
 
1389
1414
  def self.new_blocked_by2(blocker1, blocker2, *args, &block)
1390
- blocker_delayed1 = blocker1.promise.delayed
1391
- blocker_delayed2 = blocker2.promise.delayed
1392
- # TODO (pitr-ch 23-Dec-2016): use arrays when we know it will not grow (only flat adds delay)
1393
- delayed = if blocker_delayed1
1394
- if blocker_delayed2
1395
- LockFreeStack.of2(blocker_delayed1, blocker_delayed2)
1396
- else
1397
- LockFreeStack.of1(blocker_delayed1)
1398
- end
1415
+ blocker_delayed1 = blocker1.promise.delayed_because
1416
+ blocker_delayed2 = blocker2.promise.delayed_because
1417
+ delayed = if blocker_delayed1 && blocker_delayed2
1418
+ # TODO (pitr-ch 23-Dec-2016): use arrays when we know it will not grow (only flat adds delay)
1419
+ LockFreeStack.of2(blocker_delayed1, blocker_delayed2)
1399
1420
  else
1400
- blocker_delayed2 ? LockFreeStack.of1(blocker_delayed2) : nil
1421
+ blocker_delayed1 || blocker_delayed2
1401
1422
  end
1402
1423
  promise = new(delayed, 2, *args, &block)
1403
- ensure
1404
- blocker1.add_callback :callback_notify_blocked, promise, 0
1405
- blocker2.add_callback :callback_notify_blocked, promise, 1
1424
+ blocker1.add_callback_notify_blocked promise, 0
1425
+ blocker2.add_callback_notify_blocked promise, 1
1426
+ promise
1406
1427
  end
1407
1428
 
1408
1429
  def self.new_blocked_by(blockers, *args, &block)
1409
- delayed = blockers.reduce(nil, &method(:add_delayed))
1430
+ delayed = blockers.reduce(nil) { |d, f| add_delayed d, f.promise.delayed_because }
1410
1431
  promise = new(delayed, blockers.size, *args, &block)
1411
- ensure
1412
- blockers.each_with_index { |f, i| f.add_callback :callback_notify_blocked, promise, i }
1432
+ blockers.each_with_index { |f, i| f.add_callback_notify_blocked promise, i }
1433
+ promise
1413
1434
  end
1414
1435
 
1415
- def self.add_delayed(delayed, blocker)
1416
- blocker_delayed = blocker.promise.delayed
1417
- if blocker_delayed
1418
- delayed = unless delayed
1419
- LockFreeStack.of1(blocker_delayed)
1420
- else
1421
- delayed.push(blocker_delayed)
1422
- end
1436
+ def self.add_delayed(delayed1, delayed2)
1437
+ if delayed1 && delayed2
1438
+ delayed1.push delayed2
1439
+ delayed1
1440
+ else
1441
+ delayed1 || delayed2
1423
1442
  end
1424
- delayed
1425
1443
  end
1426
1444
 
1427
1445
  def initialize(delayed, blockers_count, future)
1428
1446
  super(future)
1429
- # noinspection RubyArgCount
1430
- @Touched = AtomicBoolean.new false
1431
1447
  @Delayed = delayed
1432
1448
  # noinspection RubyArgCount
1433
1449
  @Countdown = AtomicFixnum.new blockers_count
@@ -1440,16 +1456,12 @@ module Concurrent
1440
1456
  on_resolvable(future, index) if resolvable
1441
1457
  end
1442
1458
 
1443
- def delayed
1459
+ def delayed_because
1444
1460
  @Delayed
1445
1461
  end
1446
1462
 
1447
1463
  def touch
1448
- clear_propagate_touch if @Touched.make_true
1449
- end
1450
-
1451
- def touched?
1452
- @Touched.value
1464
+ clear_and_propagate_touch
1453
1465
  end
1454
1466
 
1455
1467
  # for inspection only
@@ -1461,13 +1473,11 @@ module Concurrent
1461
1473
 
1462
1474
  private
1463
1475
 
1464
- def clear_propagate_touch
1465
- @Delayed.clear_each { |o| propagate_touch o } if @Delayed
1466
- end
1476
+ def clear_and_propagate_touch(stack_or_element = @Delayed)
1477
+ return if stack_or_element.nil?
1467
1478
 
1468
- def propagate_touch(stack_or_element = @Delayed)
1469
1479
  if stack_or_element.is_a? LockFreeStack
1470
- stack_or_element.each { |element| propagate_touch element }
1480
+ stack_or_element.clear_each { |element| clear_and_propagate_touch element }
1471
1481
  else
1472
1482
  stack_or_element.touch unless stack_or_element.nil? # if still present
1473
1483
  end
@@ -1570,8 +1580,28 @@ module Concurrent
1570
1580
 
1571
1581
  class AbstractFlatPromise < BlockedPromise
1572
1582
 
1583
+ def initialize(delayed_because, blockers_count, event_or_future)
1584
+ delayed = LockFreeStack.of1(self)
1585
+ super(delayed, blockers_count, event_or_future)
1586
+ # noinspection RubyArgCount
1587
+ @Touched = AtomicBoolean.new false
1588
+ @DelayedBecause = delayed_because || LockFreeStack.new
1589
+
1590
+ event_or_future.add_callback_clear_delayed_node delayed.peek
1591
+ end
1592
+
1593
+ def touch
1594
+ if @Touched.make_true
1595
+ clear_and_propagate_touch @DelayedBecause
1596
+ end
1597
+ end
1598
+
1573
1599
  private
1574
1600
 
1601
+ def touched?
1602
+ @Touched.value
1603
+ end
1604
+
1575
1605
  def on_resolvable(resolved_future, index)
1576
1606
  resolve_with resolved_future.internal_state
1577
1607
  end
@@ -1581,11 +1611,12 @@ module Concurrent
1581
1611
  end
1582
1612
 
1583
1613
  def add_delayed_of(future)
1614
+ delayed = future.promise.delayed_because
1584
1615
  if touched?
1585
- propagate_touch future.promise.delayed
1616
+ clear_and_propagate_touch delayed
1586
1617
  else
1587
- BlockedPromise.add_delayed @Delayed, future
1588
- clear_propagate_touch if touched?
1618
+ BlockedPromise.add_delayed @DelayedBecause, delayed
1619
+ clear_and_propagate_touch @DelayedBecause if touched?
1589
1620
  end
1590
1621
  end
1591
1622
 
@@ -1613,7 +1644,7 @@ module Concurrent
1613
1644
  case value
1614
1645
  when Future, Event
1615
1646
  add_delayed_of value
1616
- value.add_callback :callback_notify_blocked, self, nil
1647
+ value.add_callback_notify_blocked self, nil
1617
1648
  countdown
1618
1649
  else
1619
1650
  resolve_with RESOLVED
@@ -1649,7 +1680,7 @@ module Concurrent
1649
1680
  case value
1650
1681
  when Future
1651
1682
  add_delayed_of value
1652
- value.add_callback :callback_notify_blocked, self, nil
1683
+ value.add_callback_notify_blocked self, nil
1653
1684
  countdown
1654
1685
  when Event
1655
1686
  evaluate_to(lambda { raise TypeError, 'cannot flatten to Event' })
@@ -1682,7 +1713,7 @@ module Concurrent
1682
1713
  case value
1683
1714
  when Future
1684
1715
  add_delayed_of value
1685
- value.add_callback :callback_notify_blocked, self, nil
1716
+ value.add_callback_notify_blocked self, nil
1686
1717
  else
1687
1718
  resolve_with internal_state
1688
1719
  end
@@ -1849,17 +1880,17 @@ module Concurrent
1849
1880
  class DelayPromise < InnerPromise
1850
1881
 
1851
1882
  def initialize(default_executor)
1852
- super event = Event.new(self, default_executor)
1853
- @Delayed = LockFreeStack.new.push self
1854
- # TODO (pitr-ch 20-Dec-2016): implement directly without callback?
1855
- event.on_resolution!(@Delayed.peek) { |stack_node| stack_node.value = nil }
1883
+ event = Event.new(self, default_executor)
1884
+ @Delayed = LockFreeStack.of1(self)
1885
+ super event
1886
+ event.add_callback_clear_delayed_node @Delayed.peek
1856
1887
  end
1857
1888
 
1858
1889
  def touch
1859
1890
  @Future.resolve_with RESOLVED
1860
1891
  end
1861
1892
 
1862
- def delayed
1893
+ def delayed_because
1863
1894
  @Delayed
1864
1895
  end
1865
1896
 
@@ -10,6 +10,11 @@ module Concurrent
10
10
  # end
11
11
  # end
12
12
  # end
13
+ # @!macro [new] throttle.example.throttled_future
14
+ # @example
15
+ # throttle.throttled_future(1) do |arg|
16
+ # arg.succ
17
+ # end
13
18
  # @!macro [new] throttle.example.throttled_future_chain
14
19
  # @example
15
20
  # throttle.throttled_future_chain do |trigger|
@@ -40,6 +45,7 @@ module Concurrent
40
45
  # A tool manage concurrency level of future tasks.
41
46
  #
42
47
  # @!macro throttle.example.then_throttled_by
48
+ # @!macro throttle.example.throttled_future
43
49
  # @!macro throttle.example.throttled_future_chain
44
50
  # @!macro throttle.example.throttled_block
45
51
  class Throttle < Synchronization::Object
@@ -132,6 +138,7 @@ module Concurrent
132
138
  # Behaves as {Promises::FactoryMethods#future} but the future is throttled.
133
139
  # @return [Promises::Future]
134
140
  # @see Promises::FactoryMethods#future
141
+ # @!macro throttle.example.throttled_future
135
142
  def throttled_future(*args, &task)
136
143
  trigger.chain(*args, &task).on_resolution! { release }
137
144
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: concurrent-ruby-edge
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jerry D'Antonio
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2016-12-27 00:00:00.000000000 Z
13
+ date: 2017-02-26 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: concurrent-ruby
@@ -18,22 +18,20 @@ dependencies:
18
18
  requirements:
19
19
  - - '='
20
20
  - !ruby/object:Gem::Version
21
- version: 1.0.4
21
+ version: 1.0.5
22
22
  type: :runtime
23
23
  prerelease: false
24
24
  version_requirements: !ruby/object:Gem::Requirement
25
25
  requirements:
26
26
  - - '='
27
27
  - !ruby/object:Gem::Version
28
- version: 1.0.4
28
+ version: 1.0.5
29
29
  description: |
30
30
  These features are under active development and may change frequently. They are expected not to
31
31
  keep backward compatibility (there may also lack tests and documentation). Semantic versions will
32
32
  be obeyed though. Features developed in `concurrent-ruby-edge` are expected to move to `concurrent-ruby` when final.
33
33
  Please see http://concurrent-ruby.com for more information.
34
- email:
35
- - jerry.dantonio@gmail.com
36
- - concurrent-ruby@googlegroups.com
34
+ email: concurrent-ruby@googlegroups.com
37
35
  executables: []
38
36
  extensions: []
39
37
  extra_rdoc_files:
@@ -95,6 +93,7 @@ files:
95
93
  - lib/concurrent/edge/lock_free_queue.rb
96
94
  - lib/concurrent/edge/lock_free_stack.rb
97
95
  - lib/concurrent/edge/old_channel_integration.rb
96
+ - lib/concurrent/edge/processing_actor.rb
98
97
  - lib/concurrent/edge/promises.rb
99
98
  - lib/concurrent/edge/throttle.rb
100
99
  homepage: http://www.concurrent-ruby.com
@@ -117,7 +116,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
117
116
  version: '0'
118
117
  requirements: []
119
118
  rubyforge_project:
120
- rubygems_version: 2.5.1
119
+ rubygems_version: 2.6.8
121
120
  signing_key:
122
121
  specification_version: 4
123
122
  summary: Edge features and additions to the concurrent-ruby gem.