concurrent-ruby-edge 0.3.1 → 0.4.0.pre1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/CHANGELOG.md +429 -0
- data/{LICENSE.txt → LICENSE.md} +2 -0
- data/README.md +203 -105
- data/{lib → lib-edge}/concurrent/actor/behaviour/abstract.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/awaits.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/buffer.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/errors_on_unknown_message.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/executes_context.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/linking.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/pausing.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/removes_child.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/sets_results.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/supervising.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/behaviour/termination.rb +1 -1
- data/{lib → lib-edge}/concurrent/actor/behaviour.rb +1 -1
- data/{lib → lib-edge}/concurrent/actor/context.rb +1 -1
- data/{lib → lib-edge}/concurrent/actor/core.rb +2 -1
- data/{lib → lib-edge}/concurrent/actor/default_dead_letter_handler.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/envelope.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/errors.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/internal_delegations.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/public_delegations.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/reference.rb +1 -1
- data/{lib → lib-edge}/concurrent/actor/root.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/type_check.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/utils/ad_hoc.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/utils/balancer.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/utils/broadcast.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/utils/pool.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor/utils.rb +0 -0
- data/{lib → lib-edge}/concurrent/actor.rb +9 -5
- data/{lib → lib-edge}/concurrent/channel/buffer/base.rb +14 -14
- data/{lib → lib-edge}/concurrent/channel/buffer/buffered.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/buffer/dropping.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/buffer/sliding.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/buffer/ticker.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/buffer/timer.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/buffer/unbuffered.rb +1 -1
- data/{lib → lib-edge}/concurrent/channel/buffer.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/selector/after_clause.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/selector/default_clause.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/selector/error_clause.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/selector/put_clause.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/selector/take_clause.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/selector.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel/tick.rb +0 -0
- data/{lib → lib-edge}/concurrent/channel.rb +2 -1
- data/{lib → lib-edge}/concurrent/edge/cancellation.rb +5 -4
- data/{lib → lib-edge}/concurrent/edge/lock_free_linked_set/node.rb +2 -2
- data/{lib → lib-edge}/concurrent/edge/lock_free_linked_set/window.rb +0 -0
- data/{lib → lib-edge}/concurrent/edge/lock_free_linked_set.rb +8 -7
- data/{lib → lib-edge}/concurrent/edge/lock_free_queue.rb +0 -0
- data/{lib → lib-edge}/concurrent/edge/old_channel_integration.rb +0 -0
- data/{lib → lib-edge}/concurrent/edge/processing_actor.rb +3 -3
- data/lib-edge/concurrent/edge/promises.rb +178 -0
- data/{lib → lib-edge}/concurrent/edge/throttle.rb +24 -15
- data/lib-edge/concurrent/edge.rb +21 -0
- data/lib-edge/concurrent/lazy_register.rb +83 -0
- data/{lib → lib-edge}/concurrent-edge.rb +0 -4
- metadata +71 -70
- data/lib/concurrent/edge/atomic_markable_reference.rb +0 -184
- data/lib/concurrent/edge/lock_free_stack.rb +0 -126
- data/lib/concurrent/edge/promises.rb +0 -2111
@@ -15,13 +15,13 @@ module Concurrent
|
|
15
15
|
# used as a channel buffer should extend this class.
|
16
16
|
class Base < Synchronization::LockableObject
|
17
17
|
|
18
|
-
# @!macro
|
18
|
+
# @!macro channel_buffer_capacity_reader
|
19
19
|
#
|
20
20
|
# The maximum number of values which can be {#put} onto the buffer
|
21
21
|
# it becomes full.
|
22
22
|
attr_reader :capacity
|
23
23
|
|
24
|
-
# @!macro
|
24
|
+
# @!macro channel_buffer_initialize
|
25
25
|
#
|
26
26
|
# Creates a new buffer.
|
27
27
|
def initialize(*args)
|
@@ -35,7 +35,7 @@ module Concurrent
|
|
35
35
|
end
|
36
36
|
end
|
37
37
|
|
38
|
-
# @!macro
|
38
|
+
# @!macro channel_buffer_blocking_question
|
39
39
|
#
|
40
40
|
# Predicate indicating if this buffer will block {#put} operations
|
41
41
|
# once it reaches its maximum capacity.
|
@@ -45,14 +45,14 @@ module Concurrent
|
|
45
45
|
true
|
46
46
|
end
|
47
47
|
|
48
|
-
# @!macro
|
48
|
+
# @!macro channel_buffer_size_reader
|
49
49
|
#
|
50
50
|
# The number of items currently in the buffer.
|
51
51
|
def size
|
52
52
|
synchronize { ns_size }
|
53
53
|
end
|
54
54
|
|
55
|
-
# @!macro
|
55
|
+
# @!macro channel_buffer_empty_question
|
56
56
|
#
|
57
57
|
# Predicate indicating if the buffer is empty.
|
58
58
|
#
|
@@ -63,7 +63,7 @@ module Concurrent
|
|
63
63
|
synchronize { ns_empty? }
|
64
64
|
end
|
65
65
|
|
66
|
-
# @!macro
|
66
|
+
# @!macro channel_buffer_full_question
|
67
67
|
#
|
68
68
|
# Predicate indicating if the buffer is full.
|
69
69
|
#
|
@@ -74,7 +74,7 @@ module Concurrent
|
|
74
74
|
synchronize { ns_full? }
|
75
75
|
end
|
76
76
|
|
77
|
-
# @!macro
|
77
|
+
# @!macro channel_buffer_put
|
78
78
|
#
|
79
79
|
# Put an item onto the buffer if possible. If the buffer is open
|
80
80
|
# but not able to accept the item the calling thread will block
|
@@ -89,9 +89,9 @@ module Concurrent
|
|
89
89
|
raise NotImplementedError
|
90
90
|
end
|
91
91
|
|
92
|
-
# @!macro
|
92
|
+
# @!macro channel_buffer_offer
|
93
93
|
#
|
94
|
-
# Put an item onto the buffer
|
94
|
+
# Put an item onto the buffer if possible. If the buffer is open but
|
95
95
|
# unable to add an item, probably due to being full, the method will
|
96
96
|
# return immediately. Similarly, the method will return immediately
|
97
97
|
# when the buffer is closed. A return value of `false` does not
|
@@ -107,7 +107,7 @@ module Concurrent
|
|
107
107
|
raise NotImplementedError
|
108
108
|
end
|
109
109
|
|
110
|
-
# @!macro
|
110
|
+
# @!macro channel_buffer_take
|
111
111
|
#
|
112
112
|
# Take an item from the buffer if one is available. If the buffer
|
113
113
|
# is open and no item is available the calling thread will block
|
@@ -123,7 +123,7 @@ module Concurrent
|
|
123
123
|
raise NotImplementedError
|
124
124
|
end
|
125
125
|
|
126
|
-
# @!macro
|
126
|
+
# @!macro channel_buffer_next
|
127
127
|
#
|
128
128
|
# Take the next "item" from the buffer and also return a boolean
|
129
129
|
# indicating if "more" items can be taken. Used for iterating
|
@@ -152,7 +152,7 @@ module Concurrent
|
|
152
152
|
raise NotImplementedError
|
153
153
|
end
|
154
154
|
|
155
|
-
# @!macro
|
155
|
+
# @!macro channel_buffer_poll
|
156
156
|
#
|
157
157
|
# Take the next item from the buffer if one is available else return
|
158
158
|
# immediately. Failing to return a value does not necessarily
|
@@ -166,7 +166,7 @@ module Concurrent
|
|
166
166
|
raise NotImplementedError
|
167
167
|
end
|
168
168
|
|
169
|
-
# @!macro
|
169
|
+
# @!macro channel_buffer_close
|
170
170
|
#
|
171
171
|
# Close the buffer, preventing new items from being added. Once a
|
172
172
|
# buffer is closed it cannot be opened again.
|
@@ -179,7 +179,7 @@ module Concurrent
|
|
179
179
|
end
|
180
180
|
end
|
181
181
|
|
182
|
-
# @!macro
|
182
|
+
# @!macro channel_buffer_closed_question
|
183
183
|
#
|
184
184
|
# Predicate indicating is this buffer closed.
|
185
185
|
#
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -14,6 +14,7 @@ module Concurrent
|
|
14
14
|
# end
|
15
15
|
# sleep 0.1
|
16
16
|
# cancellation.cancel # Stop the thread by cancelling
|
17
|
+
# @!macro warn.edge
|
17
18
|
class Cancellation < Synchronization::Object
|
18
19
|
safe_initialization!
|
19
20
|
|
@@ -54,7 +55,7 @@ module Concurrent
|
|
54
55
|
# Short string representation.
|
55
56
|
# @return [String]
|
56
57
|
def to_s
|
57
|
-
format '
|
58
|
+
format '%s canceled:%s>', super[0..-2], canceled?
|
58
59
|
end
|
59
60
|
|
60
61
|
alias_method :inspect, :to_s
|
@@ -113,14 +114,14 @@ module Concurrent
|
|
113
114
|
# @param [Token] tokens to combine
|
114
115
|
# @return [Token] new token
|
115
116
|
def join(*tokens, &block)
|
116
|
-
block ||= ->
|
117
|
+
block ||= -> token_list { Promises.any_event(*token_list.map(&:to_event)) }
|
117
118
|
self.class.new block.call([@Cancel, *tokens])
|
118
119
|
end
|
119
120
|
|
120
121
|
# Short string representation.
|
121
122
|
# @return [String]
|
122
123
|
def to_s
|
123
|
-
format '
|
124
|
+
format '%s canceled:%s>', super[0..-2], canceled?
|
124
125
|
end
|
125
126
|
|
126
127
|
alias_method :inspect, :to_s
|
@@ -132,7 +133,7 @@ module Concurrent
|
|
132
133
|
end
|
133
134
|
end
|
134
135
|
|
135
|
-
#
|
136
|
+
# TODO (pitr-ch 27-Mar-2016): cooperation with mutex, condition, select etc?
|
136
137
|
# TODO (pitr-ch 27-Mar-2016): examples (scheduled to be cancelled in 10 sec)
|
137
138
|
end
|
138
139
|
end
|
@@ -1,4 +1,4 @@
|
|
1
|
-
require 'concurrent/
|
1
|
+
require 'concurrent/atomic/atomic_markable_reference'
|
2
2
|
|
3
3
|
module Concurrent
|
4
4
|
module Edge
|
@@ -10,7 +10,7 @@ module Concurrent
|
|
10
10
|
|
11
11
|
def initialize(data = nil, successor = nil)
|
12
12
|
super()
|
13
|
-
@SuccessorReference
|
13
|
+
@SuccessorReference = AtomicMarkableReference.new(successor || Tail.new)
|
14
14
|
@Data = data
|
15
15
|
@Key = key_for data
|
16
16
|
end
|
File without changes
|
@@ -18,12 +18,13 @@ module Concurrent
|
|
18
18
|
#
|
19
19
|
# This algorithm is a variation of the Nonblocking Linked Set found in
|
20
20
|
# 'The Art of Multiprocessor Programming' by Herlihy and Shavit.
|
21
|
+
# @!macro warn.edge
|
21
22
|
class LockFreeLinkedSet
|
22
23
|
include Enumerable
|
23
24
|
|
24
|
-
# @!macro
|
25
|
+
# @!macro lock_free_linked_list_method_initialize
|
25
26
|
#
|
26
|
-
#
|
27
|
+
# @param [Fixnum] initial_size the size of the linked_list to initialize
|
27
28
|
def initialize(initial_size = 0, val = nil)
|
28
29
|
@head = Head.new
|
29
30
|
|
@@ -33,7 +34,7 @@ module Concurrent
|
|
33
34
|
end
|
34
35
|
end
|
35
36
|
|
36
|
-
# @!macro
|
37
|
+
# @!macro lock_free_linked_list_method_add
|
37
38
|
#
|
38
39
|
# Atomically adds the item to the set if it does not yet exist. Note:
|
39
40
|
# internally the set uses `Object#hash` to compare equality of items,
|
@@ -61,7 +62,7 @@ module Concurrent
|
|
61
62
|
end
|
62
63
|
end
|
63
64
|
|
64
|
-
# @!macro
|
65
|
+
# @!macro lock_free_linked_list_method_<<
|
65
66
|
#
|
66
67
|
# Atomically adds the item to the set if it does not yet exist.
|
67
68
|
#
|
@@ -73,7 +74,7 @@ module Concurrent
|
|
73
74
|
self
|
74
75
|
end
|
75
76
|
|
76
|
-
# @!macro
|
77
|
+
# @!macro lock_free_linked_list_method_contains
|
77
78
|
#
|
78
79
|
# Atomically checks to see if the set contains an item. This method
|
79
80
|
# compares equality based on the `Object#hash` method, meaning that the
|
@@ -94,7 +95,7 @@ module Concurrent
|
|
94
95
|
curr == item && !marked
|
95
96
|
end
|
96
97
|
|
97
|
-
# @!macro
|
98
|
+
# @!macro lock_free_linked_list_method_remove
|
98
99
|
#
|
99
100
|
# Atomically attempts to remove an item, comparing using `Object#hash`.
|
100
101
|
#
|
@@ -120,7 +121,7 @@ module Concurrent
|
|
120
121
|
end
|
121
122
|
end
|
122
123
|
|
123
|
-
# @!macro
|
124
|
+
# @!macro lock_free_linked_list_method_each
|
124
125
|
#
|
125
126
|
# An iterator to loop through the set.
|
126
127
|
#
|
File without changes
|
File without changes
|
@@ -13,7 +13,7 @@ module Concurrent
|
|
13
13
|
# values = actors.map(&:termination).map(&:value)
|
14
14
|
# values[0,5] # => [1, 2, 3, 4, 5]
|
15
15
|
# values[-5, 5] # => [49996, 49997, 49998, 49999, 50000]
|
16
|
-
# @!macro
|
16
|
+
# @!macro warn.edge
|
17
17
|
class ProcessingActor < Synchronization::Object
|
18
18
|
# TODO (pitr-ch 18-Dec-2016): (un)linking, bidirectional, sends special message, multiple link calls has no effect,
|
19
19
|
# TODO (pitr-ch 21-Dec-2016): Make terminated a cancellation token?
|
@@ -135,7 +135,7 @@ module Concurrent
|
|
135
135
|
).chain do |fulfilled, (which, value), (_, reason)|
|
136
136
|
# TODO (pitr-ch 20-Jan-2017): we have to know which future was resolved
|
137
137
|
# TODO (pitr-ch 20-Jan-2017): make the combinator programmable, so anyone can create what is needed
|
138
|
-
#
|
138
|
+
# FIXME (pitr-ch 19-Jan-2017): ensure no callbacks are accumulated on @Terminated
|
139
139
|
if which == :termination
|
140
140
|
raise reason.nil? ? format('actor terminated normally before answering with a value: %s', value) : reason
|
141
141
|
else
|
@@ -146,7 +146,7 @@ module Concurrent
|
|
146
146
|
|
147
147
|
# @return [String] string representation.
|
148
148
|
def inspect
|
149
|
-
format '
|
149
|
+
format '%s termination:%s>', super[0..-2], termination.state
|
150
150
|
end
|
151
151
|
|
152
152
|
private
|
@@ -0,0 +1,178 @@
|
|
1
|
+
# TODO try stealing pool, each thread has it's own queue
|
2
|
+
|
3
|
+
require 'concurrent/promises'
|
4
|
+
|
5
|
+
module Concurrent
|
6
|
+
module Promises
|
7
|
+
|
8
|
+
class Future < AbstractEventFuture
|
9
|
+
|
10
|
+
# @!macro warn.edge
|
11
|
+
module ActorIntegration
|
12
|
+
# Asks the actor with its value.
|
13
|
+
# @return [Future] new future with the response form the actor
|
14
|
+
def then_ask(actor)
|
15
|
+
self.then { |v| actor.ask(v) }.flat
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
include ActorIntegration
|
20
|
+
|
21
|
+
# @!macro warn.edge
|
22
|
+
module FlatShortcuts
|
23
|
+
|
24
|
+
# @return [Future]
|
25
|
+
def then_flat_future(*args, &block)
|
26
|
+
self.then(*args, &block).flat_future
|
27
|
+
end
|
28
|
+
|
29
|
+
alias_method :then_flat, :then_flat_future
|
30
|
+
|
31
|
+
# @return [Future]
|
32
|
+
def then_flat_future_on(executor, *args, &block)
|
33
|
+
self.then_on(executor, *args, &block).flat_future
|
34
|
+
end
|
35
|
+
|
36
|
+
alias_method :then_flat_on, :then_flat_future_on
|
37
|
+
|
38
|
+
# @return [Event]
|
39
|
+
def then_flat_event(*args, &block)
|
40
|
+
self.then(*args, &block).flat_event
|
41
|
+
end
|
42
|
+
|
43
|
+
# @return [Event]
|
44
|
+
def then_flat_event_on(executor, *args, &block)
|
45
|
+
self.then_on(executor, *args, &block).flat_event
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
include FlatShortcuts
|
50
|
+
end
|
51
|
+
|
52
|
+
# @!macro warn.edge
|
53
|
+
class Channel < Concurrent::Synchronization::Object
|
54
|
+
safe_initialization!
|
55
|
+
|
56
|
+
# Default size of the Channel, makes it accept unlimited number of messages.
|
57
|
+
UNLIMITED = ::Object.new
|
58
|
+
UNLIMITED.singleton_class.class_eval do
|
59
|
+
include Comparable
|
60
|
+
|
61
|
+
def <=>(other)
|
62
|
+
1
|
63
|
+
end
|
64
|
+
|
65
|
+
def to_s
|
66
|
+
'unlimited'
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
# A channel to pass messages between promises. The size is limited to support back pressure.
|
71
|
+
# @param [Integer, UNLIMITED] size the maximum number of messages stored in the channel.
|
72
|
+
def initialize(size = UNLIMITED)
|
73
|
+
super()
|
74
|
+
@Size = size
|
75
|
+
# TODO (pitr-ch 26-Dec-2016): replace with lock-free implementation
|
76
|
+
@Mutex = Mutex.new
|
77
|
+
@Probes = []
|
78
|
+
@Messages = []
|
79
|
+
@PendingPush = []
|
80
|
+
end
|
81
|
+
|
82
|
+
|
83
|
+
# Returns future which will fulfill when the message is added to the channel. Its value is the message.
|
84
|
+
# @param [Object] message
|
85
|
+
# @return [Future]
|
86
|
+
def push(message)
|
87
|
+
@Mutex.synchronize do
|
88
|
+
while true
|
89
|
+
if @Probes.empty?
|
90
|
+
if @Size > @Messages.size
|
91
|
+
@Messages.push message
|
92
|
+
return Promises.fulfilled_future message
|
93
|
+
else
|
94
|
+
pushed = Promises.resolvable_future
|
95
|
+
@PendingPush.push [message, pushed]
|
96
|
+
return pushed.with_hidden_resolvable
|
97
|
+
end
|
98
|
+
else
|
99
|
+
probe = @Probes.shift
|
100
|
+
if probe.fulfill [self, message], false
|
101
|
+
return Promises.fulfilled_future(message)
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
# Returns a future witch will become fulfilled with a value from the channel when one is available.
|
109
|
+
# @param [ResolvableFuture] probe the future which will be fulfilled with a channel value
|
110
|
+
# @return [Future] the probe, its value will be the message when available.
|
111
|
+
def pop(probe = Concurrent::Promises.resolvable_future)
|
112
|
+
# TODO (pitr-ch 26-Dec-2016): improve performance
|
113
|
+
pop_for_select(probe).then(&:last)
|
114
|
+
end
|
115
|
+
|
116
|
+
# @!visibility private
|
117
|
+
def pop_for_select(probe = Concurrent::Promises.resolvable_future)
|
118
|
+
@Mutex.synchronize do
|
119
|
+
if @Messages.empty?
|
120
|
+
@Probes.push probe
|
121
|
+
else
|
122
|
+
message = @Messages.shift
|
123
|
+
probe.fulfill [self, message]
|
124
|
+
|
125
|
+
unless @PendingPush.empty?
|
126
|
+
message, pushed = @PendingPush.shift
|
127
|
+
@Messages.push message
|
128
|
+
pushed.fulfill message
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
probe
|
133
|
+
end
|
134
|
+
|
135
|
+
# @return [String] Short string representation.
|
136
|
+
def to_s
|
137
|
+
format '%s size:%s>', super[0..-2], @Size
|
138
|
+
end
|
139
|
+
|
140
|
+
alias_method :inspect, :to_s
|
141
|
+
end
|
142
|
+
|
143
|
+
class Future < AbstractEventFuture
|
144
|
+
# @!macro warn.edge
|
145
|
+
module NewChannelIntegration
|
146
|
+
|
147
|
+
# @param [Channel] channel to push to.
|
148
|
+
# @return [Future] a future which is fulfilled after the message is pushed to the channel.
|
149
|
+
# May take a moment if the channel is full.
|
150
|
+
def then_push_channel(channel)
|
151
|
+
self.then { |value| channel.push value }.flat_future
|
152
|
+
end
|
153
|
+
|
154
|
+
end
|
155
|
+
|
156
|
+
include NewChannelIntegration
|
157
|
+
end
|
158
|
+
|
159
|
+
module FactoryMethods
|
160
|
+
# @!macro warn.edge
|
161
|
+
module NewChannelIntegration
|
162
|
+
|
163
|
+
# Selects a channel which is ready to be read from.
|
164
|
+
# @param [Channel] channels
|
165
|
+
# @return [Future] a future which is fulfilled with pair [channel, message] when one of the channels is
|
166
|
+
# available for reading
|
167
|
+
def select_channel(*channels)
|
168
|
+
probe = Promises.resolvable_future
|
169
|
+
channels.each { |ch| ch.pop_for_select probe }
|
170
|
+
probe
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
include NewChannelIntegration
|
175
|
+
end
|
176
|
+
|
177
|
+
end
|
178
|
+
end
|
@@ -1,5 +1,5 @@
|
|
1
1
|
module Concurrent
|
2
|
-
# @!macro
|
2
|
+
# @!macro throttle.example.throttled_block
|
3
3
|
# @example
|
4
4
|
# max_two = Throttle.new 2
|
5
5
|
# 10.times.map do
|
@@ -10,12 +10,12 @@ module Concurrent
|
|
10
10
|
# end
|
11
11
|
# end
|
12
12
|
# end
|
13
|
-
# @!macro
|
13
|
+
# @!macro throttle.example.throttled_future
|
14
14
|
# @example
|
15
15
|
# throttle.throttled_future(1) do |arg|
|
16
16
|
# arg.succ
|
17
17
|
# end
|
18
|
-
# @!macro
|
18
|
+
# @!macro throttle.example.throttled_future_chain
|
19
19
|
# @example
|
20
20
|
# throttle.throttled_future_chain do |trigger|
|
21
21
|
# trigger.
|
@@ -23,7 +23,7 @@ module Concurrent
|
|
23
23
|
# chain { 1 }.
|
24
24
|
# then(&:succ)
|
25
25
|
# end
|
26
|
-
# @!macro
|
26
|
+
# @!macro throttle.example.then_throttled_by
|
27
27
|
# @example
|
28
28
|
# data = (1..5).to_a
|
29
29
|
# db = data.reduce({}) { |h, v| h.update v => v.to_s }
|
@@ -48,11 +48,13 @@ module Concurrent
|
|
48
48
|
# @!macro throttle.example.throttled_future
|
49
49
|
# @!macro throttle.example.throttled_future_chain
|
50
50
|
# @!macro throttle.example.throttled_block
|
51
|
+
# @!macro warn.edge
|
51
52
|
class Throttle < Synchronization::Object
|
52
53
|
# TODO (pitr-ch 21-Dec-2016): consider using sized channel for implementation instead when available
|
53
54
|
|
54
55
|
safe_initialization!
|
55
|
-
|
56
|
+
attr_atomic(:can_run)
|
57
|
+
private :can_run, :can_run=, :swap_can_run, :compare_and_set_can_run, :update_can_run
|
56
58
|
|
57
59
|
# New throttle.
|
58
60
|
# @param [Integer] limit
|
@@ -95,6 +97,7 @@ module Concurrent
|
|
95
97
|
current_can_run = can_run
|
96
98
|
if compare_and_set_can_run current_can_run, current_can_run + 1
|
97
99
|
if current_can_run < 0
|
100
|
+
# release called after trigger which pushed a trigger, busy wait is ok
|
98
101
|
Thread.pass until (trigger = @Queue.pop)
|
99
102
|
trigger.resolve
|
100
103
|
end
|
@@ -117,7 +120,7 @@ module Concurrent
|
|
117
120
|
|
118
121
|
# @return [String] Short string representation.
|
119
122
|
def to_s
|
120
|
-
format '
|
123
|
+
format '%s limit:%s can_run:%d>', super[0..-2], @Limit, can_run
|
121
124
|
end
|
122
125
|
|
123
126
|
alias_method :inspect, :to_s
|
@@ -151,17 +154,23 @@ module Concurrent
|
|
151
154
|
|
152
155
|
class AbstractEventFuture < Synchronization::Object
|
153
156
|
module ThrottleIntegration
|
157
|
+
|
158
|
+
# @yieldparam [Future] a trigger
|
159
|
+
# @yieldreturn [Future, Event]
|
160
|
+
# @return [Future, Event]
|
154
161
|
def throttled_by(throttle, &throttled_futures)
|
155
162
|
a_trigger = self & self.chain { throttle.trigger }.flat_event
|
156
163
|
throttled_futures.call(a_trigger).on_resolution! { throttle.release }
|
157
164
|
end
|
158
165
|
|
159
|
-
# Behaves as {
|
160
|
-
# @return [
|
161
|
-
# @see
|
166
|
+
# Behaves as {AbstractEventFuture#chain} but the it is throttled.
|
167
|
+
# @return [Future, Event]
|
168
|
+
# @see AbstractEventFuture#chain
|
162
169
|
def chain_throttled_by(throttle, *args, &block)
|
163
170
|
throttled_by(throttle) { |trigger| trigger.chain(*args, &block) }
|
164
171
|
end
|
172
|
+
|
173
|
+
# TODO (pitr-ch 11-Jul-2018): add other then/rescue methods
|
165
174
|
end
|
166
175
|
|
167
176
|
include ThrottleIntegration
|
@@ -170,17 +179,17 @@ module Concurrent
|
|
170
179
|
class Future < AbstractEventFuture
|
171
180
|
module ThrottleIntegration
|
172
181
|
|
173
|
-
# Behaves as {
|
174
|
-
# @return [
|
175
|
-
# @see
|
182
|
+
# Behaves as {Future#then} but the it is throttled.
|
183
|
+
# @return [Future]
|
184
|
+
# @see Future#then
|
176
185
|
# @!macro throttle.example.then_throttled_by
|
177
186
|
def then_throttled_by(throttle, *args, &block)
|
178
187
|
throttled_by(throttle) { |trigger| trigger.then(*args, &block) }
|
179
188
|
end
|
180
189
|
|
181
|
-
# Behaves as {
|
182
|
-
# @return [
|
183
|
-
# @see
|
190
|
+
# Behaves as {Future#rescue} but the it is throttled.
|
191
|
+
# @return [Future]
|
192
|
+
# @see Future#rescue
|
184
193
|
def rescue_throttled_by(throttle, *args, &block)
|
185
194
|
throttled_by(throttle) { |trigger| trigger.rescue(*args, &block) }
|
186
195
|
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Concurrent
|
2
|
+
|
3
|
+
# A submodule for unstable, highly experimental features that are likely to
|
4
|
+
# change often and which may never become part of the core gem. Also for
|
5
|
+
# new, experimental version of abstractions already in the core gem.
|
6
|
+
#
|
7
|
+
# Most new features should start in this module, clearly indicating the
|
8
|
+
# experimental and unstable nature of the feature. Once a feature becomes
|
9
|
+
# more stable and is a candidate for inclusion in the core gem it should
|
10
|
+
# be moved up to the `Concurrent` module, where it would reside once merged
|
11
|
+
# into the core gem.
|
12
|
+
#
|
13
|
+
# The only exception to this is for features which *replace* features from
|
14
|
+
# the core gem in ways that are breaking and not backward compatible. These
|
15
|
+
# features should remain in this module until merged into the core gem. This
|
16
|
+
# will prevent namespace collisions.
|
17
|
+
#
|
18
|
+
# @!macro warn.edge
|
19
|
+
module Edge
|
20
|
+
end
|
21
|
+
end
|