concurrently 1.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +5 -0
- data/.rspec +4 -0
- data/.travis.yml +16 -0
- data/.yardopts +7 -0
- data/Gemfile +17 -0
- data/LICENSE +176 -0
- data/README.md +129 -0
- data/RELEASE_NOTES.md +49 -0
- data/Rakefile +28 -0
- data/concurrently.gemspec +33 -0
- data/ext/Ruby/thread.rb +28 -0
- data/ext/all/array.rb +24 -0
- data/ext/mruby/array.rb +19 -0
- data/ext/mruby/fiber.rb +5 -0
- data/ext/mruby/io.rb +54 -0
- data/guides/Installation.md +46 -0
- data/guides/Overview.md +335 -0
- data/guides/Performance.md +140 -0
- data/guides/Troubleshooting.md +262 -0
- data/lib/Ruby/concurrently.rb +12 -0
- data/lib/Ruby/concurrently/error.rb +4 -0
- data/lib/Ruby/concurrently/event_loop.rb +24 -0
- data/lib/Ruby/concurrently/event_loop/io_selector.rb +38 -0
- data/lib/all/concurrently/error.rb +10 -0
- data/lib/all/concurrently/evaluation.rb +109 -0
- data/lib/all/concurrently/evaluation/error.rb +18 -0
- data/lib/all/concurrently/event_loop.rb +101 -0
- data/lib/all/concurrently/event_loop/fiber.rb +37 -0
- data/lib/all/concurrently/event_loop/io_selector.rb +42 -0
- data/lib/all/concurrently/event_loop/proc_fiber_pool.rb +18 -0
- data/lib/all/concurrently/event_loop/run_queue.rb +111 -0
- data/lib/all/concurrently/proc.rb +233 -0
- data/lib/all/concurrently/proc/evaluation.rb +246 -0
- data/lib/all/concurrently/proc/fiber.rb +67 -0
- data/lib/all/concurrently/version.rb +8 -0
- data/lib/all/io.rb +248 -0
- data/lib/all/kernel.rb +201 -0
- data/lib/mruby/concurrently/proc.rb +21 -0
- data/lib/mruby/kernel.rb +15 -0
- data/mrbgem.rake +42 -0
- data/perf/_shared/stage.rb +33 -0
- data/perf/concurrent_proc_call.rb +13 -0
- data/perf/concurrent_proc_call_and_forget.rb +15 -0
- data/perf/concurrent_proc_call_detached.rb +15 -0
- data/perf/concurrent_proc_call_nonblock.rb +13 -0
- data/perf/concurrent_proc_calls.rb +49 -0
- data/perf/concurrent_proc_calls_awaiting.rb +48 -0
- metadata +144 -0
@@ -0,0 +1,67 @@
|
|
1
|
+
module Concurrently
|
2
|
+
# @private
|
3
|
+
class Proc::Fiber < ::Fiber
|
4
|
+
class Cancelled < Exception
|
5
|
+
# should not be rescued accidentally and therefore is an exception
|
6
|
+
end
|
7
|
+
|
8
|
+
EMPTY_EVALUATION_BUCKET = [].freeze
|
9
|
+
|
10
|
+
def initialize(fiber_pool)
|
11
|
+
# Creation of fibers is quite expensive. To reduce the cost we make
|
12
|
+
# them reusable:
|
13
|
+
# - Each concurrent proc is executed during one iteration of the loop
|
14
|
+
# inside a fiber.
|
15
|
+
# - At the end of each iteration we put the fiber back into the fiber
|
16
|
+
# pool of the event loop.
|
17
|
+
# - Taking a fiber out of the pool and resuming it will enter the
|
18
|
+
# next iteration.
|
19
|
+
super() do |proc, args, evaluation_bucket|
|
20
|
+
# The fiber's proc, arguments to call the proc with and evaluation
|
21
|
+
# are passed when scheduled right after creation or taking it out of
|
22
|
+
# the pool.
|
23
|
+
|
24
|
+
while true
|
25
|
+
evaluation_bucket ||= EMPTY_EVALUATION_BUCKET
|
26
|
+
|
27
|
+
result = if proc == self
|
28
|
+
# If we are given this very fiber when starting itself it means it
|
29
|
+
# has been evaluated right before its start. In this case just
|
30
|
+
# yield back to the evaluating fiber.
|
31
|
+
Fiber.yield
|
32
|
+
|
33
|
+
# When this fiber is started because it is next on schedule it will
|
34
|
+
# just finish without running the proc.
|
35
|
+
|
36
|
+
:cancelled
|
37
|
+
elsif not Proc === proc
|
38
|
+
# This should never happen. If it does it means something with code
|
39
|
+
# of this library is not right.
|
40
|
+
raise Concurrently::Error, "concurrent proc not started properly."
|
41
|
+
else
|
42
|
+
begin
|
43
|
+
result = proc.__proc_call__ *args
|
44
|
+
(evaluation = evaluation_bucket[0]) and evaluation.conclude_to result
|
45
|
+
result
|
46
|
+
rescue Cancelled
|
47
|
+
# raised in Kernel#await_resume!
|
48
|
+
:cancelled
|
49
|
+
rescue *RESCUABLE_ERRORS => error
|
50
|
+
# Rescue all errors not critical for other concurrent evaluations
|
51
|
+
# and don't let them leak to the loop to keep it up and running.
|
52
|
+
proc.trigger :error, error
|
53
|
+
(evaluation = evaluation_bucket[0]) and evaluation.conclude_to error
|
54
|
+
error
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
fiber_pool.return self
|
59
|
+
|
60
|
+
# Yield back to the event loop fiber or the fiber evaluating this one
|
61
|
+
# and wait for the next proc to evaluate.
|
62
|
+
proc, args, evaluation_bucket = Fiber.yield result
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
data/lib/all/io.rb
ADDED
@@ -0,0 +1,248 @@
|
|
1
|
+
# @api public
|
2
|
+
# @since 1.0.0
|
3
|
+
#
|
4
|
+
# Concurrently adds a few methods to `IO` which make them available
|
5
|
+
# for every IO instance.
|
6
|
+
class IO
|
7
|
+
# Suspends the current evaluation until IO is readable. It can be used inside
|
8
|
+
# and outside of concurrent procs.
|
9
|
+
#
|
10
|
+
# While waiting, the code jumps to the event loop and executes other
|
11
|
+
# concurrent procs that are ready to run in the meantime.
|
12
|
+
#
|
13
|
+
# @param [Hash] opts
|
14
|
+
# @option opts [Numeric] :within maximum time to wait *(defaults to: Float::INFINITY)*
|
15
|
+
# @option opts [Object] :timeout_result result to return in case of an exceeded
|
16
|
+
# waiting time *(defaults to raising {Concurrently::Evaluation::TimeoutError})*
|
17
|
+
#
|
18
|
+
# @return [true]
|
19
|
+
# @raise [Concurrently::Evaluation::TimeoutError] if a given maximum waiting time
|
20
|
+
# is exceeded and no custom timeout result is given.
|
21
|
+
#
|
22
|
+
# @example Waiting inside a concurrent proc
|
23
|
+
# # Control flow is indicated by (N)
|
24
|
+
#
|
25
|
+
# r,w = IO.pipe
|
26
|
+
#
|
27
|
+
# # (1)
|
28
|
+
# wait_proc = concurrent_proc do
|
29
|
+
# # (4)
|
30
|
+
# r.await_readable
|
31
|
+
# # (6)
|
32
|
+
# r.read
|
33
|
+
# end
|
34
|
+
#
|
35
|
+
# # (2)
|
36
|
+
# concurrently do
|
37
|
+
# # (5)
|
38
|
+
# w.write 'Hey from the other proc!'
|
39
|
+
# w.close
|
40
|
+
# end
|
41
|
+
#
|
42
|
+
# # (3)
|
43
|
+
# wait_proc.call # => 'Hey from the other proc!'
|
44
|
+
# # (7)
|
45
|
+
#
|
46
|
+
# r.close
|
47
|
+
#
|
48
|
+
# @example Waiting outside a concurrent proc
|
49
|
+
# # Control flow is indicated by (N)
|
50
|
+
#
|
51
|
+
# r,w = IO.pipe
|
52
|
+
#
|
53
|
+
# # (1)
|
54
|
+
# concurrently do
|
55
|
+
# # (3)
|
56
|
+
# puts "I'm running while the outside is waiting!"
|
57
|
+
# w.write "Continue!"
|
58
|
+
# w.close
|
59
|
+
# end
|
60
|
+
#
|
61
|
+
# # (2)
|
62
|
+
# r.await_readable
|
63
|
+
# # (4)
|
64
|
+
# r.read # => "Continue!"
|
65
|
+
#
|
66
|
+
# r.close
|
67
|
+
#
|
68
|
+
# @example Waiting with a timeout
|
69
|
+
# r,w = IO.pipe
|
70
|
+
# r.await_readable(within: 1)
|
71
|
+
# # => raises a TimeoutError after 1 second
|
72
|
+
#
|
73
|
+
# @example Waiting with a timeout and a timeout result
|
74
|
+
# r,w = IO.pipe
|
75
|
+
# r.await_readable(within: 0.1, timeout_result: false)
|
76
|
+
# # => returns false after 0.1 second
|
77
|
+
def await_readable(opts = {})
|
78
|
+
io_selector = Concurrently::EventLoop.current.io_selector
|
79
|
+
io_selector.await_reader(self, Concurrently::Evaluation.current)
|
80
|
+
await_resume! opts
|
81
|
+
ensure
|
82
|
+
io_selector.cancel_reader(self)
|
83
|
+
end
|
84
|
+
|
85
|
+
# Reads from IO concurrently.
|
86
|
+
#
|
87
|
+
# If IO is not readable right now it blocks the current concurrent evaluation
|
88
|
+
# and tries again after it became readable.
|
89
|
+
#
|
90
|
+
# This method is a shortcut for:
|
91
|
+
#
|
92
|
+
# ```
|
93
|
+
# begin
|
94
|
+
# read_nonblock(maxlen, buf)
|
95
|
+
# rescue IO::WaitReadable
|
96
|
+
# await_readable
|
97
|
+
# retry
|
98
|
+
# end
|
99
|
+
# ```
|
100
|
+
#
|
101
|
+
# @see https://ruby-doc.org/core/IO.html#method-i-read_nonblock
|
102
|
+
# Ruby documentation for `IO#read_nonblock` for details about parameters and return values.
|
103
|
+
#
|
104
|
+
# @example
|
105
|
+
# r,w = IO.pipe
|
106
|
+
# w.concurrently_write "Hello!"
|
107
|
+
# r.concurrently_read 1024 # => "Hello!"
|
108
|
+
#
|
109
|
+
# @overload concurrently_read(maxlen)
|
110
|
+
# Reads maxlen bytes from IO and returns it as new string
|
111
|
+
#
|
112
|
+
# @param [Integer] maxlen
|
113
|
+
# @return [String] read string
|
114
|
+
#
|
115
|
+
# @overload concurrently_read(maxlen, outbuf)
|
116
|
+
# Reads maxlen bytes from IO and fills the given buffer with them.
|
117
|
+
#
|
118
|
+
# @param [Integer] maxlen
|
119
|
+
# @param [String] outbuf
|
120
|
+
# @return [outbuf] outbuf filled with read string
|
121
|
+
def concurrently_read(maxlen, outbuf = nil)
|
122
|
+
read_nonblock(maxlen, outbuf)
|
123
|
+
rescue IO::WaitReadable
|
124
|
+
await_readable
|
125
|
+
retry
|
126
|
+
end
|
127
|
+
|
128
|
+
# Suspends the current evaluation until IO is writable. It can be used inside
|
129
|
+
# and outside of concurrent procs.
|
130
|
+
#
|
131
|
+
# While waiting, the code jumps to the event loop and executes other
|
132
|
+
# concurrent procs that are ready to run in the meantime.
|
133
|
+
#
|
134
|
+
# @param [Hash] opts
|
135
|
+
# @option opts [Numeric] :within maximum time to wait *(defaults to: Float::INFINITY)*
|
136
|
+
# @option opts [Object] :timeout_result result to return in case of an exceeded
|
137
|
+
# waiting time *(defaults to raising {Concurrently::Evaluation::TimeoutError})*
|
138
|
+
#
|
139
|
+
# @return [true]
|
140
|
+
# @raise [Concurrently::Evaluation::TimeoutError] if a given maximum waiting time
|
141
|
+
# is exceeded and no custom timeout result is given.
|
142
|
+
#
|
143
|
+
# @example Waiting inside a concurrent proc
|
144
|
+
# # Control flow is indicated by (N)
|
145
|
+
#
|
146
|
+
# r,w = IO.pipe
|
147
|
+
#
|
148
|
+
# # jam the pipe with x's, assuming the pipe's max capacity is 2^16 bytes
|
149
|
+
# w.write 'x'*65536
|
150
|
+
#
|
151
|
+
# # (1)
|
152
|
+
# wait_proc = concurrent_proc do
|
153
|
+
# # (4)
|
154
|
+
# w.await_writable
|
155
|
+
# # (6)
|
156
|
+
# w.write 'I can write again!'
|
157
|
+
# :written
|
158
|
+
# end
|
159
|
+
#
|
160
|
+
# # (2)
|
161
|
+
# concurrently do
|
162
|
+
# # (5)
|
163
|
+
# r.read 65536 # clear the pipe
|
164
|
+
# end
|
165
|
+
#
|
166
|
+
# # (3)
|
167
|
+
# wait_proc.call # => :written
|
168
|
+
# # (7)
|
169
|
+
#
|
170
|
+
# r.close; w.close
|
171
|
+
#
|
172
|
+
# @example Waiting outside a concurrent proc
|
173
|
+
# # Control flow is indicated by (N)
|
174
|
+
#
|
175
|
+
# r,w = IO.pipe
|
176
|
+
#
|
177
|
+
# # jam the pipe with x's, assuming the pipe's max capacity is 2^16 bytes
|
178
|
+
# w.write 'x'*65536
|
179
|
+
#
|
180
|
+
# # (1)
|
181
|
+
# concurrently do
|
182
|
+
# # (3)
|
183
|
+
# puts "I'm running while the outside is waiting!"
|
184
|
+
# r.read 65536 # clear the pipe
|
185
|
+
# end
|
186
|
+
#
|
187
|
+
# # (2)
|
188
|
+
# w.await_writable
|
189
|
+
# # (4)
|
190
|
+
#
|
191
|
+
# r.close; w.close
|
192
|
+
#
|
193
|
+
# @example Waiting with a timeout
|
194
|
+
# r,w = IO.pipe
|
195
|
+
# # jam the pipe with x's, assuming the pipe's max capacity is 2^16 bytes
|
196
|
+
# w.write 'x'*65536
|
197
|
+
#
|
198
|
+
# w.await_writable(within: 1)
|
199
|
+
# # => raises a TimeoutError after 1 second
|
200
|
+
#
|
201
|
+
# @example Waiting with a timeout and a timeout result
|
202
|
+
# r,w = IO.pipe
|
203
|
+
# # jam the pipe with x's, assuming the pipe's max capacity is 2^16 bytes
|
204
|
+
# w.write 'x'*65536
|
205
|
+
#
|
206
|
+
# w.await_writable(within: 0.1, timeout_result: false)
|
207
|
+
# # => returns false after 0.1 second
|
208
|
+
def await_writable(opts = {})
|
209
|
+
io_selector = Concurrently::EventLoop.current.io_selector
|
210
|
+
io_selector.await_writer(self, Concurrently::Evaluation.current)
|
211
|
+
await_resume! opts
|
212
|
+
ensure
|
213
|
+
io_selector.cancel_writer(self)
|
214
|
+
end
|
215
|
+
|
216
|
+
# Writes to IO concurrently.
|
217
|
+
#
|
218
|
+
# If IO is not writable right now it blocks the current concurrent proc
|
219
|
+
# and tries again after it became writable.
|
220
|
+
#
|
221
|
+
# This methods is a shortcut for:
|
222
|
+
#
|
223
|
+
# ```
|
224
|
+
# begin
|
225
|
+
# write_nonblock(string)
|
226
|
+
# rescue IO::WaitWritable
|
227
|
+
# await_writable
|
228
|
+
# retry
|
229
|
+
# end
|
230
|
+
# ```
|
231
|
+
#
|
232
|
+
# @param [String] string to write
|
233
|
+
# @return [Integer] bytes written
|
234
|
+
#
|
235
|
+
# @see https://ruby-doc.org/core/IO.html#method-i-write_nonblock
|
236
|
+
# Ruby documentation for `IO#write_nonblock` for details about parameters and return values.
|
237
|
+
#
|
238
|
+
# @example
|
239
|
+
# r,w = IO.pipe
|
240
|
+
# w.concurrently_write "Hello!"
|
241
|
+
# r.concurrently_read 1024 # => "Hello!"
|
242
|
+
def concurrently_write(string)
|
243
|
+
write_nonblock(string)
|
244
|
+
rescue IO::WaitWritable
|
245
|
+
await_writable
|
246
|
+
retry
|
247
|
+
end
|
248
|
+
end
|
data/lib/all/kernel.rb
ADDED
@@ -0,0 +1,201 @@
|
|
1
|
+
# @api public
|
2
|
+
# @since 1.0.0
|
3
|
+
#
|
4
|
+
# Concurrently adds a few methods to `Kernel` which makes them available
|
5
|
+
# for every object.
|
6
|
+
module Kernel
|
7
|
+
# @!method concurrently(*args, &block)
|
8
|
+
#
|
9
|
+
# Executes code concurrently in the background.
|
10
|
+
#
|
11
|
+
# This is a shortcut for {Concurrently::Proc#call_and_forget}.
|
12
|
+
#
|
13
|
+
# @return [nil]
|
14
|
+
#
|
15
|
+
# @example
|
16
|
+
# concurrently(a,b,c) do |a,b,c|
|
17
|
+
# # ...
|
18
|
+
# end
|
19
|
+
private def concurrently(*args)
|
20
|
+
# Concurrently::Proc.new claims the method's block just like Proc.new does
|
21
|
+
Concurrently::Proc.new.call_and_forget *args
|
22
|
+
end
|
23
|
+
|
24
|
+
# @!method concurrent_proc(&block)
|
25
|
+
#
|
26
|
+
# Creates a concurrent proc to execute code concurrently.
|
27
|
+
#
|
28
|
+
# This a shortcut for {Concurrently::Proc}.new(&block) like `proc(&block)`
|
29
|
+
# is a shortcut for `Proc.new(&block)`.
|
30
|
+
#
|
31
|
+
# @return [Concurrently::Proc]
|
32
|
+
#
|
33
|
+
# @example
|
34
|
+
# wait_proc = concurrent_proc do |seconds|
|
35
|
+
# wait seconds
|
36
|
+
# end
|
37
|
+
#
|
38
|
+
# wait_proc.call 2 # waits 2 seconds and then resumes
|
39
|
+
private def concurrent_proc(evaluation_class = Concurrently::Proc::Evaluation)
|
40
|
+
# Concurrently::Proc.new claims the method's block just like Proc.new does
|
41
|
+
Concurrently::Proc.new(evaluation_class)
|
42
|
+
end
|
43
|
+
|
44
|
+
# @note The exclamation mark in its name stands for: Watch out!
|
45
|
+
# This method needs to be complemented with a later call to
|
46
|
+
# {Concurrently::Evaluation#resume!}.
|
47
|
+
#
|
48
|
+
# Suspends the current evaluation until it is resumed manually. It can be
|
49
|
+
# used inside and outside of concurrent procs.
|
50
|
+
#
|
51
|
+
# It needs to be complemented with a later call of {Concurrently::Evaluation#resume!}.
|
52
|
+
#
|
53
|
+
# @param [Hash] opts
|
54
|
+
# @option opts [Numeric] :within maximum time to wait *(defaults to: Float::INFINITY)*
|
55
|
+
# @option opts [Object] :timeout_result result to return in case of an exceeded
|
56
|
+
# waiting time *(defaults to raising {Concurrently::Evaluation::TimeoutError})*
|
57
|
+
#
|
58
|
+
# @return [Object] the result {Concurrently::Evaluation#resume!} is called
|
59
|
+
# with.
|
60
|
+
# @raise [Concurrently::Evaluation::TimeoutError] if a given maximum waiting time
|
61
|
+
# is exceeded and no custom timeout result is given.
|
62
|
+
#
|
63
|
+
# @example Waiting inside a concurrent proc
|
64
|
+
# # Control flow is indicated by (N)
|
65
|
+
#
|
66
|
+
# # (1)
|
67
|
+
# evaluation = concurrent_proc do
|
68
|
+
# # (4)
|
69
|
+
# await_resume!
|
70
|
+
# # (7)
|
71
|
+
# end.call_nonblock
|
72
|
+
#
|
73
|
+
# # (2)
|
74
|
+
# concurrently do
|
75
|
+
# # (5)
|
76
|
+
# puts "I'm running while the outside is waiting!"
|
77
|
+
# evaluation.resume! :result
|
78
|
+
# # (6)
|
79
|
+
# end
|
80
|
+
#
|
81
|
+
# # (3)
|
82
|
+
# evaluation.await_result # => :result
|
83
|
+
# # (8)
|
84
|
+
#
|
85
|
+
# @example Waiting outside a concurrent proc
|
86
|
+
# # Control flow is indicated by (N)
|
87
|
+
#
|
88
|
+
# evaluation = Concurrently::Evaluation.current
|
89
|
+
#
|
90
|
+
# # (1)
|
91
|
+
# concurrently do
|
92
|
+
# # (3)
|
93
|
+
# puts "I'm running while the outside is waiting!"
|
94
|
+
# evaluation.resume! :result
|
95
|
+
# # (4)
|
96
|
+
# end
|
97
|
+
#
|
98
|
+
# # (2)
|
99
|
+
# await_resume! # => :result
|
100
|
+
# # (5)
|
101
|
+
#
|
102
|
+
# @example Waiting with a timeout
|
103
|
+
# await_resume! within: 1
|
104
|
+
# # => raises a TimeoutError after 1 second
|
105
|
+
#
|
106
|
+
# @example Waiting with a timeout and a timeout result
|
107
|
+
# await_resume! within: 0.1, timeout_result: false
|
108
|
+
# # => returns false after 0.1 second
|
109
|
+
private def await_resume!(opts = {})
|
110
|
+
event_loop = Concurrently::EventLoop.current
|
111
|
+
run_queue = event_loop.run_queue
|
112
|
+
evaluation = run_queue.current_evaluation
|
113
|
+
|
114
|
+
if seconds = opts[:within]
|
115
|
+
timeout_result = opts.fetch(:timeout_result, Concurrently::Evaluation::TimeoutError)
|
116
|
+
run_queue.schedule_deferred(evaluation, seconds, timeout_result)
|
117
|
+
end
|
118
|
+
|
119
|
+
evaluation.instance_variable_set :@waiting, true
|
120
|
+
result = case evaluation
|
121
|
+
when Concurrently::Proc::Evaluation
|
122
|
+
# Yield back to the event loop fiber or the evaluation evaluating this one.
|
123
|
+
# Pass along itself to indicate it is not yet fully evaluated.
|
124
|
+
Fiber.yield evaluation
|
125
|
+
else
|
126
|
+
event_loop.fiber.resume
|
127
|
+
end
|
128
|
+
evaluation.instance_variable_set :@waiting, false
|
129
|
+
|
130
|
+
# If result is this very evaluation it means this evaluation has been evaluated
|
131
|
+
# prematurely.
|
132
|
+
if evaluation.fiber == result
|
133
|
+
run_queue.cancel evaluation # in case the evaluation has already been scheduled to resume
|
134
|
+
|
135
|
+
# Generally, throw-catch is faster than raise-rescue if the code needs to
|
136
|
+
# play back the call stack, i.e. the throw resp. raise is invoked. If not
|
137
|
+
# playing back the call stack, a begin block is faster than a catch
|
138
|
+
# block. Since we won't jump out of the proc above most of the time, we
|
139
|
+
# go with raise. It is rescued in the proc fiber.
|
140
|
+
raise Concurrently::Proc::Fiber::Cancelled, '', []
|
141
|
+
elsif Concurrently::Evaluation::TimeoutError == result
|
142
|
+
raise result, "evaluation timed out after #{seconds} second(s)"
|
143
|
+
else
|
144
|
+
result
|
145
|
+
end
|
146
|
+
ensure
|
147
|
+
if seconds
|
148
|
+
run_queue.cancel evaluation
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
# Suspends the current evaluation for the given number of seconds. It can be
|
153
|
+
# used inside and outside of concurrent procs.
|
154
|
+
#
|
155
|
+
# While waiting, the code jumps to the event loop and executes other
|
156
|
+
# concurrent procs that are ready to run in the meantime.
|
157
|
+
#
|
158
|
+
# @return [true]
|
159
|
+
#
|
160
|
+
# @example Waiting inside a concurrent proc
|
161
|
+
# # Control flow is indicated by (N)
|
162
|
+
#
|
163
|
+
# # (1)
|
164
|
+
# wait_proc = concurrent_proc do |seconds|
|
165
|
+
# # (4)
|
166
|
+
# wait seconds
|
167
|
+
# # (6)
|
168
|
+
# :waited
|
169
|
+
# end
|
170
|
+
#
|
171
|
+
# # (2)
|
172
|
+
# concurrently do
|
173
|
+
# # (5)
|
174
|
+
# puts "I'm running while the other proc is waiting!"
|
175
|
+
# end
|
176
|
+
#
|
177
|
+
# # (3)
|
178
|
+
# wait_proc.call 1 # => :waited
|
179
|
+
# # (7)
|
180
|
+
#
|
181
|
+
# @example Waiting outside a concurrent proc
|
182
|
+
# # Control flow is indicated by (N)
|
183
|
+
#
|
184
|
+
# # (1)
|
185
|
+
# concurrently do
|
186
|
+
# # (3)
|
187
|
+
# puts "I'm running while the outside is waiting!"
|
188
|
+
# end
|
189
|
+
#
|
190
|
+
# # (2)
|
191
|
+
# wait 1
|
192
|
+
# # (4)
|
193
|
+
private def wait(seconds)
|
194
|
+
run_queue = Concurrently::EventLoop.current.run_queue
|
195
|
+
evaluation = run_queue.current_evaluation
|
196
|
+
run_queue.schedule_deferred(evaluation, seconds, true)
|
197
|
+
await_resume!
|
198
|
+
ensure
|
199
|
+
run_queue.cancel evaluation
|
200
|
+
end
|
201
|
+
end
|