zeevex_concurrency 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +17 -0
- data/Gemfile +16 -0
- data/LICENSE.txt +22 -0
- data/README.md +29 -0
- data/Rakefile +1 -0
- data/lib/zeevex_concurrency/delay.rb +50 -0
- data/lib/zeevex_concurrency/delayed.rb +233 -0
- data/lib/zeevex_concurrency/event_loop.rb +154 -0
- data/lib/zeevex_concurrency/future.rb +60 -0
- data/lib/zeevex_concurrency/logging.rb +7 -0
- data/lib/zeevex_concurrency/nil_logger.rb +7 -0
- data/lib/zeevex_concurrency/promise.rb +32 -0
- data/lib/zeevex_concurrency/synchronized.rb +46 -0
- data/lib/zeevex_concurrency/thread_pool.rb +346 -0
- data/lib/zeevex_concurrency/version.rb +3 -0
- data/lib/zeevex_concurrency.rb +29 -0
- data/script/repl +10 -0
- data/script/testall +2 -0
- data/spec/delay_spec.rb +172 -0
- data/spec/delayed_spec.rb +104 -0
- data/spec/event_loop_spec.rb +161 -0
- data/spec/future_spec.rb +316 -0
- data/spec/promise_spec.rb +172 -0
- data/spec/spec_helper.rb +8 -0
- data/spec/thread_pool_spec.rb +281 -0
- data/zeevex_concurrency.gemspec +30 -0
- metadata +187 -0
data/.gitignore
ADDED
data/Gemfile
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
|
3
|
+
group :development, :test do
|
4
|
+
gem 'pry'
|
5
|
+
gem 'pry-remote'
|
6
|
+
gem 'pry-doc'
|
7
|
+
gem 'pry-nav'
|
8
|
+
gem 'pry-buffers'
|
9
|
+
gem 'pry-syntax-hacks'
|
10
|
+
gem 'pry-git', :platform => :mri
|
11
|
+
gem 'jist'
|
12
|
+
gem 'ruby18_source_location', :platform => :mri_18
|
13
|
+
end
|
14
|
+
|
15
|
+
# Specify your gem's dependencies in zeevex_concurrency.gemspec
|
16
|
+
gemspec
|
data/LICENSE.txt
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2013 Robert Sanders
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
# ZeevexConcurrency
|
2
|
+
|
3
|
+
TODO: Write a gem description
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Add this line to your application's Gemfile:
|
8
|
+
|
9
|
+
gem 'zeevex_concurrency'
|
10
|
+
|
11
|
+
And then execute:
|
12
|
+
|
13
|
+
$ bundle
|
14
|
+
|
15
|
+
Or install it yourself as:
|
16
|
+
|
17
|
+
$ gem install zeevex_concurrency
|
18
|
+
|
19
|
+
## Usage
|
20
|
+
|
21
|
+
TODO: Write usage instructions here
|
22
|
+
|
23
|
+
## Contributing
|
24
|
+
|
25
|
+
1. Fork it
|
26
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
27
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
28
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
29
|
+
5. Create new Pull Request
|
data/Rakefile
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require "bundler/gem_tasks"
|
@@ -0,0 +1,50 @@
|
|
1
|
+
require 'observer'
|
2
|
+
require 'thread'
|
3
|
+
require 'zeevex_concurrency/delayed'
|
4
|
+
|
5
|
+
class ZeevexConcurrency::Delay < ZeevexConcurrency::Delayed
|
6
|
+
include Observable
|
7
|
+
include ZeevexConcurrency::Delayed::Bindable
|
8
|
+
|
9
|
+
def initialize(computation = nil, options = {}, &block)
|
10
|
+
raise ArgumentError, "Must provide computation or block for a future" unless (computation || block)
|
11
|
+
|
12
|
+
@mutex = Mutex.new
|
13
|
+
@exec_mutex = Mutex.new
|
14
|
+
@exception = nil
|
15
|
+
@done = false
|
16
|
+
@result = false
|
17
|
+
@executed = false
|
18
|
+
|
19
|
+
# has to happen after exec_mutex initialized
|
20
|
+
bind(computation, &block)
|
21
|
+
|
22
|
+
Array(options.delete(:observer) || options.delete(:observers)).each do |observer|
|
23
|
+
add_observer observer
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.create(callable = nil, options = {}, &block)
|
28
|
+
return callable if callable && callable.is_a?(ZeevexConcurrency::Delayed)
|
29
|
+
new(callable, options, &block)
|
30
|
+
end
|
31
|
+
|
32
|
+
def wait(timeout = nil)
|
33
|
+
true
|
34
|
+
end
|
35
|
+
|
36
|
+
def ready?
|
37
|
+
true
|
38
|
+
end
|
39
|
+
|
40
|
+
protected
|
41
|
+
|
42
|
+
def _fulfill(value, success = true)
|
43
|
+
@fulfilled_value = value
|
44
|
+
end
|
45
|
+
|
46
|
+
def _wait_for_value
|
47
|
+
_execute(binding)
|
48
|
+
@fulfilled_value
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,233 @@
|
|
1
|
+
require 'thread'
|
2
|
+
require 'countdownlatch'
|
3
|
+
|
4
|
+
#
|
5
|
+
# base class for Promise, Future, etc.
|
6
|
+
#
|
7
|
+
class ZeevexConcurrency::Delayed
|
8
|
+
|
9
|
+
module ConvenienceMethods
|
10
|
+
def future(*args, &block)
|
11
|
+
ZeevexConcurrency::Future.__send__(:create, *args, &block)
|
12
|
+
end
|
13
|
+
|
14
|
+
def promise(*args, &block)
|
15
|
+
ZeevexConcurrency::Promise.__send__(:create, *args, &block)
|
16
|
+
end
|
17
|
+
|
18
|
+
def delay(*args, &block)
|
19
|
+
ZeevexConcurrency::Delay.__send__(:create, *args, &block)
|
20
|
+
end
|
21
|
+
|
22
|
+
def delayed?(obj)
|
23
|
+
obj.is_a?(ZeevexConcurrency::Delayed)
|
24
|
+
end
|
25
|
+
|
26
|
+
def delay?(obj)
|
27
|
+
obj.is_a?(ZeevexConcurrency::Delay)
|
28
|
+
end
|
29
|
+
|
30
|
+
def promise?(obj)
|
31
|
+
obj.is_a?(ZeevexConcurrency::Promise)
|
32
|
+
end
|
33
|
+
|
34
|
+
def future?(obj)
|
35
|
+
obj.is_a?(ZeevexConcurrency::Future)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def exception
|
40
|
+
@exception
|
41
|
+
end
|
42
|
+
|
43
|
+
def exception?
|
44
|
+
!! @exception
|
45
|
+
end
|
46
|
+
|
47
|
+
def executed?
|
48
|
+
@executed
|
49
|
+
end
|
50
|
+
|
51
|
+
def value(reraise = true)
|
52
|
+
@mutex.synchronize do
|
53
|
+
unless @done
|
54
|
+
@result = _wait_for_value
|
55
|
+
@done = true
|
56
|
+
end
|
57
|
+
end
|
58
|
+
if @exception && reraise
|
59
|
+
raise @exception
|
60
|
+
else
|
61
|
+
@result
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def wait(timeout = nil)
|
66
|
+
Timeout::timeout(timeout) do
|
67
|
+
value(false)
|
68
|
+
true
|
69
|
+
end
|
70
|
+
rescue Timeout::Error
|
71
|
+
false
|
72
|
+
end
|
73
|
+
|
74
|
+
def set_result(&block)
|
75
|
+
@exec_mutex.synchronize do
|
76
|
+
raise ArgumentError, "Must supply block" unless block_given?
|
77
|
+
raise ArgumentError, "Already supplied block" if bound?
|
78
|
+
raise ArgumentError, "Promise already executed" if executed?
|
79
|
+
|
80
|
+
_execute(block)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
protected
|
85
|
+
|
86
|
+
#
|
87
|
+
# not MT-safe; only to be called from executor thread
|
88
|
+
#
|
89
|
+
def _execute(computation)
|
90
|
+
raise "Already executed" if executed?
|
91
|
+
raise ArgumentError, "Cannot execute without computation" unless computation
|
92
|
+
success = false
|
93
|
+
begin
|
94
|
+
result = computation.call
|
95
|
+
success = true
|
96
|
+
rescue Exception
|
97
|
+
_smash($!)
|
98
|
+
end
|
99
|
+
@executed = true
|
100
|
+
# run this separately so we can report exceptions in _fulfill rather than capture them
|
101
|
+
_fulfill_and_notify(result) if (success)
|
102
|
+
rescue Exception
|
103
|
+
puts "*** exception in _fulfill: #{$!.inspect} ***"
|
104
|
+
ensure
|
105
|
+
@executed = true
|
106
|
+
end
|
107
|
+
|
108
|
+
def _fulfill_and_notify(value, success = true)
|
109
|
+
_fulfill(value, success)
|
110
|
+
if respond_to?(:notify_observers)
|
111
|
+
changed
|
112
|
+
begin
|
113
|
+
notify_observers(self, value, success)
|
114
|
+
rescue Exception
|
115
|
+
puts "Exception in notifying observers: #{$!.inspect}"
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
#
|
120
|
+
# not MT-safe; only to be called from executor thread
|
121
|
+
#
|
122
|
+
def _smash(ex)
|
123
|
+
@exception = ex
|
124
|
+
_fulfill_and_notify ex, false
|
125
|
+
end
|
126
|
+
|
127
|
+
###
|
128
|
+
|
129
|
+
module LatchBased
|
130
|
+
def wait(timeout = nil)
|
131
|
+
@_latch.wait(timeout)
|
132
|
+
end
|
133
|
+
|
134
|
+
def ready?
|
135
|
+
@_latch.count == 0
|
136
|
+
end
|
137
|
+
|
138
|
+
protected
|
139
|
+
|
140
|
+
def _initialize_latch
|
141
|
+
@_latch = CountDownLatch.new(1)
|
142
|
+
end
|
143
|
+
|
144
|
+
def _fulfill(value, success = true)
|
145
|
+
@result = value
|
146
|
+
@_latch.countdown!
|
147
|
+
end
|
148
|
+
|
149
|
+
def _wait_for_value
|
150
|
+
@_latch.wait
|
151
|
+
@result
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
module QueueBased
|
156
|
+
def ready?
|
157
|
+
@exec_mutex.synchronize do
|
158
|
+
@queue.size > 0 || @executed
|
159
|
+
end
|
160
|
+
end
|
161
|
+
|
162
|
+
protected
|
163
|
+
|
164
|
+
def _initialize_queue
|
165
|
+
@queue = Queue.new
|
166
|
+
end
|
167
|
+
|
168
|
+
def _fulfill(value, success = true)
|
169
|
+
@queue << value
|
170
|
+
end
|
171
|
+
|
172
|
+
def _wait_for_value
|
173
|
+
@queue.pop
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
module Bindable
|
178
|
+
def bound?
|
179
|
+
!! @binding
|
180
|
+
end
|
181
|
+
|
182
|
+
def binding
|
183
|
+
@binding
|
184
|
+
end
|
185
|
+
|
186
|
+
def bind(proccy = nil, &block)
|
187
|
+
raise "Already bound" if bound?
|
188
|
+
if proccy && block
|
189
|
+
raise ArgumentError, "must supply a callable OR a block or neither, but not both"
|
190
|
+
end
|
191
|
+
raise ArgumentError, "Must provide computation as proc or block" unless (proccy || block)
|
192
|
+
@binding = proccy || block
|
193
|
+
end
|
194
|
+
|
195
|
+
def execute
|
196
|
+
@exec_mutex.synchronize do
|
197
|
+
return if executed?
|
198
|
+
return if respond_to?(:cancelled?) && cancelled?
|
199
|
+
_execute(binding)
|
200
|
+
end
|
201
|
+
end
|
202
|
+
|
203
|
+
def call
|
204
|
+
execute
|
205
|
+
end
|
206
|
+
end
|
207
|
+
|
208
|
+
module Cancellable
|
209
|
+
def cancelled?
|
210
|
+
@cancelled
|
211
|
+
end
|
212
|
+
|
213
|
+
def cancel
|
214
|
+
@exec_mutex.synchronize do
|
215
|
+
return false if executed?
|
216
|
+
return true if cancelled?
|
217
|
+
@cancelled = true
|
218
|
+
_smash CancelledException.new
|
219
|
+
true
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
def ready?
|
224
|
+
cancelled? || super
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
class CancelledException < StandardError; end
|
229
|
+
end
|
230
|
+
|
231
|
+
module ZeevexConcurrency
|
232
|
+
extend(ZeevexConcurrency::Delayed::ConvenienceMethods)
|
233
|
+
end
|
@@ -0,0 +1,154 @@
|
|
1
|
+
require 'thread'
|
2
|
+
require 'zeevex_concurrency/promise'
|
3
|
+
|
4
|
+
module ZeevexConcurrency
|
5
|
+
class EventLoop
|
6
|
+
def initialize(options = {})
|
7
|
+
@options = options
|
8
|
+
@mutex = options.delete(:mutex) || Mutex.new
|
9
|
+
@queue = options.delete(:queue) || Queue.new
|
10
|
+
@state = :stopped
|
11
|
+
end
|
12
|
+
|
13
|
+
def running?
|
14
|
+
@state == :started
|
15
|
+
end
|
16
|
+
|
17
|
+
def start
|
18
|
+
return unless @state == :stopped
|
19
|
+
@stop_requested = false
|
20
|
+
@thread = Thread.new do
|
21
|
+
process
|
22
|
+
end
|
23
|
+
|
24
|
+
@state = :started
|
25
|
+
end
|
26
|
+
|
27
|
+
def stop
|
28
|
+
return unless @state == :started
|
29
|
+
enqueue { @stop_requested = true }
|
30
|
+
unless @thread.join(1)
|
31
|
+
@thread.kill
|
32
|
+
@thread.join(0)
|
33
|
+
end
|
34
|
+
|
35
|
+
@thread = nil
|
36
|
+
@state = :stopped
|
37
|
+
end
|
38
|
+
|
39
|
+
#
|
40
|
+
# Enqueue any callable object (including a Promise or Future or other Delayed class) to the event loop
|
41
|
+
# and return a Delayed object which can be used to fetch the return value.
|
42
|
+
#
|
43
|
+
# Strictly obeys ordering.
|
44
|
+
#
|
45
|
+
def enqueue(callable = nil, &block)
|
46
|
+
to_run = callable || block
|
47
|
+
raise ArgumentError, "Must provide proc or block arg" unless to_run
|
48
|
+
|
49
|
+
to_run = ZeevexConcurrency::Promise.new(to_run) unless to_run.is_a?(ZeevexConcurrency::Delayed)
|
50
|
+
@queue << to_run
|
51
|
+
to_run
|
52
|
+
end
|
53
|
+
|
54
|
+
def <<(callable)
|
55
|
+
enqueue(callable)
|
56
|
+
end
|
57
|
+
|
58
|
+
def flush
|
59
|
+
@queue.clear
|
60
|
+
end
|
61
|
+
|
62
|
+
def backlog
|
63
|
+
@queue.size
|
64
|
+
end
|
65
|
+
|
66
|
+
def reset
|
67
|
+
stop
|
68
|
+
flush
|
69
|
+
start
|
70
|
+
end
|
71
|
+
|
72
|
+
#
|
73
|
+
# Returns true if the method was called from code executing on the event loop's thread
|
74
|
+
#
|
75
|
+
def in_event_loop?
|
76
|
+
Thread.current.object_id == @thread.object_id
|
77
|
+
end
|
78
|
+
|
79
|
+
#
|
80
|
+
# Runs a computation on the event loop. Does not deadlock if currently on the event loop, but
|
81
|
+
# will not preserve ordering either - it runs the computation immediately despite other events
|
82
|
+
# in the queue
|
83
|
+
#
|
84
|
+
def on_event_loop(runnable = nil, &block)
|
85
|
+
return unless runnable || block_given?
|
86
|
+
promise = (runnable && runnable.is_a?(ZeevexConcurrency::Delayed)) ?
|
87
|
+
runnable :
|
88
|
+
ZeevexConcurrency::Promise.create(runnable, &block)
|
89
|
+
if in_event_loop?
|
90
|
+
promise.call
|
91
|
+
promise
|
92
|
+
else
|
93
|
+
enqueue promise, &block
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
#
|
98
|
+
# Returns the value from the computation rather than a Promise. Has similar semantics to
|
99
|
+
# `on_event_loop` - if this is called from the event loop, it just executes the
|
100
|
+
# computation synchronously ahead of any other queued computations
|
101
|
+
#
|
102
|
+
def run_and_wait(runnable = nil, &block)
|
103
|
+
promise = on_event_loop(runnable, &block)
|
104
|
+
promise.value
|
105
|
+
end
|
106
|
+
|
107
|
+
protected
|
108
|
+
|
109
|
+
def process
|
110
|
+
while !@stop_requested
|
111
|
+
begin
|
112
|
+
process_one
|
113
|
+
rescue
|
114
|
+
ZeevexConcurrency.logger.error %{Exception caught in event loop: #{$!.inspect}: #{$!.backtrace.join("\n")}}
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
def process_one
|
120
|
+
@queue.pop.call
|
121
|
+
end
|
122
|
+
|
123
|
+
public
|
124
|
+
|
125
|
+
# event loop which throws away all events without running, returning nil from all promises
|
126
|
+
class Null
|
127
|
+
def initialize(options = {}); end
|
128
|
+
def start; end
|
129
|
+
def stop; end
|
130
|
+
def enqueue(callable = nil, &block)
|
131
|
+
to_run = ZeevexConcurrency::Promise.new unless to_run.is_a?(ZeevexConcurrency::Delayed)
|
132
|
+
to_run.set_result { nil }
|
133
|
+
to_run
|
134
|
+
end
|
135
|
+
def in_event_loop?; false; end
|
136
|
+
def on_event_loop(runnable = nil, &block)
|
137
|
+
enqueue(runnable, &block)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
# event loop which runs all events synchronously when enqueued
|
142
|
+
class Inline < ZeevexConcurrency::EventLoop
|
143
|
+
def start; end
|
144
|
+
def stop; end
|
145
|
+
def enqueue(callable = nil, &block)
|
146
|
+
res = super
|
147
|
+
process_one
|
148
|
+
res
|
149
|
+
end
|
150
|
+
def in_event_loop?; true; end
|
151
|
+
end
|
152
|
+
|
153
|
+
end
|
154
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
require 'observer'
|
2
|
+
require 'timeout'
|
3
|
+
require 'zeevex_concurrency/delayed'
|
4
|
+
require 'zeevex_concurrency/event_loop'
|
5
|
+
require 'zeevex_concurrency/thread_pool'
|
6
|
+
|
7
|
+
class ZeevexConcurrency::Future < ZeevexConcurrency::Delayed
|
8
|
+
include Observable
|
9
|
+
include ZeevexConcurrency::Delayed::Bindable
|
10
|
+
include ZeevexConcurrency::Delayed::LatchBased
|
11
|
+
include ZeevexConcurrency::Delayed::Cancellable
|
12
|
+
|
13
|
+
# @@worker_pool = ZeevexConcurrency::EventLoop.new
|
14
|
+
@@worker_pool = ZeevexConcurrency::ThreadPool::FixedPool.new
|
15
|
+
@@worker_pool.start
|
16
|
+
|
17
|
+
def initialize(computation = nil, options = {}, &block)
|
18
|
+
raise ArgumentError, "Must provide computation or block for a future" unless (computation || block)
|
19
|
+
|
20
|
+
@mutex = Mutex.new
|
21
|
+
@exec_mutex = Mutex.new
|
22
|
+
@exception = nil
|
23
|
+
@done = false
|
24
|
+
@result = false
|
25
|
+
@executed = false
|
26
|
+
|
27
|
+
_initialize_latch
|
28
|
+
|
29
|
+
# has to happen after exec_mutex initialized
|
30
|
+
bind(computation, &block) if (computation || block)
|
31
|
+
|
32
|
+
Array(options.delete(:observer) || options.delete(:observers)).each do |observer|
|
33
|
+
add_observer observer
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def self.shutdown
|
38
|
+
self.worker_pool.stop
|
39
|
+
end
|
40
|
+
|
41
|
+
def self.create(callable=nil, options = {}, &block)
|
42
|
+
nfuture = ZeevexConcurrency::Future.new(callable, options, &block)
|
43
|
+
(options.delete(:event_loop) || worker_pool).enqueue nfuture
|
44
|
+
|
45
|
+
nfuture
|
46
|
+
end
|
47
|
+
|
48
|
+
def self.worker_pool
|
49
|
+
@@worker_pool
|
50
|
+
end
|
51
|
+
|
52
|
+
def self.worker_pool=(pool)
|
53
|
+
@@worker_pool = pool
|
54
|
+
end
|
55
|
+
|
56
|
+
class << self
|
57
|
+
alias_method :future, :create
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
@@ -0,0 +1,32 @@
|
|
1
|
+
require 'observer'
|
2
|
+
require 'timeout'
|
3
|
+
require 'zeevex_concurrency/delayed'
|
4
|
+
|
5
|
+
class ZeevexConcurrency::Promise < ZeevexConcurrency::Delayed
|
6
|
+
include Observable
|
7
|
+
include ZeevexConcurrency::Delayed::Bindable
|
8
|
+
include ZeevexConcurrency::Delayed::LatchBased
|
9
|
+
|
10
|
+
def initialize(computation = nil, options = {}, &block)
|
11
|
+
@mutex = Mutex.new
|
12
|
+
@exec_mutex = Mutex.new
|
13
|
+
@exception = nil
|
14
|
+
@done = false
|
15
|
+
@result = false
|
16
|
+
@executed = false
|
17
|
+
|
18
|
+
_initialize_latch
|
19
|
+
|
20
|
+
# has to happen after exec_mutex initialized
|
21
|
+
bind(computation, &block) if (computation || block)
|
22
|
+
|
23
|
+
Array(options.delete(:observer) || options.delete(:observers)).each do |observer|
|
24
|
+
self.add_observer observer
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def self.create(callable = nil, options = {}, &block)
|
29
|
+
return callable if callable && callable.is_a?(ZeevexConcurrency::Delayed)
|
30
|
+
new(callable, options, &block)
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# Alex's Ruby threading utilities - taken from https://github.com/alexdowad/showcase
|
2
|
+
|
3
|
+
require 'thread'
|
4
|
+
require 'zeevex_proxy'
|
5
|
+
|
6
|
+
# Wraps an object, synchronizes all method calls
|
7
|
+
# The wrapped object can also be set and read out
|
8
|
+
# which means this can also be used as a thread-safe reference
|
9
|
+
# (like a 'volatile' variable in Java)
|
10
|
+
class ZeevexConcurrency::Synchronized < ZeevexProxy::Base
|
11
|
+
def initialize(obj)
|
12
|
+
super
|
13
|
+
@mutex = ::Mutex.new
|
14
|
+
end
|
15
|
+
|
16
|
+
def _set_synchronized_object(val)
|
17
|
+
@mutex.synchronize { @obj = val }
|
18
|
+
end
|
19
|
+
def _get_synchronized_object
|
20
|
+
@mutex.synchronize { @obj }
|
21
|
+
end
|
22
|
+
|
23
|
+
def respond_to?(method)
|
24
|
+
if [:_set_synchronized_object, :_get_synchronized_object].include?(method.to_sym)
|
25
|
+
true
|
26
|
+
else
|
27
|
+
@obj.respond_to?(method)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def method_missing(method, *args, &block)
|
32
|
+
result = @mutex.synchronize { @obj.__send__(method, *args, &block) }
|
33
|
+
# result.__id__ == @obj.__id__ ? self : result
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
#
|
38
|
+
# make object synchronized unless already synchronized
|
39
|
+
#
|
40
|
+
def ZeevexConcurrency.Synchronized(obj)
|
41
|
+
if obj.respond_to?(:_get_synchronized_object)
|
42
|
+
obj
|
43
|
+
else
|
44
|
+
ZeevexConcurrency::Synchronized.new(obj)
|
45
|
+
end
|
46
|
+
end
|