zeevex_concurrency 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,281 @@
1
+ require File.join(File.dirname(__FILE__), 'spec_helper')
2
+ require 'zeevex_concurrency/thread_pool.rb'
3
+ require 'zeevex_concurrency/event_loop.rb'
4
+ require 'timeout'
5
+ require 'thread'
6
+ require 'atomic'
7
+ require 'countdownlatch'
8
+
9
+ describe ZeevexConcurrency::ThreadPool do
10
+ let :mutex do
11
+ Mutex.new
12
+ end
13
+
14
+ let :latch do
15
+ CountDownLatch.new(1)
16
+ end
17
+
18
+ let :latch_wait_task do
19
+ Proc.new { latch.wait }
20
+ end
21
+
22
+ let :queue do
23
+ Queue.new
24
+ end
25
+
26
+ let :pop_task do
27
+ Proc.new { queue.pop }
28
+ end
29
+
30
+ let :atom do
31
+ Atomic.new(0)
32
+ end
33
+
34
+ around :each do |ex|
35
+ Timeout::timeout(30) do
36
+ ex.run
37
+ end
38
+ end
39
+
40
+ before do
41
+ queue
42
+ pop_task
43
+ atom
44
+ latch_wait_task
45
+ latch
46
+ end
47
+
48
+ def wait_until(timeout = 5, sleep_sec = 0.1)
49
+ t_start = Time.now
50
+
51
+ # go ahead and give up our timeslice as we might as well
52
+ # let somebody else make the condition true
53
+ Thread.pass unless yield
54
+ until yield || (Time.now-t_start) >= timeout
55
+ sleep sleep_sec
56
+ end
57
+ yield
58
+ end
59
+
60
+ shared_examples_for 'thread pool initialization' do
61
+ context 'basic usage' do
62
+ it 'should allow enqueue of a proc' do
63
+ expect { pool.enqueue(Proc.new { true }) }.
64
+ not_to raise_error
65
+ end
66
+
67
+ it 'should allow enqueue of a block' do
68
+ expect {
69
+ pool.enqueue do
70
+ true
71
+ end
72
+ }.not_to raise_error
73
+ end
74
+
75
+ it 'should allow enqueue of a Promise, and return same promise' do
76
+ promise = ZeevexConcurrency::Promise.new(Proc.new {true})
77
+ expect { pool.enqueue(promise) }.not_to raise_error
78
+ end
79
+
80
+ it 'should NOT allow both a callable and a block' do
81
+ expect {
82
+ pool.enqueue(Proc.new{}) do
83
+ true
84
+ end
85
+ }.to raise_error(ArgumentError)
86
+ end
87
+ end
88
+ end
89
+
90
+ shared_examples_for 'thread pool running tasks' do
91
+ it 'should execute the task on a different thread' do
92
+ pool.enqueue { queue << Thread.current.__id__ }
93
+ queue.pop.should_not == Thread.current.__id__
94
+ end
95
+
96
+ it 'should allow enqueueing from an executed task, and execute both' do
97
+ pool.enqueue do
98
+ pool.enqueue { queue << "val2" }
99
+ queue << "val1"
100
+ end
101
+ [queue.pop, queue.pop].sort.should == ["val1", "val2"]
102
+ end
103
+
104
+ it 'should execute a large number of tasks' do
105
+ atom = Atomic.new(0)
106
+ 300.times do
107
+ pool.enqueue do
108
+ atom.update { |x| x+1 }
109
+ end
110
+ end
111
+ Timeout::timeout(20) do
112
+ while atom.value != 300
113
+ sleep 0.5
114
+ end
115
+ end
116
+ atom.value.should == 300
117
+ end
118
+ end
119
+
120
+ shared_examples_for 'thread pool with parallel execution' do
121
+ after do
122
+ latch.countdown!
123
+ end
124
+
125
+ # must be an even number
126
+ let :count do
127
+ parallelism == -1 ? 32 : parallelism
128
+ end
129
+
130
+ it 'should increase busy_count when tasks start' do
131
+ count.times { pool.enqueue { queue.pop } }
132
+ wait_until { pool.busy_count == count }
133
+ pool.busy_count.should == count
134
+ end
135
+
136
+ it 'should decrease busy_count when tasks finish' do
137
+ count.times { pool.enqueue { queue.pop } }
138
+ (count / 2).times { queue << "foo" }
139
+ pool.enqueue { latch.countdown!; queue.pop }
140
+ latch.wait
141
+ # should we need the following?
142
+ wait_until { pool.busy_count == (count / 2) + 1}
143
+ pool.busy_count.should == (count / 2) + 1
144
+ end
145
+
146
+ #
147
+ # TODO: this is pretty iffy - it doesn't really prove the assertion
148
+ #
149
+ it 'should return from join only when currently executing tasks finish' do
150
+ (count / 2).times { pool.enqueue { sleep 1; atom.update {|x| x + 1} } }
151
+ pool.join
152
+ atom.value.should == count/2
153
+ end
154
+ end
155
+
156
+ shared_examples_for 'thread pool with task queue' do
157
+ it 'should give a total count of backlog in queue' do
158
+ (parallelism + 1).times { pool.enqueue { queue.pop } }
159
+ wait_until { pool.backlog == 1 }
160
+ pool.backlog.should == 1
161
+ end
162
+
163
+ it 'should allow flushing jobs from the queue' do
164
+ (parallelism + 1).times { pool.enqueue { queue.pop } }
165
+ wait_until { pool.backlog == 1 }
166
+ pool.flush
167
+ pool.backlog.should == 0
168
+ end
169
+
170
+ it 'should not return from join if backlogged tasks have not run' do
171
+ count = parallelism + 2
172
+ count.times { pool.enqueue { queue.pop } }
173
+ expect {
174
+ Timeout::timeout(2) { pool.join }
175
+ }.to raise_error(TimeoutError)
176
+ end
177
+
178
+ # TODO: this is another iffy one - how do we accurately meausure
179
+ # when join returns and how many tasks are waiting?
180
+ it 'should return from join when backlogged tasks have' do
181
+ count = parallelism * 2
182
+ t_start = Time.now
183
+ count.times { pool.enqueue { sleep 1; atom.update {|x| x + 1} } }
184
+ pool.join
185
+ t_end = Time.now
186
+ atom.value.should == count
187
+ # we expect roughly 2 seconds of wall clock time - each thread doing 2 tasks
188
+ # which sleep for 1 second each
189
+ (t_end - t_start).round.should == 2
190
+ end
191
+ end
192
+
193
+ shared_examples_for 'thread pool control' do
194
+ it 'should allow enqueueing after a stop/start' do
195
+ pending 'broken on jruby, and really in general'
196
+ pool.stop
197
+ pool.start
198
+ pool.enqueue do
199
+ queue << "ran"
200
+ end
201
+ Timeout::timeout(5) do
202
+ queue.pop.should == "ran"
203
+ end
204
+ end
205
+ end
206
+
207
+ context 'FixedPool' do
208
+ let :parallelism do
209
+ 32
210
+ end
211
+ let :pool do
212
+ ZeevexConcurrency::ThreadPool::FixedPool.new(parallelism)
213
+ end
214
+
215
+ it_should_behave_like 'thread pool initialization'
216
+ it_should_behave_like 'thread pool running tasks'
217
+ it_should_behave_like 'thread pool control'
218
+ it_should_behave_like 'thread pool with parallel execution'
219
+ it_should_behave_like 'thread pool with task queue'
220
+
221
+ it 'should indicate that the pool is busy when there are tasks in the queue' do
222
+ (parallelism + 1).times { pool.enqueue { sleep 30 } }
223
+ wait_until { pool.backlog == 1 }
224
+ pool.should be_busy
225
+ end
226
+
227
+ it 'should indicate that there are no free workers when there are tasks in the queue' do
228
+ (parallelism + 1).times { pool.enqueue { sleep 30 } }
229
+ wait_until { pool.free_count == 0 }
230
+ pool.free_count.should == 0
231
+ pool.busy_count.should == parallelism
232
+ end
233
+
234
+ end
235
+
236
+ context 'InlineThreadPool' do
237
+ let :pool do
238
+ ZeevexConcurrency::ThreadPool::InlineThreadPool.new
239
+ end
240
+ let :parallelism do
241
+ 1
242
+ end
243
+
244
+ it_should_behave_like 'thread pool initialization'
245
+ it_should_behave_like 'thread pool running tasks'
246
+ it_should_behave_like 'thread pool control'
247
+ end
248
+
249
+ context 'ThreadPerJobPool' do
250
+ let :pool do
251
+ ZeevexConcurrency::ThreadPool::ThreadPerJobPool.new
252
+ end
253
+ let :parallelism do
254
+ -1
255
+ end
256
+
257
+ it_should_behave_like 'thread pool initialization'
258
+ it_should_behave_like 'thread pool running tasks'
259
+ it_should_behave_like 'thread pool control'
260
+ it_should_behave_like 'thread pool with parallel execution'
261
+ end
262
+
263
+ context 'EventLoopAdapter' do
264
+ let :loop do
265
+ ZeevexConcurrency::EventLoop.new
266
+ end
267
+ let :pool do
268
+ ZeevexConcurrency::ThreadPool::EventLoopAdapter.new loop
269
+ end
270
+ let :parallelism do
271
+ 1
272
+ end
273
+
274
+ it_should_behave_like 'thread pool initialization'
275
+ it_should_behave_like 'thread pool running tasks'
276
+ it_should_behave_like 'thread pool control'
277
+ it_should_behave_like 'thread pool with task queue'
278
+ end
279
+
280
+ end
281
+
@@ -0,0 +1,30 @@
1
+ # -*- encoding: utf-8 -*-
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'zeevex_concurrency/version'
5
+
6
+ Gem::Specification.new do |gem|
7
+ gem.name = "zeevex_concurrency"
8
+ gem.version = ZeevexConcurrency::VERSION
9
+ gem.authors = ["Robert Sanders"]
10
+ gem.email = ["robert@zeevex.com"]
11
+ gem.description = %q{Concurrency utilities including Delays, Promises, Futures, Event Loops, Thread Pools, and Synchronizing wrappers}
12
+ gem.summary = %q{Some concurrency utilities for Ruby}
13
+ gem.homepage = ""
14
+
15
+ gem.files = `git ls-files`.split($/)
16
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
17
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
18
+ gem.require_paths = ["lib"]
19
+
20
+ gem.add_dependency 'zeevex_proxy'
21
+ gem.add_dependency 'countdownlatch', '~> 1.0.0'
22
+ gem.add_dependency 'atomic', '~> 1.0.0'
23
+
24
+ ## other headius utils
25
+ # s.add_dependency 'thread_safe'
26
+
27
+ gem.add_development_dependency 'rspec', '~> 2.9.0'
28
+ gem.add_development_dependency 'rake'
29
+ gem.add_development_dependency 'pry'
30
+ end
metadata ADDED
@@ -0,0 +1,187 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: zeevex_concurrency
3
+ version: !ruby/object:Gem::Version
4
+ hash: 29
5
+ prerelease:
6
+ segments:
7
+ - 0
8
+ - 0
9
+ - 1
10
+ version: 0.0.1
11
+ platform: ruby
12
+ authors:
13
+ - Robert Sanders
14
+ autorequire:
15
+ bindir: bin
16
+ cert_chain: []
17
+
18
+ date: 2013-01-06 00:00:00 Z
19
+ dependencies:
20
+ - !ruby/object:Gem::Dependency
21
+ version_requirements: &id001 !ruby/object:Gem::Requirement
22
+ none: false
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ hash: 3
27
+ segments:
28
+ - 0
29
+ version: "0"
30
+ prerelease: false
31
+ type: :runtime
32
+ name: zeevex_proxy
33
+ requirement: *id001
34
+ - !ruby/object:Gem::Dependency
35
+ version_requirements: &id002 !ruby/object:Gem::Requirement
36
+ none: false
37
+ requirements:
38
+ - - ~>
39
+ - !ruby/object:Gem::Version
40
+ hash: 23
41
+ segments:
42
+ - 1
43
+ - 0
44
+ - 0
45
+ version: 1.0.0
46
+ prerelease: false
47
+ type: :runtime
48
+ name: countdownlatch
49
+ requirement: *id002
50
+ - !ruby/object:Gem::Dependency
51
+ version_requirements: &id003 !ruby/object:Gem::Requirement
52
+ none: false
53
+ requirements:
54
+ - - ~>
55
+ - !ruby/object:Gem::Version
56
+ hash: 23
57
+ segments:
58
+ - 1
59
+ - 0
60
+ - 0
61
+ version: 1.0.0
62
+ prerelease: false
63
+ type: :runtime
64
+ name: atomic
65
+ requirement: *id003
66
+ - !ruby/object:Gem::Dependency
67
+ version_requirements: &id004 !ruby/object:Gem::Requirement
68
+ none: false
69
+ requirements:
70
+ - - ~>
71
+ - !ruby/object:Gem::Version
72
+ hash: 43
73
+ segments:
74
+ - 2
75
+ - 9
76
+ - 0
77
+ version: 2.9.0
78
+ prerelease: false
79
+ type: :development
80
+ name: rspec
81
+ requirement: *id004
82
+ - !ruby/object:Gem::Dependency
83
+ version_requirements: &id005 !ruby/object:Gem::Requirement
84
+ none: false
85
+ requirements:
86
+ - - ">="
87
+ - !ruby/object:Gem::Version
88
+ hash: 3
89
+ segments:
90
+ - 0
91
+ version: "0"
92
+ prerelease: false
93
+ type: :development
94
+ name: rake
95
+ requirement: *id005
96
+ - !ruby/object:Gem::Dependency
97
+ version_requirements: &id006 !ruby/object:Gem::Requirement
98
+ none: false
99
+ requirements:
100
+ - - ">="
101
+ - !ruby/object:Gem::Version
102
+ hash: 3
103
+ segments:
104
+ - 0
105
+ version: "0"
106
+ prerelease: false
107
+ type: :development
108
+ name: pry
109
+ requirement: *id006
110
+ description: Concurrency utilities including Delays, Promises, Futures, Event Loops, Thread Pools, and Synchronizing wrappers
111
+ email:
112
+ - robert@zeevex.com
113
+ executables: []
114
+
115
+ extensions: []
116
+
117
+ extra_rdoc_files: []
118
+
119
+ files:
120
+ - .gitignore
121
+ - Gemfile
122
+ - LICENSE.txt
123
+ - README.md
124
+ - Rakefile
125
+ - lib/zeevex_concurrency.rb
126
+ - lib/zeevex_concurrency/delay.rb
127
+ - lib/zeevex_concurrency/delayed.rb
128
+ - lib/zeevex_concurrency/event_loop.rb
129
+ - lib/zeevex_concurrency/future.rb
130
+ - lib/zeevex_concurrency/logging.rb
131
+ - lib/zeevex_concurrency/nil_logger.rb
132
+ - lib/zeevex_concurrency/promise.rb
133
+ - lib/zeevex_concurrency/synchronized.rb
134
+ - lib/zeevex_concurrency/thread_pool.rb
135
+ - lib/zeevex_concurrency/version.rb
136
+ - script/repl
137
+ - script/testall
138
+ - spec/delay_spec.rb
139
+ - spec/delayed_spec.rb
140
+ - spec/event_loop_spec.rb
141
+ - spec/future_spec.rb
142
+ - spec/promise_spec.rb
143
+ - spec/spec_helper.rb
144
+ - spec/thread_pool_spec.rb
145
+ - zeevex_concurrency.gemspec
146
+ homepage: ""
147
+ licenses: []
148
+
149
+ post_install_message:
150
+ rdoc_options: []
151
+
152
+ require_paths:
153
+ - lib
154
+ required_ruby_version: !ruby/object:Gem::Requirement
155
+ none: false
156
+ requirements:
157
+ - - ">="
158
+ - !ruby/object:Gem::Version
159
+ hash: 3
160
+ segments:
161
+ - 0
162
+ version: "0"
163
+ required_rubygems_version: !ruby/object:Gem::Requirement
164
+ none: false
165
+ requirements:
166
+ - - ">="
167
+ - !ruby/object:Gem::Version
168
+ hash: 3
169
+ segments:
170
+ - 0
171
+ version: "0"
172
+ requirements: []
173
+
174
+ rubyforge_project:
175
+ rubygems_version: 1.8.24
176
+ signing_key:
177
+ specification_version: 3
178
+ summary: Some concurrency utilities for Ruby
179
+ test_files:
180
+ - spec/delay_spec.rb
181
+ - spec/delayed_spec.rb
182
+ - spec/event_loop_spec.rb
183
+ - spec/future_spec.rb
184
+ - spec/promise_spec.rb
185
+ - spec/spec_helper.rb
186
+ - spec/thread_pool_spec.rb
187
+ has_rdoc: