concurrent-ruby 0.7.0.rc0-x64-mingw32
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/LICENSE.txt +21 -0
- data/README.md +166 -0
- data/ext/concurrent_ruby_ext/atomic_reference.c +78 -0
- data/ext/concurrent_ruby_ext/atomic_reference.h +12 -0
- data/ext/concurrent_ruby_ext/extconf.rb +59 -0
- data/ext/concurrent_ruby_ext/rb_concurrent.c +28 -0
- data/lib/2.0/concurrent_ruby_ext.so +0 -0
- data/lib/concurrent.rb +45 -0
- data/lib/concurrent/actress.rb +221 -0
- data/lib/concurrent/actress/ad_hoc.rb +20 -0
- data/lib/concurrent/actress/context.rb +98 -0
- data/lib/concurrent/actress/core.rb +228 -0
- data/lib/concurrent/actress/core_delegations.rb +42 -0
- data/lib/concurrent/actress/envelope.rb +41 -0
- data/lib/concurrent/actress/errors.rb +14 -0
- data/lib/concurrent/actress/reference.rb +64 -0
- data/lib/concurrent/actress/type_check.rb +48 -0
- data/lib/concurrent/agent.rb +232 -0
- data/lib/concurrent/async.rb +319 -0
- data/lib/concurrent/atomic.rb +46 -0
- data/lib/concurrent/atomic/atomic_boolean.rb +157 -0
- data/lib/concurrent/atomic/atomic_fixnum.rb +162 -0
- data/lib/concurrent/atomic/condition.rb +67 -0
- data/lib/concurrent/atomic/copy_on_notify_observer_set.rb +118 -0
- data/lib/concurrent/atomic/copy_on_write_observer_set.rb +117 -0
- data/lib/concurrent/atomic/count_down_latch.rb +116 -0
- data/lib/concurrent/atomic/cyclic_barrier.rb +106 -0
- data/lib/concurrent/atomic/event.rb +98 -0
- data/lib/concurrent/atomic/thread_local_var.rb +117 -0
- data/lib/concurrent/atomic_reference/concurrent_update_error.rb +7 -0
- data/lib/concurrent/atomic_reference/delegated_update.rb +28 -0
- data/lib/concurrent/atomic_reference/direct_update.rb +28 -0
- data/lib/concurrent/atomic_reference/jruby.rb +8 -0
- data/lib/concurrent/atomic_reference/mutex_atomic.rb +47 -0
- data/lib/concurrent/atomic_reference/numeric_cas_wrapper.rb +24 -0
- data/lib/concurrent/atomic_reference/rbx.rb +16 -0
- data/lib/concurrent/atomic_reference/ruby.rb +16 -0
- data/lib/concurrent/atomics.rb +10 -0
- data/lib/concurrent/channel/buffered_channel.rb +85 -0
- data/lib/concurrent/channel/channel.rb +41 -0
- data/lib/concurrent/channel/unbuffered_channel.rb +34 -0
- data/lib/concurrent/channel/waitable_list.rb +40 -0
- data/lib/concurrent/channels.rb +5 -0
- data/lib/concurrent/collection/blocking_ring_buffer.rb +71 -0
- data/lib/concurrent/collection/priority_queue.rb +305 -0
- data/lib/concurrent/collection/ring_buffer.rb +59 -0
- data/lib/concurrent/collections.rb +3 -0
- data/lib/concurrent/configuration.rb +158 -0
- data/lib/concurrent/dataflow.rb +91 -0
- data/lib/concurrent/delay.rb +112 -0
- data/lib/concurrent/dereferenceable.rb +101 -0
- data/lib/concurrent/errors.rb +30 -0
- data/lib/concurrent/exchanger.rb +34 -0
- data/lib/concurrent/executor/cached_thread_pool.rb +44 -0
- data/lib/concurrent/executor/executor.rb +229 -0
- data/lib/concurrent/executor/fixed_thread_pool.rb +33 -0
- data/lib/concurrent/executor/immediate_executor.rb +16 -0
- data/lib/concurrent/executor/java_cached_thread_pool.rb +31 -0
- data/lib/concurrent/executor/java_fixed_thread_pool.rb +33 -0
- data/lib/concurrent/executor/java_single_thread_executor.rb +21 -0
- data/lib/concurrent/executor/java_thread_pool_executor.rb +187 -0
- data/lib/concurrent/executor/per_thread_executor.rb +24 -0
- data/lib/concurrent/executor/ruby_cached_thread_pool.rb +29 -0
- data/lib/concurrent/executor/ruby_fixed_thread_pool.rb +32 -0
- data/lib/concurrent/executor/ruby_single_thread_executor.rb +73 -0
- data/lib/concurrent/executor/ruby_thread_pool_executor.rb +286 -0
- data/lib/concurrent/executor/ruby_thread_pool_worker.rb +72 -0
- data/lib/concurrent/executor/safe_task_executor.rb +35 -0
- data/lib/concurrent/executor/serialized_execution.rb +90 -0
- data/lib/concurrent/executor/single_thread_executor.rb +35 -0
- data/lib/concurrent/executor/thread_pool_executor.rb +68 -0
- data/lib/concurrent/executor/timer_set.rb +143 -0
- data/lib/concurrent/executors.rb +9 -0
- data/lib/concurrent/future.rb +124 -0
- data/lib/concurrent/ivar.rb +111 -0
- data/lib/concurrent/logging.rb +17 -0
- data/lib/concurrent/mvar.rb +200 -0
- data/lib/concurrent/obligation.rb +171 -0
- data/lib/concurrent/observable.rb +40 -0
- data/lib/concurrent/options_parser.rb +46 -0
- data/lib/concurrent/promise.rb +169 -0
- data/lib/concurrent/scheduled_task.rb +78 -0
- data/lib/concurrent/supervisor.rb +343 -0
- data/lib/concurrent/timer_task.rb +341 -0
- data/lib/concurrent/tvar.rb +252 -0
- data/lib/concurrent/utilities.rb +3 -0
- data/lib/concurrent/utility/processor_count.rb +150 -0
- data/lib/concurrent/utility/timeout.rb +35 -0
- data/lib/concurrent/utility/timer.rb +21 -0
- data/lib/concurrent/version.rb +3 -0
- data/lib/concurrent_ruby.rb +1 -0
- data/lib/concurrent_ruby_ext.so +0 -0
- data/lib/extension_helper.rb +9 -0
- metadata +141 -0
@@ -0,0 +1,305 @@
|
|
1
|
+
module Concurrent
|
2
|
+
|
3
|
+
# @!macro [attach] priority_queue
|
4
|
+
#
|
5
|
+
# A queue collection in which the elements are sorted based on their
|
6
|
+
# comparison (spaceship) operator `<=>`. Items are added to the queue
|
7
|
+
# at a position relative to their priority. On removal the element
|
8
|
+
# with the "highest" priority is removed. By default the sort order is
|
9
|
+
# from highest to lowest, but a lowest-to-highest sort order can be
|
10
|
+
# set on construction.
|
11
|
+
#
|
12
|
+
# The API is based on the `Queue` class from the Ruby standard library.
|
13
|
+
#
|
14
|
+
# The pure Ruby implementation, `MutexPriorityQueue` uses a heap algorithm
|
15
|
+
# stored in an array. The algorithm is based on the work of Robert Sedgewick
|
16
|
+
# and Kevin Wayne.
|
17
|
+
#
|
18
|
+
# The JRuby native implementation is a thin wrapper around the standard
|
19
|
+
# library `java.util.PriorityQueue`.
|
20
|
+
#
|
21
|
+
# When running under JRuby the class `PriorityQueue` extends `JavaPriorityQueue`.
|
22
|
+
# When running under all other interpreters it extends `MutexPriorityQueue`.
|
23
|
+
#
|
24
|
+
# @note This implementation is *not* thread safe and performs no blocking.
|
25
|
+
#
|
26
|
+
# @see http://en.wikipedia.org/wiki/Priority_queue
|
27
|
+
# @see http://ruby-doc.org/stdlib-2.0.0/libdoc/thread/rdoc/Queue.html
|
28
|
+
#
|
29
|
+
# @see http://algs4.cs.princeton.edu/24pq/index.php#2.6
|
30
|
+
# @see http://algs4.cs.princeton.edu/24pq/MaxPQ.java.html
|
31
|
+
#
|
32
|
+
# @see http://docs.oracle.com/javase/7/docs/api/java/util/PriorityQueue.html
|
33
|
+
class MutexPriorityQueue
|
34
|
+
|
35
|
+
# @!macro [attach] priority_queue_method_initialize
|
36
|
+
#
|
37
|
+
# Create a new priority queue with no items.
|
38
|
+
#
|
39
|
+
# @param [Hash] opts the options for creating the queue
|
40
|
+
# @option opts [Symbol] :order (:max) dictates the order in which items are
|
41
|
+
# stored: from highest to lowest when `:max` or `:high`; from lowest to
|
42
|
+
# highest when `:min` or `:low`
|
43
|
+
def initialize(opts = {})
|
44
|
+
order = opts.fetch(:order, :max)
|
45
|
+
@comparator = [:min, :low].include?(order) ? -1 : 1
|
46
|
+
clear
|
47
|
+
end
|
48
|
+
|
49
|
+
# @!macro [attach] priority_queue_method_clear
|
50
|
+
#
|
51
|
+
# Removes all of the elements from this priority queue.
|
52
|
+
def clear
|
53
|
+
@queue = [nil]
|
54
|
+
@length = 0
|
55
|
+
true
|
56
|
+
end
|
57
|
+
|
58
|
+
# @!macro [attach] priority_queue_method_delete
|
59
|
+
#
|
60
|
+
# Deletes all items from `self` that are equal to `item`.
|
61
|
+
#
|
62
|
+
# @param [Object] item the item to be removed from the queue
|
63
|
+
# @return [Object] true if the item is found else false
|
64
|
+
def delete(item)
|
65
|
+
original_length = @length
|
66
|
+
k = 1
|
67
|
+
while k <= @length
|
68
|
+
if @queue[k] == item
|
69
|
+
swap(k, @length)
|
70
|
+
@length -= 1
|
71
|
+
sink(k)
|
72
|
+
@queue.pop
|
73
|
+
else
|
74
|
+
k += 1
|
75
|
+
end
|
76
|
+
end
|
77
|
+
@length != original_length
|
78
|
+
end
|
79
|
+
|
80
|
+
# @!macro [attach] priority_queue_method_empty
|
81
|
+
#
|
82
|
+
# Returns `true` if `self` contains no elements.
|
83
|
+
#
|
84
|
+
# @return [Boolean] true if there are no items in the queue else false
|
85
|
+
def empty?
|
86
|
+
size == 0
|
87
|
+
end
|
88
|
+
|
89
|
+
# @!macro [attach] priority_queue_method_include
|
90
|
+
#
|
91
|
+
# Returns `true` if the given item is present in `self` (that is, if any
|
92
|
+
# element == `item`), otherwise returns false.
|
93
|
+
#
|
94
|
+
# @param [Object] item the item to search for
|
95
|
+
#
|
96
|
+
# @return [Boolean] true if the item is found else false
|
97
|
+
def include?(item)
|
98
|
+
@queue.include?(item)
|
99
|
+
end
|
100
|
+
alias_method :has_priority?, :include?
|
101
|
+
|
102
|
+
# @!macro [attach] priority_queue_method_length
|
103
|
+
#
|
104
|
+
# The current length of the queue.
|
105
|
+
#
|
106
|
+
# @return [Fixnum] the number of items in the queue
|
107
|
+
def length
|
108
|
+
@length
|
109
|
+
end
|
110
|
+
alias_method :size, :length
|
111
|
+
|
112
|
+
# @!macro [attach] priority_queue_method_peek
|
113
|
+
#
|
114
|
+
# Retrieves, but does not remove, the head of this queue, or returns `nil`
|
115
|
+
# if this queue is empty.
|
116
|
+
#
|
117
|
+
# @return [Object] the head of the queue or `nil` when empty
|
118
|
+
def peek
|
119
|
+
@queue[1]
|
120
|
+
end
|
121
|
+
|
122
|
+
# @!macro [attach] priority_queue_method_pop
|
123
|
+
#
|
124
|
+
# Retrieves and removes the head of this queue, or returns `nil` if this
|
125
|
+
# queue is empty.
|
126
|
+
#
|
127
|
+
# @return [Object] the head of the queue or `nil` when empty
|
128
|
+
def pop
|
129
|
+
max = @queue[1]
|
130
|
+
swap(1, @length)
|
131
|
+
@length -= 1
|
132
|
+
sink(1)
|
133
|
+
@queue.pop
|
134
|
+
max
|
135
|
+
end
|
136
|
+
alias_method :deq, :pop
|
137
|
+
alias_method :shift, :pop
|
138
|
+
|
139
|
+
# @!macro [attach] priority_queue_method_push
|
140
|
+
#
|
141
|
+
# Inserts the specified element into this priority queue.
|
142
|
+
#
|
143
|
+
# @param [Object] item the item to insert onto the queue
|
144
|
+
def push(item)
|
145
|
+
@length += 1
|
146
|
+
@queue << item
|
147
|
+
swim(@length)
|
148
|
+
true
|
149
|
+
end
|
150
|
+
alias_method :<<, :push
|
151
|
+
alias_method :enq, :push
|
152
|
+
|
153
|
+
# @!macro [attach] priority_queue_method_from_list
|
154
|
+
#
|
155
|
+
# Create a new priority queue from the given list.
|
156
|
+
#
|
157
|
+
# @param [Enumerable] list the list to build the queue from
|
158
|
+
# @param [Hash] opts the options for creating the queue
|
159
|
+
#
|
160
|
+
# @return [PriorityQueue] the newly created and populated queue
|
161
|
+
def self.from_list(list, opts = {})
|
162
|
+
queue = new(opts)
|
163
|
+
list.each{|item| queue << item }
|
164
|
+
queue
|
165
|
+
end
|
166
|
+
|
167
|
+
protected
|
168
|
+
|
169
|
+
# Exchange the values at the given indexes within the internal array.
|
170
|
+
#
|
171
|
+
# @param [Integer] x the first index to swap
|
172
|
+
# @param [Integer] y the second index to swap
|
173
|
+
#
|
174
|
+
# @!visibility private
|
175
|
+
def swap(x, y)
|
176
|
+
temp = @queue[x]
|
177
|
+
@queue[x] = @queue[y]
|
178
|
+
@queue[y] = temp
|
179
|
+
end
|
180
|
+
|
181
|
+
# Are the items at the given indexes ordered based on the priority
|
182
|
+
# order specified at construction?
|
183
|
+
#
|
184
|
+
# @param [Integer] x the first index from which to retrieve a comparable value
|
185
|
+
# @param [Integer] y the second index from which to retrieve a comparable value
|
186
|
+
#
|
187
|
+
# @return [Boolean] true if the two elements are in the correct priority order
|
188
|
+
# else false
|
189
|
+
#
|
190
|
+
# @!visibility private
|
191
|
+
def ordered?(x, y)
|
192
|
+
(@queue[x] <=> @queue[y]) == @comparator
|
193
|
+
end
|
194
|
+
|
195
|
+
# Percolate down to maintain heap invariant.
|
196
|
+
#
|
197
|
+
# @param [Integer] k the index at which to start the percolation
|
198
|
+
#
|
199
|
+
# @!visibility private
|
200
|
+
def sink(k)
|
201
|
+
while (j = (2 * k)) <= @length do
|
202
|
+
j += 1 if j < @length && ! ordered?(j, j+1)
|
203
|
+
break if ordered?(k, j)
|
204
|
+
swap(k, j)
|
205
|
+
k = j
|
206
|
+
end
|
207
|
+
end
|
208
|
+
|
209
|
+
# Percolate up to maintain heap invariant.
|
210
|
+
#
|
211
|
+
# @param [Integer] k the index at which to start the percolation
|
212
|
+
#
|
213
|
+
# @!visibility private
|
214
|
+
def swim(k)
|
215
|
+
while k > 1 && ! ordered?(k/2, k) do
|
216
|
+
swap(k, k/2)
|
217
|
+
k = k/2
|
218
|
+
end
|
219
|
+
end
|
220
|
+
end
|
221
|
+
|
222
|
+
if RUBY_PLATFORM == 'java'
|
223
|
+
|
224
|
+
# @!macro priority_queue
|
225
|
+
class JavaPriorityQueue
|
226
|
+
|
227
|
+
# @!macro priority_queue_method_initialize
|
228
|
+
def initialize(opts = {})
|
229
|
+
order = opts.fetch(:order, :max)
|
230
|
+
if [:min, :low].include?(order)
|
231
|
+
@queue = java.util.PriorityQueue.new(11) # 11 is the default initial capacity
|
232
|
+
else
|
233
|
+
@queue = java.util.PriorityQueue.new(11, java.util.Collections.reverseOrder())
|
234
|
+
end
|
235
|
+
end
|
236
|
+
|
237
|
+
# @!macro priority_queue_method_clear
|
238
|
+
def clear
|
239
|
+
@queue.clear
|
240
|
+
true
|
241
|
+
end
|
242
|
+
|
243
|
+
# @!macro priority_queue_method_delete
|
244
|
+
def delete(item)
|
245
|
+
found = false
|
246
|
+
while @queue.remove(item) do
|
247
|
+
found = true
|
248
|
+
end
|
249
|
+
found
|
250
|
+
end
|
251
|
+
|
252
|
+
# @!macro priority_queue_method_empty
|
253
|
+
def empty?
|
254
|
+
@queue.size == 0
|
255
|
+
end
|
256
|
+
|
257
|
+
# @!macro priority_queue_method_include
|
258
|
+
def include?(item)
|
259
|
+
@queue.contains(item)
|
260
|
+
end
|
261
|
+
alias_method :has_priority?, :include?
|
262
|
+
|
263
|
+
# @!macro priority_queue_method_length
|
264
|
+
def length
|
265
|
+
@queue.size
|
266
|
+
end
|
267
|
+
alias_method :size, :length
|
268
|
+
|
269
|
+
# @!macro priority_queue_method_peek
|
270
|
+
def peek
|
271
|
+
@queue.peek
|
272
|
+
end
|
273
|
+
|
274
|
+
# @!macro priority_queue_method_pop
|
275
|
+
def pop
|
276
|
+
@queue.poll
|
277
|
+
end
|
278
|
+
alias_method :deq, :pop
|
279
|
+
alias_method :shift, :pop
|
280
|
+
|
281
|
+
# @!macro priority_queue_method_push
|
282
|
+
def push(item)
|
283
|
+
@queue.add(item)
|
284
|
+
end
|
285
|
+
alias_method :<<, :push
|
286
|
+
alias_method :enq, :push
|
287
|
+
|
288
|
+
# @!macro priority_queue_method_from_list
|
289
|
+
def self.from_list(list, opts = {})
|
290
|
+
queue = new(opts)
|
291
|
+
list.each{|item| queue << item }
|
292
|
+
queue
|
293
|
+
end
|
294
|
+
end
|
295
|
+
|
296
|
+
# @!macro priority_queue
|
297
|
+
class PriorityQueue < JavaPriorityQueue
|
298
|
+
end
|
299
|
+
else
|
300
|
+
|
301
|
+
# @!macro priority_queue
|
302
|
+
class PriorityQueue < MutexPriorityQueue
|
303
|
+
end
|
304
|
+
end
|
305
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
module Concurrent
|
2
|
+
|
3
|
+
# non-thread safe buffer
|
4
|
+
class RingBuffer
|
5
|
+
|
6
|
+
def initialize(capacity)
|
7
|
+
@buffer = Array.new(capacity)
|
8
|
+
@first = @last = 0
|
9
|
+
@count = 0
|
10
|
+
end
|
11
|
+
|
12
|
+
|
13
|
+
# @return [Integer] the capacity of the buffer
|
14
|
+
def capacity
|
15
|
+
@buffer.size
|
16
|
+
end
|
17
|
+
|
18
|
+
# @return [Integer] the number of elements currently in the buffer
|
19
|
+
def count
|
20
|
+
@count
|
21
|
+
end
|
22
|
+
|
23
|
+
# @return [Boolean] true if buffer is empty, false otherwise
|
24
|
+
def empty?
|
25
|
+
@count == 0
|
26
|
+
end
|
27
|
+
|
28
|
+
# @return [Boolean] true if buffer is full, false otherwise
|
29
|
+
def full?
|
30
|
+
@count == capacity
|
31
|
+
end
|
32
|
+
|
33
|
+
# @param [Object] value
|
34
|
+
# @return [Boolean] true if value has been inserted, false otherwise
|
35
|
+
def offer(value)
|
36
|
+
return false if full?
|
37
|
+
|
38
|
+
@buffer[@last] = value
|
39
|
+
@last = (@last + 1) % @buffer.size
|
40
|
+
@count += 1
|
41
|
+
true
|
42
|
+
end
|
43
|
+
|
44
|
+
# @return [Object] the first available value and removes it from the buffer. If buffer is empty returns nil
|
45
|
+
def poll
|
46
|
+
result = @buffer[@first]
|
47
|
+
@buffer[@first] = nil
|
48
|
+
@first = (@first + 1) % @buffer.size
|
49
|
+
@count -= 1
|
50
|
+
result
|
51
|
+
end
|
52
|
+
|
53
|
+
# @return [Object] the first available value and without removing it from the buffer. If buffer is empty returns nil
|
54
|
+
def peek
|
55
|
+
@buffer[@first]
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,158 @@
|
|
1
|
+
require 'thread'
|
2
|
+
require 'concurrent/delay'
|
3
|
+
require 'concurrent/errors'
|
4
|
+
require 'concurrent/atomic'
|
5
|
+
require 'concurrent/executor/thread_pool_executor'
|
6
|
+
require 'concurrent/executor/timer_set'
|
7
|
+
require 'concurrent/utility/processor_count'
|
8
|
+
|
9
|
+
module Concurrent
|
10
|
+
extend Logging
|
11
|
+
|
12
|
+
# A gem-level configuration object.
|
13
|
+
class Configuration
|
14
|
+
|
15
|
+
# a proc defining how to log messages, its interface has to be:
|
16
|
+
# lambda { |level, progname, message = nil, &block| _ }
|
17
|
+
attr_accessor :logger
|
18
|
+
|
19
|
+
# Create a new configuration object.
|
20
|
+
def initialize
|
21
|
+
@global_task_pool = Delay.new { new_task_pool }
|
22
|
+
@global_operation_pool = Delay.new { new_operation_pool }
|
23
|
+
@global_timer_set = Delay.new { Concurrent::TimerSet.new }
|
24
|
+
@logger = no_logger
|
25
|
+
end
|
26
|
+
|
27
|
+
# if assigned to {#logger}, it will log nothing.
|
28
|
+
def no_logger
|
29
|
+
lambda { |level, progname, message = nil, &block| }
|
30
|
+
end
|
31
|
+
|
32
|
+
# Global thread pool optimized for short *tasks*.
|
33
|
+
#
|
34
|
+
# @return [ThreadPoolExecutor] the thread pool
|
35
|
+
def global_task_pool
|
36
|
+
@global_task_pool.value
|
37
|
+
end
|
38
|
+
|
39
|
+
# Global thread pool optimized for long *operations*.
|
40
|
+
#
|
41
|
+
# @return [ThreadPoolExecutor] the thread pool
|
42
|
+
def global_operation_pool
|
43
|
+
@global_operation_pool.value
|
44
|
+
end
|
45
|
+
|
46
|
+
# Global thread pool optimized for *timers*
|
47
|
+
#
|
48
|
+
# @return [ThreadPoolExecutor] the thread pool
|
49
|
+
#
|
50
|
+
# @see Concurrent::timer
|
51
|
+
def global_timer_set
|
52
|
+
@global_timer_set.value
|
53
|
+
end
|
54
|
+
|
55
|
+
# Global thread pool optimized for short *tasks*.
|
56
|
+
#
|
57
|
+
# A global thread pool must be set as soon as the gem is loaded. Setting a new
|
58
|
+
# thread pool once tasks and operations have been post can lead to unpredictable
|
59
|
+
# results. The first time a task/operation is post a new thread pool will be
|
60
|
+
# created using the default configuration. Once set the thread pool cannot be
|
61
|
+
# changed. Thus, explicitly setting the thread pool must occur *before* any
|
62
|
+
# tasks/operations are post else an exception will be raised.
|
63
|
+
#
|
64
|
+
# @param [Executor] executor the executor to be used for this thread pool
|
65
|
+
#
|
66
|
+
# @return [ThreadPoolExecutor] the new thread pool
|
67
|
+
#
|
68
|
+
# @raise [Concurrent::ConfigurationError] if this thread pool has already been set
|
69
|
+
def global_task_pool=(executor)
|
70
|
+
@global_task_pool.reconfigure { executor } or
|
71
|
+
raise ConfigurationError.new('global task pool was already set')
|
72
|
+
end
|
73
|
+
|
74
|
+
# Global thread pool optimized for long *operations*.
|
75
|
+
#
|
76
|
+
# A global thread pool must be set as soon as the gem is loaded. Setting a new
|
77
|
+
# thread pool once tasks and operations have been post can lead to unpredictable
|
78
|
+
# results. The first time a task/operation is post a new thread pool will be
|
79
|
+
# created using the default configuration. Once set the thread pool cannot be
|
80
|
+
# changed. Thus, explicitly setting the thread pool must occur *before* any
|
81
|
+
# tasks/operations are post else an exception will be raised.
|
82
|
+
#
|
83
|
+
# @param [Executor] executor the executor to be used for this thread pool
|
84
|
+
#
|
85
|
+
# @return [ThreadPoolExecutor] the new thread pool
|
86
|
+
#
|
87
|
+
# @raise [Concurrent::ConfigurationError] if this thread pool has already been set
|
88
|
+
def global_operation_pool=(executor)
|
89
|
+
@global_operation_pool.reconfigure { executor } or
|
90
|
+
raise ConfigurationError.new('global operation pool was already set')
|
91
|
+
end
|
92
|
+
|
93
|
+
def new_task_pool
|
94
|
+
Concurrent::ThreadPoolExecutor.new(
|
95
|
+
min_threads: [2, Concurrent.processor_count].max,
|
96
|
+
max_threads: [20, Concurrent.processor_count * 15].max,
|
97
|
+
idletime: 2 * 60, # 2 minutes
|
98
|
+
max_queue: 0, # unlimited
|
99
|
+
overflow_policy: :abort # raise an exception
|
100
|
+
)
|
101
|
+
end
|
102
|
+
|
103
|
+
def new_operation_pool
|
104
|
+
Concurrent::ThreadPoolExecutor.new(
|
105
|
+
min_threads: [2, Concurrent.processor_count].max,
|
106
|
+
max_threads: [2, Concurrent.processor_count].max,
|
107
|
+
idletime: 10 * 60, # 10 minutes
|
108
|
+
max_queue: [20, Concurrent.processor_count * 15].max,
|
109
|
+
overflow_policy: :abort # raise an exception
|
110
|
+
)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
# create the default configuration on load
|
115
|
+
@configuration = Atomic.new Configuration.new
|
116
|
+
|
117
|
+
# @return [Configuration]
|
118
|
+
def self.configuration
|
119
|
+
@configuration.value
|
120
|
+
end
|
121
|
+
|
122
|
+
# Perform gem-level configuration.
|
123
|
+
#
|
124
|
+
# @yield the configuration commands
|
125
|
+
# @yieldparam [Configuration] the current configuration object
|
126
|
+
def self.configure
|
127
|
+
yield(configuration)
|
128
|
+
end
|
129
|
+
|
130
|
+
private
|
131
|
+
|
132
|
+
# Attempt to properly shutdown the given executor using the `shutdown` or
|
133
|
+
# `kill` method when available.
|
134
|
+
#
|
135
|
+
# @param [Executor] executor the executor to shutdown
|
136
|
+
#
|
137
|
+
# @return [Boolean] `true` if the executor is successfully shut down or `nil`, else `false`
|
138
|
+
def self.finalize_executor(executor)
|
139
|
+
return true if executor.nil?
|
140
|
+
if executor.respond_to?(:shutdown)
|
141
|
+
executor.shutdown
|
142
|
+
elsif executor.respond_to?(:kill)
|
143
|
+
executor.kill
|
144
|
+
end
|
145
|
+
true
|
146
|
+
rescue => ex
|
147
|
+
log DEBUG, ex
|
148
|
+
false
|
149
|
+
end
|
150
|
+
|
151
|
+
|
152
|
+
# set exit hook to shutdown global thread pools
|
153
|
+
at_exit do
|
154
|
+
self.finalize_executor(self.configuration.global_timer_set)
|
155
|
+
self.finalize_executor(self.configuration.global_task_pool)
|
156
|
+
self.finalize_executor(self.configuration.global_operation_pool)
|
157
|
+
end
|
158
|
+
end
|