concurrent-ruby 0.6.0.pre.1 → 0.6.0.pre.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +16 -0
- data/lib/concurrent.rb +9 -29
- data/lib/concurrent/{actor.rb → actor/actor.rb} +3 -3
- data/lib/concurrent/actor/actor_context.rb +77 -0
- data/lib/concurrent/actor/actor_ref.rb +67 -0
- data/lib/concurrent/{postable.rb → actor/postable.rb} +1 -1
- data/lib/concurrent/actor/simple_actor_ref.rb +94 -0
- data/lib/concurrent/actors.rb +5 -0
- data/lib/concurrent/agent.rb +81 -47
- data/lib/concurrent/async.rb +35 -35
- data/lib/concurrent/atomic/atomic_boolean.rb +157 -0
- data/lib/concurrent/atomic/atomic_fixnum.rb +170 -0
- data/lib/concurrent/{condition.rb → atomic/condition.rb} +0 -0
- data/lib/concurrent/{copy_on_notify_observer_set.rb → atomic/copy_on_notify_observer_set.rb} +48 -13
- data/lib/concurrent/{copy_on_write_observer_set.rb → atomic/copy_on_write_observer_set.rb} +41 -20
- data/lib/concurrent/atomic/count_down_latch.rb +116 -0
- data/lib/concurrent/atomic/cyclic_barrier.rb +106 -0
- data/lib/concurrent/atomic/event.rb +103 -0
- data/lib/concurrent/{thread_local_var.rb → atomic/thread_local_var.rb} +0 -0
- data/lib/concurrent/atomics.rb +9 -0
- data/lib/concurrent/channel/buffered_channel.rb +6 -4
- data/lib/concurrent/channel/channel.rb +30 -2
- data/lib/concurrent/channel/unbuffered_channel.rb +2 -2
- data/lib/concurrent/channel/waitable_list.rb +3 -1
- data/lib/concurrent/channels.rb +5 -0
- data/lib/concurrent/{channel → collection}/blocking_ring_buffer.rb +16 -5
- data/lib/concurrent/collection/priority_queue.rb +305 -0
- data/lib/concurrent/{channel → collection}/ring_buffer.rb +6 -1
- data/lib/concurrent/collections.rb +3 -0
- data/lib/concurrent/configuration.rb +68 -19
- data/lib/concurrent/dataflow.rb +9 -9
- data/lib/concurrent/delay.rb +21 -13
- data/lib/concurrent/dereferenceable.rb +40 -33
- data/lib/concurrent/exchanger.rb +3 -0
- data/lib/concurrent/{cached_thread_pool.rb → executor/cached_thread_pool.rb} +8 -9
- data/lib/concurrent/executor/executor.rb +222 -0
- data/lib/concurrent/{fixed_thread_pool.rb → executor/fixed_thread_pool.rb} +6 -7
- data/lib/concurrent/{immediate_executor.rb → executor/immediate_executor.rb} +5 -5
- data/lib/concurrent/executor/java_cached_thread_pool.rb +31 -0
- data/lib/concurrent/{java_fixed_thread_pool.rb → executor/java_fixed_thread_pool.rb} +7 -11
- data/lib/concurrent/executor/java_single_thread_executor.rb +21 -0
- data/lib/concurrent/{java_thread_pool_executor.rb → executor/java_thread_pool_executor.rb} +66 -77
- data/lib/concurrent/executor/one_by_one.rb +65 -0
- data/lib/concurrent/{per_thread_executor.rb → executor/per_thread_executor.rb} +4 -4
- data/lib/concurrent/executor/ruby_cached_thread_pool.rb +29 -0
- data/lib/concurrent/{ruby_fixed_thread_pool.rb → executor/ruby_fixed_thread_pool.rb} +5 -4
- data/lib/concurrent/executor/ruby_single_thread_executor.rb +72 -0
- data/lib/concurrent/executor/ruby_thread_pool_executor.rb +282 -0
- data/lib/concurrent/{ruby_thread_pool_worker.rb → executor/ruby_thread_pool_worker.rb} +6 -6
- data/lib/concurrent/{safe_task_executor.rb → executor/safe_task_executor.rb} +20 -13
- data/lib/concurrent/executor/single_thread_executor.rb +35 -0
- data/lib/concurrent/executor/thread_pool_executor.rb +68 -0
- data/lib/concurrent/executor/timer_set.rb +138 -0
- data/lib/concurrent/executors.rb +9 -0
- data/lib/concurrent/future.rb +39 -40
- data/lib/concurrent/ivar.rb +22 -15
- data/lib/concurrent/mvar.rb +2 -1
- data/lib/concurrent/obligation.rb +9 -3
- data/lib/concurrent/observable.rb +33 -0
- data/lib/concurrent/options_parser.rb +46 -0
- data/lib/concurrent/promise.rb +23 -24
- data/lib/concurrent/scheduled_task.rb +21 -45
- data/lib/concurrent/timer_task.rb +204 -126
- data/lib/concurrent/tvar.rb +1 -1
- data/lib/concurrent/utilities.rb +3 -36
- data/lib/concurrent/{processor_count.rb → utility/processor_count.rb} +1 -1
- data/lib/concurrent/utility/timeout.rb +36 -0
- data/lib/concurrent/utility/timer.rb +21 -0
- data/lib/concurrent/version.rb +1 -1
- data/lib/concurrent_ruby_ext.bundle +0 -0
- data/spec/concurrent/{actor_context_spec.rb → actor/actor_context_spec.rb} +0 -8
- data/spec/concurrent/{actor_ref_shared.rb → actor/actor_ref_shared.rb} +9 -59
- data/spec/concurrent/{actor_spec.rb → actor/actor_spec.rb} +43 -41
- data/spec/concurrent/{postable_shared.rb → actor/postable_shared.rb} +0 -0
- data/spec/concurrent/actor/simple_actor_ref_spec.rb +135 -0
- data/spec/concurrent/agent_spec.rb +160 -71
- data/spec/concurrent/atomic/atomic_boolean_spec.rb +172 -0
- data/spec/concurrent/atomic/atomic_fixnum_spec.rb +186 -0
- data/spec/concurrent/{condition_spec.rb → atomic/condition_spec.rb} +2 -2
- data/spec/concurrent/{copy_on_notify_observer_set_spec.rb → atomic/copy_on_notify_observer_set_spec.rb} +0 -0
- data/spec/concurrent/{copy_on_write_observer_set_spec.rb → atomic/copy_on_write_observer_set_spec.rb} +0 -0
- data/spec/concurrent/atomic/count_down_latch_spec.rb +151 -0
- data/spec/concurrent/atomic/cyclic_barrier_spec.rb +248 -0
- data/spec/concurrent/{event_spec.rb → atomic/event_spec.rb} +18 -3
- data/spec/concurrent/{observer_set_shared.rb → atomic/observer_set_shared.rb} +15 -6
- data/spec/concurrent/{thread_local_var_spec.rb → atomic/thread_local_var_spec.rb} +0 -0
- data/spec/concurrent/channel/buffered_channel_spec.rb +1 -1
- data/spec/concurrent/channel/channel_spec.rb +6 -4
- data/spec/concurrent/channel/probe_spec.rb +37 -9
- data/spec/concurrent/channel/unbuffered_channel_spec.rb +2 -2
- data/spec/concurrent/{channel → collection}/blocking_ring_buffer_spec.rb +0 -0
- data/spec/concurrent/collection/priority_queue_spec.rb +317 -0
- data/spec/concurrent/{channel → collection}/ring_buffer_spec.rb +0 -0
- data/spec/concurrent/configuration_spec.rb +4 -70
- data/spec/concurrent/dereferenceable_shared.rb +5 -4
- data/spec/concurrent/exchanger_spec.rb +10 -5
- data/spec/concurrent/{cached_thread_pool_shared.rb → executor/cached_thread_pool_shared.rb} +15 -37
- data/spec/concurrent/{fixed_thread_pool_shared.rb → executor/fixed_thread_pool_shared.rb} +0 -0
- data/spec/concurrent/{global_thread_pool_shared.rb → executor/global_thread_pool_shared.rb} +10 -8
- data/spec/concurrent/{immediate_executor_spec.rb → executor/immediate_executor_spec.rb} +0 -0
- data/spec/concurrent/{java_cached_thread_pool_spec.rb → executor/java_cached_thread_pool_spec.rb} +1 -21
- data/spec/concurrent/{java_fixed_thread_pool_spec.rb → executor/java_fixed_thread_pool_spec.rb} +0 -0
- data/spec/concurrent/executor/java_single_thread_executor_spec.rb +21 -0
- data/spec/concurrent/{java_thread_pool_executor_spec.rb → executor/java_thread_pool_executor_spec.rb} +0 -0
- data/spec/concurrent/{per_thread_executor_spec.rb → executor/per_thread_executor_spec.rb} +0 -4
- data/spec/concurrent/{ruby_cached_thread_pool_spec.rb → executor/ruby_cached_thread_pool_spec.rb} +1 -1
- data/spec/concurrent/{ruby_fixed_thread_pool_spec.rb → executor/ruby_fixed_thread_pool_spec.rb} +0 -0
- data/spec/concurrent/executor/ruby_single_thread_executor_spec.rb +18 -0
- data/spec/concurrent/{ruby_thread_pool_executor_spec.rb → executor/ruby_thread_pool_executor_spec.rb} +12 -24
- data/spec/concurrent/executor/safe_task_executor_spec.rb +103 -0
- data/spec/concurrent/{thread_pool_class_cast_spec.rb → executor/thread_pool_class_cast_spec.rb} +12 -0
- data/spec/concurrent/{thread_pool_executor_shared.rb → executor/thread_pool_executor_shared.rb} +0 -0
- data/spec/concurrent/{thread_pool_shared.rb → executor/thread_pool_shared.rb} +84 -119
- data/spec/concurrent/executor/timer_set_spec.rb +183 -0
- data/spec/concurrent/future_spec.rb +12 -0
- data/spec/concurrent/ivar_spec.rb +11 -1
- data/spec/concurrent/observable_shared.rb +173 -0
- data/spec/concurrent/observable_spec.rb +51 -0
- data/spec/concurrent/options_parser_spec.rb +71 -0
- data/spec/concurrent/runnable_shared.rb +6 -0
- data/spec/concurrent/scheduled_task_spec.rb +60 -40
- data/spec/concurrent/timer_task_spec.rb +130 -144
- data/spec/concurrent/{processor_count_spec.rb → utility/processor_count_spec.rb} +0 -0
- data/spec/concurrent/{utilities_spec.rb → utility/timeout_spec.rb} +0 -0
- data/spec/concurrent/utility/timer_spec.rb +52 -0
- metadata +147 -108
- data/lib/concurrent/actor_context.rb +0 -31
- data/lib/concurrent/actor_ref.rb +0 -39
- data/lib/concurrent/atomic.rb +0 -121
- data/lib/concurrent/channel/probe.rb +0 -19
- data/lib/concurrent/count_down_latch.rb +0 -60
- data/lib/concurrent/event.rb +0 -80
- data/lib/concurrent/java_cached_thread_pool.rb +0 -45
- data/lib/concurrent/ruby_cached_thread_pool.rb +0 -37
- data/lib/concurrent/ruby_thread_pool_executor.rb +0 -268
- data/lib/concurrent/simple_actor_ref.rb +0 -124
- data/lib/concurrent/thread_pool_executor.rb +0 -30
- data/spec/concurrent/atomic_spec.rb +0 -201
- data/spec/concurrent/count_down_latch_spec.rb +0 -125
- data/spec/concurrent/safe_task_executor_spec.rb +0 -58
- data/spec/concurrent/simple_actor_ref_spec.rb +0 -219
@@ -1,6 +1,6 @@
|
|
1
1
|
module Concurrent
|
2
2
|
|
3
|
-
#
|
3
|
+
# non-thread safe buffer
|
4
4
|
class RingBuffer
|
5
5
|
|
6
6
|
def initialize(capacity)
|
@@ -9,18 +9,23 @@ module Concurrent
|
|
9
9
|
@count = 0
|
10
10
|
end
|
11
11
|
|
12
|
+
|
13
|
+
# @return [Integer] the capacity of the buffer
|
12
14
|
def capacity
|
13
15
|
@buffer.size
|
14
16
|
end
|
15
17
|
|
18
|
+
# @return [Integer] the number of elements currently in the buffer
|
16
19
|
def count
|
17
20
|
@count
|
18
21
|
end
|
19
22
|
|
23
|
+
# @return [Boolean] true if buffer is empty, false otherwise
|
20
24
|
def empty?
|
21
25
|
@count == 0
|
22
26
|
end
|
23
27
|
|
28
|
+
# @return [Boolean] true if buffer is full, false otherwise
|
24
29
|
def full?
|
25
30
|
@count == capacity
|
26
31
|
end
|
@@ -1,29 +1,43 @@
|
|
1
1
|
require 'thread'
|
2
|
-
require 'concurrent/thread_pool_executor'
|
3
|
-
require 'concurrent/
|
2
|
+
require 'concurrent/executor/thread_pool_executor'
|
3
|
+
require 'concurrent/executor/timer_set'
|
4
|
+
require 'concurrent/utility/processor_count'
|
4
5
|
|
5
6
|
module Concurrent
|
6
7
|
|
8
|
+
# An error class to be raised when errors occur during configuration.
|
7
9
|
ConfigurationError = Class.new(StandardError)
|
8
10
|
|
9
11
|
class << self
|
10
12
|
attr_accessor :configuration
|
11
13
|
end
|
12
14
|
|
15
|
+
# Perform gem-level configuration.
|
16
|
+
#
|
17
|
+
# @yield the configuration commands
|
18
|
+
# @yieldparam [Configuration] the current configuration object
|
13
19
|
def self.configure
|
14
20
|
(@mutex ||= Mutex.new).synchronize do
|
15
21
|
yield(configuration)
|
22
|
+
|
23
|
+
# initialize the global thread pools if necessary
|
24
|
+
configuration.global_task_pool
|
25
|
+
configuration.global_operation_pool
|
26
|
+
configuration.global_timer_set
|
16
27
|
end
|
17
28
|
end
|
18
29
|
|
30
|
+
# A gem-level configuration object.
|
19
31
|
class Configuration
|
20
|
-
attr_accessor :global_task_pool
|
21
|
-
attr_accessor :global_operation_pool
|
22
32
|
|
33
|
+
# Create a new configuration object.
|
23
34
|
def initialize
|
24
35
|
@cores ||= Concurrent::processor_count
|
25
36
|
end
|
26
37
|
|
38
|
+
# Global thread pool optimized for short *tasks*.
|
39
|
+
#
|
40
|
+
# @return [ThreadPoolExecutor] the thread pool
|
27
41
|
def global_task_pool
|
28
42
|
@global_task_pool ||= Concurrent::ThreadPoolExecutor.new(
|
29
43
|
min_threads: [2, @cores].max,
|
@@ -34,6 +48,9 @@ module Concurrent
|
|
34
48
|
)
|
35
49
|
end
|
36
50
|
|
51
|
+
# Global thread pool optimized for long *operations*.
|
52
|
+
#
|
53
|
+
# @return [ThreadPoolExecutor] the thread pool
|
37
54
|
def global_operation_pool
|
38
55
|
@global_operation_pool ||= Concurrent::ThreadPoolExecutor.new(
|
39
56
|
min_threads: [2, @cores].max,
|
@@ -44,41 +61,72 @@ module Concurrent
|
|
44
61
|
)
|
45
62
|
end
|
46
63
|
|
64
|
+
# Global thread pool optimized for *timers*
|
65
|
+
#
|
66
|
+
# @return [ThreadPoolExecutor] the thread pool
|
67
|
+
#
|
68
|
+
# @see Concurrent::timer
|
69
|
+
def global_timer_set
|
70
|
+
@global_timer_set ||= Concurrent::TimerSet.new
|
71
|
+
end
|
72
|
+
|
73
|
+
# Global thread pool optimized for short *tasks*.
|
74
|
+
#
|
75
|
+
# A global thread pool must be set as soon as the gem is loaded. Setting a new
|
76
|
+
# thread pool once tasks and operations have been post can lead to unpredictable
|
77
|
+
# results. The first time a task/operation is post a new thread pool will be
|
78
|
+
# created using the default configuration. Once set the thread pool cannot be
|
79
|
+
# changed. Thus, explicitly setting the thread pool must occur *before* any
|
80
|
+
# tasks/operations are post else an exception will be raised.
|
81
|
+
#
|
82
|
+
# @param [Executor] executor the executor to be used for this thread pool
|
83
|
+
#
|
84
|
+
# @return [ThreadPoolExecutor] the new thread pool
|
85
|
+
#
|
86
|
+
# @raise [ConfigurationError] if this thread pool has already been set
|
47
87
|
def global_task_pool=(executor)
|
48
88
|
raise ConfigurationError.new('global task pool was already set') unless @global_task_pool.nil?
|
49
89
|
@global_task_pool = executor
|
50
90
|
end
|
51
91
|
|
92
|
+
# Global thread pool optimized for long *operations*.
|
93
|
+
#
|
94
|
+
# A global thread pool must be set as soon as the gem is loaded. Setting a new
|
95
|
+
# thread pool once tasks and operations have been post can lead to unpredictable
|
96
|
+
# results. The first time a task/operation is post a new thread pool will be
|
97
|
+
# created using the default configuration. Once set the thread pool cannot be
|
98
|
+
# changed. Thus, explicitly setting the thread pool must occur *before* any
|
99
|
+
# tasks/operations are post else an exception will be raised.
|
100
|
+
#
|
101
|
+
# @param [Executor] executor the executor to be used for this thread pool
|
102
|
+
#
|
103
|
+
# @return [ThreadPoolExecutor] the new thread pool
|
104
|
+
#
|
105
|
+
# @raise [ConfigurationError] if this thread pool has already been set
|
52
106
|
def global_operation_pool=(executor)
|
53
107
|
raise ConfigurationError.new('global operation pool was already set') unless @global_operation_pool.nil?
|
54
108
|
@global_operation_pool = executor
|
55
109
|
end
|
56
110
|
end
|
57
111
|
|
58
|
-
module OptionsParser
|
59
|
-
|
60
|
-
def get_executor_from(opts = {})
|
61
|
-
if opts[:executor]
|
62
|
-
opts[:executor]
|
63
|
-
elsif opts[:operation] == true || opts[:task] == false
|
64
|
-
Concurrent.configuration.global_operation_pool
|
65
|
-
else
|
66
|
-
Concurrent.configuration.global_task_pool
|
67
|
-
end
|
68
|
-
end
|
69
|
-
end
|
70
|
-
|
71
112
|
private
|
72
113
|
|
114
|
+
# Attempt to properly shutdown the given executor using the `shutdown` or
|
115
|
+
# `kill` method when available.
|
116
|
+
#
|
117
|
+
# @param [Executor] executor the executor to shutdown
|
118
|
+
#
|
119
|
+
# @return [Boolean] `true` if the executor is successfully shut down or `nil`, else `false`
|
73
120
|
def self.finalize_executor(executor)
|
74
|
-
return if executor.nil?
|
121
|
+
return true if executor.nil?
|
75
122
|
if executor.respond_to?(:shutdown)
|
76
123
|
executor.shutdown
|
77
124
|
elsif executor.respond_to?(:kill)
|
78
125
|
executor.kill
|
79
126
|
end
|
127
|
+
true
|
80
128
|
rescue
|
81
|
-
|
129
|
+
false
|
82
130
|
end
|
83
131
|
|
84
132
|
# create the default configuration on load
|
@@ -86,6 +134,7 @@ module Concurrent
|
|
86
134
|
|
87
135
|
# set exit hook to shutdown global thread pools
|
88
136
|
at_exit do
|
137
|
+
self.finalize_executor(self.configuration.global_timer_set)
|
89
138
|
self.finalize_executor(self.configuration.global_task_pool)
|
90
139
|
self.finalize_executor(self.configuration.global_operation_pool)
|
91
140
|
end
|
data/lib/concurrent/dataflow.rb
CHANGED
@@ -1,6 +1,6 @@
|
|
1
|
-
require 'concurrent/atomic'
|
2
1
|
require 'concurrent/future'
|
3
|
-
require 'concurrent/
|
2
|
+
require 'concurrent/atomic/atomic_fixnum'
|
3
|
+
require 'concurrent/executor/per_thread_executor'
|
4
4
|
|
5
5
|
module Concurrent
|
6
6
|
|
@@ -20,13 +20,13 @@ module Concurrent
|
|
20
20
|
end
|
21
21
|
|
22
22
|
# Dataflow allows you to create a task that will be scheduled then all of its
|
23
|
-
# data dependencies are available. Data dependencies are
|
24
|
-
# dataflow task itself is also a
|
23
|
+
# data dependencies are available. Data dependencies are `Future` values. The
|
24
|
+
# dataflow task itself is also a `Future` value, so you can build up a graph of
|
25
25
|
# these tasks, each of which is run when all the data and other tasks it depends
|
26
26
|
# on are available or completed.
|
27
27
|
#
|
28
|
-
# Our syntax is somewhat related to that of Akka's
|
29
|
-
#
|
28
|
+
# Our syntax is somewhat related to that of Akka's `flow` and Habanero Java's
|
29
|
+
# `DataDrivenFuture`. However unlike Akka we don't schedule a task at all until
|
30
30
|
# it is ready to run, and unlike Habanero Java we pass the data values into the
|
31
31
|
# task instead of dereferencing them again in the task.
|
32
32
|
#
|
@@ -50,16 +50,16 @@ module Concurrent
|
|
50
50
|
# # wait up to 1 second for the answer...
|
51
51
|
# f.value(1) #=> 377
|
52
52
|
#
|
53
|
-
# @param [Future] inputs zero or more
|
53
|
+
# @param [Future] inputs zero or more `Future` operations that this dataflow depends upon
|
54
54
|
#
|
55
55
|
# @yield The operation to perform once all the dependencies are met
|
56
|
-
# @yieldparam [Future] inputs each of the
|
56
|
+
# @yieldparam [Future] inputs each of the `Future` inputs to the dataflow
|
57
57
|
# @yieldreturn [Object] the result of the block operation
|
58
58
|
#
|
59
59
|
# @return [Object] the result of all the operations
|
60
60
|
#
|
61
61
|
# @raise [ArgumentError] if no block is given
|
62
|
-
# @raise [ArgumentError] if any of the inputs are not
|
62
|
+
# @raise [ArgumentError] if any of the inputs are not `IVar`s
|
63
63
|
def dataflow(*inputs, &block)
|
64
64
|
dataflow_with(Concurrent.configuration.global_task_pool, *inputs, &block)
|
65
65
|
end
|
data/lib/concurrent/delay.rb
CHANGED
@@ -32,14 +32,14 @@ module Concurrent
|
|
32
32
|
class Delay
|
33
33
|
include Obligation
|
34
34
|
|
35
|
-
# Create a new
|
35
|
+
# Create a new `Delay` in the `:pending` state.
|
36
36
|
#
|
37
37
|
# @yield the delayed operation to perform
|
38
38
|
#
|
39
39
|
# @param [Hash] opts the options to create a message with
|
40
|
-
# @option opts [String] :dup_on_deref (false) call
|
41
|
-
# @option opts [String] :freeze_on_deref (false) call
|
42
|
-
# @option opts [String] :copy_on_deref (nil) call the given
|
40
|
+
# @option opts [String] :dup_on_deref (false) call `#dup` before returning the data
|
41
|
+
# @option opts [String] :freeze_on_deref (false) call `#freeze` before returning the data
|
42
|
+
# @option opts [String] :copy_on_deref (nil) call the given `Proc` passing the internal value and
|
43
43
|
# returning the value returned from the proc
|
44
44
|
#
|
45
45
|
# @raise [ArgumentError] if no block is given
|
@@ -54,24 +54,34 @@ module Concurrent
|
|
54
54
|
|
55
55
|
# Return the (possibly memoized) value of the delayed operation.
|
56
56
|
#
|
57
|
-
# If the state is
|
58
|
-
# operation is performed. All other threads simultaneously calling
|
59
|
-
# will block as well. Once the operation is complete (either
|
60
|
-
#
|
57
|
+
# If the state is `:pending` then the calling thread will block while the
|
58
|
+
# operation is performed. All other threads simultaneously calling `#value`
|
59
|
+
# will block as well. Once the operation is complete (either `:fulfilled` or
|
60
|
+
# `:rejected`) all waiting threads will unblock and the new value will be
|
61
61
|
# returned.
|
62
62
|
#
|
63
|
-
# If the state is not
|
63
|
+
# If the state is not `:pending` when `#value` is called the (possibly memoized)
|
64
64
|
# value will be returned without blocking and without performing the operation
|
65
65
|
# again.
|
66
66
|
#
|
67
|
-
# Regardless of the final disposition all
|
67
|
+
# Regardless of the final disposition all `Dereferenceable` options set during
|
68
68
|
# object construction will be honored.
|
69
69
|
#
|
70
70
|
# @return [Object] the (possibly memoized) result of the block operation
|
71
71
|
#
|
72
72
|
# @see Concurrent::Dereferenceable
|
73
73
|
def value
|
74
|
-
mutex.
|
74
|
+
mutex.lock
|
75
|
+
execute_task_once
|
76
|
+
result = apply_deref_options(@value)
|
77
|
+
mutex.unlock
|
78
|
+
|
79
|
+
result
|
80
|
+
end
|
81
|
+
|
82
|
+
private
|
83
|
+
|
84
|
+
def execute_task_once
|
75
85
|
if @state == :pending
|
76
86
|
begin
|
77
87
|
@value = @task.call
|
@@ -81,8 +91,6 @@ module Concurrent
|
|
81
91
|
@state = :rejected
|
82
92
|
end
|
83
93
|
end
|
84
|
-
return apply_deref_options(@value)
|
85
94
|
end
|
86
|
-
end
|
87
95
|
end
|
88
96
|
end
|
@@ -1,58 +1,66 @@
|
|
1
1
|
module Concurrent
|
2
2
|
|
3
3
|
# Object references in Ruby are mutable. This can lead to serious problems when
|
4
|
-
# the
|
5
|
-
# case unless the value is a
|
6
|
-
# Most classes in this library that expose a
|
4
|
+
# the `#value` of a concurrent object is a mutable reference. Which is always the
|
5
|
+
# case unless the value is a `Fixnum`, `Symbol`, or similar "primitive" data type.
|
6
|
+
# Most classes in this library that expose a `#value` getter method do so using
|
7
7
|
# this mixin module.
|
8
8
|
module Dereferenceable
|
9
9
|
|
10
10
|
# Return the value this object represents after applying the options specified
|
11
|
-
# by the
|
11
|
+
# by the `#set_deref_options` method.
|
12
12
|
#
|
13
13
|
# When multiple deref options are set the order of operations is strictly defined.
|
14
14
|
# The order of deref operations is:
|
15
|
-
# *
|
16
|
-
# *
|
17
|
-
# *
|
15
|
+
# * `:copy_on_deref`
|
16
|
+
# * `:dup_on_deref`
|
17
|
+
# * `:freeze_on_deref`
|
18
18
|
#
|
19
|
-
# Because of this ordering there is no need to
|
20
|
-
# provided
|
21
|
-
# Setting both
|
19
|
+
# Because of this ordering there is no need to `#freeze` an object created by a
|
20
|
+
# provided `:copy_on_deref` block. Simply set `:freeze_on_deref` to `true`.
|
21
|
+
# Setting both `:dup_on_deref` to `true` and `:freeze_on_deref` to `true` is
|
22
22
|
# as close to the behavior of a "pure" functional language (like Erlang, Clojure,
|
23
23
|
# or Haskell) as we are likely to get in Ruby.
|
24
24
|
#
|
25
|
-
# This method is thread-safe and synchronized with the internal
|
25
|
+
# This method is thread-safe and synchronized with the internal `#mutex`.
|
26
26
|
#
|
27
27
|
# @return [Object] the current value of the object
|
28
28
|
def value
|
29
|
-
mutex.
|
30
|
-
|
31
|
-
|
29
|
+
mutex.lock
|
30
|
+
result = apply_deref_options(@value)
|
31
|
+
mutex.unlock
|
32
|
+
result
|
32
33
|
end
|
34
|
+
|
33
35
|
alias_method :deref, :value
|
34
36
|
|
35
37
|
protected
|
36
38
|
|
39
|
+
# Set the internal value of this object
|
40
|
+
#
|
41
|
+
# @param [Object] val the new value
|
42
|
+
def value=(val)
|
43
|
+
mutex.lock
|
44
|
+
result = @value = val
|
45
|
+
mutex.unlock
|
46
|
+
result
|
47
|
+
end
|
48
|
+
|
37
49
|
# A mutex lock used for synchronizing thread-safe operations. Methods defined
|
38
|
-
# by
|
50
|
+
# by `Dereferenceable` are synchronized using the `Mutex` returned from this
|
39
51
|
# method. Operations performed by the including class that operate on the
|
40
|
-
#
|
52
|
+
# `@value` instance variable should be locked with this `Mutex`.
|
41
53
|
#
|
42
54
|
# @return [Mutex] the synchronization object
|
43
|
-
#
|
44
|
-
# @!visibility public
|
45
55
|
def mutex
|
46
56
|
@mutex
|
47
57
|
end
|
48
58
|
|
49
|
-
# Initializes the internal
|
59
|
+
# Initializes the internal `Mutex`.
|
50
60
|
#
|
51
61
|
# @note This method *must* be called from within the constructor of the including class.
|
52
62
|
#
|
53
63
|
# @see #mutex
|
54
|
-
#
|
55
|
-
# @!visibility public
|
56
64
|
def init_mutex
|
57
65
|
@mutex = Mutex.new
|
58
66
|
end
|
@@ -60,24 +68,23 @@ module Concurrent
|
|
60
68
|
# Set the options which define the operations #value performs before
|
61
69
|
# returning data to the caller (dereferencing).
|
62
70
|
#
|
63
|
-
# @note Most classes that include this module will call
|
71
|
+
# @note Most classes that include this module will call `#set_deref_options`
|
64
72
|
# from within the constructor, thus allowing these options to be set at
|
65
73
|
# object creation.
|
66
74
|
#
|
67
75
|
# @param [Hash] opts the options defining dereference behavior.
|
68
|
-
# @option opts [String] :dup_on_deref (false) call
|
69
|
-
# @option opts [String] :freeze_on_deref (false) call
|
70
|
-
# @option opts [String] :copy_on_deref (nil) call the given
|
76
|
+
# @option opts [String] :dup_on_deref (false) call `#dup` before returning the data
|
77
|
+
# @option opts [String] :freeze_on_deref (false) call `#freeze` before returning the data
|
78
|
+
# @option opts [String] :copy_on_deref (nil) call the given `Proc` passing the internal value and
|
71
79
|
# returning the value returned from the proc
|
72
|
-
#
|
73
|
-
# @!visibility public
|
74
80
|
def set_deref_options(opts = {})
|
75
|
-
mutex.
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
+
mutex.lock
|
82
|
+
@dup_on_deref = opts[:dup_on_deref] || opts[:dup]
|
83
|
+
@freeze_on_deref = opts[:freeze_on_deref] || opts[:freeze]
|
84
|
+
@copy_on_deref = opts[:copy_on_deref] || opts[:copy]
|
85
|
+
@do_nothing_on_deref = !(@dup_on_deref || @freeze_on_deref || @copy_on_deref)
|
86
|
+
mutex.unlock
|
87
|
+
nil
|
81
88
|
end
|
82
89
|
|
83
90
|
# @!visibility private
|
data/lib/concurrent/exchanger.rb
CHANGED
@@ -8,6 +8,9 @@ module Concurrent
|
|
8
8
|
@second = MVar.new(MVar::EMPTY, opts)
|
9
9
|
end
|
10
10
|
|
11
|
+
# @param [Object] value the value to exchange with an other thread
|
12
|
+
# @param [Numeric] timeout the maximum time in second to wait for one other thread. nil (default value) means no timeout
|
13
|
+
# @return [Object] the value exchanged by the other thread; nil if timed out
|
11
14
|
def exchange(value, timeout = nil)
|
12
15
|
first = @first.take(timeout)
|
13
16
|
if first == MVar::TIMEOUT
|
@@ -1,9 +1,9 @@
|
|
1
|
-
require 'concurrent/ruby_cached_thread_pool'
|
1
|
+
require 'concurrent/executor/ruby_cached_thread_pool'
|
2
2
|
|
3
3
|
module Concurrent
|
4
4
|
|
5
5
|
if RUBY_PLATFORM == 'java'
|
6
|
-
require 'concurrent/java_cached_thread_pool'
|
6
|
+
require 'concurrent/executor/java_cached_thread_pool'
|
7
7
|
# @!macro [attach] cached_thread_pool
|
8
8
|
# A thread pool that dynamically grows and shrinks to fit the current workload.
|
9
9
|
# New threads are created as needed, existing threads are reused, and threads
|
@@ -11,9 +11,9 @@ module Concurrent
|
|
11
11
|
# pools are particularly suited to applications that perform a high volume of
|
12
12
|
# short-lived tasks.
|
13
13
|
#
|
14
|
-
# On creation a
|
15
|
-
# created on the pool as new operations are
|
16
|
-
# will grow until
|
14
|
+
# On creation a `CachedThreadPool` has zero running threads. New threads are
|
15
|
+
# created on the pool as new operations are `#post`. The size of the pool
|
16
|
+
# will grow until `#max_length` threads are in the pool or until the number
|
17
17
|
# of threads exceeds the number of running and pending operations. When a new
|
18
18
|
# operation is post to the pool the first available idle thread will be tasked
|
19
19
|
# with the new operation.
|
@@ -23,10 +23,10 @@ module Concurrent
|
|
23
23
|
# of time will be killed and reclaimed. Thus these thread pools are very
|
24
24
|
# efficient at reclaiming unused resources.
|
25
25
|
#
|
26
|
-
# The API and behavior of this class are based on Java's
|
26
|
+
# The API and behavior of this class are based on Java's `CachedThreadPool`
|
27
27
|
#
|
28
|
-
# @note When running on the JVM (JRuby) this class will inherit from
|
29
|
-
# On all other platforms it will inherit from
|
28
|
+
# @note When running on the JVM (JRuby) this class will inherit from `JavaCachedThreadPool`.
|
29
|
+
# On all other platforms it will inherit from `RubyCachedThreadPool`.
|
30
30
|
#
|
31
31
|
# @see Concurrent::RubyCachedThreadPool
|
32
32
|
# @see Concurrent::JavaCachedThreadPool
|
@@ -34,7 +34,6 @@ module Concurrent
|
|
34
34
|
# @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html
|
35
35
|
# @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html
|
36
36
|
# @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html
|
37
|
-
# @see http://stackoverflow.com/questions/17957382/fixedthreadpool-vs-cachedthreadpool-the-lesser-of-two-evils
|
38
37
|
class CachedThreadPool < JavaCachedThreadPool
|
39
38
|
end
|
40
39
|
else
|