concurrent-ruby 0.6.0.pre.1 → 0.6.0.pre.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +16 -0
  3. data/lib/concurrent.rb +9 -29
  4. data/lib/concurrent/{actor.rb → actor/actor.rb} +3 -3
  5. data/lib/concurrent/actor/actor_context.rb +77 -0
  6. data/lib/concurrent/actor/actor_ref.rb +67 -0
  7. data/lib/concurrent/{postable.rb → actor/postable.rb} +1 -1
  8. data/lib/concurrent/actor/simple_actor_ref.rb +94 -0
  9. data/lib/concurrent/actors.rb +5 -0
  10. data/lib/concurrent/agent.rb +81 -47
  11. data/lib/concurrent/async.rb +35 -35
  12. data/lib/concurrent/atomic/atomic_boolean.rb +157 -0
  13. data/lib/concurrent/atomic/atomic_fixnum.rb +170 -0
  14. data/lib/concurrent/{condition.rb → atomic/condition.rb} +0 -0
  15. data/lib/concurrent/{copy_on_notify_observer_set.rb → atomic/copy_on_notify_observer_set.rb} +48 -13
  16. data/lib/concurrent/{copy_on_write_observer_set.rb → atomic/copy_on_write_observer_set.rb} +41 -20
  17. data/lib/concurrent/atomic/count_down_latch.rb +116 -0
  18. data/lib/concurrent/atomic/cyclic_barrier.rb +106 -0
  19. data/lib/concurrent/atomic/event.rb +103 -0
  20. data/lib/concurrent/{thread_local_var.rb → atomic/thread_local_var.rb} +0 -0
  21. data/lib/concurrent/atomics.rb +9 -0
  22. data/lib/concurrent/channel/buffered_channel.rb +6 -4
  23. data/lib/concurrent/channel/channel.rb +30 -2
  24. data/lib/concurrent/channel/unbuffered_channel.rb +2 -2
  25. data/lib/concurrent/channel/waitable_list.rb +3 -1
  26. data/lib/concurrent/channels.rb +5 -0
  27. data/lib/concurrent/{channel → collection}/blocking_ring_buffer.rb +16 -5
  28. data/lib/concurrent/collection/priority_queue.rb +305 -0
  29. data/lib/concurrent/{channel → collection}/ring_buffer.rb +6 -1
  30. data/lib/concurrent/collections.rb +3 -0
  31. data/lib/concurrent/configuration.rb +68 -19
  32. data/lib/concurrent/dataflow.rb +9 -9
  33. data/lib/concurrent/delay.rb +21 -13
  34. data/lib/concurrent/dereferenceable.rb +40 -33
  35. data/lib/concurrent/exchanger.rb +3 -0
  36. data/lib/concurrent/{cached_thread_pool.rb → executor/cached_thread_pool.rb} +8 -9
  37. data/lib/concurrent/executor/executor.rb +222 -0
  38. data/lib/concurrent/{fixed_thread_pool.rb → executor/fixed_thread_pool.rb} +6 -7
  39. data/lib/concurrent/{immediate_executor.rb → executor/immediate_executor.rb} +5 -5
  40. data/lib/concurrent/executor/java_cached_thread_pool.rb +31 -0
  41. data/lib/concurrent/{java_fixed_thread_pool.rb → executor/java_fixed_thread_pool.rb} +7 -11
  42. data/lib/concurrent/executor/java_single_thread_executor.rb +21 -0
  43. data/lib/concurrent/{java_thread_pool_executor.rb → executor/java_thread_pool_executor.rb} +66 -77
  44. data/lib/concurrent/executor/one_by_one.rb +65 -0
  45. data/lib/concurrent/{per_thread_executor.rb → executor/per_thread_executor.rb} +4 -4
  46. data/lib/concurrent/executor/ruby_cached_thread_pool.rb +29 -0
  47. data/lib/concurrent/{ruby_fixed_thread_pool.rb → executor/ruby_fixed_thread_pool.rb} +5 -4
  48. data/lib/concurrent/executor/ruby_single_thread_executor.rb +72 -0
  49. data/lib/concurrent/executor/ruby_thread_pool_executor.rb +282 -0
  50. data/lib/concurrent/{ruby_thread_pool_worker.rb → executor/ruby_thread_pool_worker.rb} +6 -6
  51. data/lib/concurrent/{safe_task_executor.rb → executor/safe_task_executor.rb} +20 -13
  52. data/lib/concurrent/executor/single_thread_executor.rb +35 -0
  53. data/lib/concurrent/executor/thread_pool_executor.rb +68 -0
  54. data/lib/concurrent/executor/timer_set.rb +138 -0
  55. data/lib/concurrent/executors.rb +9 -0
  56. data/lib/concurrent/future.rb +39 -40
  57. data/lib/concurrent/ivar.rb +22 -15
  58. data/lib/concurrent/mvar.rb +2 -1
  59. data/lib/concurrent/obligation.rb +9 -3
  60. data/lib/concurrent/observable.rb +33 -0
  61. data/lib/concurrent/options_parser.rb +46 -0
  62. data/lib/concurrent/promise.rb +23 -24
  63. data/lib/concurrent/scheduled_task.rb +21 -45
  64. data/lib/concurrent/timer_task.rb +204 -126
  65. data/lib/concurrent/tvar.rb +1 -1
  66. data/lib/concurrent/utilities.rb +3 -36
  67. data/lib/concurrent/{processor_count.rb → utility/processor_count.rb} +1 -1
  68. data/lib/concurrent/utility/timeout.rb +36 -0
  69. data/lib/concurrent/utility/timer.rb +21 -0
  70. data/lib/concurrent/version.rb +1 -1
  71. data/lib/concurrent_ruby_ext.bundle +0 -0
  72. data/spec/concurrent/{actor_context_spec.rb → actor/actor_context_spec.rb} +0 -8
  73. data/spec/concurrent/{actor_ref_shared.rb → actor/actor_ref_shared.rb} +9 -59
  74. data/spec/concurrent/{actor_spec.rb → actor/actor_spec.rb} +43 -41
  75. data/spec/concurrent/{postable_shared.rb → actor/postable_shared.rb} +0 -0
  76. data/spec/concurrent/actor/simple_actor_ref_spec.rb +135 -0
  77. data/spec/concurrent/agent_spec.rb +160 -71
  78. data/spec/concurrent/atomic/atomic_boolean_spec.rb +172 -0
  79. data/spec/concurrent/atomic/atomic_fixnum_spec.rb +186 -0
  80. data/spec/concurrent/{condition_spec.rb → atomic/condition_spec.rb} +2 -2
  81. data/spec/concurrent/{copy_on_notify_observer_set_spec.rb → atomic/copy_on_notify_observer_set_spec.rb} +0 -0
  82. data/spec/concurrent/{copy_on_write_observer_set_spec.rb → atomic/copy_on_write_observer_set_spec.rb} +0 -0
  83. data/spec/concurrent/atomic/count_down_latch_spec.rb +151 -0
  84. data/spec/concurrent/atomic/cyclic_barrier_spec.rb +248 -0
  85. data/spec/concurrent/{event_spec.rb → atomic/event_spec.rb} +18 -3
  86. data/spec/concurrent/{observer_set_shared.rb → atomic/observer_set_shared.rb} +15 -6
  87. data/spec/concurrent/{thread_local_var_spec.rb → atomic/thread_local_var_spec.rb} +0 -0
  88. data/spec/concurrent/channel/buffered_channel_spec.rb +1 -1
  89. data/spec/concurrent/channel/channel_spec.rb +6 -4
  90. data/spec/concurrent/channel/probe_spec.rb +37 -9
  91. data/spec/concurrent/channel/unbuffered_channel_spec.rb +2 -2
  92. data/spec/concurrent/{channel → collection}/blocking_ring_buffer_spec.rb +0 -0
  93. data/spec/concurrent/collection/priority_queue_spec.rb +317 -0
  94. data/spec/concurrent/{channel → collection}/ring_buffer_spec.rb +0 -0
  95. data/spec/concurrent/configuration_spec.rb +4 -70
  96. data/spec/concurrent/dereferenceable_shared.rb +5 -4
  97. data/spec/concurrent/exchanger_spec.rb +10 -5
  98. data/spec/concurrent/{cached_thread_pool_shared.rb → executor/cached_thread_pool_shared.rb} +15 -37
  99. data/spec/concurrent/{fixed_thread_pool_shared.rb → executor/fixed_thread_pool_shared.rb} +0 -0
  100. data/spec/concurrent/{global_thread_pool_shared.rb → executor/global_thread_pool_shared.rb} +10 -8
  101. data/spec/concurrent/{immediate_executor_spec.rb → executor/immediate_executor_spec.rb} +0 -0
  102. data/spec/concurrent/{java_cached_thread_pool_spec.rb → executor/java_cached_thread_pool_spec.rb} +1 -21
  103. data/spec/concurrent/{java_fixed_thread_pool_spec.rb → executor/java_fixed_thread_pool_spec.rb} +0 -0
  104. data/spec/concurrent/executor/java_single_thread_executor_spec.rb +21 -0
  105. data/spec/concurrent/{java_thread_pool_executor_spec.rb → executor/java_thread_pool_executor_spec.rb} +0 -0
  106. data/spec/concurrent/{per_thread_executor_spec.rb → executor/per_thread_executor_spec.rb} +0 -4
  107. data/spec/concurrent/{ruby_cached_thread_pool_spec.rb → executor/ruby_cached_thread_pool_spec.rb} +1 -1
  108. data/spec/concurrent/{ruby_fixed_thread_pool_spec.rb → executor/ruby_fixed_thread_pool_spec.rb} +0 -0
  109. data/spec/concurrent/executor/ruby_single_thread_executor_spec.rb +18 -0
  110. data/spec/concurrent/{ruby_thread_pool_executor_spec.rb → executor/ruby_thread_pool_executor_spec.rb} +12 -24
  111. data/spec/concurrent/executor/safe_task_executor_spec.rb +103 -0
  112. data/spec/concurrent/{thread_pool_class_cast_spec.rb → executor/thread_pool_class_cast_spec.rb} +12 -0
  113. data/spec/concurrent/{thread_pool_executor_shared.rb → executor/thread_pool_executor_shared.rb} +0 -0
  114. data/spec/concurrent/{thread_pool_shared.rb → executor/thread_pool_shared.rb} +84 -119
  115. data/spec/concurrent/executor/timer_set_spec.rb +183 -0
  116. data/spec/concurrent/future_spec.rb +12 -0
  117. data/spec/concurrent/ivar_spec.rb +11 -1
  118. data/spec/concurrent/observable_shared.rb +173 -0
  119. data/spec/concurrent/observable_spec.rb +51 -0
  120. data/spec/concurrent/options_parser_spec.rb +71 -0
  121. data/spec/concurrent/runnable_shared.rb +6 -0
  122. data/spec/concurrent/scheduled_task_spec.rb +60 -40
  123. data/spec/concurrent/timer_task_spec.rb +130 -144
  124. data/spec/concurrent/{processor_count_spec.rb → utility/processor_count_spec.rb} +0 -0
  125. data/spec/concurrent/{utilities_spec.rb → utility/timeout_spec.rb} +0 -0
  126. data/spec/concurrent/utility/timer_spec.rb +52 -0
  127. metadata +147 -108
  128. data/lib/concurrent/actor_context.rb +0 -31
  129. data/lib/concurrent/actor_ref.rb +0 -39
  130. data/lib/concurrent/atomic.rb +0 -121
  131. data/lib/concurrent/channel/probe.rb +0 -19
  132. data/lib/concurrent/count_down_latch.rb +0 -60
  133. data/lib/concurrent/event.rb +0 -80
  134. data/lib/concurrent/java_cached_thread_pool.rb +0 -45
  135. data/lib/concurrent/ruby_cached_thread_pool.rb +0 -37
  136. data/lib/concurrent/ruby_thread_pool_executor.rb +0 -268
  137. data/lib/concurrent/simple_actor_ref.rb +0 -124
  138. data/lib/concurrent/thread_pool_executor.rb +0 -30
  139. data/spec/concurrent/atomic_spec.rb +0 -201
  140. data/spec/concurrent/count_down_latch_spec.rb +0 -125
  141. data/spec/concurrent/safe_task_executor_spec.rb +0 -58
  142. data/spec/concurrent/simple_actor_ref_spec.rb +0 -219
@@ -3,10 +3,10 @@ require 'thread'
3
3
  module Concurrent
4
4
 
5
5
  # @!visibility private
6
- class RubyThreadPoolWorker # :nodoc:
6
+ class RubyThreadPoolWorker
7
7
 
8
8
  # @!visibility private
9
- def initialize(queue, parent) # :nodoc:
9
+ def initialize(queue, parent)
10
10
  @queue = queue
11
11
  @parent = parent
12
12
  @mutex = Mutex.new
@@ -14,14 +14,14 @@ module Concurrent
14
14
  end
15
15
 
16
16
  # @!visibility private
17
- def dead? # :nodoc:
17
+ def dead?
18
18
  return @mutex.synchronize do
19
19
  @thread.nil? ? false : ! @thread.alive?
20
20
  end
21
21
  end
22
22
 
23
23
  # @!visibility private
24
- def last_activity # :nodoc:
24
+ def last_activity
25
25
  @mutex.synchronize { @last_activity }
26
26
  end
27
27
 
@@ -33,7 +33,7 @@ module Concurrent
33
33
  end
34
34
 
35
35
  # @!visibility private
36
- def kill # :nodoc:
36
+ def kill
37
37
  @mutex.synchronize do
38
38
  Thread.kill(@thread) unless @thread.nil?
39
39
  @thread = nil
@@ -41,7 +41,7 @@ module Concurrent
41
41
  end
42
42
 
43
43
  # @!visibility private
44
- def run(thread = Thread.current) # :nodoc:
44
+ def run(thread = Thread.current)
45
45
  @mutex.synchronize do
46
46
  raise StandardError.new('already running') unless @thread.nil?
47
47
  @thread = thread
@@ -1,3 +1,5 @@
1
+ require 'thread'
2
+
1
3
  module Concurrent
2
4
 
3
5
  # A simple utility class that executes a callable and returns and array of three elements:
@@ -5,24 +7,29 @@ module Concurrent
5
7
  # value - filled by the callable result if it has been executed without errors, nil otherwise
6
8
  # reason - the error risen by the callable if it has been executed with errors, nil otherwise
7
9
  class SafeTaskExecutor
8
- def initialize(task)
10
+
11
+ def initialize(task, opts = {})
9
12
  @task = task
13
+ @mutex = Mutex.new
14
+ @exception_class = opts.fetch(:rescue_exception, false) ? Exception : StandardError
10
15
  end
11
16
 
12
17
  # @return [Array]
13
- def execute
14
- success = false
15
- value = reason = nil
16
-
17
- begin
18
- value = @task.call
19
- success = true
20
- rescue => ex
21
- reason = ex
18
+ def execute(*args)
19
+ @mutex.synchronize do
22
20
  success = false
23
- end
21
+ value = reason = nil
22
+
23
+ begin
24
+ value = @task.call(*args)
25
+ success = true
26
+ rescue @exception_class => ex
27
+ reason = ex
28
+ success = false
29
+ end
24
30
 
25
- [success, value, reason]
31
+ [success, value, reason]
32
+ end
26
33
  end
27
34
  end
28
- end
35
+ end
@@ -0,0 +1,35 @@
1
+ require 'concurrent/executor/ruby_single_thread_executor'
2
+
3
+ module Concurrent
4
+
5
+ if RUBY_PLATFORM == 'java'
6
+
7
+ require 'concurrent/executor/java_single_thread_executor'
8
+
9
+ # @!macro [attach] single_thread_executor
10
+ #
11
+ # A thread pool with a set number of threads. The number of threads in the pool
12
+ # is set on construction and remains constant. When all threads are busy new
13
+ # tasks `#post` to the thread pool are enqueued until a thread becomes available.
14
+ # Should a thread crash for any reason the thread will immediately be removed
15
+ # from the pool and replaced.
16
+ #
17
+ # The API and behavior of this class are based on Java's `SingleThreadExecutor`
18
+ #
19
+ # @note When running on the JVM (JRuby) this class will inherit from `JavaSingleThreadExecutor`.
20
+ # On all other platforms it will inherit from `RubySingleThreadExecutor`.
21
+ #
22
+ # @see Concurrent::RubySingleThreadExecutor
23
+ # @see Concurrent::JavaSingleThreadExecutor
24
+ #
25
+ # @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html
26
+ # @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html
27
+ # @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html
28
+ class SingleThreadExecutor < JavaSingleThreadExecutor
29
+ end
30
+ else
31
+ # @!macro single_thread_executor
32
+ class SingleThreadExecutor < RubySingleThreadExecutor
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,68 @@
1
+ require 'concurrent/executor/ruby_thread_pool_executor'
2
+
3
+ module Concurrent
4
+
5
+ if RUBY_PLATFORM == 'java'
6
+ require 'concurrent/executor/java_thread_pool_executor'
7
+ # @!macro [attach] thread_pool_executor
8
+ #
9
+ # An abstraction composed of one or more threads and a task queue. Tasks
10
+ # (blocks or `proc` objects) are submit to the pool and added to the queue.
11
+ # The threads in the pool remove the tasks and execute them in the order
12
+ # they were received. When there are more tasks queued than there are
13
+ # threads to execute them the pool will create new threads, up to the
14
+ # configured maximum. Similarly, threads that are idle for too long will
15
+ # be garbage collected, down to the configured minimum options. Should a
16
+ # thread crash it, too, will be garbage collected.
17
+ #
18
+ # `ThreadPoolExecutor` is based on the Java class of the same name. From
19
+ # the official Java documentationa;
20
+ #
21
+ # > Thread pools address two different problems: they usually provide
22
+ # > improved performance when executing large numbers of asynchronous tasks,
23
+ # > due to reduced per-task invocation overhead, and they provide a means
24
+ # > of bounding and managing the resources, including threads, consumed
25
+ # > when executing a collection of tasks. Each ThreadPoolExecutor also
26
+ # > maintains some basic statistics, such as the number of completed tasks.
27
+ # >
28
+ # > To be useful across a wide range of contexts, this class provides many
29
+ # > adjustable parameters and extensibility hooks. However, programmers are
30
+ # > urged to use the more convenient Executors factory methods
31
+ # > [CachedThreadPool] (unbounded thread pool, with automatic thread reclamation),
32
+ # > [FixedThreadPool] (fixed size thread pool) and [SingleThreadExecutor] (single
33
+ # > background thread), that preconfigure settings for the most common usage
34
+ # > scenarios.
35
+ #
36
+ # Thread pools support several configuration options:
37
+ #
38
+ # * `max_threads`: The maximum number of threads that may be created in the pool.
39
+ # * `min_threads`: The minimum number of threads that may be retained in the pool.
40
+ # * `idletime`: The number of seconds that a thread may be idle before being reclaimed.
41
+ # * `max_queue`: The maximum number of tasks that may be waiting in the work queue at
42
+ # any one time. When the queue size reaches `max_queue` subsequent tasks will be
43
+ # rejected in accordance with the configured `overflow_policy`.
44
+ # * `overflow_policy`: The policy defining how rejected tasks are handled. #
45
+ #
46
+ # Three overflow policies are supported:
47
+ #
48
+ # * `:abort`: Raise a `RejectedExecutionError` exception and discard the task.
49
+ # * `:discard`: Silently discard the task and return `nil` as the task result.
50
+ # * `:caller_runs`: Execute the task on the calling thread.
51
+ #
52
+ # @note When running on the JVM (JRuby) this class will inherit from `JavaThreadPoolExecutor`.
53
+ # On all other platforms it will inherit from `RubyThreadPoolExecutor`.
54
+ #
55
+ # @see Concurrent::RubyThreadPoolExecutor
56
+ # @see Concurrent::JavaThreadPoolExecutor
57
+ #
58
+ # @see http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html
59
+ # @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Executors.html
60
+ # @see http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html
61
+ class ThreadPoolExecutor < JavaThreadPoolExecutor
62
+ end
63
+ else
64
+ # @!macro thread_pool_executor
65
+ class ThreadPoolExecutor < RubyThreadPoolExecutor
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,138 @@
1
+ require 'thread'
2
+ require_relative 'executor'
3
+ require 'concurrent/options_parser'
4
+ require 'concurrent/atomic/event'
5
+ require 'concurrent/collection/priority_queue'
6
+ require 'concurrent/executor/single_thread_executor'
7
+
8
+ module Concurrent
9
+
10
+ # Executes a collection of tasks at the specified times. A master thread
11
+ # monitors the set and schedules each task for execution at the appropriate
12
+ # time. Tasks are run on the global task pool or on the supplied executor.
13
+ class TimerSet
14
+ include Executor
15
+
16
+ # Create a new set of timed tasks.
17
+ #
18
+ # @param [Hash] opts the options controlling how the future will be processed
19
+ # @option opts [Boolean] :operation (false) when `true` will execute the future on the global
20
+ # operation pool (for long-running operations), when `false` will execute the future on the
21
+ # global task pool (for short-running tasks)
22
+ # @option opts [object] :executor when provided will run all operations on
23
+ # this executor rather than the global thread pool (overrides :operation)
24
+ def initialize(opts = {})
25
+ @queue = PriorityQueue.new(order: :min)
26
+ @task_executor = OptionsParser::get_executor_from(opts)
27
+ @timer_executor = SingleThreadExecutor.new
28
+ @condition = Condition.new
29
+ init_executor
30
+ end
31
+
32
+ # Post a task to be execute at the specified time. The given time may be either
33
+ # a `Time` object or the number of seconds to wait. If the intended execution
34
+ # time is within 1/100th of a second of the current time the task will be
35
+ # immediately post to the executor.
36
+ #
37
+ # @param [Object] intended_time the time to schedule the task for execution
38
+ #
39
+ # @yield the task to be performed
40
+ #
41
+ # @return [Boolean] true if the message is post, false after shutdown
42
+ #
43
+ # @raise [ArgumentError] if the intended execution time is not in the future
44
+ # @raise [ArgumentError] if no block is given
45
+ def post(intended_time, *args, &task)
46
+ time = TimerSet.calculate_schedule_time(intended_time).to_f
47
+ raise ArgumentError.new('no block given') unless block_given?
48
+
49
+ mutex.synchronize do
50
+ return false unless running?
51
+
52
+ if (time - Time.now.to_f) <= 0.01
53
+ @task_executor.post(*args, &task)
54
+ else
55
+ @queue.push(Task.new(time, args, task))
56
+ @timer_executor.post(&method(:process_tasks))
57
+ end
58
+
59
+ true
60
+ end
61
+
62
+ end
63
+
64
+ alias_method :kill, :shutdown
65
+
66
+ # Calculate an Epoch time with milliseconds at which to execute a
67
+ # task. If the given time is a `Time` object it will be converted
68
+ # accordingly. If the time is an integer value greater than zero
69
+ # it will be understood as a number of seconds in the future and
70
+ # will be added to the current time to calculate Epoch.
71
+ #
72
+ # @param [Object] intended_time the time (as a `Time` object or an integer)
73
+ # to schedule the task for execution
74
+ # @param [Time] now (Time.now) the time from which to calculate an interval
75
+ #
76
+ # @return [Fixnum] the intended time as seconds/millis from Epoch
77
+ #
78
+ # @raise [ArgumentError] if the intended execution time is not in the future
79
+ def self.calculate_schedule_time(intended_time, now = Time.now)
80
+ if intended_time.is_a?(Time)
81
+ raise ArgumentError.new('schedule time must be in the future') if intended_time <= now
82
+ intended_time
83
+ else
84
+ raise ArgumentError.new('seconds must be greater than zero') if intended_time.to_f < 0.0
85
+ now + intended_time
86
+ end
87
+ end
88
+
89
+ private
90
+
91
+ # A struct for encapsulating a task and its intended execution time.
92
+ # It facilitates proper prioritization by overriding the comparison
93
+ # (spaceship) operator as a comparison of the intended execution
94
+ # times.
95
+ #
96
+ # @!visibility private
97
+ Task = Struct.new(:time, :args, :op) do
98
+ include Comparable
99
+
100
+ def <=>(other)
101
+ self.time <=> other.time
102
+ end
103
+ end
104
+
105
+ private_constant :Task
106
+
107
+ # @!visibility private
108
+ def shutdown_execution
109
+ @queue.clear
110
+ @timer_executor.kill
111
+ stopped_event.set
112
+ end
113
+
114
+ # Run a loop and execute tasks in the scheduled order and at the approximate
115
+ # scheduled time. If no tasks remain the thread will exit gracefully so that
116
+ # garbage collection can occur. If there are no ready tasks it will sleep
117
+ # for up to 60 seconds waiting for the next scheduled task.
118
+ #
119
+ # @!visibility private
120
+ def process_tasks
121
+ loop do
122
+ break if @queue.empty?
123
+
124
+ task = @queue.peek
125
+ interval = task.time - Time.now.to_f
126
+
127
+ if interval <= 0
128
+ @task_executor.post(*task.args, &task.op)
129
+ @queue.pop
130
+ else
131
+ mutex.synchronize do
132
+ @condition.wait(mutex, [interval, 60].min)
133
+ end
134
+ end
135
+ end
136
+ end
137
+ end
138
+ end
@@ -0,0 +1,9 @@
1
+ require 'concurrent/executor/cached_thread_pool'
2
+ require 'concurrent/executor/fixed_thread_pool'
3
+ require 'concurrent/executor/immediate_executor'
4
+ require 'concurrent/executor/per_thread_executor'
5
+ require 'concurrent/executor/safe_task_executor'
6
+ require 'concurrent/executor/single_thread_executor'
7
+ require 'concurrent/executor/thread_pool_executor'
8
+ require 'concurrent/executor/timer_set'
9
+ require 'concurrent/executor/one_by_one'
@@ -1,61 +1,60 @@
1
1
  require 'thread'
2
2
 
3
- require 'concurrent/configuration'
4
3
  require 'concurrent/obligation'
5
- require 'concurrent/safe_task_executor'
4
+ require 'concurrent/options_parser'
5
+ require 'concurrent/executor/safe_task_executor'
6
6
 
7
7
  module Concurrent
8
8
 
9
- # A +Future+ represents a promise to complete an action at some time in the future.
9
+ # A `Future` represents a promise to complete an action at some time in the future.
10
10
  # The action is atomic and permanent. The idea behind a future is to send an operation
11
11
  # for asynchronous completion, do other stuff, then return and retrieve the result
12
12
  # of the async operation at a later time.
13
13
  #
14
- # A +Future+ has four possible states: *:unscheduled*, *:pending*, *:rejected*, or *:fulfilled*.
15
- # When a +Future+ is created its state is set to *:unscheduled*. Once the +#execute+ method is
14
+ # A `Future` has four possible states: *:unscheduled*, *:pending*, *:rejected*, or *:fulfilled*.
15
+ # When a `Future` is created its state is set to *:unscheduled*. Once the `#execute` method is
16
16
  # called the state becomes *:pending* and will remain in that state until processing is
17
- # complete. A completed +Future+ is either *:rejected*, indicating that an exception was
18
- # thrown during processing, or *:fulfilled*, indicating success. If a +Future+ is *:fulfilled*
19
- # its +value+ will be updated to reflect the result of the operation. If *:rejected* the
20
- # +reason+ will be updated with a reference to the thrown exception. The predicate methods
21
- # +#unscheduled?+, +#pending?+, +#rejected?+, and +fulfilled?+ can be called at any time to
22
- # obtain the state of the +Future+, as can the +#state+ method, which returns a symbol.
17
+ # complete. A completed `Future` is either *:rejected*, indicating that an exception was
18
+ # thrown during processing, or *:fulfilled*, indicating success. If a `Future` is *:fulfilled*
19
+ # its `value` will be updated to reflect the result of the operation. If *:rejected* the
20
+ # `reason` will be updated with a reference to the thrown exception. The predicate methods
21
+ # `#unscheduled?`, `#pending?`, `#rejected?`, and `fulfilled?` can be called at any time to
22
+ # obtain the state of the `Future`, as can the `#state` method, which returns a symbol.
23
23
  #
24
- # Retrieving the value of a +Future+ is done through the +#value+ (alias: +#deref+) method.
25
- # Obtaining the value of a +Future+ is a potentially blocking operation. When a +Future+ is
26
- # *:rejected* a call to +#value+ will return +nil+ immediately. When a +Future+ is
27
- # *:fulfilled* a call to +#value+ will immediately return the current value. When a
28
- # +Future+ is *:pending* a call to +#value+ will block until the +Future+ is either
29
- # *:rejected* or *:fulfilled*. A *timeout* value can be passed to +#value+ to limit how
30
- # long the call will block. If +nil+ the call will block indefinitely. If +0+ the call will
24
+ # Retrieving the value of a `Future` is done through the `#value` (alias: `#deref`) method.
25
+ # Obtaining the value of a `Future` is a potentially blocking operation. When a `Future` is
26
+ # *:rejected* a call to `#value` will return `nil` immediately. When a `Future` is
27
+ # *:fulfilled* a call to `#value` will immediately return the current value. When a
28
+ # `Future` is *:pending* a call to `#value` will block until the `Future` is either
29
+ # *:rejected* or *:fulfilled*. A *timeout* value can be passed to `#value` to limit how
30
+ # long the call will block. If `nil` the call will block indefinitely. If `0` the call will
31
31
  # not block. Any other integer or float value will indicate the maximum number of seconds to block.
32
32
  #
33
- # The +Future+ class also includes the behavior of the Ruby standard library +Observable+ module,
33
+ # The `Future` class also includes the behavior of the Ruby standard library `Observable` module,
34
34
  # but does so in a thread-safe way. On fulfillment or rejection all observers will be notified
35
- # according to the normal +Observable+ behavior. The observer callback function will be called
36
- # with three parameters: the +Time+ of fulfillment/rejection, the final +value+, and the final
37
- # +reason+. Observers added after fulfillment/rejection will still be notified as normal.
35
+ # according to the normal `Observable` behavior. The observer callback function will be called
36
+ # with three parameters: the `Time` of fulfillment/rejection, the final `value`, and the final
37
+ # `reason`. Observers added after fulfillment/rejection will still be notified as normal.
38
38
  #
39
39
  # @see http://ruby-doc.org/stdlib-2.1.1/libdoc/observer/rdoc/Observable.html Ruby Observable module
40
40
  # @see http://clojuredocs.org/clojure_core/clojure.core/future Clojure's future function
41
41
  # @see http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html java.util.concurrent.Future
42
42
  class Future < IVar
43
43
  include Obligation
44
- include OptionsParser
45
44
 
46
- # Create a new +Future+ in the +:unscheduled+ state.
45
+ # Create a new `Future` in the `:unscheduled` state.
47
46
  #
48
47
  # @yield the asynchronous operation to perform
49
48
  #
50
- # @param [Hash] opts the options to create a message with
51
- # @option opts [Boolean] :operation (false) when +true+ will execute the future on the global
52
- # operation pool (for long-running operations), when +false+ will execute the future on the
49
+ # @param [Hash] opts the options controlling how the future will be processed
50
+ # @option opts [Boolean] :operation (false) when `true` will execute the future on the global
51
+ # operation pool (for long-running operations), when `false` will execute the future on the
53
52
  # global task pool (for short-running tasks)
54
53
  # @option opts [object] :executor when provided will run all operations on
55
54
  # this executor rather than the global thread pool (overrides :operation)
56
- # @option opts [String] :dup_on_deref (false) call +#dup+ before returning the data
57
- # @option opts [String] :freeze_on_deref (false) call +#freeze+ before returning the data
58
- # @option opts [String] :copy_on_deref (nil) call the given +Proc+ passing the internal value and
55
+ # @option opts [String] :dup_on_deref (false) call `#dup` before returning the data
56
+ # @option opts [String] :freeze_on_deref (false) call `#freeze` before returning the data
57
+ # @option opts [String] :copy_on_deref (nil) call the given `Proc` passing the internal value and
59
58
  # returning the value returned from the proc
60
59
  #
61
60
  # @raise [ArgumentError] if no block is given
@@ -64,14 +63,14 @@ module Concurrent
64
63
  super(IVar::NO_VALUE, opts)
65
64
  @state = :unscheduled
66
65
  @task = block
67
- @executor = get_executor_from(opts)
66
+ @executor = OptionsParser::get_executor_from(opts)
68
67
  end
69
68
 
70
- # Execute an +:unscheduled+ +Future+. Immediately sets the state to +:pending+ and
69
+ # Execute an `:unscheduled` `Future`. Immediately sets the state to `:pending` and
71
70
  # passes the block to a new thread/thread pool for eventual execution.
72
- # Does nothing if the +Future+ is in any state other than +:unscheduled+.
71
+ # Does nothing if the `Future` is in any state other than `:unscheduled`.
73
72
  #
74
- # @return [Future] a reference to +self+
73
+ # @return [Future] a reference to `self`
75
74
  #
76
75
  # @example Instance and execute in separate steps
77
76
  # future = Concurrent::Future.new{ sleep(1); 42 }
@@ -91,17 +90,17 @@ module Concurrent
91
90
  end
92
91
  end
93
92
 
94
- # Create a new +Future+ object with the given block, execute it, and return the
95
- # +:pending+ object.
93
+ # Create a new `Future` object with the given block, execute it, and return the
94
+ # `:pending` object.
96
95
  #
97
96
  # @yield the asynchronous operation to perform
98
97
  #
99
- # @option opts [String] :dup_on_deref (false) call +#dup+ before returning the data
100
- # @option opts [String] :freeze_on_deref (false) call +#freeze+ before returning the data
101
- # @option opts [String] :copy_on_deref (nil) call the given +Proc+ passing the internal value and
98
+ # @option opts [String] :dup_on_deref (false) call `#dup` before returning the data
99
+ # @option opts [String] :freeze_on_deref (false) call `#freeze` before returning the data
100
+ # @option opts [String] :copy_on_deref (nil) call the given `Proc` passing the internal value and
102
101
  # returning the value returned from the proc
103
102
  #
104
- # @return [Future] the newly created +Future+ in the +:pending+ state
103
+ # @return [Future] the newly created `Future` in the `:pending` state
105
104
  #
106
105
  # @raise [ArgumentError] if no block is given
107
106
  #