concurrent-ruby 0.7.0-x86-mingw32 → 0.7.1-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +8 -8
  2. data/CHANGELOG.md +138 -0
  3. data/README.md +73 -105
  4. data/lib/1.9/concurrent_ruby_ext.so +0 -0
  5. data/lib/2.0/concurrent_ruby_ext.so +0 -0
  6. data/lib/concurrent/actor.rb +11 -12
  7. data/lib/concurrent/actor/behaviour/errors_on_unknown_message.rb +1 -1
  8. data/lib/concurrent/actor/behaviour/linking.rb +4 -1
  9. data/lib/concurrent/actor/behaviour/pausing.rb +2 -2
  10. data/lib/concurrent/actor/behaviour/supervised.rb +2 -1
  11. data/lib/concurrent/actor/behaviour/termination.rb +1 -1
  12. data/lib/concurrent/actor/context.rb +2 -1
  13. data/lib/concurrent/actor/core.rb +7 -3
  14. data/lib/concurrent/actor/utils/balancer.rb +4 -2
  15. data/lib/concurrent/actor/utils/pool.rb +1 -1
  16. data/lib/concurrent/agent.rb +1 -22
  17. data/lib/concurrent/async.rb +1 -79
  18. data/lib/concurrent/atomic.rb +1 -1
  19. data/lib/concurrent/atomic/thread_local_var.rb +71 -24
  20. data/lib/concurrent/atomics.rb +0 -1
  21. data/lib/concurrent/configuration.rb +11 -5
  22. data/lib/concurrent/dataflow.rb +1 -30
  23. data/lib/concurrent/dereferenceable.rb +9 -2
  24. data/lib/concurrent/executor/indirect_immediate_executor.rb +46 -0
  25. data/lib/concurrent/executor/java_thread_pool_executor.rb +2 -4
  26. data/lib/concurrent/executor/ruby_thread_pool_executor.rb +24 -22
  27. data/lib/concurrent/executor/thread_pool_executor.rb +2 -0
  28. data/lib/concurrent/executor/timer_set.rb +7 -8
  29. data/lib/concurrent/executors.rb +1 -0
  30. data/lib/concurrent/future.rb +7 -29
  31. data/lib/concurrent/ivar.rb +9 -0
  32. data/lib/concurrent/logging.rb +3 -0
  33. data/lib/concurrent/mvar.rb +26 -9
  34. data/lib/concurrent/observable.rb +33 -0
  35. data/lib/concurrent/promise.rb +59 -1
  36. data/lib/concurrent/scheduled_task.rb +1 -0
  37. data/lib/concurrent/timer_task.rb +18 -18
  38. data/lib/concurrent/tvar.rb +2 -0
  39. data/lib/concurrent/version.rb +1 -1
  40. data/lib/concurrent_ruby_ext.so +0 -0
  41. metadata +21 -4
@@ -5,8 +5,8 @@ module Concurrent
5
5
  # Allows to pause actors on errors.
6
6
  # When paused all arriving messages are collected and processed after the actor
7
7
  # is resumed or reset. Resume will simply continue with next message.
8
- # Reset also reinitialized context. `:reset!` and `:resume!` messages are only accepted
9
- # form supervisor, see Supervised behaviour.
8
+ # Reset also reinitialized context.
9
+ # TODO example
10
10
  class Pausing < Abstract
11
11
  def initialize(core, subsequent)
12
12
  super core, subsequent
@@ -3,7 +3,8 @@ module Concurrent
3
3
  module Behaviour
4
4
 
5
5
  # Sets and holds the supervisor of the actor if any. There is at most one supervisor
6
- # for each actor. Each supervisor is automatically linked.
6
+ # for each actor. Each supervisor is automatically linked. Messages:
7
+ # `:pause!, :resume!, :reset!, :restart!` are accepted only from supervisor.
7
8
  class Supervised < Abstract
8
9
  attr_reader :supervisor
9
10
 
@@ -44,7 +44,7 @@ module Concurrent
44
44
  def terminate!
45
45
  return true if terminated?
46
46
  terminated.set
47
- broadcast(:terminated)
47
+ broadcast(:terminated) # TODO do not end up in Dead Letter Router
48
48
  parent << :remove_child if parent
49
49
  true
50
50
  end
@@ -115,7 +115,8 @@ module Concurrent
115
115
  undef_method :spawn
116
116
  end
117
117
 
118
- # Basic Context of an Actor.
118
+ # Basic Context of an Actor. It does not support supervision and pausing.
119
+ # It simply terminates on error.
119
120
  #
120
121
  # - linking
121
122
  # - terminates on error
@@ -46,10 +46,14 @@ module Concurrent
46
46
  synchronize do
47
47
  @mailbox = Array.new
48
48
  @serialized_execution = SerializedExecution.new
49
- @executor = Type! opts.fetch(:executor, Concurrent.configuration.global_task_pool), Executor
50
49
  @children = Set.new
51
- @context_class = Child! opts.fetch(:class), AbstractContext
50
+
51
+ @context_class = Child! opts.fetch(:class), AbstractContext
52
52
  allocate_context
53
+
54
+ @executor = Type! opts.fetch(:executor, Concurrent.configuration.global_task_pool), Executor
55
+ raise ArgumentError, 'ImmediateExecutor is not supported' if @executor.is_a? ImmediateExecutor
56
+
53
57
  @reference = (Child! opts[:reference_class] || @context.default_reference_class, Reference).new self
54
58
  @name = (Type! opts.fetch(:name), String, Symbol).to_s
55
59
 
@@ -82,7 +86,7 @@ module Concurrent
82
86
  handle_envelope Envelope.new(message, nil, parent, reference)
83
87
  end
84
88
 
85
- initialized.set true if initialized
89
+ initialized.set reference if initialized
86
90
  rescue => ex
87
91
  log ERROR, ex
88
92
  @first_behaviour.terminate!
@@ -4,6 +4,7 @@ module Concurrent
4
4
 
5
5
  # Distributes messages between subscribed actors. Each actor'll get only one message then
6
6
  # it's unsubscribed. The actor needs to resubscribe when it's ready to receive next message.
7
+ # It will buffer the messages if there is no worker registered.
7
8
  # @see Pool
8
9
  class Balancer < RestartingContext
9
10
 
@@ -24,14 +25,15 @@ module Concurrent
24
25
  when :subscribed?
25
26
  @receivers.include? envelope.sender
26
27
  else
27
- @buffer << message
28
+ @buffer << envelope
28
29
  distribute
30
+ Behaviour::MESSAGE_PROCESSED
29
31
  end
30
32
  end
31
33
 
32
34
  def distribute
33
35
  while !@receivers.empty? && !@buffer.empty?
34
- @receivers.shift << @buffer.shift
36
+ redirect @receivers.shift, @buffer.shift
35
37
  end
36
38
  end
37
39
  end
@@ -34,7 +34,7 @@ module Concurrent
34
34
  end
35
35
 
36
36
  def on_message(message)
37
- @balancer << message
37
+ redirect @balancer
38
38
  end
39
39
  end
40
40
 
@@ -8,28 +8,7 @@ require 'concurrent/logging'
8
8
 
9
9
  module Concurrent
10
10
 
11
- # An agent is a single atomic value that represents an identity. The current value
12
- # of the agent can be requested at any time (`#deref`). Each agent has a work queue and operates on
13
- # the global thread pool. Consumers can `#post` code blocks to the agent. The code block (function)
14
- # will receive the current value of the agent as its sole parameter. The return value of the block
15
- # will become the new value of the agent. Agents support two error handling modes: fail and continue.
16
- # A good example of an agent is a shared incrementing counter, such as the score in a video game.
17
- #
18
- # @example Basic usage
19
- # score = Concurrent::Agent.new(10)
20
- # score.value #=> 10
21
- #
22
- # score << proc{|current| current + 100 }
23
- # sleep(0.1)
24
- # score.value #=> 110
25
- #
26
- # score << proc{|current| current * 2 }
27
- # sleep(0.1)
28
- # score.value #=> 220
29
- #
30
- # score << proc{|current| current - 50 }
31
- # sleep(0.1)
32
- # score.value #=> 170
11
+ # {include:file:doc/agent.md}
33
12
  #
34
13
  # @!attribute [r] timeout
35
14
  # @return [Fixnum] the maximum number of seconds before an update is cancelled
@@ -8,85 +8,7 @@ require 'concurrent/executor/serialized_execution'
8
8
 
9
9
  module Concurrent
10
10
 
11
- # A mixin module that provides simple asynchronous behavior to any standard
12
- # class/object or object.
13
- #
14
- # Scenario:
15
- # As a stateful, plain old Ruby class/object
16
- # I want safe, asynchronous behavior
17
- # So my long-running methods don't block the main thread
18
- #
19
- # Stateful, mutable objects must be managed carefully when used asynchronously.
20
- # But Ruby is an object-oriented language so designing with objects and classes
21
- # plays to Ruby's strengths and is often more natural to many Ruby programmers.
22
- # The `Async` module is a way to mix simple yet powerful asynchronous capabilities
23
- # into any plain old Ruby object or class. These capabilities provide a reasonable
24
- # level of thread safe guarantees when used correctly.
25
- #
26
- # When this module is mixed into a class or object it provides to new methods:
27
- # `async` and `await`. These methods are thread safe with respect to the enclosing
28
- # object. The former method allows methods to be called asynchronously by posting
29
- # to the global thread pool. The latter allows a method to be called synchronously
30
- # on the current thread but does so safely with respect to any pending asynchronous
31
- # method calls. Both methods return an `Obligation` which can be inspected for
32
- # the result of the method call. Calling a method with `async` will return a
33
- # `:pending` `Obligation` whereas `await` will return a `:complete` `Obligation`.
34
- #
35
- # Very loosely based on the `async` and `await` keywords in C#.
36
- #
37
- # @example Defining an asynchronous class
38
- # class Echo
39
- # include Concurrent::Async
40
- #
41
- # def initialize
42
- # init_mutex # initialize the internal synchronization objects
43
- # end
44
- #
45
- # def echo(msg)
46
- # sleep(rand)
47
- # print "#{msg}\n"
48
- # nil
49
- # end
50
- # end
51
- #
52
- # horn = Echo.new
53
- # horn.echo('zero') # synchronous, not thread-safe
54
- #
55
- # horn.async.echo('one') # asynchronous, non-blocking, thread-safe
56
- # horn.await.echo('two') # synchronous, blocking, thread-safe
57
- #
58
- # @example Monkey-patching an existing object
59
- # numbers = 1_000_000.times.collect{ rand }
60
- # numbers.extend(Concurrent::Async)
61
- # numbers.init_mutex # initialize the internal synchronization objects
62
- #
63
- # future = numbers.async.max
64
- # future.state #=> :pending
65
- #
66
- # sleep(2)
67
- #
68
- # future.state #=> :fulfilled
69
- # future.value #=> 0.999999138918843
70
- #
71
- # @note This module depends on several internal synchronization objects that
72
- # must be initialized prior to calling any of the async/await/executor methods.
73
- # The best practice is to call `init_mutex` from within the constructor
74
- # of the including class. A less ideal but acceptable practice is for the
75
- # thread creating the asynchronous object to explicitly call the `init_mutex`
76
- # method prior to calling any of the async/await/executor methods. If
77
- # `init_mutex` is *not* called explicitly the async/await/executor methods
78
- # will raize a `Concurrent::InitializationError`. This is the only way
79
- # thread-safe initialization can be guaranteed.
80
- #
81
- # @note Thread safe guarantees can only be made when asynchronous method calls
82
- # are not mixed with synchronous method calls. Use only synchronous calls
83
- # when the object is used exclusively on a single thread. Use only
84
- # `async` and `await` when the object is shared between threads. Once you
85
- # call a method using `async`, you should no longer call any methods
86
- # directly on the object. Use `async` and `await` exclusively from then on.
87
- # With careful programming it is possible to switch back and forth but it's
88
- # also very easy to create race conditions and break your application.
89
- # Basically, it's "async all the way down."
11
+ # {include:file:doc/async.md}
90
12
  #
91
13
  # @since 0.6.0
92
14
  #
@@ -25,7 +25,7 @@ begin
25
25
 
26
26
  require "concurrent/atomic_reference/#{ruby_engine}"
27
27
  rescue LoadError
28
- warn 'Compiled extensions not installed, pure Ruby Atomic will be used.'
28
+ #warn 'Compiled extensions not installed, pure Ruby Atomic will be used.'
29
29
  end
30
30
 
31
31
  if defined? Concurrent::JavaAtomic
@@ -2,41 +2,83 @@ require 'concurrent/atomic'
2
2
 
3
3
  module Concurrent
4
4
 
5
- module ThreadLocalRubyStorage
5
+ # @!macro [attach] abstract_thread_local_var
6
+ # A `ThreadLocalVar` is a variable where the value is different for each thread.
7
+ # Each variable may have a default value, but when you modify the variable only
8
+ # the current thread will ever see that change.
9
+ #
10
+ # @example
11
+ # v = ThreadLocalVar.new(14)
12
+ # v.value #=> 14
13
+ # v.value = 2
14
+ # v.value #=> 2
15
+ #
16
+ # @example
17
+ # v = ThreadLocalVar.new(14)
18
+ #
19
+ # t1 = Thread.new do
20
+ # v.value #=> 14
21
+ # v.value = 1
22
+ # v.value #=> 1
23
+ # end
24
+ #
25
+ # t2 = Thread.new do
26
+ # v.value #=> 14
27
+ # v.value = 2
28
+ # v.value #=> 2
29
+ # end
30
+ #
31
+ # v.value #=> 14
32
+ class AbstractThreadLocalVar
6
33
 
7
- def allocate_storage
8
- @storage = Atomic.new Hash.new
9
- end
34
+ module ThreadLocalRubyStorage
10
35
 
11
- def get
12
- @storage.get[Thread.current]
13
- end
36
+ protected
14
37
 
15
- def set(value)
16
- @storage.update { |s| s.merge Thread.current => value }
17
- end
38
+ unless RUBY_PLATFORM == 'java'
39
+ require 'ref'
40
+ end
18
41
 
19
- end
42
+ def allocate_storage
43
+ @storage = Ref::WeakKeyMap.new
44
+ end
20
45
 
21
- module ThreadLocalJavaStorage
46
+ def get
47
+ @storage[Thread.current]
48
+ end
22
49
 
23
- protected
50
+ def set(value, &block)
51
+ key = Thread.current
24
52
 
25
- def allocate_storage
26
- @var = java.lang.ThreadLocal.new
27
- end
53
+ @storage[key] = value
28
54
 
29
- def get
30
- @var.get
55
+ if block_given?
56
+ begin
57
+ block.call
58
+ ensure
59
+ @storage.delete key
60
+ end
61
+ end
62
+ end
31
63
  end
32
64
 
33
- def set(value)
34
- @var.set(value)
35
- end
65
+ module ThreadLocalJavaStorage
36
66
 
37
- end
67
+ protected
38
68
 
39
- class AbstractThreadLocalVar
69
+ def allocate_storage
70
+ @var = java.lang.ThreadLocal.new
71
+ end
72
+
73
+ def get
74
+ @var.get
75
+ end
76
+
77
+ def set(value)
78
+ @var.set(value)
79
+ end
80
+
81
+ end
40
82
 
41
83
  NIL_SENTINEL = Object.new
42
84
 
@@ -58,19 +100,24 @@ module Concurrent
58
100
  end
59
101
 
60
102
  def value=(value)
103
+ bind value
104
+ end
105
+
106
+ def bind(value, &block)
61
107
  if value.nil?
62
108
  stored_value = NIL_SENTINEL
63
109
  else
64
110
  stored_value = value
65
111
  end
66
112
 
67
- set stored_value
113
+ set stored_value, &block
68
114
 
69
115
  value
70
116
  end
71
117
 
72
118
  end
73
119
 
120
+ # @!macro abstract_thread_local_var
74
121
  class ThreadLocalVar < AbstractThreadLocalVar
75
122
  if RUBY_PLATFORM == 'java'
76
123
  include ThreadLocalJavaStorage
@@ -7,5 +7,4 @@ require 'concurrent/atomic/copy_on_write_observer_set'
7
7
  require 'concurrent/atomic/cyclic_barrier'
8
8
  require 'concurrent/atomic/count_down_latch'
9
9
  require 'concurrent/atomic/event'
10
- require 'concurrent/atomic/thread_local_var'
11
10
  require 'concurrent/atomic/synchronization'
@@ -17,6 +17,9 @@ module Concurrent
17
17
  # lambda { |level, progname, message = nil, &block| _ }
18
18
  attr_accessor :logger
19
19
 
20
+ # defines if executors should be auto-terminated in at_exit callback
21
+ attr_accessor :auto_terminate
22
+
20
23
  # Create a new configuration object.
21
24
  def initialize
22
25
  immediate_executor = ImmediateExecutor.new
@@ -24,6 +27,7 @@ module Concurrent
24
27
  @global_operation_pool = Delay.new(executor: immediate_executor) { new_operation_pool }
25
28
  @global_timer_set = Delay.new(executor: immediate_executor) { Concurrent::TimerSet.new }
26
29
  @logger = no_logger
30
+ @auto_terminate = true
27
31
  end
28
32
 
29
33
  # if assigned to {#logger}, it will log nothing.
@@ -129,6 +133,12 @@ module Concurrent
129
133
  yield(configuration)
130
134
  end
131
135
 
136
+ def self.finalize_global_executors
137
+ self.finalize_executor(self.configuration.global_timer_set)
138
+ self.finalize_executor(self.configuration.global_task_pool)
139
+ self.finalize_executor(self.configuration.global_operation_pool)
140
+ end
141
+
132
142
  private
133
143
 
134
144
  # Attempt to properly shutdown the given executor using the `shutdown` or
@@ -150,12 +160,8 @@ module Concurrent
150
160
  false
151
161
  end
152
162
 
153
-
154
163
  # set exit hook to shutdown global thread pools
155
164
  at_exit do
156
- self.finalize_executor(self.configuration.global_timer_set)
157
- self.finalize_executor(self.configuration.global_task_pool)
158
- self.finalize_executor(self.configuration.global_operation_pool)
159
- # TODO may break other test suites using concurrent-ruby, terminates before test is run
165
+ finalize_global_executors if configuration.auto_terminate
160
166
  end
161
167
  end
@@ -19,36 +19,7 @@ module Concurrent
19
19
  end
20
20
  end
21
21
 
22
- # Dataflow allows you to create a task that will be scheduled then all of its
23
- # data dependencies are available. Data dependencies are `Future` values. The
24
- # dataflow task itself is also a `Future` value, so you can build up a graph of
25
- # these tasks, each of which is run when all the data and other tasks it depends
26
- # on are available or completed.
27
- #
28
- # Our syntax is somewhat related to that of Akka's `flow` and Habanero Java's
29
- # `DataDrivenFuture`. However unlike Akka we don't schedule a task at all until
30
- # it is ready to run, and unlike Habanero Java we pass the data values into the
31
- # task instead of dereferencing them again in the task.
32
- #
33
- # The theory of dataflow goes back to the 80s. In the terminology of the literature,
34
- # our implementation is coarse-grained, in that each task can be many instructions,
35
- # and dynamic in that you can create more tasks within other tasks.
36
- #
37
- # @example Parallel Fibonacci calculator
38
- # def fib(n)
39
- # if n < 2
40
- # Concurrent::dataflow { n }
41
- # else
42
- # n1 = fib(n - 1)
43
- # n2 = fib(n - 2)
44
- # Concurrent::dataflow(n1, n2) { |v1, v2| v1 + v2 }
45
- # end
46
- # end
47
- #
48
- # f = fib(14) #=> #<Concurrent::Future:0x000001019a26d8 ...
49
- #
50
- # # wait up to 1 second for the answer...
51
- # f.value(1) #=> 377
22
+ # {include:file:doc/dataflow.md}
52
23
  #
53
24
  # @param [Future] inputs zero or more `Future` operations that this dataflow depends upon
54
25
  #