concurrently 1.1.1 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e10c69de9f9db4d13d7411544b336709be235688
4
- data.tar.gz: a519b852d6ac6d07689daf39bd893fe1594c76eb
3
+ metadata.gz: a76cfe92534837645dfc5c4104d0e0954262c726
4
+ data.tar.gz: 5a6535677b4b12510c3f32305266b10b81436321
5
5
  SHA512:
6
- metadata.gz: e81443aec073e777bb7bdb18a293383716d66028a884d773208205aae52a15f2af993ed8f427f78e4515d83b379b07ee287c3d5f1ac2f10c478267258aff2872
7
- data.tar.gz: 87102e7ee79831c03975175f52527b78da04100d6ddaaca56e479b208f59d03a362ea5ddbf3df16f08d37bf41d5157978c222f1b3c1afbb3e920f3e3b6d7b3e2
6
+ metadata.gz: bc43d2ae55910b31451d855607caf2a64654a684e32c382060ebbe48f7a2cfd596f11158f1e8d7c92c15ce6b388e0b550cb9f3bee2eb2aafe2afb97dc5846ede
7
+ data.tar.gz: 0be9f1e7ac0fa65fde27f4e33999c9eab1000b9d5290e7c8c990042101ce2c754db9328a8f5d52a4115a4cb0c04ea75a3bc2c5ec84d4e6f0bb195f17daa3ba47
data/.rspec CHANGED
@@ -1,4 +1,4 @@
1
1
  --format documentation
2
2
  --color
3
3
  --require spec_helper.rb
4
- --default-path test/Ruby
4
+ --default-path test/CRuby
@@ -1,5 +1,13 @@
1
1
  # Release Notes
2
2
 
3
+ ## 1.2.0 (2017-12-12)
4
+
5
+ ### Added [Concurrently::Debug](http://www.rubydoc.info/github/christopheraue/m-ruby-concurrently/Concurrently/Debug) interface
6
+ * [.enable](http://www.rubydoc.info/github/christopheraue/m-ruby-concurrently/Concurrently/Debug#enable-class_method)
7
+
8
+ ### Extended [Concurrently::Proc](http://www.rubydoc.info/github/christopheraue/m-ruby-concurrently/Concurrently/Proc) interface
9
+ * [.error_log_output=](http://www.rubydoc.info/github/christopheraue/m-ruby-concurrently/Concurrently/Proc#error_log_output=-class_method)
10
+
3
11
  ## 1.1.0 (2017-07-10)
4
12
 
5
13
  ### Improvements
data/Rakefile CHANGED
@@ -5,8 +5,8 @@ perf_dir = File.expand_path "perf"
5
5
  # Ruby
6
6
  ruby = {
7
7
  test: "rspec" ,
8
- benchmark: "ruby -Iperf/Ruby -rstage",
9
- profile: "ruby -Iperf/Ruby -rstage" }
8
+ benchmark: "ruby -Iperf/CRuby -rstage",
9
+ profile: "ruby -Iperf/CRuby -rstage" }
10
10
 
11
11
  mruby_dir = File.expand_path "mruby_builds"
12
12
  mruby = {
@@ -73,7 +73,7 @@ end
73
73
 
74
74
  namespace :mruby do
75
75
  file mruby[:src] do
76
- sh "git clone --depth=1 git://github.com/mruby/mruby.git #{mruby[:src]}"
76
+ sh "git clone git://github.com/mruby/mruby.git #{mruby[:src]}"
77
77
  end
78
78
 
79
79
  desc "Checkout a tag or commit of the mruby source. Executes: git checkout reference"
@@ -27,11 +27,11 @@ DESC
27
27
  spec.authors = ["Christopher Aue"]
28
28
  spec.email = ["rubygems@christopheraue.net"]
29
29
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
30
- spec.require_paths = ["lib/Ruby"]
30
+ spec.require_paths = ["lib/CRuby"]
31
31
 
32
32
  spec.required_ruby_version = ">= 2.2.7"
33
33
 
34
34
  spec.add_dependency "nio4r", "~> 2.1"
35
35
  spec.add_dependency "hitimes", "~> 1.2"
36
- spec.add_dependency "callbacks_attachable", "~> 2.2"
36
+ spec.add_dependency "callbacks_attachable", "~> 3.0"
37
37
  end
@@ -1,11 +1,6 @@
1
1
  # @api ruby_patches
2
2
  # @since 1.0.0
3
3
  class Thread
4
- # Attach an event loop to every thread in Ruby.
5
- def __concurrently_event_loop__
6
- @__concurrently_event_loop__ ||= Concurrently::EventLoop.new
7
- end
8
-
9
4
  # Disable fiber-local variables and treat variables using the fiber-local
10
5
  # interface as thread-local. Most of the code out there is not using
11
6
  # fibers explicitly and really intends to attach values to the current
@@ -57,19 +57,19 @@ than that.
57
57
 
58
58
  Results for ruby 2.4.1
59
59
  ----------------------
60
- proc.call: 11048400 executions in 1.0000 seconds
61
- conproc.call: 734000 executions in 1.0000 seconds
62
- conproc.call_nonblock: 857800 executions in 1.0001 seconds
63
- conproc.call_detached: 464800 executions in 1.0002 seconds
64
- conproc.call_and_forget: 721800 executions in 1.0001 seconds
60
+ proc.call: 11521800 executions in 1.0000 seconds
61
+ conproc.call: 756000 executions in 1.0001 seconds
62
+ conproc.call_nonblock: 881800 executions in 1.0001 seconds
63
+ conproc.call_detached: 443500 executions in 1.0001 seconds
64
+ conproc.call_and_forget: 755600 executions in 1.0001 seconds
65
65
 
66
66
  Results for mruby 1.3.0
67
67
  -----------------------
68
- proc.call: 4771700 executions in 1.0000 seconds
69
- conproc.call: 362000 executions in 1.0002 seconds
70
- conproc.call_nonblock: 427400 executions in 1.0000 seconds
71
- conproc.call_detached: 188900 executions in 1.0005 seconds
72
- conproc.call_and_forget: 383400 executions in 1.0002 seconds
68
+ proc.call: 5801400 executions in 1.0000 seconds
69
+ conproc.call: 449100 executions in 1.0002 seconds
70
+ conproc.call_nonblock: 523400 executions in 1.0000 seconds
71
+ conproc.call_detached: 272500 executions in 1.0000 seconds
72
+ conproc.call_and_forget: 490500 executions in 1.0001 seconds
73
73
 
74
74
  *conproc.call_detached* and *conproc.call_and_forget* call `wait 0` after each
75
75
  batch so the scheduled evaluations have [a chance to run]
@@ -150,15 +150,15 @@ to expect in these cases.
150
150
 
151
151
  Results for ruby 2.4.1
152
152
  ----------------------
153
- wait: 291100 executions in 1.0001 seconds
154
- await_readable: 147800 executions in 1.0005 seconds
155
- await_writable: 148300 executions in 1.0003 seconds
153
+ wait: 331300 executions in 1.0001 seconds
154
+ await_readable: 152500 executions in 1.0001 seconds
155
+ await_writable: 150700 executions in 1.0001 seconds
156
156
 
157
157
  Results for mruby 1.3.0
158
158
  -----------------------
159
- wait: 104300 executions in 1.0002 seconds
160
- await_readable: 132600 executions in 1.0006 seconds
161
- await_writable: 130500 executions in 1.0005 seconds
159
+ wait: 160700 executions in 1.0004 seconds
160
+ await_readable: 176700 executions in 1.0005 seconds
161
+ await_writable: 177700 executions in 1.0003 seconds
162
162
 
163
163
  Explanation of the results:
164
164
 
@@ -257,25 +257,25 @@ performance in these scenarios.
257
257
 
258
258
  Results for ruby 2.4.1
259
259
  ----------------------
260
- call: 687600 executions in 1.0001 seconds
261
- call_nonblock: 855600 executions in 1.0001 seconds
262
- call_detached: 426400 executions in 1.0000 seconds
263
- call_and_forget: 722200 executions in 1.0000 seconds
264
- waiting call: 90300 executions in 1.0005 seconds
265
- waiting call_nonblock: 191800 executions in 1.0001 seconds
266
- waiting call_detached: 190300 executions in 1.0003 seconds
267
- waiting call_and_forget: 207100 executions in 1.0001 seconds
260
+ call: 753800 executions in 1.0000 seconds
261
+ call_nonblock: 913400 executions in 1.0000 seconds
262
+ call_detached: 418700 executions in 1.0001 seconds
263
+ call_and_forget: 748800 executions in 1.0001 seconds
264
+ waiting call: 89400 executions in 1.0001 seconds
265
+ waiting call_nonblock: 198800 executions in 1.0001 seconds
266
+ waiting call_detached: 199600 executions in 1.0004 seconds
267
+ waiting call_and_forget: 225300 executions in 1.0001 seconds
268
268
 
269
269
  Results for mruby 1.3.0
270
270
  -----------------------
271
- call: 319900 executions in 1.0003 seconds
272
- call_nonblock: 431700 executions in 1.0002 seconds
273
- call_detached: 158400 executions in 1.0006 seconds
274
- call_and_forget: 397700 executions in 1.0002 seconds
275
- waiting call: 49900 executions in 1.0015 seconds
276
- waiting call_nonblock: 74600 executions in 1.0001 seconds
277
- waiting call_detached: 73300 executions in 1.0006 seconds
278
- waiting call_and_forget: 85200 executions in 1.0008 seconds
271
+ call: 444200 executions in 1.0002 seconds
272
+ call_nonblock: 525300 executions in 1.0001 seconds
273
+ call_detached: 232600 executions in 1.0003 seconds
274
+ call_and_forget: 464500 executions in 1.0000 seconds
275
+ waiting call: 60100 executions in 1.0004 seconds
276
+ waiting call_nonblock: 95400 executions in 1.0004 seconds
277
+ waiting call_detached: 102100 executions in 1.0005 seconds
278
+ waiting call_and_forget: 118500 executions in 1.0005 seconds
279
279
 
280
280
  `wait 0` is used as a stand in for all wait methods. Measurements of concurrent
281
281
  procs doing nothing are included for comparision.
@@ -6,7 +6,7 @@ require "callbacks_attachable"
6
6
  root = File.dirname File.dirname File.dirname __FILE__
7
7
  files =
8
8
  Dir[File.join(root, 'ext', 'all', '**', '*.rb')].sort +
9
- Dir[File.join(root, 'ext', 'Ruby', '**', '*.rb')].sort +
9
+ Dir[File.join(root, 'ext', 'CRuby', '**', '*.rb')].sort +
10
10
  Dir[File.join(root, 'lib', 'all', '**', '*.rb')].sort +
11
- Dir[File.join(root, 'lib', 'Ruby', '**', '*.rb')].sort
11
+ Dir[File.join(root, 'lib', 'CRuby', '**', '*.rb')].sort
12
12
  files.each{ |f| require f }
@@ -2,7 +2,7 @@ module Concurrently
2
2
  class Proc::Evaluation
3
3
  module RescueableError
4
4
  # Ruby has additional error classes
5
- [NoMemoryError, SecurityError].each do |error_class|
5
+ [SecurityError].each do |error_class|
6
6
  append_features error_class
7
7
  end
8
8
  end
@@ -0,0 +1,8 @@
1
+ # @private
2
+ # @since 1.0.0
3
+ class Thread
4
+ # Attach an event loop to every thread in Ruby.
5
+ def __concurrently_event_loop__
6
+ @__concurrently_event_loop__ ||= Concurrently::EventLoop.new
7
+ end
8
+ end
@@ -0,0 +1,7 @@
1
+ # @api public
2
+ # @since 1.0.0
3
+ #
4
+ # The namespace this library lives in
5
+ module Concurrently
6
+
7
+ end
@@ -0,0 +1,151 @@
1
+ module Concurrently
2
+ # @api public
3
+ # @since 1.2.0
4
+ #
5
+ # With `Concurrently::Debug` the locations where concurrent procs are
6
+ # entered, suspended, resumed and exited at can be logged. The log shows
7
+ # the subsequent order in which concurrent procs are executed.
8
+ #
9
+ # It looks like:
10
+ #
11
+ # .---- BEGIN 94khk test/CRuby/event_loop_spec.rb:16
12
+ # '-> SUSPEND 94khk lib/all/concurrently/proc/evaluation.rb:86:in `__suspend__'
13
+ # ... [other entries] ...
14
+ # .--- RESUME 94khk lib/all/concurrently/proc/evaluation.rb:86:in `__suspend__'
15
+ # '-----> END 94khk test/CRuby/event_loop_spec.rb:16
16
+ #
17
+ # This log section indicates that the concurrent proc defined at
18
+ # `test/CRuby/event_loop_spec.rb:16` has been started to be evaluated. It is
19
+ # assigned the id `94khk`. The code of the proc is evaluated until it is
20
+ # suspended at `lib/all/concurrently/proc/evaluation.rb:86`. After other
21
+ # concurrent procs where scheduled to run, proc `94khk` is resumed again and
22
+ # from there on is evaluated until its end.
23
+ #
24
+ # Next to `END`, there are two other variations how the evaluation of a
25
+ # concurrent proc can be marked as concluded. These are
26
+ # * `CANCEL` if the evaluation is prematurely concluded with
27
+ # {Proc::Evaluation#conclude_to} and
28
+ # * `ERROR` if the evaluation raises an error.
29
+ #
30
+ # The id of an evaluation may (and very likely will) be reused after the
31
+ # evaluation was concluded.
32
+ module Debug
33
+ @overwrites = []
34
+ @fibers = {}
35
+
36
+ @concurrently_path = 'm-ruby-concurrently'.freeze
37
+
38
+ class << self
39
+ # Enables debugging
40
+ #
41
+ # @param [Logger] logger
42
+ # @param [Array<String>] filter An array of strings to filter
43
+ # stacktrace locations by. The first location in a stacktrace
44
+ # containing one of the given strings will be used in the log message.
45
+ # If no filter is given, the first location is logged.
46
+ # @return [true]
47
+ #
48
+ # @example
49
+ # require 'logger'
50
+ # Concurrently::Debug.enable Logger.new(STDOUT), ['file2']
51
+ #
52
+ # # Assuming the stacktrace when resuming/suspending looks like:
53
+ # # /path/to/file1.rb
54
+ # # /path/to/file2.rb
55
+ # # /path/to/file3.rb
56
+ # #
57
+ # # Then, the logged location will be /path/to/file2.rb
58
+ def enable(logger, filter = false)
59
+ @logger = logger
60
+ @filter = filter
61
+ @log_concurrently_gem = filter && filter.any?{ |f| f.include? @concurrently_path }
62
+ @overwrites.each{ |overwrite| overwrite.call }
63
+ true
64
+ end
65
+
66
+ # @private
67
+ #
68
+ # Defines blocks of code to evaluate once debugging is enabled. Used
69
+ # internally only.
70
+ #
71
+ # @param [Class] klass The class in whose scope the given block will be evaluated
72
+ # with `#class_eval`
73
+ # @param [Proc] block The block of code to evaluate
74
+ def overwrite(klass, &block)
75
+ @overwrites << proc{ klass.class_eval &block }
76
+ end
77
+
78
+
79
+ # @private
80
+ def notice_for(fiber)
81
+ <<-MSG
82
+ Evaluation id: #{fiber.__id__.to_s 36}
83
+ To debug how this happened enable logging of control flow with:
84
+
85
+ require 'logger'
86
+ Concurrently::Debug.enable Logger.new STDOUT
87
+ MSG
88
+ end
89
+
90
+ # @private
91
+ def log_begin(fiber, location)
92
+ return unless @logger
93
+ return unless satisfies_filter? location
94
+ @fibers[fiber.__id__] = location
95
+ @logger.debug ".---- BEGIN #{fiber.__id__.to_s 36} #{location}"
96
+ end
97
+
98
+ # @private
99
+ def log_suspend(fiber, locations)
100
+ return unless @logger
101
+ return unless @fibers.key? fiber.__id__
102
+ return unless location = locations.find{ |loc| satisfies_filter? loc }
103
+ @logger.debug "'-> SUSPEND #{fiber.__id__.to_s 36} #{location}"
104
+ end
105
+
106
+ # @private
107
+ def log_schedule(fiber, locations)
108
+ return unless @logger
109
+ return unless location = locations.find{ |loc| satisfies_filter? loc }
110
+
111
+ prefix = (@fibers.key? Fiber.current.__id__) ? '|' : ' '
112
+ @logger.debug "#{prefix} SCHEDULE #{fiber.__id__.to_s 36} from #{location}"
113
+ end
114
+
115
+ # @private
116
+ def log_resume(fiber, locations)
117
+ return unless @logger
118
+ return unless @fibers.key? fiber.__id__
119
+ return unless location = locations.find{ |loc| satisfies_filter? loc }
120
+ @logger.debug ".--- RESUME #{fiber.__id__.to_s 36} #{location}"
121
+ end
122
+
123
+ # @private
124
+ def log_end(fiber)
125
+ return unless @logger
126
+ return unless location = @fibers.delete(fiber.__id__)
127
+ @logger.debug "'-----> END #{fiber.__id__.to_s 36} #{location}"
128
+ end
129
+
130
+ # @private
131
+ def log_cancel(fiber)
132
+ return unless @logger
133
+ return unless location = @fibers.delete(fiber.__id__)
134
+ @logger.debug "'--> CANCEL #{fiber.__id__.to_s 36} #{location}"
135
+ end
136
+
137
+ # @private
138
+ def log_error(fiber)
139
+ return unless @logger
140
+ return unless location = @fibers.delete(fiber.__id__)
141
+ @logger.debug "'---> ERROR #{fiber.__id__.to_s 36} #{location}"
142
+ end
143
+
144
+ # @private
145
+ private def satisfies_filter?(location)
146
+ (not location.include? @concurrently_path or @log_concurrently_gem) and
147
+ (not @filter or @filter.any?{ |filter| location.include? filter })
148
+ end
149
+ end
150
+ end
151
+ end
@@ -22,7 +22,7 @@ module Concurrently
22
22
  # Concurrently::Evaluation.current # => #<Concurrently::Proc::Evaluation:0x00000000e56910>
23
23
  # end
24
24
  #
25
- # Concurrently::Evaluation.current # => #<Concurrently::Evaluation0x00000000e5be10>
25
+ # Concurrently::Evaluation.current # => #<Concurrently::Evaluation:0x00000000e5be10>
26
26
  def self.current
27
27
  EventLoop.current.run_queue.current_evaluation
28
28
  end
@@ -30,23 +30,43 @@ module Concurrently
30
30
  # @private
31
31
  def initialize(fiber)
32
32
  @fiber = fiber
33
+ @waiting = false
33
34
  end
34
35
 
35
36
  # @private
36
37
  #
37
- # The fiber the evaluation runs inside.
38
+ # Suspends the evaluation. This is a method called internally only.
39
+ def __suspend__(event_loop_fiber)
40
+ @waiting = true
41
+ event_loop_fiber.resume
42
+ end
43
+
44
+ # @private
45
+ #
46
+ # Resumes the evaluation. This is a method called internally only.
38
47
  def __resume__(result)
39
48
  @scheduled = false
40
- @fiber.resume result
49
+ Fiber.yield result
41
50
  end
42
51
 
43
52
  # @!attribute [r] waiting?
44
53
  #
45
- # Checks if the evaluation is waiting
54
+ # Checks if the evaluation is not running and not resumed.
46
55
  #
47
56
  # @return [Boolean]
48
57
  def waiting?
49
- @waiting
58
+ !!@waiting
59
+ end
60
+
61
+ # @private
62
+ #
63
+ # Called by {Proc::Evaluation#await_result}. This is a method called
64
+ # internally only.
65
+ def __await_result_of__(evaluation, opts)
66
+ evaluation.__add_waiting_evaluation__ self
67
+ await_resume! opts
68
+ ensure
69
+ evaluation.__remove_waiting_evaluation__ self
50
70
  end
51
71
 
52
72
  # @note The exclamation mark in its name stands for: Watch out!
@@ -76,7 +96,8 @@ module Concurrently
76
96
  # because of a manual call of {Kernel#await_resume!}.
77
97
  #
78
98
  # @return [:resumed]
79
- # @raise [Error] if the evaluation is already scheduled to resume
99
+ # @raise [Error] if the evaluation is not waiting or is already scheduled
100
+ # to be resumed
80
101
  #
81
102
  # @example
82
103
  # # Control flow is indicated by (N)
@@ -92,8 +113,10 @@ module Concurrently
92
113
  # evaluation.resume! :result
93
114
  # # (5)
94
115
  # evaluation.await_result # => :result
95
- def resume!(result = nil)
96
- raise Error, "already scheduled to resume" if @scheduled
116
+ def __resume__!(result = nil)
117
+ raise self.class::Error, "already scheduled\n#{Debug.notice_for @fiber}" if @scheduled
118
+ raise self.class::Error, "not waiting\n#{Debug.notice_for @fiber}" unless @waiting
119
+ @waiting = false
97
120
  @scheduled = true
98
121
 
99
122
  run_queue = Concurrently::EventLoop.current.run_queue
@@ -107,5 +130,13 @@ module Concurrently
107
130
  run_queue.schedule_immediately(self, result)
108
131
  :resumed
109
132
  end
133
+ alias resume! __resume__!
134
+
135
+ Debug.overwrite(self) do
136
+ def resume!(result = nil)
137
+ Debug.log_schedule @fiber, caller
138
+ __resume__! result
139
+ end
140
+ end
110
141
  end
111
142
  end
@@ -8,15 +8,29 @@ module Concurrently
8
8
  # There are two tracks. The fast track and the regular cart track. The
9
9
  # fast track exists for evaluations to be scheduled immediately. Having a
10
10
  # dedicated track lets us just push carts to the track in the order they
11
- # appear. This saves us the rather expensive #bisect_left computation where
11
+ # appear. This saves us the rather expensive #find_index computation where
12
12
  # on the regular cart track to insert the cart.
13
13
 
14
14
  # The additional cart index exists so carts can be cancelled by their
15
15
  # evaluation. Cancelled carts have their evaluation set to false.
16
16
 
17
17
  class Track < Array
18
- def bisect_left
19
- bsearch_index{ |item| yield item } || length
18
+ def find_index(ref)
19
+ # For a track size < 64, a linear search is faster than a binary one.
20
+ if (len = size) < 64
21
+ idx = 0
22
+ while idx < len
23
+ break if self[idx][TIME] <= ref
24
+ idx += 1
25
+ end
26
+ idx
27
+ else
28
+ bsearch_index{ |cart| cart[TIME] <= ref } || length
29
+ end
30
+ end
31
+
32
+ def insert(cart)
33
+ super find_index(cart[TIME]), cart
20
34
  end
21
35
 
22
36
  def next
@@ -34,17 +48,32 @@ module Concurrently
34
48
  @immediate_track = Track.new
35
49
  end
36
50
 
37
- def schedule_immediately(evaluation, result = nil, cancellable = true)
51
+ def __schedule_immediately__(evaluation, result = nil, cancellable = true)
38
52
  cart = [evaluation, false, result]
39
53
  evaluation.instance_variable_set :@__cart__, cart if cancellable
40
54
  @immediate_track << cart
41
55
  end
56
+ alias schedule_immediately __schedule_immediately__
57
+
58
+ Debug.overwrite(self) do
59
+ def schedule_immediately(evaluation, result = nil, cancellable = true)
60
+ evaluation.instance_variable_set :@scheduled_caller, caller if cancellable
61
+ __schedule_immediately__(evaluation, result, cancellable)
62
+ end
63
+ end
42
64
 
43
- def schedule_deferred(evaluation, seconds, result = nil)
65
+ def __schedule_deferred__(evaluation, seconds, result = nil)
44
66
  cart = [evaluation, @loop.lifetime+seconds, result]
45
67
  evaluation.instance_variable_set :@__cart__, cart
46
- index = @deferred_track.bisect_left{ |tcart| tcart[TIME] <= cart[TIME] }
47
- @deferred_track.insert(index, cart)
68
+ @deferred_track.insert(cart)
69
+ end
70
+ alias schedule_deferred __schedule_deferred__
71
+
72
+ Debug.overwrite(self) do
73
+ def schedule_deferred(evaluation, seconds, result = nil)
74
+ evaluation.instance_variable_set :@scheduled_caller, caller
75
+ __schedule_deferred__(evaluation, seconds, result)
76
+ end
48
77
  end
49
78
 
50
79
  def cancel(evaluation, only_if_deferred = false)
@@ -61,7 +90,7 @@ module Concurrently
61
90
 
62
91
  if @deferred_track.size > 0
63
92
  now = @loop.lifetime
64
- index = @deferred_track.bisect_left{ |cart| cart[TIME] <= now }
93
+ index = @deferred_track.find_index now
65
94
 
66
95
  processable_count = @deferred_track.size-index
67
96
  while processable_count > 0
@@ -94,12 +123,9 @@ module Concurrently
94
123
  when Proc::Fiber # this will only happen when calling Concurrently::Proc#call_and_forget
95
124
  @current_evaluation = nil
96
125
  evaluation.resume result
97
- when Proc::Evaluation
126
+ when Evaluation
98
127
  @current_evaluation = evaluation
99
128
  evaluation.__resume__ result
100
- when Evaluation
101
- @current_evaluation = nil
102
- Fiber.yield result
103
129
  else
104
130
  raise Error, "#{evaluation.inspect} cannot be resumed"
105
131
  end
@@ -5,12 +5,18 @@ module Concurrently
5
5
  #
6
6
  # mruby's Proc does not support instance variables. So, whe have to make
7
7
  # it a normal class that does not inherit from Proc :(
8
- class Proc; end
8
+ #
9
+ # It is continued to be implemented in the mruby folder of lib/
10
+ class Proc
11
+ def original_call(*args)
12
+ @proc.call *args
13
+ end
14
+ end
9
15
  else
10
16
  class Proc < ::Proc
11
17
  # @private
12
18
  # Calls the concurrent proc like a normal proc
13
- alias_method :__proc_call__, :call
19
+ alias_method :original_call, :call
14
20
  end
15
21
  end
16
22
 
@@ -25,13 +31,15 @@ module Concurrently
25
31
  # execution of its thread.
26
32
  #
27
33
  # Errors raised inside concurrent evaluations are re-raised when getting
28
- # their result with {Evaluation#await_result}. They can also be watched by
29
- # registering callbacks for the `:error` event as shown in the example below.
30
- # This is useful as a central hook to all errors inside concurrent
31
- # evaluations for monitoring or logging purposes. Also, concurrent procs
32
- # evaluated with {Proc#call_and_forget} are evaluated in the background with
33
- # no access to their evaluation and will fail silently. The callbacks are the
34
- # only way to be notified about errors inside them.
34
+ # their result with {Evaluation#await_result}. They are also logged to
35
+ # stderr by default. This behavior can be controlled by {.error_log_output=}.
36
+ # In addition, errors can be watched by registering callbacks for the
37
+ # `:error` event as shown in the example below. This is useful as a central
38
+ # access point to errors happening inside concurrent evaluations for recovery
39
+ # purposes. Also, concurrent procs evaluated with {Proc#call_and_forget} are
40
+ # evaluated in the background with no access to their evaluation and if they
41
+ # fail they do so silently. The callbacks are the only way to gain access to
42
+ # their errors.
35
43
  #
36
44
  # The callbacks can be registered for all procs or only for one specific
37
45
  # proc:
@@ -68,6 +76,45 @@ module Concurrently
68
76
  class Proc
69
77
  include CallbacksAttachable
70
78
 
79
+ # @private
80
+ def self.default_error_log_message(proc, error)
81
+ <<MSG
82
+ Concurrent proc exited with error!
83
+ Source location: #{proc.source_location.join ':'}
84
+ Error: (#{error.class}) #{error}
85
+ Backtrace: #{error.backtrace.join "\n"}
86
+ MSG
87
+ end
88
+
89
+ # @api public
90
+ # @since 1.2.0
91
+ #
92
+ # Sets the output to which errors in concurrent procs are written to.
93
+ #
94
+ # @param [IO|Logger|false|nil] output
95
+ #
96
+ # By default, errors are written to stderr. To disable logging of errors,
97
+ # set the output to `nil` or `false`.
98
+ #
99
+ # @example
100
+ # require 'logger'
101
+ # Concurrently::Proc.error_log_output = Logger.new(STDOUT)
102
+ def self.error_log_output=(output)
103
+ if Object.const_defined? :Logger and Logger === output
104
+ @error_handler.cancel if @error_handler
105
+ @error_handler = on(:error){ |error| output.error Proc.default_error_log_message self, error }
106
+ elsif IO === output
107
+ @error_handler.cancel if @error_handler
108
+ @error_handler = on(:error){ |error| output.puts Proc.default_error_log_message self, error }
109
+ elsif !output
110
+ remove_instance_variable(:@error_handler).cancel if @error_handler
111
+ else
112
+ raise Error, "output no logger or IO"
113
+ end
114
+ end
115
+
116
+ self.error_log_output = STDERR
117
+
71
118
  # A new instance of {Proc}
72
119
  #
73
120
  # @param [Class] evaluation_class It can be given a custom class to create
@@ -78,6 +125,26 @@ module Concurrently
78
125
  @evaluation_class = evaluation_class
79
126
  end
80
127
 
128
+ # @private
129
+ # Calls the concurrent proc from a fiber
130
+ alias_method :__original_call__, :original_call
131
+
132
+ Debug.overwrite(self) do
133
+ def original_call(*args)
134
+ Debug.log_begin Fiber.current, source_location.join(':')
135
+ result = __original_call__ *args
136
+ rescue Evaluation::Cancelled => e
137
+ Debug.log_cancel Fiber.current
138
+ raise e
139
+ rescue Exception => e
140
+ Debug.log_error Fiber.current
141
+ raise e
142
+ else
143
+ Debug.log_end Fiber.current
144
+ result
145
+ end
146
+ end
147
+
81
148
  # Evaluates the concurrent proc in a blocking manner.
82
149
  #
83
150
  # Evaluating the proc this way executes its block of code immediately
@@ -18,7 +18,7 @@ module Concurrently
18
18
  def initialize(fiber)
19
19
  super
20
20
  @concluded = false
21
- @awaiting_result = {}
21
+ @waiting_evaluations = {}
22
22
  @data = {}
23
23
  end
24
24
 
@@ -76,6 +76,49 @@ module Concurrently
76
76
  @data.keys
77
77
  end
78
78
 
79
+ # @private
80
+ #
81
+ # Suspends the evaluation. This is a method called internally only.
82
+ def __suspend__(event_loop_fiber)
83
+ @waiting = true
84
+ # Yield back to the event loop fiber or the evaluation evaluating this one.
85
+ # Pass along itself to indicate it is not yet fully evaluated.
86
+ Proc::Fiber.yield self
87
+ ensure
88
+ @waiting = false
89
+ end
90
+
91
+ # @private
92
+ #
93
+ # Resumes the evaluation. This is a method called internally only.
94
+ def __resume__(result)
95
+ @scheduled = false
96
+ @fiber.resume result
97
+ end
98
+
99
+ Debug.overwrite(self) do
100
+ def __resume__(result)
101
+ @scheduled = false
102
+ @fiber.resume result, @scheduled_caller
103
+ end
104
+ end
105
+
106
+ # @private
107
+ #
108
+ # Manages bookkeeping of evaluations awaiting the result of this
109
+ # evaluation. This is a method called internally only.
110
+ def __add_waiting_evaluation__(evaluation, result_override = false)
111
+ @waiting_evaluations.store evaluation, result_override
112
+ end
113
+
114
+ # @private
115
+ #
116
+ # Manages bookkeeping of evaluations awaiting the result of this
117
+ # evaluation. This is a method called internally only.
118
+ def __remove_waiting_evaluation__(evaluation)
119
+ @waiting_evaluations.delete evaluation
120
+ end
121
+
79
122
  # Waits for the evaluation to be concluded with a result.
80
123
  #
81
124
  # The result can be awaited from multiple places at once. All of them are
@@ -171,19 +214,15 @@ module Concurrently
171
214
  # evaluation.await_result{ |result| raise "invalid result" if result != :result }
172
215
  # # => raises "invalid result"
173
216
  def await_result(opts = {}) # &with_result
174
- if @concluded
175
- result = @result
176
- else
177
- result = begin
178
- evaluation = Concurrently::Evaluation.current
179
- @awaiting_result.store evaluation, false
180
- await_resume! opts
181
- rescue Exception => error
182
- error
183
- ensure
184
- @awaiting_result.delete evaluation
185
- end
186
- end
217
+ result = if @concluded
218
+ @result
219
+ else
220
+ begin
221
+ Concurrently::Evaluation.current.__await_result_of__ self, opts
222
+ rescue Exception => error
223
+ error
224
+ end
225
+ end
187
226
 
188
227
  result = yield result if block_given?
189
228
 
@@ -195,9 +234,9 @@ module Concurrently
195
234
  # Checks if the evaluation is concluded
196
235
  #
197
236
  # @return [Boolean]
198
- def concluded?
199
- @concluded
200
- end
237
+ attr_reader :concluded
238
+ alias concluded? concluded
239
+ undef concluded
201
240
 
202
241
  # Cancels the concurrent evaluation prematurely by injecting a result.
203
242
  #
@@ -228,29 +267,23 @@ module Concurrently
228
267
  # # (6)
229
268
  def conclude_to(result)
230
269
  if @concluded
231
- raise self.class::Error, "already concluded"
270
+ raise self.class::Error, "already concluded\n#{Debug.notice_for @fiber}"
232
271
  end
233
272
 
234
273
  @result = result
235
274
  @concluded = true
236
275
 
237
276
  if Fiber.current != @fiber
238
- # Cancel its fiber by resuming it with itself as argument
277
+ # Cancel its fiber
278
+ run_queue = Concurrently::EventLoop.current.run_queue
279
+ previous_evaluation = run_queue.current_evaluation
280
+ run_queue.current_evaluation = self
239
281
  @fiber.resume Cancelled
282
+ run_queue.current_evaluation = previous_evaluation
240
283
  end
241
284
 
242
- @awaiting_result.each{ |evaluation, override| evaluation.resume! (override or result) }
285
+ @waiting_evaluations.each{ |evaluation, override| evaluation.resume! (override or result) }
243
286
  :concluded
244
287
  end
245
-
246
- # Schedules the evaluation to be resumed
247
- #
248
- # For details see: {Concurrently::Evaluation#resume!}
249
- #
250
- # @raise [Evaluation::Error] if the evaluation is already concluded
251
- def resume!(*)
252
- raise Evaluation::Error, "already concluded to #{@result.inspect}" if @concluded
253
- super
254
- end
255
288
  end
256
289
  end
@@ -36,8 +36,10 @@ module Concurrently
36
36
  "there is a bug in Concurrently."
37
37
  else
38
38
  begin
39
- result = proc.__proc_call__ *args
40
- (evaluation = evaluation_bucket[0]) and evaluation.conclude_to result
39
+ result = proc.original_call *args
40
+ if (evaluation = evaluation_bucket[0]) and not evaluation.concluded?
41
+ evaluation.conclude_to result
42
+ end
41
43
  result
42
44
  rescue Proc::Evaluation::Cancelled
43
45
  # raised in Kernel#await_resume!
@@ -45,9 +47,10 @@ module Concurrently
45
47
  rescue Proc::Evaluation::RescueableError => error
46
48
  # Rescue all errors not critical for other concurrent evaluations
47
49
  # and don't let them leak to the loop to keep it up and running.
48
- STDERR.puts error
49
50
  proc.trigger :error, error
50
- (evaluation = evaluation_bucket[0]) and evaluation.conclude_to error
51
+ if (evaluation = evaluation_bucket[0]) and not evaluation.concluded?
52
+ evaluation.conclude_to error
53
+ end
51
54
  error
52
55
  end
53
56
  end
@@ -56,9 +59,25 @@ module Concurrently
56
59
 
57
60
  # Yield back to the event loop fiber or the fiber evaluating this one
58
61
  # and wait for the next proc to evaluate.
59
- proc, args, evaluation_bucket = Fiber.yield result
62
+ proc, args, evaluation_bucket = Proc::Fiber.yield result
60
63
  end
61
64
  end
62
65
  end
66
+
67
+ Debug.overwrite(self) do
68
+ def self.yield(*)
69
+ Debug.log_suspend Fiber.current, caller
70
+ super
71
+ ensure
72
+ Debug.log_resume Fiber.current, caller
73
+ end
74
+
75
+ def resume(result, stacktrace = caller)
76
+ Debug.log_suspend Fiber.current, stacktrace
77
+ super result
78
+ ensure
79
+ Debug.log_resume Fiber.current, stacktrace
80
+ end
81
+ end
63
82
  end
64
83
  end
@@ -1,8 +1,4 @@
1
- # @api public
2
- # @since 1.0.0
3
- #
4
- # The namespace this library lives in
5
1
  module Concurrently
6
2
  # The version
7
- VERSION = "1.1.1"
3
+ VERSION = "1.2.0"
8
4
  end
@@ -9,7 +9,7 @@ module Kernel
9
9
  #
10
10
  # This is a shortcut for {Concurrently::Proc#call_detached}.
11
11
  #
12
- # @return [nil]
12
+ # @return [Evaluation]
13
13
  #
14
14
  # @example
15
15
  # concurrently(a,b,c) do |a,b,c|
@@ -120,20 +120,11 @@ module Kernel
120
120
  run_queue.schedule_deferred(evaluation, seconds, timeout_result)
121
121
  end
122
122
 
123
- evaluation.instance_variable_set :@waiting, true
124
- result = case evaluation
125
- when Concurrently::Proc::Evaluation
126
- # Yield back to the event loop fiber or the evaluation evaluating this one.
127
- # Pass along itself to indicate it is not yet fully evaluated.
128
- Fiber.yield evaluation
129
- else
130
- event_loop.fiber.resume
131
- end
132
- evaluation.instance_variable_set :@waiting, false
123
+ result = evaluation.__suspend__ event_loop.fiber
133
124
 
134
125
  if Concurrently::Proc::Evaluation::Cancelled.equal? result
135
126
  run_queue.cancel evaluation # in case the evaluation has already been scheduled to resume
136
- raise Concurrently::Proc::Evaluation::Cancelled, ''
127
+ raise Concurrently::Proc::Evaluation::Cancelled, '' # TODO: add empty backtrace as last argument once murby supports it
137
128
  elsif Concurrently::Evaluation::TimeoutError.equal? result
138
129
  raise Concurrently::Evaluation::TimeoutError, "evaluation timed out after #{seconds} second(s)"
139
130
  else
@@ -242,10 +233,10 @@ module Kernel
242
233
  else
243
234
  begin
244
235
  curr_eval = Concurrently::Evaluation.current
245
- evaluations.each{ |e| e.instance_eval{ @awaiting_result.store curr_eval, self } }
236
+ evaluations.each{ |eval| eval.__add_waiting_evaluation__ curr_eval, eval }
246
237
  await_resume! opts
247
238
  ensure
248
- evaluations.each{ |e| e.instance_eval{ @awaiting_result.delete curr_eval } }
239
+ evaluations.each{ |eval| eval.__remove_waiting_evaluation__ curr_eval }
249
240
  end
250
241
  end
251
242
  end
@@ -10,12 +10,12 @@ module Concurrently
10
10
  @proc = proc
11
11
  end
12
12
 
13
- def arity
14
- @proc.arity
13
+ def source_location
14
+ @proc.source_location || ['main', 0]
15
15
  end
16
16
 
17
- def __proc_call__(*args)
18
- @proc.call *args
17
+ def arity
18
+ @proc.arity
19
19
  end
20
20
  end
21
21
  end
@@ -25,9 +25,12 @@ DESC
25
25
  spec.license = 'Apache-2.0'
26
26
  spec.authors = ['Christopher Aue']
27
27
 
28
- # patch build process so we can set source files with spec.rbfiles
29
- @generate_functions = true
30
- @objs << objfile("#{build_dir}/gem_init")
28
+ unless system("git merge-base --is-ancestor 5a9eedf5417266b82e3695ae0c29797182a5d04e HEAD")
29
+ # mruby commit 5a9eedf fixed the usage of spec.rbfiles. mruby 1.3.0
30
+ # did not have that commit, yet. Add the patch for this case:
31
+ @generate_functions = true
32
+ @objs << objfile("#{build_dir}/gem_init")
33
+ end
31
34
 
32
35
  spec.rbfiles =
33
36
  Dir["#{spec.dir}/ext/all/**/*.rb"].sort +
@@ -36,13 +39,16 @@ DESC
36
39
  Dir["#{spec.dir}/lib/mruby/**/*.rb"].sort
37
40
  spec.test_rbfiles = Dir["#{spec.dir}/test/mruby/*.rb"]
38
41
 
42
+ spec.add_dependency 'mruby-hash-ext', :core => 'mruby-hash-ext'
39
43
  spec.add_dependency 'mruby-array-ext', :core => 'mruby-array-ext'
40
44
  spec.add_dependency 'mruby-numeric-ext', :core => 'mruby-numeric-ext'
45
+ spec.add_dependency 'mruby-proc-ext', :core => 'mruby-proc-ext'
46
+ spec.add_dependency 'mruby-kernel-ext', :core => 'mruby-kernel-ext'
41
47
  spec.add_dependency 'mruby-enumerator', :core => 'mruby-enumerator'
42
48
  spec.add_dependency 'mruby-fiber', :core => 'mruby-fiber'
43
49
  spec.add_dependency 'mruby-time', :core => 'mruby-time'
44
50
  spec.add_dependency 'mruby-io'
45
- spec.add_dependency 'mruby-callbacks_attachable', '~> 2.2', github: 'christopheraue/m-ruby-callbacks_attachable'
51
+ spec.add_dependency 'mruby-callbacks_attachable', '~> 3.0', github: 'christopheraue/m-ruby-callbacks_attachable'
46
52
 
47
53
  # use mruby-poll only on unix-like OSes
48
54
  if ENV['VisualStudioVersion'] || ENV['VSINSTALLDIR']
File without changes
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: concurrently
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.1
4
+ version: 1.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Christopher Aue
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-07-15 00:00:00.000000000 Z
11
+ date: 2017-12-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: nio4r
@@ -44,14 +44,14 @@ dependencies:
44
44
  requirements:
45
45
  - - "~>"
46
46
  - !ruby/object:Gem::Version
47
- version: '2.2'
47
+ version: '3.0'
48
48
  type: :runtime
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
- version: '2.2'
54
+ version: '3.0'
55
55
  description: "Concurrently is a concurrency framework for Ruby and mruby based on\nfibers.
56
56
  With it code can be evaluated independently in its own execution\ncontext similar
57
57
  to a thread:\n\n hello = concurrently do\n wait 0.2 # seconds\n \"hello\"\n
@@ -73,7 +73,7 @@ files:
73
73
  - RELEASE_NOTES.md
74
74
  - Rakefile
75
75
  - concurrently.gemspec
76
- - ext/Ruby/thread.rb
76
+ - ext/CRuby/thread.rb
77
77
  - ext/all/array.rb
78
78
  - ext/mruby/fiber.rb
79
79
  - ext/mruby/io.rb
@@ -81,10 +81,13 @@ files:
81
81
  - guides/Overview.md
82
82
  - guides/Performance.md
83
83
  - guides/Troubleshooting.md
84
- - lib/Ruby/concurrently.rb
85
- - lib/Ruby/concurrently/event_loop.rb
86
- - lib/Ruby/concurrently/event_loop/io_selector.rb
87
- - lib/Ruby/concurrently/proc/evaluation/error.rb
84
+ - lib/CRuby/concurrently.rb
85
+ - lib/CRuby/concurrently/event_loop.rb
86
+ - lib/CRuby/concurrently/event_loop/io_selector.rb
87
+ - lib/CRuby/concurrently/proc/evaluation/error.rb
88
+ - lib/CRuby/thread.rb
89
+ - lib/all/concurrently.rb
90
+ - lib/all/concurrently/debug.rb
88
91
  - lib/all/concurrently/error.rb
89
92
  - lib/all/concurrently/evaluation.rb
90
93
  - lib/all/concurrently/evaluation/error.rb
@@ -105,7 +108,7 @@ files:
105
108
  - lib/mruby/kernel.rb
106
109
  - mrbgem.rake
107
110
  - mruby_builds/build_config.rb
108
- - perf/Ruby/stage.rb
111
+ - perf/CRuby/stage.rb
109
112
  - perf/benchmark_call_methods.rb
110
113
  - perf/benchmark_call_methods_waiting.rb
111
114
  - perf/benchmark_wait_methods.rb
@@ -128,7 +131,7 @@ metadata: {}
128
131
  post_install_message:
129
132
  rdoc_options: []
130
133
  require_paths:
131
- - lib/Ruby
134
+ - lib/CRuby
132
135
  required_ruby_version: !ruby/object:Gem::Requirement
133
136
  requirements:
134
137
  - - ">="
@@ -141,7 +144,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
141
144
  version: '0'
142
145
  requirements: []
143
146
  rubyforge_project:
144
- rubygems_version: 2.6.11
147
+ rubygems_version: 2.6.13
145
148
  signing_key:
146
149
  specification_version: 4
147
150
  summary: A concurrency framework based on fibers