asynchronic 3.0.1 → 4.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +5 -5
  2. data/.travis.yml +7 -10
  3. data/README.md +1 -2
  4. data/asynchronic.gemspec +2 -3
  5. data/lib/asynchronic.rb +8 -0
  6. data/lib/asynchronic/data_store/in_memory.rb +17 -15
  7. data/lib/asynchronic/data_store/key.rb +3 -3
  8. data/lib/asynchronic/data_store/lazy_value.rb +5 -3
  9. data/lib/asynchronic/data_store/redis.rb +22 -14
  10. data/lib/asynchronic/data_store/scoped_store.rb +18 -19
  11. data/lib/asynchronic/environment.rb +3 -3
  12. data/lib/asynchronic/error.rb +2 -3
  13. data/lib/asynchronic/garbage_collector.rb +2 -1
  14. data/lib/asynchronic/job.rb +12 -12
  15. data/lib/asynchronic/notifier/broadcaster.rb +8 -4
  16. data/lib/asynchronic/process.rb +42 -40
  17. data/lib/asynchronic/queue_engine/in_memory.rb +17 -11
  18. data/lib/asynchronic/queue_engine/ost.rb +7 -5
  19. data/lib/asynchronic/queue_engine/synchronic.rb +19 -7
  20. data/lib/asynchronic/version.rb +1 -1
  21. data/lib/asynchronic/worker.rb +7 -10
  22. data/spec/data_store/data_store_examples.rb +17 -9
  23. data/spec/data_store/in_memory_spec.rb +0 -2
  24. data/spec/data_store/key_spec.rb +1 -1
  25. data/spec/data_store/lazy_value_examples.rb +7 -5
  26. data/spec/data_store/redis_spec.rb +4 -10
  27. data/spec/expectations.rb +2 -2
  28. data/spec/facade_spec.rb +3 -3
  29. data/spec/jobs.rb +10 -10
  30. data/spec/minitest_helper.rb +5 -12
  31. data/spec/process/life_cycle_examples.rb +25 -23
  32. data/spec/process/life_cycle_in_memory_spec.rb +0 -1
  33. data/spec/process/life_cycle_redis_spec.rb +0 -1
  34. data/spec/queue_engine/in_memory_spec.rb +1 -3
  35. data/spec/queue_engine/ost_spec.rb +1 -7
  36. data/spec/queue_engine/queue_engine_examples.rb +17 -9
  37. data/spec/queue_engine/synchronic_spec.rb +1 -1
  38. data/spec/worker/in_memory_spec.rb +1 -2
  39. data/spec/worker/redis_spec.rb +0 -6
  40. data/spec/worker/worker_examples.rb +6 -4
  41. metadata +11 -20
@@ -8,23 +8,27 @@ module Asynchronic
8
8
  end
9
9
 
10
10
  def publish(pid, event, data=nil)
11
- @broadcaster.publish DataStore::Key[pid][event], data
11
+ broadcaster.publish DataStore::Key[pid][event], data
12
12
  end
13
13
 
14
14
  def subscribe(pid, event, &block)
15
- @broadcaster.subscribe DataStore::Key[pid][event] do |data|
15
+ broadcaster.subscribe DataStore::Key[pid][event] do |data|
16
16
  block.call data
17
17
  end
18
18
  end
19
19
 
20
20
  def unsubscribe(subscription_id)
21
- @broadcaster.unsubscribe subscription_id
21
+ broadcaster.unsubscribe subscription_id
22
22
  end
23
23
 
24
24
  def unsubscribe_all
25
- @broadcaster.unsubscribe_all
25
+ broadcaster.unsubscribe_all
26
26
  end
27
27
 
28
+ private
29
+
30
+ attr_reader :broadcaster
31
+
28
32
  end
29
33
  end
30
34
  end
@@ -13,11 +13,36 @@ module Asynchronic
13
13
 
14
14
  ATTRIBUTE_NAMES = [:type, :name, :queue, :status, :dependencies, :data, :result, :error, :connection_name] | TIME_TRACKING_MAP.values.uniq
15
15
 
16
+ AUTOMATIC_ABORTED_ERROR_MESSAGE = 'Automatic aborted before execution'
16
17
  CANCELED_ERROR_MESSAGE = 'Canceled'
17
18
  DEAD_ERROR_MESSAGE = 'Process connection broken'
18
19
 
19
20
  attr_reader :id
20
21
 
22
+ def self.create(environment, type, params={})
23
+ id = params.delete(:id) || SecureRandom.uuid
24
+
25
+ Asynchronic.logger.debug('Asynchronic') { "Created process #{type} - #{id} - #{params}" }
26
+
27
+ new(environment, id) do
28
+ self.type = type
29
+ self.name = (params.delete(:alias) || type).to_s
30
+ self.queue = params.delete(:queue) || type.queue || parent_queue
31
+ self.dependencies = Array(params.delete(:dependencies)) | Array(params.delete(:dependency)) | infer_dependencies(params)
32
+ self.params = params
33
+ self.data = {}
34
+ pending!
35
+ end
36
+ end
37
+
38
+ def self.all(environment)
39
+ environment.data_store.keys
40
+ .select { |k| k.sections.count == 2 && k.match(/created_at$/) }
41
+ .sort_by { |k| environment.data_store[k] }
42
+ .reverse
43
+ .map { |k| Process.new environment, k.remove_last }
44
+ end
45
+
21
46
  def initialize(environment, id, &block)
22
47
  @environment = environment
23
48
  @id = DataStore::Key[id]
@@ -83,9 +108,11 @@ module Asynchronic
83
108
  end
84
109
 
85
110
  def processes
86
- data_store.scoped(:processes).keys.
87
- select { |k| k.sections.count == 2 && k.match(/\|name$/) }.
88
- sort.map { |k| Process.new environment, id[:processes][k.remove_last] }
111
+ data_store.scoped(:processes)
112
+ .keys
113
+ .select { |k| k.sections.count == 2 && k.match(/\|name$/) }
114
+ .sort
115
+ .map { |k| Process.new environment, id[:processes][k.remove_last] }
89
116
  end
90
117
 
91
118
  def parent
@@ -97,22 +124,20 @@ module Asynchronic
97
124
  end
98
125
 
99
126
  def real_error
100
- return nil unless error
127
+ return nil if !aborted?
101
128
 
102
- processes.each do |child|
103
- return child.real_error if child.error
104
- end
129
+ first_aborted_child = processes.select(&:aborted?).sort_by(&:finalized_at).first
105
130
 
106
- error.message
131
+ first_aborted_child ? first_aborted_child.real_error : error.message
107
132
  end
108
133
 
109
134
  def dependencies
110
135
  return [] if parent.nil? || data_store[:dependencies].empty?
111
-
136
+
112
137
  parent_processes = parent.processes.each_with_object({}) do |process, hash|
113
138
  hash[process.name] = process
114
139
  end
115
-
140
+
116
141
  data_store[:dependencies].map { |d| parent_processes[d.to_s] }
117
142
  end
118
143
 
@@ -136,7 +161,7 @@ module Asynchronic
136
161
  wakeup_children
137
162
  end
138
163
  Asynchronic.logger.info('Asynchronic') { "Wakeup finalized #{type} (#{id})" }
139
-
164
+
140
165
  parent.wakeup if parent && finalized?
141
166
  end
142
167
 
@@ -152,29 +177,6 @@ module Asynchronic
152
177
  self.data = self.data.merge key => value
153
178
  end
154
179
 
155
- def self.create(environment, type, params={})
156
- id = params.delete(:id) || SecureRandom.uuid
157
-
158
- Asynchronic.logger.debug('Asynchronic') { "Created process #{type} - #{id} - #{params}" }
159
-
160
- new(environment, id) do
161
- self.type = type
162
- self.name = (params.delete(:alias) || type).to_s
163
- self.queue = params.delete(:queue) || type.queue || parent_queue
164
- self.dependencies = Array(params.delete(:dependencies)) | Array(params.delete(:dependency)) | infer_dependencies(params)
165
- self.params = params
166
- self.data = {}
167
- pending!
168
- end
169
- end
170
-
171
- def self.all(environment)
172
- environment.data_store.keys.
173
- select { |k| k.sections.count == 2 && k.match(/created_at$/) }.
174
- sort_by { |k| environment.data_store[k] }.reverse.
175
- map { |k| Process.new environment, k.remove_last }
176
- end
177
-
178
180
  private
179
181
 
180
182
  attr_reader :environment
@@ -195,10 +197,10 @@ module Asynchronic
195
197
 
196
198
  def status=(status)
197
199
  Asynchronic.logger.info('Asynchronic') { "#{status.to_s.capitalize} #{type} (#{id})" }
198
-
200
+
199
201
  data_store[:status] = status
200
202
  data_store[TIME_TRACKING_MAP[status]] = Time.now if TIME_TRACKING_MAP.key? status
201
-
203
+
202
204
  environment.notifier.publish id, :status_changed, status
203
205
  environment.notifier.publish id, :finalized if finalized?
204
206
  end
@@ -215,8 +217,8 @@ module Asynchronic
215
217
  end
216
218
  end
217
219
 
218
- def abort!(exception=nil)
219
- self.error = Error.new exception if exception
220
+ def abort!(exception)
221
+ self.error = Error.new exception
220
222
  aborted!
221
223
  end
222
224
 
@@ -224,13 +226,13 @@ module Asynchronic
224
226
  self.connection_name = Asynchronic.connection_name
225
227
 
226
228
  if root.aborted?
227
- abort!
229
+ abort! AUTOMATIC_ABORTED_ERROR_MESSAGE
228
230
  else
229
231
  running!
230
232
  self.result = job.call
231
233
  waiting!
232
234
  end
233
-
235
+
234
236
  rescue Exception => ex
235
237
  message = "Failed process #{type} (#{id})\n#{ex.class} #{ex.message}\n#{ex.backtrace.join("\n")}"
236
238
  Asynchronic.logger.error('Asynchronic') { message }
@@ -2,27 +2,25 @@ module Asynchronic
2
2
  module QueueEngine
3
3
  class InMemory
4
4
 
5
- attr_reader :default_queue
6
-
7
5
  def initialize(options={})
8
- @default_queue = options[:default_queue]
6
+ @options = options
9
7
  @queues ||= Hash.new { |h,k| h[k] = Queue.new }
10
8
  end
11
9
 
12
10
  def default_queue
13
- @default_queue ||= Asynchronic.default_queue
11
+ @default_queue ||= options.fetch(:default_queue, Asynchronic.default_queue)
14
12
  end
15
13
 
16
14
  def [](name)
17
- @queues[name]
15
+ queues[name]
18
16
  end
19
17
 
20
- def queues
21
- @queues.keys.map(&:to_sym)
18
+ def queue_names
19
+ queues.keys.map(&:to_sym)
22
20
  end
23
21
 
24
22
  def clear
25
- @queues.clear
23
+ queues.clear
26
24
  end
27
25
 
28
26
  def listener
@@ -37,12 +35,16 @@ module Asynchronic
37
35
  [Asynchronic.connection_name]
38
36
  end
39
37
 
38
+ private
39
+
40
+ attr_reader :queues, :options
41
+
40
42
 
41
43
  class Queue
42
44
 
43
45
  extend Forwardable
44
46
 
45
- def_delegators :@queue, :size, :empty?, :to_a
47
+ def_delegators :queue, :size, :empty?, :to_a
46
48
 
47
49
  def initialize
48
50
  @queue = []
@@ -50,13 +52,17 @@ module Asynchronic
50
52
  end
51
53
 
52
54
  def pop
53
- @mutex.synchronize { @queue.shift }
55
+ mutex.synchronize { queue.shift }
54
56
  end
55
57
 
56
58
  def push(message)
57
- @mutex.synchronize { @queue.push message }
59
+ mutex.synchronize { queue.push message }
58
60
  end
59
61
 
62
+ private
63
+
64
+ attr_reader :queue, :mutex
65
+
60
66
  end
61
67
 
62
68
 
@@ -5,22 +5,22 @@ module Asynchronic
5
5
  attr_reader :redis, :default_queue
6
6
 
7
7
  def initialize(options={})
8
- @redis = Redic.new(*Array(options[:redis]))
8
+ @redis = Asynchronic.establish_redis_connection options
9
9
  @default_queue = options.fetch(:default_queue, Asynchronic.default_queue)
10
10
  @queues ||= Hash.new { |h,k| h[k] = Queue.new k, redis }
11
11
  @keep_alive_thread = notify_keep_alive
12
12
  end
13
13
 
14
14
  def [](name)
15
- @queues[name]
15
+ queues[name]
16
16
  end
17
17
 
18
- def queues
19
- (@queues.values.map(&:key) | redis.call!('KEYS', 'ost:*')).map { |q| q.to_s[4..-1].to_sym }
18
+ def queue_names
19
+ (queues.values.map(&:key) | redis.call!('KEYS', 'ost:*')).map { |q| q.to_s[4..-1].to_sym }
20
20
  end
21
21
 
22
22
  def clear
23
- @queues.clear
23
+ queues.clear
24
24
  redis.call!('KEYS', 'ost:*').each { |k| redis.call!('DEL', k) }
25
25
  end
26
26
 
@@ -41,6 +41,8 @@ module Asynchronic
41
41
 
42
42
  private
43
43
 
44
+ attr_reader :queues
45
+
44
46
  def notify_keep_alive
45
47
  Thread.new do
46
48
  loop do
@@ -5,7 +5,7 @@ module Asynchronic
5
5
  attr_reader :stubs
6
6
 
7
7
  def initialize(options={})
8
- @environment = options[:environment]
8
+ @options = options
9
9
  @stubs = {}
10
10
  end
11
11
 
@@ -14,7 +14,7 @@ module Asynchronic
14
14
  end
15
15
 
16
16
  def environment
17
- @environment ||= Asynchronic.environment
17
+ @environment ||= options.fetch(:environment, Asynchronic.environment)
18
18
  end
19
19
 
20
20
  def [](name)
@@ -22,7 +22,7 @@ module Asynchronic
22
22
  end
23
23
 
24
24
  def stub(job, &block)
25
- @stubs[job] = block
25
+ stubs[job] = block
26
26
  end
27
27
 
28
28
  def asynchronic?
@@ -33,6 +33,10 @@ module Asynchronic
33
33
  [Asynchronic.connection_name]
34
34
  end
35
35
 
36
+ private
37
+
38
+ attr_reader :options
39
+
36
40
 
37
41
  class Queue
38
42
 
@@ -41,11 +45,11 @@ module Asynchronic
41
45
  end
42
46
 
43
47
  def push(message)
44
- process = @engine.environment.load_process(message)
48
+ process = engine.environment.load_process(message)
45
49
 
46
- if @engine.stubs[process.type]
50
+ if engine.stubs[process.type]
47
51
  job = process.job
48
- block = @engine.stubs[process.type]
52
+ block = engine.stubs[process.type]
49
53
  process.define_singleton_method :job do
50
54
  MockJob.new job, process, &block
51
55
  end
@@ -54,6 +58,10 @@ module Asynchronic
54
58
  process.execute
55
59
  end
56
60
 
61
+ private
62
+
63
+ attr_reader :engine
64
+
57
65
  end
58
66
 
59
67
 
@@ -66,12 +74,16 @@ module Asynchronic
66
74
  end
67
75
 
68
76
  def call
69
- @block.call @process
77
+ block.call process
70
78
  end
71
79
 
72
80
  def before_finalize
73
81
  end
74
82
 
83
+ private
84
+
85
+ attr_reader :process, :block
86
+
75
87
  end
76
88
 
77
89
  end
@@ -1,3 +1,3 @@
1
1
  module Asynchronic
2
- VERSION = '3.0.1'
2
+ VERSION = '4.0.1'
3
3
  end
@@ -1,24 +1,21 @@
1
1
  class Asynchronic::Worker
2
2
 
3
- attr_reader :queue
4
- attr_reader :queue_name
5
- attr_reader :env
6
- attr_reader :listener
3
+ attr_reader :queue, :queue_name, :environment, :listener
7
4
 
8
- def initialize(queue_name, env)
5
+ def initialize(queue_name, environment)
9
6
  @queue_name = queue_name
10
- @queue = env.queue_engine[queue_name]
11
- @env = env
12
- @listener = env.queue_engine.listener
7
+ @queue = environment.queue_engine[queue_name]
8
+ @environment = environment
9
+ @listener = environment.queue_engine.listener
13
10
  end
14
11
 
15
12
  def start
16
13
  Asynchronic.logger.info('Asynchronic') { "Starting worker of #{queue_name} (#{Process.pid})" }
17
14
 
18
15
  Signal.trap('QUIT') { stop }
19
-
16
+
20
17
  listener.listen(queue) do |pid|
21
- env.load_process(pid).execute
18
+ environment.load_process(pid).execute
22
19
  end
23
20
  end
24
21
 
@@ -1,5 +1,11 @@
1
1
  module DataStoreExamples
2
-
2
+
3
+ extend Minitest::Spec::DSL
4
+
5
+ after do
6
+ data_store.clear
7
+ end
8
+
3
9
  it 'Get/Set value' do
4
10
  data_store[:key] = 123
5
11
  data_store[:key].must_equal 123
@@ -32,8 +38,8 @@ module DataStoreExamples
32
38
  data_store.delete_cascade Asynchronic::DataStore::Key[:key_1]
33
39
 
34
40
  data_store.keys.sort.must_equal [
35
- Asynchronic::DataStore::Key[:key_2],
36
- Asynchronic::DataStore::Key[:key_2][:key_2_1],
41
+ Asynchronic::DataStore::Key[:key_2],
42
+ Asynchronic::DataStore::Key[:key_2][:key_2_1],
37
43
  Asynchronic::DataStore::Key[:key_2][:key_2_2]
38
44
  ]
39
45
  end
@@ -110,18 +116,20 @@ module DataStoreExamples
110
116
 
111
117
  it 'Synchronization' do
112
118
  sum = 0
113
- threads = 1.upto(100).map do |i|
119
+
120
+ threads = 10.times.map do
114
121
  Thread.new do
115
- data_store.synchronize('xxx') do
122
+ data_store.synchronize('lock_key') do
116
123
  temp = sum
117
- sleep 0
124
+ sleep 0.01
118
125
  sum = temp + 1
119
126
  end
120
127
  end
121
128
  end
129
+
122
130
  threads.each(&:join)
123
-
124
- sum.must_equal 100
131
+
132
+ sum.must_equal threads.count
125
133
  end
126
-
134
+
127
135
  end