asynchronic 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -0
  3. data/README.md +0 -70
  4. data/Rakefile +7 -0
  5. data/asynchronic.gemspec +5 -1
  6. data/lib/asynchronic/data_store/in_memory.rb +47 -0
  7. data/lib/asynchronic/data_store/key.rb +15 -0
  8. data/lib/asynchronic/data_store/lookup.rb +27 -0
  9. data/lib/asynchronic/data_store/redis.rb +52 -0
  10. data/lib/asynchronic/environment.rb +57 -0
  11. data/lib/asynchronic/error.rb +13 -0
  12. data/lib/asynchronic/hash.rb +31 -0
  13. data/lib/asynchronic/job.rb +46 -0
  14. data/lib/asynchronic/process.rb +117 -48
  15. data/lib/asynchronic/queue_engine/in_memory.rb +72 -0
  16. data/lib/asynchronic/queue_engine/ost.rb +73 -0
  17. data/lib/asynchronic/runtime.rb +40 -0
  18. data/lib/asynchronic/version.rb +1 -1
  19. data/lib/asynchronic/worker.rb +27 -18
  20. data/lib/asynchronic.rb +17 -32
  21. data/spec/coverage_helper.rb +0 -6
  22. data/spec/data_store/data_store_examples.rb +62 -0
  23. data/spec/data_store/in_memory_spec.rb +10 -0
  24. data/spec/data_store/key_spec.rb +36 -0
  25. data/spec/data_store/lookup_spec.rb +92 -0
  26. data/spec/data_store/redis_spec.rb +14 -0
  27. data/spec/expectations.rb +89 -0
  28. data/spec/facade_spec.rb +61 -0
  29. data/spec/jobs.rb +123 -33
  30. data/spec/minitest_helper.rb +12 -14
  31. data/spec/process/life_cycle_examples.rb +329 -0
  32. data/spec/process/life_cycle_in_memory_spec.rb +11 -0
  33. data/spec/process/life_cycle_redis_spec.rb +15 -0
  34. data/spec/queue_engine/in_memory_spec.rb +11 -0
  35. data/spec/queue_engine/ost_spec.rb +15 -0
  36. data/spec/queue_engine/queue_engine_examples.rb +47 -0
  37. data/spec/worker/in_memory_spec.rb +11 -0
  38. data/spec/worker/redis_spec.rb +16 -0
  39. data/spec/worker/worker_examples.rb +49 -0
  40. metadata +111 -18
  41. data/lib/asynchronic/persistent.rb +0 -61
  42. data/lib/asynchronic/pipeline.rb +0 -23
  43. data/spec/integration_spec.rb +0 -122
  44. data/spec/persistent_spec.rb +0 -88
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c7a82bc550c9f5c81ebf6ec87c33051ab31d645d
4
- data.tar.gz: 0571213c9a3a35d9770f4f81032fb9e35c8c1dd4
3
+ metadata.gz: 15caa715564ca133dd45cad9dce01db2de3cf7a6
4
+ data.tar.gz: 006fe0ebb38821c8cd1842e4b8060e14779d71c5
5
5
  SHA512:
6
- metadata.gz: 0b8a197203e07e67f36af15d8dd4736517f0aba1fff0696f462a76a9e61a881f1935dc4401f3d0e330c77d4b382ae762fa71ae3d327ee7b7599726ac67105a34
7
- data.tar.gz: 4ecbd031498315ae41331ddc09c8a1a72e6eba232f194c695a3feb638c1bb1820e7d225cedaae8899e291a1207984ed8b2bb84d9706a82fc71cbda9569eea05d
6
+ metadata.gz: c41cf2f761f07c3b17a0c05099361bbe90f2f33506bb29a349074f079827bd1ee588e12319158a1c59e399bbd0f641c616579b401f4aac2a82175be59916e73e
7
+ data.tar.gz: a64a7b0e1a5422bee3fa8a465d166ef37342ef5294865c24208a211423aed838f9f031b0320755cfde35788c2e6d34691b1fc762173af33bd3ed430359ffa112
data/.travis.yml CHANGED
@@ -2,6 +2,7 @@ language: ruby
2
2
  rvm:
3
3
  - 1.9.3
4
4
  - 2.0
5
+ - 2.1
5
6
  - jruby
6
7
  services:
7
8
  - redis-server
data/README.md CHANGED
@@ -24,76 +24,6 @@ Or install it yourself as:
24
24
 
25
25
  ## Usage
26
26
 
27
- ### Basic usage
28
-
29
- class Job
30
- extend Asynchronic::Pipeline
31
-
32
- step :step_name do
33
- ...
34
- end
35
- end
36
-
37
- Job.run
38
-
39
- Asynchronic::Worker.start
40
-
41
- ### Enque job in specific queue
42
-
43
- class Job
44
- extend Asynchronic::Pipeline
45
-
46
- queue :queue_name
47
-
48
- step :step_name do
49
- ...
50
- end
51
- end
52
-
53
- Job.run
54
-
55
- Asynchronic::Worker.start :queue_name
56
-
57
- ### Pipeline with shared context
58
-
59
- class Job
60
- extend Asynchronic::Pipeline
61
-
62
- step :first do |ctx|
63
- ctx[:c] = ctx[:a] + ctx[:b]
64
- 100
65
- end
66
-
67
- step :second do |ctx, input|
68
- input * ctx[:c] # 300
69
- end
70
- end
71
-
72
- Job.run a: 1, b: 2
73
-
74
- Asynchronic::Worker.start
75
-
76
- ### Specify queue for each step
77
-
78
- class Job
79
- extend Asynchronic::Pipeline
80
-
81
- step :first_queue, queue: :queue1 do
82
- ...
83
- end
84
-
85
- step :second_queue, queue: ->(ctx){ctx[:dynamic_queue]} do
86
- ...
87
- end
88
- end
89
-
90
- Job.run dynamic_queue: :queue2
91
-
92
- [:queue1, :queue2].map do |queue|
93
- Thread.new do
94
- Asynchronic::Worker.start queue
95
- end
96
- end
97
27
 
98
28
  ## Contributing
99
29
 
data/Rakefile CHANGED
@@ -7,4 +7,11 @@ Rake::TestTask.new(:spec) do |t|
7
7
  t.verbose = false
8
8
  end
9
9
 
10
+ task :console do
11
+ require 'pry'
12
+ require 'asynchronic'
13
+ ARGV.clear
14
+ Pry.start
15
+ end
16
+
10
17
  task default: :spec
data/asynchronic.gemspec CHANGED
@@ -18,11 +18,15 @@ Gem::Specification.new do |spec|
18
18
  spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
19
  spec.require_paths = ['lib']
20
20
 
21
+ spec.add_dependency 'redis', '~> 3.0'
21
22
  spec.add_dependency 'ost', '~> 0.1'
22
-
23
+ spec.add_dependency 'class_config', '~> 0.0'
24
+
23
25
  spec.add_development_dependency 'bundler', '~> 1.3'
24
26
  spec.add_development_dependency 'rake'
25
27
  spec.add_development_dependency 'minitest', '~> 4.7'
28
+ spec.add_development_dependency 'minitest-great_expectations', '~> 0.0'
26
29
  spec.add_development_dependency 'turn', '~> 0.9'
27
30
  spec.add_development_dependency 'simplecov'
31
+ spec.add_development_dependency 'pry'
28
32
  end
@@ -0,0 +1,47 @@
1
+ module Asynchronic
2
+ module DataStore
3
+ class InMemory
4
+
5
+ def initialize
6
+ @hash = {}
7
+ @mutex = Mutex.new
8
+ end
9
+
10
+ def get(key)
11
+ @hash[key.to_s]
12
+ end
13
+
14
+ def set(key, value)
15
+ @mutex.synchronize { @hash[key.to_s] = value }
16
+ end
17
+
18
+ def merge(key, hash)
19
+ scoped_key = Key.new key
20
+ hash.each do |k,v|
21
+ set scoped_key[k].to_s, v
22
+ end
23
+ end
24
+
25
+ def to_hash(key)
26
+ children_key = "#{key}:"
27
+ keys(children_key).inject({}) do |hash, k|
28
+ hash[k[children_key.size..-1]] = get k
29
+ hash
30
+ end
31
+ end
32
+
33
+ def keys(key=nil)
34
+ key ? keys.select { |k| k.start_with? key.to_s } : @hash.keys
35
+ end
36
+
37
+ def clear(key=nil)
38
+ if key
39
+ @hash.delete_if { |k,v| k.start_with? key.to_s }
40
+ else
41
+ @hash.clear
42
+ end
43
+ end
44
+
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,15 @@
1
+ module Asynchronic
2
+ module DataStore
3
+ class Key < String
4
+
5
+ def initialize(key)
6
+ super key.to_s
7
+ end
8
+
9
+ def [](key)
10
+ self.class.new "#{self}:#{key}"
11
+ end
12
+
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,27 @@
1
+ module Asynchronic
2
+ module DataStore
3
+ class Lookup
4
+
5
+ KEYS = [:status, :data, :jobs, :error, :created_at, :queued_at, :started_at, :finalized_at]
6
+
7
+ def initialize(job)
8
+ @job = job
9
+ end
10
+
11
+ def id
12
+ if @job.parent
13
+ DataStore::Key.new(@job.parent)[:jobs][@job.id]
14
+ else
15
+ DataStore::Key.new(:job)[@job.id]
16
+ end
17
+ end
18
+
19
+ KEYS.each do |key|
20
+ define_method key do
21
+ id[key]
22
+ end
23
+ end
24
+
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,52 @@
1
+ module Asynchronic
2
+ module DataStore
3
+ class Redis
4
+
5
+ attr_reader :connection
6
+
7
+ def initialize(*args)
8
+ @connection = ::Redis.new *args
9
+ end
10
+
11
+ def get(key)
12
+ value = connection.get root[key]
13
+ value ? Marshal.load(value) : nil
14
+ end
15
+
16
+ def set(key, value)
17
+ connection.set root[key], Marshal.dump(value)
18
+ end
19
+
20
+ def merge(key, hash)
21
+ scoped_key = Key.new key
22
+ hash.each do |k,v|
23
+ set scoped_key[k], v
24
+ end
25
+ end
26
+
27
+ def to_hash(key)
28
+ children_key = "#{key}:"
29
+ keys(children_key).inject({}) do |hash, k|
30
+ hash[k[children_key.size..-1]] = get k
31
+ hash
32
+ end
33
+ end
34
+
35
+ def keys(key=nil)
36
+ keys = key ? connection.keys("#{root[key]}*") : connection.keys
37
+ keys.map { |k| k[(root.size + 1)..-1] }
38
+ end
39
+
40
+ def clear(key=nil)
41
+ keys(key).each { |k| connection.del root[k] }
42
+ end
43
+
44
+ private
45
+
46
+ def root
47
+ Key.new :asynchronic
48
+ end
49
+
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,57 @@
1
+ module Asynchronic
2
+ class Environment
3
+
4
+ attr_reader :queue_engine
5
+ attr_reader :data_store
6
+
7
+ def initialize(queue_engine, data_store)
8
+ @queue_engine = queue_engine
9
+ @data_store = data_store
10
+ end
11
+
12
+ def [](key)
13
+ data_store.get key
14
+ end
15
+
16
+ def []=(key, value)
17
+ data_store.set key, value
18
+ end
19
+
20
+ def queue(name)
21
+ queue_engine[name]
22
+ end
23
+
24
+ def default_queue
25
+ queue(queue_engine.default_queue)
26
+ end
27
+
28
+ def enqueue(msg, queue=nil)
29
+ queue(queue || queue_engine.default_queue).push msg
30
+ end
31
+
32
+ def build_job(job_class, options={})
33
+ Asynchronic.logger.debug('Asynchronic') { "Building job #{job_class} - #{options}" }
34
+ job_class.new(options).tap do |job|
35
+ self[job.lookup.id] = job
36
+ self[job.lookup.created_at] = Time.now
37
+ end
38
+ end
39
+
40
+ def build_process(job_class, options={})
41
+ Process.new build_job(job_class, options), self
42
+ end
43
+
44
+ def load_process(pid)
45
+ Process.new self[pid], self
46
+ end
47
+
48
+ def processes
49
+ data_store.keys.
50
+ select { |k| k.match Regexp.new("job:#{Asynchronic::UUID_REGEXP}:created_at$") }.
51
+ sort_by {|k| data_store.get k }.
52
+ reverse.
53
+ map { |k| load_process k.gsub(':created_at', '') }
54
+ end
55
+
56
+ end
57
+ end
@@ -0,0 +1,13 @@
1
+ module Asynchronic
2
+ class Error
3
+
4
+ attr_reader :message
5
+ attr_reader :backtrace
6
+
7
+ def initialize(source)
8
+ @message = source.respond_to?(:message) ? source.message : source.to_s
9
+ @backtrace = source.respond_to?(:backtrace) ? source.backtrace : []
10
+ end
11
+
12
+ end
13
+ end
@@ -0,0 +1,31 @@
1
+ class Hash
2
+ def with_indiferent_access
3
+ HashWithIndiferentAccess.new self
4
+ end
5
+ end
6
+
7
+ class HashWithIndiferentAccess < Hash
8
+
9
+ def initialize(hash=nil)
10
+ merge! hash if hash
11
+ end
12
+
13
+ def [](key)
14
+ if key?(key) || !transformable_key?(key)
15
+ super
16
+ else
17
+ super transform_key(key)
18
+ end
19
+ end
20
+
21
+ private
22
+
23
+ def transformable_key?(key)
24
+ key.is_a?(String) || key.is_a?(Symbol)
25
+ end
26
+
27
+ def transform_key(key)
28
+ key.is_a?(String) ? key.to_sym : key.to_s
29
+ end
30
+
31
+ end
@@ -0,0 +1,46 @@
1
+ module Asynchronic
2
+ class Job
3
+
4
+ attr_reader :id
5
+ attr_reader :name
6
+ attr_reader :queue
7
+ attr_reader :parent
8
+ attr_reader :dependencies
9
+ attr_reader :local
10
+
11
+ def initialize(options={})
12
+ @id = SecureRandom.uuid
13
+ @name = options.key?(:alias) ? options[:alias].to_s : self.class.to_s
14
+ @queue = options[:queue] || self.class.queue
15
+ @parent = options[:parent]
16
+ @dependencies = Array(options[:dependencies] || options[:dependency]).map(&:to_s)
17
+ @local = options[:local] || {}
18
+
19
+ raise 'Cant have dependencies without parent job' if dependencies.any? && parent.nil?
20
+ end
21
+
22
+ def lookup
23
+ DataStore::Lookup.new self
24
+ end
25
+
26
+ def self.queue(queue=nil)
27
+ queue ? @queue = queue : @queue
28
+ end
29
+
30
+ def self.implementation
31
+ @implementation
32
+ end
33
+
34
+ def self.enqueue(data={})
35
+ process = Asynchronic.environment.build_process self
36
+ process.enqueue data
37
+ end
38
+
39
+ private
40
+
41
+ def self.define(&block)
42
+ @implementation = block
43
+ end
44
+
45
+ end
46
+ end
@@ -1,79 +1,148 @@
1
1
  module Asynchronic
2
2
  class Process
3
3
 
4
- include Persistent
4
+ STATUSES = [:pending, :queued, :running, :waiting, :completed, :aborted]
5
5
 
6
- Child = Struct.new :status, :output
6
+ TIME_TRACKING_MAP = {
7
+ queued: :queued_at,
8
+ running: :started_at,
9
+ completed: :finalized_at,
10
+ aborted: :finalized_at
11
+ }
7
12
 
8
- attr_reader :pipeline
9
- attr_reader :context
10
- attr_reader :children
11
-
12
- def initialize(pipeline, context={})
13
- @pipeline = pipeline
14
- @context = context
15
- @children = pipeline.steps.map { Child.new :pending }
13
+ extend Forwardable
14
+
15
+ def_delegators :job, :id, :name, :queue
16
+ def_delegators :data, :[]
17
+
18
+ attr_reader :job
19
+ attr_reader :env
20
+
21
+ def initialize(job, env)
22
+ @job = job
23
+ @env = env
16
24
  end
17
25
 
18
- def enqueue(queue=nil)
19
- q = queue || pipeline.queue || Asynchronic.default_queue
20
- Ost[q.is_a?(Proc) ? q.call(context) : q].push id
26
+ def pid
27
+ lookup.id
21
28
  end
22
29
 
23
- def run
24
- current_child.tap do |i|
25
- log "Running: #{id} (child: #{i})" do
26
- children[i].status = :running
27
- save
28
-
29
- current_input = previous_child?(i) ? children[previous_child(i)].output : nil
30
- children[i].output = pipeline.steps[i].block.call(context, current_input)
31
- children[i].status = :finalized
32
- save
33
-
34
- if next_child?(i)
35
- enqueue(pipeline.steps[next_child(i)].options[:queue])
30
+ def data
31
+ parent ? parent.data : env.data_store.to_hash(lookup.data).with_indiferent_access
32
+ end
33
+
34
+ def merge(data)
35
+ parent ? parent.merge(data) : env.data_store.merge(lookup.data, data)
36
+ end
37
+
38
+ def enqueue(data={})
39
+ merge data
40
+ env.enqueue lookup.id, queue
41
+ update_status :queued
42
+
43
+ lookup.id
44
+ end
45
+
46
+ def execute
47
+ run
48
+ wakeup
49
+ end
50
+
51
+ def wakeup
52
+ if waiting?
53
+ if processes.any?(&:aborted?)
54
+ abort Error.new "Error caused by #{processes.select(&:aborted?).map{|p| p.job.name}.join(', ')}"
55
+ else
56
+ if processes.all?(&:completed?)
57
+ update_status :completed
36
58
  else
37
- archive
59
+ processes.select(&:ready?).each { |p| p.enqueue }
38
60
  end
39
61
  end
40
62
  end
63
+
64
+ parent.wakeup if parent && finalized?
41
65
  end
42
66
 
43
- def self.enqueue(pipeline, context={})
44
- process = Process.create pipeline, context
45
- process.enqueue(pipeline.steps.first.options[:queue])
46
- process.id
67
+ def error
68
+ env[lookup.error]
47
69
  end
48
70
 
49
- private
71
+ def status
72
+ env[lookup.status] || :pending
73
+ end
74
+
75
+ STATUSES.each do |status|
76
+ define_method "#{status}?" do
77
+ self.status == status
78
+ end
79
+ end
50
80
 
51
- def current_child
52
- children.index { |c| c.status == :pending }
81
+ def ready?
82
+ pending? && dependencies.all?(&:completed?)
53
83
  end
54
84
 
55
- def previous_child(index=current_step)
56
- index - 1
85
+ def finalized?
86
+ completed? || aborted?
57
87
  end
58
88
 
59
- def previous_child?(index=current_step)
60
- previous_child(index) >= 0
89
+ def processes(name=nil)
90
+ processes = env.data_store.keys(lookup.jobs).
91
+ select { |k| k.match Regexp.new("^#{lookup.jobs[Asynchronic::UUID_REGEXP]}$") }.
92
+ map { |k| env.load_process k }
93
+
94
+ name ? processes.detect { |p| p.name == name.to_s } : processes
95
+ end
96
+
97
+ def parent
98
+ @parent ||= Process.new env[job.parent], env if job.parent
99
+ end
100
+
101
+ def dependencies
102
+ @dependencies ||= parent.processes.select { |p| job.dependencies.include? p.name }
103
+ end
104
+
105
+ def created_at
106
+ env[lookup.created_at]
107
+ end
108
+
109
+ def queued_at
110
+ env[lookup.queued_at]
111
+ end
112
+
113
+ def started_at
114
+ env[lookup.started_at]
115
+ end
116
+
117
+ def finalized_at
118
+ env[lookup.finalized_at]
119
+ end
120
+
121
+ private
122
+
123
+ def run
124
+ update_status :running
125
+ Runtime.evaluate self
126
+ update_status :waiting
127
+ rescue Exception => ex
128
+ message = "Failed job #{job.name} (#{lookup.id})\n#{ex.class} #{ex.message}\n#{ex.backtrace.join("\n")}"
129
+ Asynchronic.logger.error('Asynchronic') { message }
130
+ abort ex
61
131
  end
62
132
 
63
- def next_child(index=current_step)
64
- index + 1
133
+ def update_status(status)
134
+ Asynchronic.logger.info('Asynchronic') { "#{status.to_s.capitalize} #{job.name} (#{lookup.id})" }
135
+ env[lookup.status] = status
136
+ env[lookup.send(TIME_TRACKING_MAP[status])] = Time.now if TIME_TRACKING_MAP.key? status
65
137
  end
66
138
 
67
- def next_child?(index=current_step)
68
- next_child(index) < children.count
139
+ def abort(exception)
140
+ env[lookup.error] = Error.new(exception)
141
+ update_status :aborted
69
142
  end
70
143
 
71
- def log(message)
72
- start = Time.now
73
- Asynchronic.logger.info('Asynchronic') { "#{message} - Start" }
74
- result = yield
75
- Asynchronic.logger.info('Asynchronic') { "#{message} - End (Time: #{Time.now - start})" }
76
- result
144
+ def lookup
145
+ @lookup ||= job.lookup
77
146
  end
78
147
 
79
148
  end
@@ -0,0 +1,72 @@
1
+ module Asynchronic
2
+ module QueueEngine
3
+ class InMemory
4
+
5
+ attr_reader :default_queue
6
+
7
+ def initialize(options={})
8
+ @default_queue = options.fetch(:default_queue, Asynchronic.default_queue)
9
+ @queues ||= Hash.new { |h,k| h[k] = Queue.new }
10
+ end
11
+
12
+ def [](name)
13
+ @queues[name]
14
+ end
15
+
16
+ def queues
17
+ @queues.keys.map(&:to_sym)
18
+ end
19
+
20
+ def clear
21
+ @queues.clear
22
+ end
23
+
24
+ def listener
25
+ Listener.new
26
+ end
27
+
28
+
29
+ class Queue
30
+
31
+ extend Forwardable
32
+
33
+ def_delegators :@queue, :size, :empty?, :to_a
34
+
35
+ def initialize
36
+ @queue = []
37
+ @mutex = Mutex.new
38
+ end
39
+
40
+ def pop
41
+ @mutex.synchronize { @queue.shift }
42
+ end
43
+
44
+ def push(message)
45
+ @mutex.synchronize { @queue.push message }
46
+ end
47
+
48
+ end
49
+
50
+
51
+ class Listener
52
+
53
+ def listen(queue, &block)
54
+ @stopping = false
55
+
56
+ loop do
57
+ break if @stopping
58
+ item = queue.pop
59
+ next unless item
60
+ block.call item
61
+ end
62
+ end
63
+
64
+ def stop
65
+ @stopping = true
66
+ end
67
+
68
+ end
69
+
70
+ end
71
+ end
72
+ end