asynchronic 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -0
  3. data/README.md +0 -70
  4. data/Rakefile +7 -0
  5. data/asynchronic.gemspec +5 -1
  6. data/lib/asynchronic/data_store/in_memory.rb +47 -0
  7. data/lib/asynchronic/data_store/key.rb +15 -0
  8. data/lib/asynchronic/data_store/lookup.rb +27 -0
  9. data/lib/asynchronic/data_store/redis.rb +52 -0
  10. data/lib/asynchronic/environment.rb +57 -0
  11. data/lib/asynchronic/error.rb +13 -0
  12. data/lib/asynchronic/hash.rb +31 -0
  13. data/lib/asynchronic/job.rb +46 -0
  14. data/lib/asynchronic/process.rb +117 -48
  15. data/lib/asynchronic/queue_engine/in_memory.rb +72 -0
  16. data/lib/asynchronic/queue_engine/ost.rb +73 -0
  17. data/lib/asynchronic/runtime.rb +40 -0
  18. data/lib/asynchronic/version.rb +1 -1
  19. data/lib/asynchronic/worker.rb +27 -18
  20. data/lib/asynchronic.rb +17 -32
  21. data/spec/coverage_helper.rb +0 -6
  22. data/spec/data_store/data_store_examples.rb +62 -0
  23. data/spec/data_store/in_memory_spec.rb +10 -0
  24. data/spec/data_store/key_spec.rb +36 -0
  25. data/spec/data_store/lookup_spec.rb +92 -0
  26. data/spec/data_store/redis_spec.rb +14 -0
  27. data/spec/expectations.rb +89 -0
  28. data/spec/facade_spec.rb +61 -0
  29. data/spec/jobs.rb +123 -33
  30. data/spec/minitest_helper.rb +12 -14
  31. data/spec/process/life_cycle_examples.rb +329 -0
  32. data/spec/process/life_cycle_in_memory_spec.rb +11 -0
  33. data/spec/process/life_cycle_redis_spec.rb +15 -0
  34. data/spec/queue_engine/in_memory_spec.rb +11 -0
  35. data/spec/queue_engine/ost_spec.rb +15 -0
  36. data/spec/queue_engine/queue_engine_examples.rb +47 -0
  37. data/spec/worker/in_memory_spec.rb +11 -0
  38. data/spec/worker/redis_spec.rb +16 -0
  39. data/spec/worker/worker_examples.rb +49 -0
  40. metadata +111 -18
  41. data/lib/asynchronic/persistent.rb +0 -61
  42. data/lib/asynchronic/pipeline.rb +0 -23
  43. data/spec/integration_spec.rb +0 -122
  44. data/spec/persistent_spec.rb +0 -88
@@ -0,0 +1,73 @@
1
+ module Asynchronic
2
+ module QueueEngine
3
+ class Ost
4
+
5
+ attr_reader :default_queue
6
+
7
+ def initialize(options={})
8
+ ::Ost.connect options[:redis] if options.key?(:redis)
9
+ @default_queue = options.fetch(:default_queue, Asynchronic.default_queue)
10
+ @queues ||= Hash.new { |h,k| h[k] = Queue.new k }
11
+ end
12
+
13
+ def [](name)
14
+ @queues[name]
15
+ end
16
+
17
+ def queues
18
+ (@queues.values.map(&:key) | redis.keys('ost:*')).map { |q| q.to_s[4..-1].to_sym }
19
+ end
20
+
21
+ def clear
22
+ @queues.clear
23
+ redis.keys('ost:*').each { |k| redis.del k }
24
+ end
25
+
26
+ def listener
27
+ Listener.new
28
+ end
29
+
30
+ private
31
+
32
+ def redis
33
+ @redis ||= Redis.connect(::Ost.options)
34
+ end
35
+
36
+
37
+ class Queue < ::Ost::Queue
38
+
39
+ def pop
40
+ key.rpop
41
+ end
42
+
43
+ def empty?
44
+ !redis.exists(key)
45
+ end
46
+
47
+ def size
48
+ items.count
49
+ end
50
+
51
+ def to_a
52
+ items.reverse
53
+ end
54
+
55
+ end
56
+
57
+
58
+ class Listener
59
+
60
+ def listen(queue, &block)
61
+ @current_queue = queue
62
+ queue.each &block
63
+ end
64
+
65
+ def stop
66
+ @current_queue.stop
67
+ end
68
+
69
+ end
70
+
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,40 @@
1
+ module Asynchronic
2
+ class Runtime
3
+
4
+ attr_reader :process
5
+
6
+ def initialize(process)
7
+ @process = process
8
+ end
9
+
10
+ def evaluate
11
+ begin
12
+ @data = process.data
13
+ process.job.local.each { |k,v| define_singleton_method(k) { v } }
14
+ instance_eval &process.job.class.implementation
15
+ ensure
16
+ process.merge @data
17
+ end
18
+ end
19
+
20
+ def self.evaluate(process)
21
+ new(process).evaluate
22
+ end
23
+
24
+ private
25
+
26
+ def define_job(job_class, options={})
27
+ defaults = {
28
+ parent: process.job.lookup.id,
29
+ queue: job_class.queue || process.queue
30
+ }
31
+
32
+ process.env.build_job job_class, defaults.merge(options)
33
+ end
34
+
35
+ def data
36
+ @data
37
+ end
38
+
39
+ end
40
+ end
@@ -1,3 +1,3 @@
1
1
  module Asynchronic
2
- VERSION = "0.0.1"
2
+ VERSION = '0.1.0'
3
3
  end
@@ -1,27 +1,36 @@
1
- module Asynchronic
2
- class Worker
1
+ class Asynchronic::Worker
3
2
 
4
- attr_reader :queue
3
+ attr_reader :queue
4
+ attr_reader :queue_name
5
+ attr_reader :env
6
+ attr_reader :listener
5
7
 
6
- def initialize(queue=nil)
7
- @queue = queue || Asynchronic.default_queue
8
- end
8
+ def initialize(queue_name, env)
9
+ @queue_name = queue_name
10
+ @queue = env.queue_engine[queue_name]
11
+ @env = env
12
+ @listener = env.queue_engine.listener
13
+ end
9
14
 
10
- def start
11
- Signal.trap('INT') { stop }
15
+ def start
16
+ Asynchronic.logger.info('Asynchronic') { "Starting worker of #{queue_name} (#{Process.pid})" }
12
17
 
13
- Ost[@queue].pop do |pid|
14
- Process.find(pid).run
15
- end
18
+ Signal.trap('INT') { stop }
19
+
20
+ listener.listen(queue) do |pid|
21
+ env.load_process(pid).execute
16
22
  end
23
+ end
17
24
 
18
- def stop
19
- Ost[@queue].stop
20
- end
25
+ def stop
26
+ Asynchronic.logger.info('Asynchronic') { "Stopping worker of #{@queue_name} (#{Process.pid})" }
27
+ listener.stop
28
+ end
21
29
 
22
- def self.start(queue=nil)
23
- new(queue).tap(&:start)
24
- end
25
-
30
+ def self.start(queue_name, &block)
31
+ worker = new queue_name, Asynchronic.environment
32
+ Thread.new { block.call(worker) } if block_given?
33
+ worker.start
26
34
  end
35
+
27
36
  end
data/lib/asynchronic.rb CHANGED
@@ -1,48 +1,33 @@
1
- require 'ost'
1
+ require 'forwardable'
2
2
  require 'securerandom'
3
- require 'base64'
3
+ require 'redis'
4
+ require 'ost'
5
+ require 'class_config'
4
6
  require 'logger'
5
- require 'fileutils'
6
7
 
7
- Dir.glob(File.expand_path('asynchronic/*.rb', File.dirname(__FILE__))).sort.each { |f| require f }
8
+ Dir.glob(File.expand_path('asynchronic/**/*.rb', File.dirname(__FILE__))).sort.each { |f| require f }
8
9
 
9
10
  module Asynchronic
10
11
 
11
- def self.default_queue
12
- @default_queue ||= :asynchronic
13
- end
14
-
15
- def self.default_queue=(name)
16
- @default_queue = name
17
- end
18
-
19
- def self.logger
20
- @logger ||= Logger.new($stdout)
21
- end
12
+ UUID_REGEXP = '[a-z\d]{8}-[a-z\d]{4}-[a-z\d]{4}-[a-z\d]{4}-[a-z\d]{12}'
22
13
 
23
- def self.logger=(logger)
24
- @logger = logger
25
- end
14
+ extend ClassConfig
26
15
 
27
- def self.connect_redis(options)
28
- Ost.connect options
29
- @redis = Redis.new options
30
- end
16
+ attr_config :default_queue, :asynchronic
17
+ attr_config :queue_engine, QueueEngine::InMemory.new
18
+ attr_config :data_store, DataStore::InMemory.new
19
+ attr_config :logger, Logger.new($stdout)
31
20
 
32
- def self.redis
33
- @redis ||= Redis.current
21
+ def self.environment
22
+ Environment.new queue_engine, data_store
34
23
  end
35
24
 
36
- def self.archiving_path
37
- @archiving_path ||= File.join(Dir.home, '.asynchronic', 'data')
38
- end
39
-
40
- def self.archiving_path=(path)
41
- @archiving_path = path
25
+ def self.[](pid)
26
+ environment.load_process pid
42
27
  end
43
28
 
44
- def self.archiving_file(name)
45
- File.join archiving_path, "#{name}.bin"
29
+ def self.processes
30
+ environment.processes
46
31
  end
47
32
 
48
33
  end
@@ -1,8 +1,2 @@
1
1
  require 'simplecov'
2
- require 'coveralls'
3
-
4
- SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
5
- SimpleCov::Formatter::HTMLFormatter,
6
- Coveralls::SimpleCov::Formatter
7
- ]
8
2
  SimpleCov.start
@@ -0,0 +1,62 @@
1
+ module DataStoreExamples
2
+
3
+ it 'Get/Set value' do
4
+ data_store.set 'test_key', 123
5
+ data_store.get('test_key').must_equal 123
6
+ end
7
+
8
+ it 'Key not found' do
9
+ data_store.get('test_key').must_be_nil
10
+ end
11
+
12
+ it 'Keys' do
13
+ data_store.keys.must_be_empty
14
+ data_store.set 'test_key', 123
15
+ data_store.keys.must_equal ['test_key']
16
+ end
17
+
18
+ it 'Merge' do
19
+ data_store.set 'a:1', 0
20
+ data_store.merge 'a', '1' => 1, '2' => 2
21
+
22
+ data_store.get('a:1').must_equal 1
23
+ data_store.get('a:2').must_equal 2
24
+ end
25
+
26
+ it 'To hash' do
27
+ data_store.set 'a', 0
28
+ data_store.set 'a:1', 1
29
+ data_store.set 'a:2', 2
30
+ data_store.set 'b:3', 3
31
+
32
+ data_store.to_hash('a').must_equal '1' => 1, '2' => 2
33
+ end
34
+
35
+ it 'Nested keys' do
36
+ data_store.set 'a', 0
37
+ data_store.set 'a:1', 1
38
+ data_store.set 'a:2', 2
39
+ data_store.set 'b:3', 3
40
+
41
+ data_store.keys('a').must_equal_contents %w(a a:1 a:2)
42
+ data_store.keys('a:').must_equal_contents %w(a:1 a:2)
43
+ end
44
+
45
+ it 'Clear' do
46
+ data_store.set 'test_key', 123
47
+ data_store.clear
48
+ data_store.keys.must_be_empty
49
+ end
50
+
51
+ it 'Nested clear' do
52
+ data_store.set 'a', 0
53
+ data_store.set 'a:1', 1
54
+ data_store.set 'a:2', 2
55
+ data_store.set 'b:3', 3
56
+
57
+ data_store.clear 'a:'
58
+
59
+ data_store.keys.must_equal_contents %w(a b:3)
60
+ end
61
+
62
+ end
@@ -0,0 +1,10 @@
1
+ require 'minitest_helper'
2
+ require_relative './data_store_examples'
3
+
4
+ describe Asynchronic::DataStore::InMemory do
5
+
6
+ let(:data_store) { Asynchronic::DataStore::InMemory.new }
7
+
8
+ include DataStoreExamples
9
+
10
+ end
@@ -0,0 +1,36 @@
1
+ require 'minitest_helper'
2
+
3
+ describe Asynchronic::DataStore::Key do
4
+
5
+ it 'Return the namespace' do
6
+ key = Asynchronic::DataStore::Key.new('foo')
7
+ key.must_equal 'foo'
8
+ end
9
+
10
+ it 'Prepend the namespace' do
11
+ key = Asynchronic::DataStore::Key.new('foo')
12
+ key['bar'].must_equal 'foo:bar'
13
+ end
14
+
15
+ it 'Work in more than one level' do
16
+ key_1 = Asynchronic::DataStore::Key.new('foo')
17
+ key_2 = Asynchronic::DataStore::Key.new(key_1['bar'])
18
+ key_2['baz'].must_equal 'foo:bar:baz'
19
+ end
20
+
21
+ it 'Be chainable' do
22
+ key = Asynchronic::DataStore::Key.new('foo')
23
+ key['bar']['baz'].must_equal 'foo:bar:baz'
24
+ end
25
+
26
+ it 'Accept symbols' do
27
+ key = Asynchronic::DataStore::Key.new(:foo)
28
+ key[:bar].must_equal 'foo:bar'
29
+ end
30
+
31
+ it 'Accept numbers' do
32
+ key = Asynchronic::DataStore::Key.new('foo')
33
+ key[3].must_equal 'foo:3'
34
+ end
35
+
36
+ end
@@ -0,0 +1,92 @@
1
+ require 'minitest_helper'
2
+
3
+ describe Asynchronic::DataStore::Lookup do
4
+
5
+ describe 'One level' do
6
+
7
+ let(:job) { Asynchronic::Job.new }
8
+ let(:lookup) { Asynchronic::DataStore::Lookup.new job }
9
+
10
+ it 'Id' do
11
+ lookup.id.must_equal "job:#{job.id}"
12
+ end
13
+
14
+ it 'Status' do
15
+ lookup.status.must_equal "job:#{job.id}:status"
16
+ end
17
+
18
+ it 'Data' do
19
+ lookup.data.must_equal "job:#{job.id}:data"
20
+ end
21
+
22
+ it 'Jobs' do
23
+ lookup.jobs.must_equal "job:#{job.id}:jobs"
24
+ end
25
+
26
+ it 'Error' do
27
+ lookup.error.must_equal "job:#{job.id}:error"
28
+ end
29
+
30
+ it 'Created At' do
31
+ lookup.created_at.must_equal "job:#{job.id}:created_at"
32
+ end
33
+
34
+ it 'Queued At' do
35
+ lookup.queued_at.must_equal "job:#{job.id}:queued_at"
36
+ end
37
+
38
+ it 'Started At' do
39
+ lookup.started_at.must_equal "job:#{job.id}:started_at"
40
+ end
41
+
42
+ it 'Finalized At' do
43
+ lookup.finalized_at.must_equal "job:#{job.id}:finalized_at"
44
+ end
45
+
46
+ end
47
+
48
+ describe 'Two levels' do
49
+
50
+ let(:parent) { "job:#{SecureRandom.uuid}" }
51
+ let(:job) { Asynchronic::Job.new parent: parent }
52
+ let(:lookup) { Asynchronic::DataStore::Lookup.new job }
53
+
54
+ it 'Id' do
55
+ lookup.id.must_equal "#{parent}:jobs:#{job.id}"
56
+ end
57
+
58
+ it 'Status' do
59
+ lookup.status.must_equal "#{parent}:jobs:#{job.id}:status"
60
+ end
61
+
62
+ it 'Data' do
63
+ lookup.data.must_equal "#{parent}:jobs:#{job.id}:data"
64
+ end
65
+
66
+ it 'Jobs' do
67
+ lookup.jobs.must_equal "#{parent}:jobs:#{job.id}:jobs"
68
+ end
69
+
70
+ it 'Error' do
71
+ lookup.error.must_equal "#{parent}:jobs:#{job.id}:error"
72
+ end
73
+
74
+ it 'Created At' do
75
+ lookup.created_at.must_equal "#{parent}:jobs:#{job.id}:created_at"
76
+ end
77
+
78
+ it 'Queued At' do
79
+ lookup.queued_at.must_equal "#{parent}:jobs:#{job.id}:queued_at"
80
+ end
81
+
82
+ it 'Started At' do
83
+ lookup.started_at.must_equal "#{parent}:jobs:#{job.id}:started_at"
84
+ end
85
+
86
+ it 'Finalized At' do
87
+ lookup.finalized_at.must_equal "#{parent}:jobs:#{job.id}:finalized_at"
88
+ end
89
+
90
+ end
91
+
92
+ end
@@ -0,0 +1,14 @@
1
+ require 'minitest_helper'
2
+ require_relative './data_store_examples'
3
+
4
+ describe Asynchronic::DataStore::Redis do
5
+
6
+ let(:data_store) { Asynchronic::DataStore::Redis.new }
7
+
8
+ before do
9
+ data_store.clear
10
+ end
11
+
12
+ include DataStoreExamples
13
+
14
+ end
@@ -0,0 +1,89 @@
1
+ module MiniTest::Assertions
2
+
3
+ def assert_enqueued(expected_processes, queue)
4
+ messages = Array(expected_processes).map { |p| p.job.lookup.id }
5
+ queue.to_a.sort.must_equal messages.sort, "Jobs #{Array(expected_processes).map{ |p| p.job.name }}"
6
+ end
7
+
8
+ def assert_have(expected_hash, process)
9
+ process.data.keys.count.must_equal expected_hash.keys.count, "Missing keys\nExpected keys: #{expected_hash.keys}\n Actual keys: #{process.data.keys}"
10
+ expected_hash.each do |k,v|
11
+ process[k].must_equal v, "Key #{k}"
12
+ end
13
+ end
14
+
15
+ def assert_be_initialized(process)
16
+ process.must_be :pending?
17
+ process.wont_be :finalized?
18
+
19
+ process.processes.must_be_empty
20
+ process.data.must_be_empty
21
+ process.error.must_be_nil
22
+
23
+ process.created_at.must_be_instance_of Time
24
+ process.queued_at.must_be_nil
25
+ process.started_at.must_be_nil
26
+ process.finalized_at.must_be_nil
27
+ end
28
+
29
+ def assert_be_pending(process)
30
+ process.must_be :pending?
31
+ process.wont_be :finalized?
32
+
33
+ process.created_at.must_be_instance_of Time
34
+ process.queued_at.must_be_nil
35
+ process.started_at.must_be_nil
36
+ process.finalized_at.must_be_nil
37
+ end
38
+
39
+ def assert_be_queued(process)
40
+ process.must_be :queued?
41
+ process.wont_be :finalized?
42
+
43
+ process.created_at.must_be_instance_of Time
44
+ process.queued_at.must_be_instance_of Time
45
+ process.started_at.must_be_nil
46
+ process.finalized_at.must_be_nil
47
+ end
48
+
49
+ def assert_be_waiting(process)
50
+ process.must_be :waiting?
51
+ process.wont_be :finalized?
52
+
53
+ process.created_at.must_be_instance_of Time
54
+ process.queued_at.must_be_instance_of Time
55
+ process.started_at.must_be_instance_of Time
56
+ process.finalized_at.must_be_nil
57
+ end
58
+
59
+ def assert_be_completed(process)
60
+ process.must_be :completed?
61
+ process.must_be :finalized?
62
+
63
+ process.created_at.must_be_instance_of Time
64
+ process.queued_at.must_be_instance_of Time
65
+ process.started_at.must_be_instance_of Time
66
+ process.finalized_at.must_be_instance_of Time
67
+ end
68
+
69
+ def assert_be_aborted(process)
70
+ process.must_be :aborted?
71
+ process.must_be :finalized?
72
+
73
+ process.created_at.must_be_instance_of Time
74
+ process.queued_at.must_be_instance_of Time
75
+ process.started_at.must_be_instance_of Time
76
+ process.finalized_at.must_be_instance_of Time
77
+ end
78
+
79
+ end
80
+
81
+ Asynchronic::QueueEngine::InMemory::Queue.infect_an_assertion :assert_enqueued, :must_enqueued
82
+ Asynchronic::QueueEngine::Ost::Queue.infect_an_assertion :assert_enqueued, :must_enqueued
83
+ Asynchronic::Process.infect_an_assertion :assert_have, :must_have
84
+ Asynchronic::Process.infect_an_assertion :assert_be_initialized, :must_be_initialized, :unary
85
+ Asynchronic::Process.infect_an_assertion :assert_be_pending, :must_be_pending, :unary
86
+ Asynchronic::Process.infect_an_assertion :assert_be_queued, :must_be_queued, :unary
87
+ Asynchronic::Process.infect_an_assertion :assert_be_waiting, :must_be_waiting, :unary
88
+ Asynchronic::Process.infect_an_assertion :assert_be_completed, :must_be_completed, :unary
89
+ Asynchronic::Process.infect_an_assertion :assert_be_aborted, :must_be_aborted, :unary
@@ -0,0 +1,61 @@
1
+ require 'minitest_helper'
2
+
3
+ describe Asynchronic, 'Facade' do
4
+
5
+ before do
6
+ Asynchronic.environment.data_store.clear
7
+ Asynchronic.environment.queue_engine.clear
8
+ end
9
+
10
+ it 'Default queue' do
11
+ Asynchronic.default_queue.must_equal :asynchronic
12
+ end
13
+
14
+ it 'Default queue_engine' do
15
+ Asynchronic.queue_engine.must_be_instance_of Asynchronic::QueueEngine::InMemory
16
+ end
17
+
18
+ it 'Default data store' do
19
+ Asynchronic.data_store.must_be_instance_of Asynchronic::DataStore::InMemory
20
+ end
21
+
22
+ it 'Default logger' do
23
+ Asynchronic.logger.must_be_instance_of Logger
24
+ end
25
+
26
+ it 'Environment' do
27
+ Asynchronic.environment.tap do |env|
28
+ env.queue_engine.must_equal Asynchronic.queue_engine
29
+ env.data_store.must_equal Asynchronic.data_store
30
+ end
31
+ end
32
+
33
+ it 'Load process' do
34
+ process = Asynchronic.environment.build_process BasicJob
35
+ Asynchronic[process.pid].tap do |p|
36
+ p.pid.must_equal process.pid
37
+ p.job.must_equal process.job
38
+ end
39
+ end
40
+
41
+ it 'List processes' do
42
+ pids = 3.times.map do
43
+ process = Asynchronic.environment.build_process SequentialJob
44
+ process.pid
45
+ end
46
+
47
+ Asynchronic.processes.count.must_equal 3
48
+ Asynchronic.processes.map(&:pid).each { |pid| pids.must_include pid }
49
+ end
50
+
51
+ it 'Enqueue' do
52
+ pid = BasicJob.enqueue input: 100
53
+
54
+ Asynchronic.environment.tap do |env|
55
+ env.default_queue.to_a.must_equal [pid]
56
+ env[pid].must_be_instance_of BasicJob
57
+ env.load_process(pid)[:input].must_equal 100
58
+ end
59
+ end
60
+
61
+ end