asynchronic 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.coveralls.yml +2 -0
- data/.gitignore +18 -0
- data/.travis.yml +7 -0
- data/Gemfile +6 -0
- data/LICENSE.txt +22 -0
- data/README.md +104 -0
- data/Rakefile +10 -0
- data/asynchronic.gemspec +28 -0
- data/lib/asynchronic.rb +48 -0
- data/lib/asynchronic/persistent.rb +61 -0
- data/lib/asynchronic/pipeline.rb +23 -0
- data/lib/asynchronic/process.rb +80 -0
- data/lib/asynchronic/version.rb +3 -0
- data/lib/asynchronic/worker.rb +27 -0
- data/spec/coverage_helper.rb +8 -0
- data/spec/integration_spec.rb +122 -0
- data/spec/jobs.rb +58 -0
- data/spec/minitest_helper.rb +31 -0
- data/spec/persistent_spec.rb +88 -0
- metadata +152 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: c7a82bc550c9f5c81ebf6ec87c33051ab31d645d
|
4
|
+
data.tar.gz: 0571213c9a3a35d9770f4f81032fb9e35c8c1dd4
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 0b8a197203e07e67f36af15d8dd4736517f0aba1fff0696f462a76a9e61a881f1935dc4401f3d0e330c77d4b382ae762fa71ae3d327ee7b7599726ac67105a34
|
7
|
+
data.tar.gz: 4ecbd031498315ae41331ddc09c8a1a72e6eba232f194c695a3feb638c1bb1820e7d225cedaae8899e291a1207984ed8b2bb84d9706a82fc71cbda9569eea05d
|
data/.coveralls.yml
ADDED
data/.gitignore
ADDED
data/.travis.yml
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2013 Gabriel Naiman
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,104 @@
|
|
1
|
+
# Asynchronic
|
2
|
+
|
3
|
+
[![Gem Version](https://badge.fury.io/rb/asynchronic.png)](https://rubygems.org/gems/asynchronic)
|
4
|
+
[![Build Status](https://travis-ci.org/gabynaiman/asynchronic.png?branch=master)](https://travis-ci.org/gabynaiman/asynchronic)
|
5
|
+
[![Coverage Status](https://coveralls.io/repos/gabynaiman/asynchronic/badge.png?branch=master)](https://coveralls.io/r/gabynaiman/asynchronic?branch=master)
|
6
|
+
[![Code Climate](https://codeclimate.com/github/gabynaiman/asynchronic.png)](https://codeclimate.com/github/gabynaiman/asynchronic)
|
7
|
+
[![Dependency Status](https://gemnasium.com/gabynaiman/asynchronic.png)](https://gemnasium.com/gabynaiman/asynchronic)
|
8
|
+
|
9
|
+
DSL for asynchronic pipeline using queues over Redis
|
10
|
+
|
11
|
+
## Installation
|
12
|
+
|
13
|
+
Add this line to your application's Gemfile:
|
14
|
+
|
15
|
+
gem 'asynchronic'
|
16
|
+
|
17
|
+
And then execute:
|
18
|
+
|
19
|
+
$ bundle
|
20
|
+
|
21
|
+
Or install it yourself as:
|
22
|
+
|
23
|
+
$ gem install asynchronic
|
24
|
+
|
25
|
+
## Usage
|
26
|
+
|
27
|
+
### Basic usage
|
28
|
+
|
29
|
+
class Job
|
30
|
+
extend Asynchronic::Pipeline
|
31
|
+
|
32
|
+
step :step_name do
|
33
|
+
...
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
Job.run
|
38
|
+
|
39
|
+
Asynchronic::Worker.start
|
40
|
+
|
41
|
+
### Enque job in specific queue
|
42
|
+
|
43
|
+
class Job
|
44
|
+
extend Asynchronic::Pipeline
|
45
|
+
|
46
|
+
queue :queue_name
|
47
|
+
|
48
|
+
step :step_name do
|
49
|
+
...
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
Job.run
|
54
|
+
|
55
|
+
Asynchronic::Worker.start :queue_name
|
56
|
+
|
57
|
+
### Pipeline with shared context
|
58
|
+
|
59
|
+
class Job
|
60
|
+
extend Asynchronic::Pipeline
|
61
|
+
|
62
|
+
step :first do |ctx|
|
63
|
+
ctx[:c] = ctx[:a] + ctx[:b]
|
64
|
+
100
|
65
|
+
end
|
66
|
+
|
67
|
+
step :second do |ctx, input|
|
68
|
+
input * ctx[:c] # 300
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
Job.run a: 1, b: 2
|
73
|
+
|
74
|
+
Asynchronic::Worker.start
|
75
|
+
|
76
|
+
### Specify queue for each step
|
77
|
+
|
78
|
+
class Job
|
79
|
+
extend Asynchronic::Pipeline
|
80
|
+
|
81
|
+
step :first_queue, queue: :queue1 do
|
82
|
+
...
|
83
|
+
end
|
84
|
+
|
85
|
+
step :second_queue, queue: ->(ctx){ctx[:dynamic_queue]} do
|
86
|
+
...
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
Job.run dynamic_queue: :queue2
|
91
|
+
|
92
|
+
[:queue1, :queue2].map do |queue|
|
93
|
+
Thread.new do
|
94
|
+
Asynchronic::Worker.start queue
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
## Contributing
|
99
|
+
|
100
|
+
1. Fork it
|
101
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
102
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
103
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
104
|
+
5. Create new Pull Request
|
data/Rakefile
ADDED
data/asynchronic.gemspec
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'asynchronic/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = 'asynchronic'
|
8
|
+
spec.version = Asynchronic::VERSION
|
9
|
+
spec.authors = ['Gabriel Naiman']
|
10
|
+
spec.email = ['gabynaiman@gmail.com']
|
11
|
+
spec.description = 'DSL for asynchronic pipeline'
|
12
|
+
spec.summary = 'DSL for asynchronic pipeline using queues over Redis'
|
13
|
+
spec.homepage = 'https://github.com/gabynaiman/asynchronic'
|
14
|
+
spec.license = 'MIT'
|
15
|
+
|
16
|
+
spec.files = `git ls-files`.split($/)
|
17
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
|
+
spec.require_paths = ['lib']
|
20
|
+
|
21
|
+
spec.add_dependency 'ost', '~> 0.1'
|
22
|
+
|
23
|
+
spec.add_development_dependency 'bundler', '~> 1.3'
|
24
|
+
spec.add_development_dependency 'rake'
|
25
|
+
spec.add_development_dependency 'minitest', '~> 4.7'
|
26
|
+
spec.add_development_dependency 'turn', '~> 0.9'
|
27
|
+
spec.add_development_dependency 'simplecov'
|
28
|
+
end
|
data/lib/asynchronic.rb
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
require 'ost'
|
2
|
+
require 'securerandom'
|
3
|
+
require 'base64'
|
4
|
+
require 'logger'
|
5
|
+
require 'fileutils'
|
6
|
+
|
7
|
+
Dir.glob(File.expand_path('asynchronic/*.rb', File.dirname(__FILE__))).sort.each { |f| require f }
|
8
|
+
|
9
|
+
module Asynchronic
|
10
|
+
|
11
|
+
def self.default_queue
|
12
|
+
@default_queue ||= :asynchronic
|
13
|
+
end
|
14
|
+
|
15
|
+
def self.default_queue=(name)
|
16
|
+
@default_queue = name
|
17
|
+
end
|
18
|
+
|
19
|
+
def self.logger
|
20
|
+
@logger ||= Logger.new($stdout)
|
21
|
+
end
|
22
|
+
|
23
|
+
def self.logger=(logger)
|
24
|
+
@logger = logger
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.connect_redis(options)
|
28
|
+
Ost.connect options
|
29
|
+
@redis = Redis.new options
|
30
|
+
end
|
31
|
+
|
32
|
+
def self.redis
|
33
|
+
@redis ||= Redis.current
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.archiving_path
|
37
|
+
@archiving_path ||= File.join(Dir.home, '.asynchronic', 'data')
|
38
|
+
end
|
39
|
+
|
40
|
+
def self.archiving_path=(path)
|
41
|
+
@archiving_path = path
|
42
|
+
end
|
43
|
+
|
44
|
+
def self.archiving_file(name)
|
45
|
+
File.join archiving_path, "#{name}.bin"
|
46
|
+
end
|
47
|
+
|
48
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
module Asynchronic
|
2
|
+
module Persistent
|
3
|
+
|
4
|
+
def self.included(base)
|
5
|
+
base.send :include, InstanceMethods
|
6
|
+
base.extend ClassMethods
|
7
|
+
end
|
8
|
+
|
9
|
+
module InstanceMethods
|
10
|
+
|
11
|
+
def id
|
12
|
+
@id
|
13
|
+
end
|
14
|
+
|
15
|
+
def save
|
16
|
+
@id ||= SecureRandom.uuid
|
17
|
+
nest.set Marshal.dump(self)
|
18
|
+
end
|
19
|
+
|
20
|
+
def delete
|
21
|
+
return unless id
|
22
|
+
nest.del
|
23
|
+
end
|
24
|
+
|
25
|
+
def archive
|
26
|
+
return unless id
|
27
|
+
FileUtils.mkpath(Asynchronic.archiving_path) unless Dir.exists?(Asynchronic.archiving_path)
|
28
|
+
File.write Asynchronic.archiving_file(id), Base64.encode64(Marshal.dump(self))
|
29
|
+
delete
|
30
|
+
end
|
31
|
+
|
32
|
+
def nest
|
33
|
+
self.class.nest[id]
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
37
|
+
|
38
|
+
module ClassMethods
|
39
|
+
|
40
|
+
def create(*args, &block)
|
41
|
+
new(*args, &block).tap(&:save)
|
42
|
+
end
|
43
|
+
|
44
|
+
def find(id)
|
45
|
+
if nest[id].get
|
46
|
+
Marshal.load nest[id].get
|
47
|
+
elsif File.exists?(Asynchronic.archiving_file(id))
|
48
|
+
Marshal.load(Base64.decode64(File.read(Asynchronic.archiving_file(id))))
|
49
|
+
else
|
50
|
+
nil
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def nest
|
55
|
+
@nest ||= Nest.new self.name, Asynchronic.redis
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
59
|
+
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module Asynchronic
|
2
|
+
module Pipeline
|
3
|
+
|
4
|
+
Step = Struct.new :name, :options, :block
|
5
|
+
|
6
|
+
def queue(name=nil)
|
7
|
+
name ? @queue = name : @queue
|
8
|
+
end
|
9
|
+
|
10
|
+
def step(name, options={}, &block)
|
11
|
+
steps << Step.new(name, options, block)
|
12
|
+
end
|
13
|
+
|
14
|
+
def steps
|
15
|
+
@steps ||= []
|
16
|
+
end
|
17
|
+
|
18
|
+
def run(context={})
|
19
|
+
Process.enqueue self, context
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,80 @@
|
|
1
|
+
module Asynchronic
|
2
|
+
class Process
|
3
|
+
|
4
|
+
include Persistent
|
5
|
+
|
6
|
+
Child = Struct.new :status, :output
|
7
|
+
|
8
|
+
attr_reader :pipeline
|
9
|
+
attr_reader :context
|
10
|
+
attr_reader :children
|
11
|
+
|
12
|
+
def initialize(pipeline, context={})
|
13
|
+
@pipeline = pipeline
|
14
|
+
@context = context
|
15
|
+
@children = pipeline.steps.map { Child.new :pending }
|
16
|
+
end
|
17
|
+
|
18
|
+
def enqueue(queue=nil)
|
19
|
+
q = queue || pipeline.queue || Asynchronic.default_queue
|
20
|
+
Ost[q.is_a?(Proc) ? q.call(context) : q].push id
|
21
|
+
end
|
22
|
+
|
23
|
+
def run
|
24
|
+
current_child.tap do |i|
|
25
|
+
log "Running: #{id} (child: #{i})" do
|
26
|
+
children[i].status = :running
|
27
|
+
save
|
28
|
+
|
29
|
+
current_input = previous_child?(i) ? children[previous_child(i)].output : nil
|
30
|
+
children[i].output = pipeline.steps[i].block.call(context, current_input)
|
31
|
+
children[i].status = :finalized
|
32
|
+
save
|
33
|
+
|
34
|
+
if next_child?(i)
|
35
|
+
enqueue(pipeline.steps[next_child(i)].options[:queue])
|
36
|
+
else
|
37
|
+
archive
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def self.enqueue(pipeline, context={})
|
44
|
+
process = Process.create pipeline, context
|
45
|
+
process.enqueue(pipeline.steps.first.options[:queue])
|
46
|
+
process.id
|
47
|
+
end
|
48
|
+
|
49
|
+
private
|
50
|
+
|
51
|
+
def current_child
|
52
|
+
children.index { |c| c.status == :pending }
|
53
|
+
end
|
54
|
+
|
55
|
+
def previous_child(index=current_step)
|
56
|
+
index - 1
|
57
|
+
end
|
58
|
+
|
59
|
+
def previous_child?(index=current_step)
|
60
|
+
previous_child(index) >= 0
|
61
|
+
end
|
62
|
+
|
63
|
+
def next_child(index=current_step)
|
64
|
+
index + 1
|
65
|
+
end
|
66
|
+
|
67
|
+
def next_child?(index=current_step)
|
68
|
+
next_child(index) < children.count
|
69
|
+
end
|
70
|
+
|
71
|
+
def log(message)
|
72
|
+
start = Time.now
|
73
|
+
Asynchronic.logger.info('Asynchronic') { "#{message} - Start" }
|
74
|
+
result = yield
|
75
|
+
Asynchronic.logger.info('Asynchronic') { "#{message} - End (Time: #{Time.now - start})" }
|
76
|
+
result
|
77
|
+
end
|
78
|
+
|
79
|
+
end
|
80
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module Asynchronic
|
2
|
+
class Worker
|
3
|
+
|
4
|
+
attr_reader :queue
|
5
|
+
|
6
|
+
def initialize(queue=nil)
|
7
|
+
@queue = queue || Asynchronic.default_queue
|
8
|
+
end
|
9
|
+
|
10
|
+
def start
|
11
|
+
Signal.trap('INT') { stop }
|
12
|
+
|
13
|
+
Ost[@queue].pop do |pid|
|
14
|
+
Process.find(pid).run
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def stop
|
19
|
+
Ost[@queue].stop
|
20
|
+
end
|
21
|
+
|
22
|
+
def self.start(queue=nil)
|
23
|
+
new(queue).tap(&:start)
|
24
|
+
end
|
25
|
+
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,122 @@
|
|
1
|
+
require 'minitest_helper'
|
2
|
+
require 'jobs'
|
3
|
+
|
4
|
+
describe 'Integration' do
|
5
|
+
|
6
|
+
before do
|
7
|
+
Registry.clear
|
8
|
+
end
|
9
|
+
|
10
|
+
def start_and_stop_worker(queue=nil)
|
11
|
+
worker = Asynchronic::Worker.new queue
|
12
|
+
Thread.new do
|
13
|
+
sleep 0.1
|
14
|
+
while Nest.new('ost')[worker.queue].exists; end
|
15
|
+
worker.stop
|
16
|
+
end
|
17
|
+
worker.start
|
18
|
+
end
|
19
|
+
|
20
|
+
def exist_queue?(queue)
|
21
|
+
Nest.new('ost')[queue].exists
|
22
|
+
end
|
23
|
+
|
24
|
+
it 'Job defaults' do
|
25
|
+
SingleStepJob.queue.must_be_nil
|
26
|
+
SingleStepJob.steps.count.must_equal 1
|
27
|
+
SingleStepJob.steps[0].name.must_equal :step_name
|
28
|
+
SingleStepJob.steps[0].options.must_equal Hash.new
|
29
|
+
SingleStepJob.steps[0].block.class.must_equal Proc
|
30
|
+
SingleStepJob.must_respond_to :run
|
31
|
+
end
|
32
|
+
|
33
|
+
it 'Process defaults' do
|
34
|
+
pid = SingleStepJob.run
|
35
|
+
|
36
|
+
pid.wont_be_nil
|
37
|
+
|
38
|
+
process = Asynchronic::Process.find pid
|
39
|
+
|
40
|
+
process.pipeline.must_equal SingleStepJob
|
41
|
+
process.context.must_equal Hash.new
|
42
|
+
process.children.count.must_equal 1
|
43
|
+
process.children[0].status.must_equal :pending
|
44
|
+
process.children[0].output.must_be_nil
|
45
|
+
end
|
46
|
+
|
47
|
+
describe 'Execution' do
|
48
|
+
|
49
|
+
it 'One step job' do
|
50
|
+
SingleStepJob.queue.must_be_nil
|
51
|
+
refute exist_queue? Asynchronic.default_queue
|
52
|
+
|
53
|
+
pid = SingleStepJob.run
|
54
|
+
|
55
|
+
assert exist_queue? Asynchronic.default_queue
|
56
|
+
Registry.must_be_empty
|
57
|
+
|
58
|
+
start_and_stop_worker
|
59
|
+
|
60
|
+
process = Asynchronic::Process.find pid
|
61
|
+
process.children[0].status.must_equal :finalized
|
62
|
+
process.children[0].output.must_equal :single_step_job
|
63
|
+
|
64
|
+
Registry.to_a.must_equal [:single_step_job]
|
65
|
+
end
|
66
|
+
|
67
|
+
it 'Two steps with specific queue and context arguments' do
|
68
|
+
TwoStepsWithSpecificQueueJob.queue.wont_be_nil
|
69
|
+
refute exist_queue? TwoStepsWithSpecificQueueJob.queue
|
70
|
+
|
71
|
+
pid = TwoStepsWithSpecificQueueJob.run value1: 10
|
72
|
+
|
73
|
+
assert exist_queue? TwoStepsWithSpecificQueueJob.queue
|
74
|
+
Registry.must_be_empty
|
75
|
+
|
76
|
+
start_and_stop_worker TwoStepsWithSpecificQueueJob.queue
|
77
|
+
|
78
|
+
process = Asynchronic::Process.find pid
|
79
|
+
process.context.must_equal value1: 10, value2: 5
|
80
|
+
process.children[0].status.must_equal :finalized
|
81
|
+
process.children[0].output.must_equal 11
|
82
|
+
process.children[1].status.must_equal :finalized
|
83
|
+
process.children[1].output.must_equal 55
|
84
|
+
|
85
|
+
Registry.to_a.must_equal [11, 55]
|
86
|
+
end
|
87
|
+
|
88
|
+
it 'Steps with different queues (fixed and contextual)' do
|
89
|
+
MultipleQueuesJob.queue.must_be_nil
|
90
|
+
refute exist_queue? :queue1
|
91
|
+
refute exist_queue? :queue2
|
92
|
+
|
93
|
+
pid = MultipleQueuesJob.run dynamic_queue: :queue2
|
94
|
+
|
95
|
+
assert exist_queue? :queue1
|
96
|
+
refute exist_queue? :queue2
|
97
|
+
Registry.must_be_empty
|
98
|
+
|
99
|
+
start_and_stop_worker :queue1
|
100
|
+
|
101
|
+
process = Asynchronic::Process.find pid
|
102
|
+
process.children[0].status.must_equal :finalized
|
103
|
+
process.children[1].status.must_equal :pending
|
104
|
+
|
105
|
+
refute exist_queue? :queue1
|
106
|
+
assert exist_queue? :queue2
|
107
|
+
Registry.to_a.must_equal [:first_queue]
|
108
|
+
|
109
|
+
start_and_stop_worker :queue2
|
110
|
+
|
111
|
+
process = Asynchronic::Process.find pid
|
112
|
+
process.children[0].status.must_equal :finalized
|
113
|
+
process.children[1].status.must_equal :finalized
|
114
|
+
|
115
|
+
refute exist_queue? :queue1
|
116
|
+
refute exist_queue? :queue2
|
117
|
+
Registry.to_a.must_equal [:first_queue, :second_queue]
|
118
|
+
end
|
119
|
+
|
120
|
+
end
|
121
|
+
|
122
|
+
end
|
data/spec/jobs.rb
ADDED
@@ -0,0 +1,58 @@
|
|
1
|
+
class Registry
|
2
|
+
extend Enumerable
|
3
|
+
|
4
|
+
def self.add(arg)
|
5
|
+
Asynchronic.logger.debug('Asynchronic') { "Registry: #{arg}" }
|
6
|
+
elements << arg
|
7
|
+
arg
|
8
|
+
end
|
9
|
+
|
10
|
+
def self.clear
|
11
|
+
elements.clear
|
12
|
+
end
|
13
|
+
|
14
|
+
def self.each(&block)
|
15
|
+
elements.each(&block)
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.empty?
|
19
|
+
!any?
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
def self.elements
|
25
|
+
@elements ||= []
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
class SingleStepJob
|
30
|
+
extend Asynchronic::Pipeline
|
31
|
+
step :step_name do
|
32
|
+
Registry.add :single_step_job
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
class TwoStepsWithSpecificQueueJob
|
37
|
+
extend Asynchronic::Pipeline
|
38
|
+
queue :specific_queue
|
39
|
+
step :first do |ctx|
|
40
|
+
ctx[:value2] = ctx[:value1] / 2
|
41
|
+
Registry.add ctx[:value1] + 1
|
42
|
+
end
|
43
|
+
step :second do |ctx, input|
|
44
|
+
Registry.add input * ctx[:value2]
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
class MultipleQueuesJob
|
49
|
+
extend Asynchronic::Pipeline
|
50
|
+
step :first_queue, queue: :queue1 do
|
51
|
+
Registry.add :first_queue
|
52
|
+
end
|
53
|
+
step :second_queue, queue: ->(ctx){ctx[:dynamic_queue]} do
|
54
|
+
Registry.add :second_queue
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
|
@@ -0,0 +1,31 @@
|
|
1
|
+
require 'coverage_helper'
|
2
|
+
require 'minitest/autorun'
|
3
|
+
require 'turn'
|
4
|
+
require 'asynchronic'
|
5
|
+
|
6
|
+
Turn.config do |c|
|
7
|
+
c.format = :pretty
|
8
|
+
c.natural = true
|
9
|
+
end
|
10
|
+
|
11
|
+
logger = Logger.new($stdout)
|
12
|
+
logger.level = Logger::ERROR
|
13
|
+
Asynchronic.logger = logger
|
14
|
+
|
15
|
+
Asynchronic.connect_redis host: 'localhost', port: 6379
|
16
|
+
|
17
|
+
Asynchronic.default_queue = 'asynchronic_test'
|
18
|
+
|
19
|
+
Asynchronic.archiving_path = File.expand_path('../tmp', File.dirname(__FILE__))
|
20
|
+
|
21
|
+
class MiniTest::Spec
|
22
|
+
|
23
|
+
before do
|
24
|
+
Redis.current.flushdb
|
25
|
+
end
|
26
|
+
|
27
|
+
def redis
|
28
|
+
Redis.current
|
29
|
+
end
|
30
|
+
|
31
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
require 'minitest_helper'
|
2
|
+
|
3
|
+
describe Asynchronic::Persistent do
|
4
|
+
|
5
|
+
Dummy = Struct.new :string, :hash, :array do
|
6
|
+
include Asynchronic::Persistent
|
7
|
+
end
|
8
|
+
|
9
|
+
def dummy_attributes
|
10
|
+
['text', {key1: 'value1', key2: 'value2'}, [1,2,3]]
|
11
|
+
end
|
12
|
+
|
13
|
+
def assert_dummy(obj)
|
14
|
+
obj.string.must_equal 'text'
|
15
|
+
obj.hash.must_equal key1: 'value1', key2: 'value2'
|
16
|
+
obj.array.must_equal [1,2,3]
|
17
|
+
end
|
18
|
+
|
19
|
+
describe 'Instance methods' do
|
20
|
+
|
21
|
+
let(:dummy) { Dummy.new *dummy_attributes }
|
22
|
+
|
23
|
+
it 'Nest instance identifier' do
|
24
|
+
dummy.define_singleton_method(:id) { '123456' }
|
25
|
+
dummy.nest.must_equal 'Dummy:123456'
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'Save' do
|
29
|
+
dummy.id.must_equal nil
|
30
|
+
dummy.save
|
31
|
+
dummy.id.wont_equal nil
|
32
|
+
|
33
|
+
redis.keys.must_include dummy.nest
|
34
|
+
|
35
|
+
assert_dummy Marshal.load(redis.get(dummy.nest))
|
36
|
+
end
|
37
|
+
|
38
|
+
it 'Delete' do
|
39
|
+
dummy.save
|
40
|
+
redis.keys.must_include dummy.nest
|
41
|
+
|
42
|
+
dummy.delete
|
43
|
+
redis.keys.wont_include dummy.nest
|
44
|
+
end
|
45
|
+
|
46
|
+
it 'Archive' do
|
47
|
+
dummy.save
|
48
|
+
redis.keys.must_include dummy.nest
|
49
|
+
refute File.exists?(Asynchronic.archiving_file(dummy.id))
|
50
|
+
|
51
|
+
dummy.archive
|
52
|
+
redis.keys.wont_include dummy.nest
|
53
|
+
assert File.exists?(Asynchronic.archiving_file(dummy.id))
|
54
|
+
|
55
|
+
assert_dummy Marshal.load(Base64.decode64(File.read(Asynchronic.archiving_file(dummy.id))))
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
59
|
+
|
60
|
+
describe 'Class methods' do
|
61
|
+
|
62
|
+
it 'Nest class identifier' do
|
63
|
+
Dummy.nest.must_equal 'Dummy'
|
64
|
+
end
|
65
|
+
|
66
|
+
it 'Create' do
|
67
|
+
dummy = Dummy.create *dummy_attributes
|
68
|
+
|
69
|
+
redis.keys.must_include dummy.nest
|
70
|
+
assert_dummy Marshal.load(redis.get(dummy.nest))
|
71
|
+
end
|
72
|
+
|
73
|
+
it 'Find' do
|
74
|
+
dummy = Dummy.create *dummy_attributes
|
75
|
+
|
76
|
+
assert_dummy Dummy.find(dummy.id)
|
77
|
+
end
|
78
|
+
|
79
|
+
it 'Find archived' do
|
80
|
+
dummy = Dummy.create *dummy_attributes
|
81
|
+
dummy.archive
|
82
|
+
|
83
|
+
assert_dummy Dummy.find(dummy.id)
|
84
|
+
end
|
85
|
+
|
86
|
+
end
|
87
|
+
|
88
|
+
end
|
metadata
ADDED
@@ -0,0 +1,152 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: asynchronic
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Gabriel Naiman
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2013-11-29 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: ost
|
15
|
+
prerelease: false
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - "~>"
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: '0.1'
|
21
|
+
version_requirements: !ruby/object:Gem::Requirement
|
22
|
+
requirements:
|
23
|
+
- - "~>"
|
24
|
+
- !ruby/object:Gem::Version
|
25
|
+
version: '0.1'
|
26
|
+
type: :runtime
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: bundler
|
29
|
+
prerelease: false
|
30
|
+
requirement: !ruby/object:Gem::Requirement
|
31
|
+
requirements:
|
32
|
+
- - "~>"
|
33
|
+
- !ruby/object:Gem::Version
|
34
|
+
version: '1.3'
|
35
|
+
version_requirements: !ruby/object:Gem::Requirement
|
36
|
+
requirements:
|
37
|
+
- - "~>"
|
38
|
+
- !ruby/object:Gem::Version
|
39
|
+
version: '1.3'
|
40
|
+
type: :development
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: rake
|
43
|
+
prerelease: false
|
44
|
+
requirement: !ruby/object:Gem::Requirement
|
45
|
+
requirements:
|
46
|
+
- - ">="
|
47
|
+
- !ruby/object:Gem::Version
|
48
|
+
version: '0'
|
49
|
+
version_requirements: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - ">="
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :development
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: minitest
|
57
|
+
prerelease: false
|
58
|
+
requirement: !ruby/object:Gem::Requirement
|
59
|
+
requirements:
|
60
|
+
- - "~>"
|
61
|
+
- !ruby/object:Gem::Version
|
62
|
+
version: '4.7'
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - "~>"
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '4.7'
|
68
|
+
type: :development
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: turn
|
71
|
+
prerelease: false
|
72
|
+
requirement: !ruby/object:Gem::Requirement
|
73
|
+
requirements:
|
74
|
+
- - "~>"
|
75
|
+
- !ruby/object:Gem::Version
|
76
|
+
version: '0.9'
|
77
|
+
version_requirements: !ruby/object:Gem::Requirement
|
78
|
+
requirements:
|
79
|
+
- - "~>"
|
80
|
+
- !ruby/object:Gem::Version
|
81
|
+
version: '0.9'
|
82
|
+
type: :development
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: simplecov
|
85
|
+
prerelease: false
|
86
|
+
requirement: !ruby/object:Gem::Requirement
|
87
|
+
requirements:
|
88
|
+
- - ">="
|
89
|
+
- !ruby/object:Gem::Version
|
90
|
+
version: '0'
|
91
|
+
version_requirements: !ruby/object:Gem::Requirement
|
92
|
+
requirements:
|
93
|
+
- - ">="
|
94
|
+
- !ruby/object:Gem::Version
|
95
|
+
version: '0'
|
96
|
+
type: :development
|
97
|
+
description: DSL for asynchronic pipeline
|
98
|
+
email:
|
99
|
+
- gabynaiman@gmail.com
|
100
|
+
executables: []
|
101
|
+
extensions: []
|
102
|
+
extra_rdoc_files: []
|
103
|
+
files:
|
104
|
+
- ".coveralls.yml"
|
105
|
+
- ".gitignore"
|
106
|
+
- ".travis.yml"
|
107
|
+
- Gemfile
|
108
|
+
- LICENSE.txt
|
109
|
+
- README.md
|
110
|
+
- Rakefile
|
111
|
+
- asynchronic.gemspec
|
112
|
+
- lib/asynchronic.rb
|
113
|
+
- lib/asynchronic/persistent.rb
|
114
|
+
- lib/asynchronic/pipeline.rb
|
115
|
+
- lib/asynchronic/process.rb
|
116
|
+
- lib/asynchronic/version.rb
|
117
|
+
- lib/asynchronic/worker.rb
|
118
|
+
- spec/coverage_helper.rb
|
119
|
+
- spec/integration_spec.rb
|
120
|
+
- spec/jobs.rb
|
121
|
+
- spec/minitest_helper.rb
|
122
|
+
- spec/persistent_spec.rb
|
123
|
+
homepage: https://github.com/gabynaiman/asynchronic
|
124
|
+
licenses:
|
125
|
+
- MIT
|
126
|
+
metadata: {}
|
127
|
+
post_install_message:
|
128
|
+
rdoc_options: []
|
129
|
+
require_paths:
|
130
|
+
- lib
|
131
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
132
|
+
requirements:
|
133
|
+
- - ">="
|
134
|
+
- !ruby/object:Gem::Version
|
135
|
+
version: '0'
|
136
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
137
|
+
requirements:
|
138
|
+
- - ">="
|
139
|
+
- !ruby/object:Gem::Version
|
140
|
+
version: '0'
|
141
|
+
requirements: []
|
142
|
+
rubyforge_project:
|
143
|
+
rubygems_version: 2.1.5
|
144
|
+
signing_key:
|
145
|
+
specification_version: 4
|
146
|
+
summary: DSL for asynchronic pipeline using queues over Redis
|
147
|
+
test_files:
|
148
|
+
- spec/coverage_helper.rb
|
149
|
+
- spec/integration_spec.rb
|
150
|
+
- spec/jobs.rb
|
151
|
+
- spec/minitest_helper.rb
|
152
|
+
- spec/persistent_spec.rb
|