gush 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +21 -0
- data/.rspec +1 -0
- data/Gemfile +8 -0
- data/LICENSE.txt +22 -0
- data/README.md +128 -0
- data/Rakefile +1 -0
- data/bin/gush +12 -0
- data/gush.gemspec +32 -0
- data/lib/gush.rb +47 -0
- data/lib/gush/cli.rb +245 -0
- data/lib/gush/client.rb +146 -0
- data/lib/gush/configuration.rb +42 -0
- data/lib/gush/errors.rb +3 -0
- data/lib/gush/job.rb +161 -0
- data/lib/gush/logger_builder.rb +15 -0
- data/lib/gush/metadata.rb +24 -0
- data/lib/gush/null_logger.rb +6 -0
- data/lib/gush/version.rb +3 -0
- data/lib/gush/worker.rb +100 -0
- data/lib/gush/workflow.rb +154 -0
- data/spec/Gushfile.rb +0 -0
- data/spec/lib/gush/client_spec.rb +125 -0
- data/spec/lib/gush/configuration_spec.rb +27 -0
- data/spec/lib/gush/job_spec.rb +114 -0
- data/spec/lib/gush/logger_builder_spec.rb +25 -0
- data/spec/lib/gush/null_logger_spec.rb +15 -0
- data/spec/lib/gush/worker_spec.rb +96 -0
- data/spec/lib/gush/workflow_spec.rb +246 -0
- data/spec/lib/gush_spec.rb +39 -0
- data/spec/redis.conf +2 -0
- data/spec/spec_helper.rb +79 -0
- metadata +256 -0
@@ -0,0 +1,154 @@
|
|
1
|
+
require 'securerandom'
|
2
|
+
require 'gush/metadata'
|
3
|
+
|
4
|
+
module Gush
|
5
|
+
class Workflow
|
6
|
+
include Gush::Metadata
|
7
|
+
|
8
|
+
attr_accessor :id, :nodes, :stopped
|
9
|
+
|
10
|
+
def initialize(id, options = {})
|
11
|
+
@id = id
|
12
|
+
@nodes = []
|
13
|
+
@dependencies = []
|
14
|
+
@logger_builder = default_logger_builder
|
15
|
+
@stopped = false
|
16
|
+
|
17
|
+
unless options[:configure] == false
|
18
|
+
configure
|
19
|
+
create_dependencies
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def default_logger_builder
|
24
|
+
LoggerBuilder
|
25
|
+
end
|
26
|
+
|
27
|
+
def configure
|
28
|
+
end
|
29
|
+
|
30
|
+
def stop!
|
31
|
+
@stopped = true
|
32
|
+
end
|
33
|
+
|
34
|
+
def start!
|
35
|
+
@stopped = false
|
36
|
+
end
|
37
|
+
|
38
|
+
def logger_builder(klass)
|
39
|
+
@logger_builder = klass
|
40
|
+
end
|
41
|
+
|
42
|
+
def build_logger_for_job(job, jid)
|
43
|
+
@logger_builder.new(self, job, jid).build
|
44
|
+
end
|
45
|
+
|
46
|
+
def create_dependencies
|
47
|
+
@dependencies.each do |dependency|
|
48
|
+
from = find_job(dependency[:from])
|
49
|
+
to = find_job(dependency[:to])
|
50
|
+
|
51
|
+
to.incoming << dependency[:from]
|
52
|
+
from.outgoing << dependency[:to]
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def find_job(name)
|
57
|
+
@nodes.find { |node| node.name == name.to_s || node.class.to_s == name.to_s }
|
58
|
+
end
|
59
|
+
|
60
|
+
def finished?
|
61
|
+
nodes.all?(&:finished)
|
62
|
+
end
|
63
|
+
|
64
|
+
def running?
|
65
|
+
nodes.any? {|j| j.enqueued? || j.running? } && !stopped?
|
66
|
+
end
|
67
|
+
|
68
|
+
def failed?
|
69
|
+
nodes.any?(&:failed)
|
70
|
+
end
|
71
|
+
|
72
|
+
def stopped?
|
73
|
+
stopped
|
74
|
+
end
|
75
|
+
|
76
|
+
def run(klass, deps = {})
|
77
|
+
node = klass.new(name: klass.to_s)
|
78
|
+
@nodes << node
|
79
|
+
|
80
|
+
deps_after = [*deps[:after]]
|
81
|
+
deps_after.each do |dep|
|
82
|
+
@dependencies << {from: dep.to_s, to: klass.to_s }
|
83
|
+
end
|
84
|
+
|
85
|
+
deps_before = [*deps[:before]]
|
86
|
+
deps_before.each do |dep|
|
87
|
+
@dependencies << {from: klass.to_s, to: dep.to_s }
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
def status
|
92
|
+
case
|
93
|
+
when failed?
|
94
|
+
"Failed"
|
95
|
+
when running?
|
96
|
+
"Running"
|
97
|
+
when finished?
|
98
|
+
"Finished"
|
99
|
+
when stopped?
|
100
|
+
"Stopped"
|
101
|
+
else
|
102
|
+
"Pending"
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def started_at
|
107
|
+
first_job ? first_job.started_at : nil
|
108
|
+
end
|
109
|
+
|
110
|
+
def finished_at
|
111
|
+
last_job ? last_job.finished_at : nil
|
112
|
+
end
|
113
|
+
|
114
|
+
def to_hash
|
115
|
+
name = self.class.to_s
|
116
|
+
{
|
117
|
+
name: name,
|
118
|
+
id: @id,
|
119
|
+
total: @nodes.count,
|
120
|
+
finished: @nodes.count(&:finished?),
|
121
|
+
klass: name,
|
122
|
+
nodes: @nodes.map(&:as_json),
|
123
|
+
status: status,
|
124
|
+
stopped: stopped,
|
125
|
+
started_at: started_at,
|
126
|
+
finished_at: finished_at,
|
127
|
+
logger_builder: @logger_builder.to_s
|
128
|
+
}
|
129
|
+
end
|
130
|
+
|
131
|
+
def to_json(options = {})
|
132
|
+
JSON.dump(to_hash)
|
133
|
+
end
|
134
|
+
|
135
|
+
def next_jobs
|
136
|
+
@nodes.select do |job|
|
137
|
+
job.can_be_started?(self)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
def self.descendants
|
142
|
+
ObjectSpace.each_object(Class).select { |klass| klass < self }
|
143
|
+
end
|
144
|
+
|
145
|
+
private
|
146
|
+
def first_job
|
147
|
+
nodes.min_by{ |n| n.started_at || Time.now.to_i }
|
148
|
+
end
|
149
|
+
|
150
|
+
def last_job
|
151
|
+
nodes.max_by{ |n| n.finished_at || 0 } if nodes.all?(&:finished?)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
data/spec/Gushfile.rb
ADDED
File without changes
|
@@ -0,0 +1,125 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Gush::Client do
|
4
|
+
describe "#find_workflow" do
|
5
|
+
context "when workflow doesn't exist" do
|
6
|
+
it "returns raises WorkflowNotFound" do
|
7
|
+
expect {
|
8
|
+
client.find_workflow('nope')
|
9
|
+
}.to raise_error(WorkflowNotFound)
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
context "when given workflow exists" do
|
14
|
+
it "returns Workflow object" do
|
15
|
+
expected_workflow = TestWorkflow.new(SecureRandom.uuid)
|
16
|
+
client.persist_workflow(expected_workflow)
|
17
|
+
workflow = client.find_workflow(expected_workflow.id)
|
18
|
+
|
19
|
+
expect(workflow.id).to eq(expected_workflow.id)
|
20
|
+
expect(workflow.nodes.map(&:name)).to match_array(expected_workflow.nodes.map(&:name))
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
describe "#start_workflow" do
|
26
|
+
it "enqueues next jobs from the workflow" do
|
27
|
+
id = SecureRandom.uuid
|
28
|
+
workflow = TestWorkflow.new(id)
|
29
|
+
client.persist_workflow(workflow)
|
30
|
+
expect {
|
31
|
+
client.start_workflow(id)
|
32
|
+
}.to change{Gush::Worker.jobs.count}.from(0).to(1)
|
33
|
+
end
|
34
|
+
|
35
|
+
it "removes stopped flag when the workflow is started" do
|
36
|
+
id = SecureRandom.uuid
|
37
|
+
workflow = TestWorkflow.new(id)
|
38
|
+
workflow.stop!
|
39
|
+
client.persist_workflow(workflow)
|
40
|
+
expect {
|
41
|
+
client.start_workflow(id)
|
42
|
+
}.to change{client.find_workflow(id).stopped?}.from(true).to(false)
|
43
|
+
end
|
44
|
+
|
45
|
+
it "marks the enqueued jobs as enqueued" do
|
46
|
+
id = SecureRandom.uuid
|
47
|
+
workflow = TestWorkflow.new(id)
|
48
|
+
client.persist_workflow(workflow)
|
49
|
+
client.start_workflow(id)
|
50
|
+
job = client.find_workflow(id).find_job("Prepare")
|
51
|
+
expect(job.enqueued?).to eq(true)
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
describe "#stop_workflow" do
|
56
|
+
it "marks the workflow as stopped" do
|
57
|
+
id = SecureRandom.uuid
|
58
|
+
workflow = TestWorkflow.new(id)
|
59
|
+
client.persist_workflow(workflow)
|
60
|
+
expect {
|
61
|
+
client.stop_workflow(id)
|
62
|
+
}.to change{client.find_workflow(id).stopped?}.from(false).to(true)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
describe "#persist_workflow" do
|
67
|
+
it "persists JSON dump of the Workflow and its jobs" do
|
68
|
+
job = double("job", to_json: 'json')
|
69
|
+
workflow = double("workflow", id: 'abcd', nodes: [job, job, job], to_json: '"json"')
|
70
|
+
expect(client).to receive(:persist_job).exactly(3).times.with(workflow.id, job)
|
71
|
+
client.persist_workflow(workflow)
|
72
|
+
expect(redis.keys("gush.workflows.abcd").length).to eq(1)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
describe "#destroy_workflow" do
|
77
|
+
it "removes all Redis keys related to the workflow" do
|
78
|
+
id = SecureRandom.uuid
|
79
|
+
workflow = TestWorkflow.new(id)
|
80
|
+
client.persist_workflow(workflow)
|
81
|
+
expect(redis.keys("gush.workflows.#{id}").length).to eq(1)
|
82
|
+
expect(redis.keys("gush.jobs.#{id}.*").length).to eq(5)
|
83
|
+
|
84
|
+
client.destroy_workflow(workflow)
|
85
|
+
|
86
|
+
expect(redis.keys("gush.workflows.#{id}").length).to eq(0)
|
87
|
+
expect(redis.keys("gush.jobs.#{id}.*").length).to eq(0)
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
describe "#persist_job" do
|
92
|
+
it "persists JSON dump of the job in Redis" do
|
93
|
+
job = double("job", to_json: 'json')
|
94
|
+
client.persist_job('deadbeef', job)
|
95
|
+
expect(redis.keys("gush.jobs.deadbeef.*").length).to eq(1)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
describe "#all_workflows" do
|
100
|
+
it "returns all registered workflows" do
|
101
|
+
workflow = TestWorkflow.new(SecureRandom.uuid)
|
102
|
+
client.persist_workflow(workflow)
|
103
|
+
workflows = client.all_workflows
|
104
|
+
expect(workflows.map(&:id)).to eq([workflow.id])
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
it "should be able to handle outdated data format" do
|
109
|
+
workflow_id = SecureRandom.uuid
|
110
|
+
workflow = TestWorkflow.new(workflow_id)
|
111
|
+
client.persist_workflow(workflow)
|
112
|
+
|
113
|
+
# malform the data
|
114
|
+
hash = Yajl::Parser.parse(redis.get("gush.workflows.#{workflow_id}"), symbolize_keys: true)
|
115
|
+
hash.delete(:logger_builder)
|
116
|
+
hash.delete(:stopped)
|
117
|
+
redis.set("gush.workflows.#{workflow_id}", Yajl::Encoder.new.encode(hash))
|
118
|
+
|
119
|
+
expect {
|
120
|
+
workflow = client.find_workflow(workflow_id)
|
121
|
+
expect(workflow.stopped?).to be false
|
122
|
+
expect(workflow.instance_variable_get(:@logger_builder)).to be Gush::LoggerBuilder
|
123
|
+
}.not_to raise_error
|
124
|
+
end
|
125
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Gush::Configuration do
|
4
|
+
|
5
|
+
it "has defaults set" do
|
6
|
+
subject.gushfile = GUSHFILE
|
7
|
+
expect(subject.redis_url).to eq("redis://localhost:6379")
|
8
|
+
expect(subject.concurrency).to eq(5)
|
9
|
+
expect(subject.namespace).to eq('gush')
|
10
|
+
expect(subject.gushfile).to eq(GUSHFILE.realpath)
|
11
|
+
expect(subject.environment).to eq('development')
|
12
|
+
end
|
13
|
+
|
14
|
+
describe "#configure" do
|
15
|
+
it "allows setting options through a block" do
|
16
|
+
Gush.configure do |config|
|
17
|
+
config.redis_url = "redis://localhost"
|
18
|
+
config.concurrency = 25
|
19
|
+
config.environment = 'production'
|
20
|
+
end
|
21
|
+
|
22
|
+
expect(Gush.configuration.redis_url).to eq("redis://localhost")
|
23
|
+
expect(Gush.configuration.concurrency).to eq(25)
|
24
|
+
expect(Gush.configuration.environment).to eq('production')
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,114 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Gush::Job do
|
4
|
+
|
5
|
+
describe "#fail!" do
|
6
|
+
it "sets finished and failed to true and records time" do
|
7
|
+
job = described_class.new(name: "a-job")
|
8
|
+
job.fail!
|
9
|
+
expect(job.failed_at).to eq(Time.now.to_i)
|
10
|
+
expect(job.failed).to eq(true)
|
11
|
+
expect(job.finished).to eq(true)
|
12
|
+
expect(job.running).to eq(false)
|
13
|
+
expect(job.enqueued).to eq(false)
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
describe "#finish!" do
|
18
|
+
it "sets finished to false and failed to false and records time" do
|
19
|
+
job = described_class.new(name: "a-job")
|
20
|
+
job.finish!
|
21
|
+
expect(job.finished_at).to eq(Time.now.to_i)
|
22
|
+
expect(job.failed).to eq(false)
|
23
|
+
expect(job.running).to eq(false)
|
24
|
+
expect(job.finished).to eq(true)
|
25
|
+
expect(job.enqueued).to eq(false)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
describe "#enqueue!" do
|
30
|
+
it "resets flags to false and sets enqueued to true" do
|
31
|
+
job = described_class.new(name: "a-job")
|
32
|
+
job.finished_at = 123
|
33
|
+
job.failed_at = 123
|
34
|
+
job.enqueue!
|
35
|
+
expect(job.started_at).to eq(nil)
|
36
|
+
expect(job.finished_at).to eq(nil)
|
37
|
+
expect(job.failed_at).to eq(nil)
|
38
|
+
expect(job.failed).to eq(false)
|
39
|
+
expect(job.finished).to eq(false)
|
40
|
+
expect(job.enqueued).to eq(true)
|
41
|
+
expect(job.running).to eq(false)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
describe "#start!" do
|
46
|
+
it "resets flags to false and sets running to true" do
|
47
|
+
job = described_class.new(name: "a-job")
|
48
|
+
job.enqueue!
|
49
|
+
job.start!
|
50
|
+
expect(job.started_at).to eq(Time.now.to_i)
|
51
|
+
expect(job.enqueued).to eq(false)
|
52
|
+
expect(job.running).to eq(true)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
describe "#as_json" do
|
57
|
+
context "finished and enqueued set to true" do
|
58
|
+
it "returns correct hash" do
|
59
|
+
job = described_class.new(name: "a-job", finished: true, enqueued: true)
|
60
|
+
expected = {
|
61
|
+
name: "a-job",
|
62
|
+
klass: "Gush::Job",
|
63
|
+
finished: true,
|
64
|
+
enqueued: true,
|
65
|
+
failed: false,
|
66
|
+
incoming: [],
|
67
|
+
outgoing: [],
|
68
|
+
failed_at: nil,
|
69
|
+
started_at: nil,
|
70
|
+
finished_at: nil,
|
71
|
+
running: false
|
72
|
+
}
|
73
|
+
expect(job.as_json).to eq(expected)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
describe ".from_hash" do
|
79
|
+
it "properly restores state of the job from hash" do
|
80
|
+
job = described_class.from_hash({
|
81
|
+
klass: 'Gush::Job',
|
82
|
+
name: 'gob',
|
83
|
+
finished: true,
|
84
|
+
failed: true,
|
85
|
+
enqueued: true,
|
86
|
+
incoming: ['a', 'b'],
|
87
|
+
outgoing: ['c'],
|
88
|
+
failed_at: 123,
|
89
|
+
finished_at: 122,
|
90
|
+
started_at: 55
|
91
|
+
})
|
92
|
+
|
93
|
+
expect(job.name).to eq('gob')
|
94
|
+
expect(job.class).to eq(Gush::Job)
|
95
|
+
expect(job.finished).to eq(true)
|
96
|
+
expect(job.failed).to eq(true)
|
97
|
+
expect(job.enqueued).to eq(true)
|
98
|
+
expect(job.incoming).to eq(['a', 'b'])
|
99
|
+
expect(job.outgoing).to eq(['c'])
|
100
|
+
expect(job.failed_at).to eq(123)
|
101
|
+
expect(job.finished_at).to eq(122)
|
102
|
+
expect(job.started_at).to eq(55)
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
describe "#logger" do
|
107
|
+
it "returns a logger for the job" do
|
108
|
+
job = described_class.new(name: "a-job", finished: true, running: true)
|
109
|
+
job.logger = TestLoggerBuilder.new(:workflow, job, 1234).build
|
110
|
+
expect(job.logger).to be_a TestLogger
|
111
|
+
expect(job.logger.name).to eq(job.name)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Gush::LoggerBuilder do
|
4
|
+
it 'takes a job as an argument' do
|
5
|
+
builder = Gush::LoggerBuilder.new(:workflow, :job, :jid)
|
6
|
+
expect(builder.job).to eq(:job)
|
7
|
+
end
|
8
|
+
|
9
|
+
it 'takes a workflow as an argument' do
|
10
|
+
builder = Gush::LoggerBuilder.new(:workflow, :job, :jid)
|
11
|
+
expect(builder.workflow).to eq(:workflow)
|
12
|
+
end
|
13
|
+
|
14
|
+
it 'takes a jid as an argument' do
|
15
|
+
builder = Gush::LoggerBuilder.new(:workflow, :job, :jid)
|
16
|
+
expect(builder.jid).to eq(:jid)
|
17
|
+
end
|
18
|
+
|
19
|
+
describe "#build" do
|
20
|
+
it 'returns a logger for a job' do
|
21
|
+
expect(Gush::LoggerBuilder.new(:workflow, :job, :jid).build).to be_a Gush::NullLogger
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|