gush 0.4.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -1
- data/.travis.yml +2 -2
- data/CHANGELOG.md +33 -1
- data/README.md +195 -80
- data/gush.gemspec +6 -5
- data/lib/gush.rb +0 -14
- data/lib/gush/cli.rb +3 -18
- data/lib/gush/cli/overview.rb +1 -1
- data/lib/gush/client.rb +8 -32
- data/lib/gush/configuration.rb +4 -6
- data/lib/gush/graph.rb +2 -1
- data/lib/gush/job.rb +12 -16
- data/lib/gush/worker.rb +21 -49
- data/lib/gush/workflow.rb +9 -5
- data/spec/{Gushfile.rb → Gushfile} +0 -0
- data/spec/features/integration_spec.rb +62 -23
- data/spec/gush/client_spec.rb +1 -1
- data/spec/gush/configuration_spec.rb +0 -3
- data/spec/gush/job_spec.rb +3 -3
- data/spec/gush/worker_spec.rb +33 -41
- data/spec/gush/workflow_spec.rb +4 -2
- data/spec/gush_spec.rb +4 -4
- data/spec/spec_helper.rb +23 -9
- metadata +26 -12
data/lib/gush/workflow.rb
CHANGED
@@ -102,10 +102,9 @@ module Gush
|
|
102
102
|
end
|
103
103
|
|
104
104
|
def run(klass, opts = {})
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
name: client.next_free_job_id(id,klass.to_s),
|
105
|
+
node = klass.new({
|
106
|
+
workflow_id: id,
|
107
|
+
name: client.next_free_job_id(id, klass.to_s),
|
109
108
|
params: opts.fetch(:params, {})
|
110
109
|
})
|
111
110
|
|
@@ -125,7 +124,12 @@ module Gush
|
|
125
124
|
end
|
126
125
|
|
127
126
|
def reload
|
128
|
-
self.class.find(id)
|
127
|
+
flow = self.class.find(id)
|
128
|
+
|
129
|
+
self.jobs = flow.jobs
|
130
|
+
self.stopped = flow.stopped
|
131
|
+
|
132
|
+
self
|
129
133
|
end
|
130
134
|
|
131
135
|
def initial_jobs
|
File without changes
|
@@ -1,12 +1,13 @@
|
|
1
1
|
require 'spec_helper'
|
2
|
+
require 'pry'
|
2
3
|
|
3
4
|
describe "Workflows" do
|
4
5
|
context "when all jobs finish successfuly" do
|
5
6
|
it "marks workflow as completed" do
|
6
7
|
flow = TestWorkflow.create
|
7
|
-
|
8
|
-
|
9
|
-
|
8
|
+
perform_enqueued_jobs do
|
9
|
+
flow.start!
|
10
|
+
end
|
10
11
|
|
11
12
|
flow = flow.reload
|
12
13
|
expect(flow).to be_finished
|
@@ -20,39 +21,39 @@ describe "Workflows" do
|
|
20
21
|
|
21
22
|
expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(['Prepare']))
|
22
23
|
|
23
|
-
|
24
|
+
perform_one
|
24
25
|
expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["FetchFirstJob", "FetchSecondJob"]))
|
25
26
|
|
26
|
-
|
27
|
+
perform_one
|
27
28
|
expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["FetchSecondJob", "PersistFirstJob"]))
|
28
29
|
|
29
|
-
|
30
|
+
perform_one
|
30
31
|
expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["PersistFirstJob"]))
|
31
32
|
|
32
|
-
|
33
|
+
perform_one
|
33
34
|
expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["NormalizeJob"]))
|
34
35
|
|
35
|
-
|
36
|
+
perform_one
|
36
37
|
|
37
|
-
expect(
|
38
|
+
expect(ActiveJob::Base.queue_adapter.enqueued_jobs).to be_empty
|
38
39
|
end
|
39
40
|
|
40
41
|
it "passes payloads down the workflow" do
|
41
42
|
class UpcaseJob < Gush::Job
|
42
|
-
def
|
43
|
+
def perform
|
43
44
|
output params[:input].upcase
|
44
45
|
end
|
45
46
|
end
|
46
47
|
|
47
48
|
class PrefixJob < Gush::Job
|
48
|
-
def
|
49
|
+
def perform
|
49
50
|
output params[:prefix].capitalize
|
50
51
|
end
|
51
52
|
end
|
52
53
|
|
53
54
|
class PrependJob < Gush::Job
|
54
|
-
def
|
55
|
-
string = "#{payloads['PrefixJob']
|
55
|
+
def perform
|
56
|
+
string = "#{payloads.find { |j| j[:class] == 'PrefixJob'}[:output]}: #{payloads.find { |j| j[:class] == 'UpcaseJob'}[:output]}"
|
56
57
|
output string
|
57
58
|
end
|
58
59
|
end
|
@@ -68,13 +69,13 @@ describe "Workflows" do
|
|
68
69
|
flow = PayloadWorkflow.create
|
69
70
|
flow.start!
|
70
71
|
|
71
|
-
|
72
|
+
perform_one
|
72
73
|
expect(flow.reload.find_job("UpcaseJob").output_payload).to eq("SOME TEXT")
|
73
74
|
|
74
|
-
|
75
|
+
perform_one
|
75
76
|
expect(flow.reload.find_job("PrefixJob").output_payload).to eq("A prefix")
|
76
77
|
|
77
|
-
|
78
|
+
perform_one
|
78
79
|
expect(flow.reload.find_job("PrependJob").output_payload).to eq("A prefix: SOME TEXT")
|
79
80
|
|
80
81
|
|
@@ -82,14 +83,14 @@ describe "Workflows" do
|
|
82
83
|
|
83
84
|
it "passes payloads from workflow that runs multiple same class jobs with nameized payloads" do
|
84
85
|
class RepetitiveJob < Gush::Job
|
85
|
-
def
|
86
|
+
def perform
|
86
87
|
output params[:input]
|
87
88
|
end
|
88
89
|
end
|
89
90
|
|
90
91
|
class SummaryJob < Gush::Job
|
91
|
-
def
|
92
|
-
output payloads[
|
92
|
+
def perform
|
93
|
+
output payloads.map { |payload| payload[:output] }
|
93
94
|
end
|
94
95
|
end
|
95
96
|
|
@@ -106,17 +107,55 @@ describe "Workflows" do
|
|
106
107
|
flow = PayloadWorkflow.create
|
107
108
|
flow.start!
|
108
109
|
|
109
|
-
|
110
|
+
perform_one
|
110
111
|
expect(flow.reload.find_job(flow.jobs[0].name).output_payload).to eq('first')
|
111
112
|
|
112
|
-
|
113
|
+
perform_one
|
113
114
|
expect(flow.reload.find_job(flow.jobs[1].name).output_payload).to eq('second')
|
114
115
|
|
115
|
-
|
116
|
+
perform_one
|
116
117
|
expect(flow.reload.find_job(flow.jobs[2].name).output_payload).to eq('third')
|
117
118
|
|
118
|
-
|
119
|
+
perform_one
|
119
120
|
expect(flow.reload.find_job(flow.jobs[3].name).output_payload).to eq(%w(first second third))
|
121
|
+
end
|
122
|
+
|
123
|
+
it "does not execute `configure` on each job for huge workflows" do
|
124
|
+
INTERNAL_SPY = double('spy')
|
125
|
+
INTERNAL_CONFIGURE_SPY = double('configure spy')
|
126
|
+
expect(INTERNAL_SPY).to receive(:some_method).exactly(110).times
|
127
|
+
|
128
|
+
# One time when persisting, second time when reloading in the spec
|
129
|
+
expect(INTERNAL_CONFIGURE_SPY).to receive(:some_method).exactly(2).times
|
130
|
+
|
131
|
+
class SimpleJob < Gush::Job
|
132
|
+
def perform
|
133
|
+
INTERNAL_SPY.some_method
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
class GiganticWorkflow < Gush::Workflow
|
138
|
+
def configure
|
139
|
+
INTERNAL_CONFIGURE_SPY.some_method
|
140
|
+
|
141
|
+
10.times do
|
142
|
+
main = run(SimpleJob)
|
143
|
+
10.times do
|
144
|
+
run(SimpleJob, after: main)
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
flow = GiganticWorkflow.create
|
151
|
+
flow.start!
|
152
|
+
|
153
|
+
110.times do
|
154
|
+
perform_one
|
155
|
+
end
|
120
156
|
|
157
|
+
flow = flow.reload
|
158
|
+
expect(flow).to be_finished
|
159
|
+
expect(flow).to_not be_failed
|
121
160
|
end
|
122
161
|
end
|
data/spec/gush/client_spec.rb
CHANGED
@@ -41,7 +41,7 @@ describe Gush::Client do
|
|
41
41
|
workflow = TestWorkflow.create
|
42
42
|
expect {
|
43
43
|
client.start_workflow(workflow)
|
44
|
-
}.to change{
|
44
|
+
}.to change{ActiveJob::Base.queue_adapter.enqueued_jobs.size}.from(0).to(1)
|
45
45
|
end
|
46
46
|
|
47
47
|
it "removes stopped flag when the workflow is started" do
|
@@ -8,7 +8,6 @@ describe Gush::Configuration do
|
|
8
8
|
expect(subject.concurrency).to eq(5)
|
9
9
|
expect(subject.namespace).to eq('gush')
|
10
10
|
expect(subject.gushfile).to eq(GUSHFILE.realpath)
|
11
|
-
expect(subject.environment).to eq('development')
|
12
11
|
end
|
13
12
|
|
14
13
|
describe "#configure" do
|
@@ -16,12 +15,10 @@ describe Gush::Configuration do
|
|
16
15
|
Gush.configure do |config|
|
17
16
|
config.redis_url = "redis://localhost"
|
18
17
|
config.concurrency = 25
|
19
|
-
config.environment = 'production'
|
20
18
|
end
|
21
19
|
|
22
20
|
expect(Gush.configuration.redis_url).to eq("redis://localhost")
|
23
21
|
expect(Gush.configuration.concurrency).to eq(25)
|
24
|
-
expect(Gush.configuration.environment).to eq('production')
|
25
22
|
end
|
26
23
|
end
|
27
24
|
end
|
data/spec/gush/job_spec.rb
CHANGED
@@ -62,7 +62,7 @@ describe Gush::Job do
|
|
62
62
|
describe "#as_json" do
|
63
63
|
context "finished and enqueued set to true" do
|
64
64
|
it "returns correct hash" do
|
65
|
-
job = described_class.new(
|
65
|
+
job = described_class.new(workflow_id: 123, name: "a-job", finished_at: 123, enqueued_at: 120)
|
66
66
|
expected = {
|
67
67
|
name: "a-job",
|
68
68
|
klass: "Gush::Job",
|
@@ -73,7 +73,8 @@ describe Gush::Job do
|
|
73
73
|
finished_at: 123,
|
74
74
|
enqueued_at: 120,
|
75
75
|
params: {},
|
76
|
-
output_payload: nil
|
76
|
+
output_payload: nil,
|
77
|
+
workflow_id: 123
|
77
78
|
}
|
78
79
|
expect(job.as_json).to eq(expected)
|
79
80
|
end
|
@@ -83,7 +84,6 @@ describe Gush::Job do
|
|
83
84
|
describe ".from_hash" do
|
84
85
|
it "properly restores state of the job from hash" do
|
85
86
|
job = described_class.from_hash(
|
86
|
-
double('flow'),
|
87
87
|
{
|
88
88
|
klass: 'Gush::Job',
|
89
89
|
name: 'gob',
|
data/spec/gush/worker_spec.rb
CHANGED
@@ -4,68 +4,60 @@ describe Gush::Worker do
|
|
4
4
|
subject { described_class.new }
|
5
5
|
|
6
6
|
let!(:workflow) { TestWorkflow.create }
|
7
|
-
let(:job)
|
7
|
+
let!(:job) { client.find_job(workflow.id, "Prepare") }
|
8
8
|
let(:config) { Gush.configuration.to_json }
|
9
|
-
let!(:client) {
|
10
|
-
|
11
|
-
before :each do
|
12
|
-
allow(subject).to receive(:client).and_return(client)
|
13
|
-
allow(subject).to receive(:enqueue_outgoing_jobs)
|
14
|
-
|
15
|
-
allow(client).to receive(:find_workflow).with(workflow.id).and_return(workflow)
|
16
|
-
expect(client).to receive(:persist_job).at_least(1).times
|
17
|
-
expect(client).to receive(:worker_report).with(hash_including(status: :started)).ordered
|
18
|
-
end
|
9
|
+
let!(:client) { Gush::Client.new }
|
19
10
|
|
20
11
|
describe "#perform" do
|
21
12
|
context "when job fails" do
|
22
13
|
it "should mark it as failed" do
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
14
|
+
class FailingJob < Gush::Job
|
15
|
+
def perform
|
16
|
+
invalid.code_to_raise.error
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
class FailingWorkflow < Gush::Workflow
|
21
|
+
def configure
|
22
|
+
run FailingJob
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
workflow = FailingWorkflow.create
|
36
27
|
expect do
|
37
|
-
subject.perform(workflow.id, "
|
38
|
-
end.to raise_error(
|
28
|
+
subject.perform(workflow.id, "FailingJob")
|
29
|
+
end.to raise_error(NameError)
|
30
|
+
expect(client.find_job(workflow.id, "FailingJob")).to be_failed
|
39
31
|
end
|
40
32
|
end
|
41
33
|
|
42
34
|
context "when job completes successfully" do
|
43
35
|
it "should mark it as succedeed" do
|
44
36
|
expect(subject).to receive(:mark_as_finished)
|
45
|
-
expect(client).to receive(:worker_report).with(hash_including(status: :finished)).ordered
|
46
37
|
|
47
38
|
subject.perform(workflow.id, "Prepare")
|
48
39
|
end
|
40
|
+
end
|
49
41
|
|
50
|
-
|
51
|
-
|
42
|
+
it "calls job.perform method" do
|
43
|
+
SPY = double()
|
44
|
+
expect(SPY).to receive(:some_method)
|
52
45
|
|
53
|
-
|
46
|
+
class OkayJob < Gush::Job
|
47
|
+
def perform
|
48
|
+
SPY.some_method
|
49
|
+
end
|
54
50
|
end
|
55
|
-
end
|
56
51
|
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
end
|
52
|
+
class OkayWorkflow < Gush::Workflow
|
53
|
+
def configure
|
54
|
+
run OkayJob
|
55
|
+
end
|
56
|
+
end
|
63
57
|
|
64
|
-
|
65
|
-
allow(client).to receive(:worker_report)
|
66
|
-
expect(client).to receive(:worker_report).with(hash_including(status: :finished)).ordered
|
58
|
+
workflow = OkayWorkflow.create
|
67
59
|
|
68
|
-
subject.perform(workflow.id,
|
60
|
+
subject.perform(workflow.id, 'OkayJob')
|
69
61
|
end
|
70
62
|
end
|
71
63
|
end
|
data/spec/gush/workflow_spec.rb
CHANGED
@@ -114,7 +114,8 @@ describe Gush::Workflow do
|
|
114
114
|
"enqueued_at"=>nil,
|
115
115
|
"failed_at"=>nil,
|
116
116
|
"params" => {},
|
117
|
-
"output_payload" => nil
|
117
|
+
"output_payload" => nil,
|
118
|
+
"workflow_id" => an_instance_of(String)
|
118
119
|
},
|
119
120
|
{
|
120
121
|
"name"=>a_string_starting_with('PersistFirstJob'),
|
@@ -126,7 +127,8 @@ describe Gush::Workflow do
|
|
126
127
|
"enqueued_at"=>nil,
|
127
128
|
"failed_at"=>nil,
|
128
129
|
"params" => {},
|
129
|
-
"output_payload" => nil
|
130
|
+
"output_payload" => nil,
|
131
|
+
"workflow_id" => an_instance_of(String)
|
130
132
|
}
|
131
133
|
]
|
132
134
|
}
|
data/spec/gush_spec.rb
CHANGED
@@ -4,16 +4,16 @@ describe Gush do
|
|
4
4
|
describe ".gushfile" do
|
5
5
|
let(:path) { Pathname("/tmp/Gushfile.rb") }
|
6
6
|
|
7
|
-
context "Gushfile
|
8
|
-
it "
|
7
|
+
context "Gushfile is missing from pwd" do
|
8
|
+
it "returns nil" do
|
9
9
|
path.delete if path.exist?
|
10
10
|
Gush.configuration.gushfile = path
|
11
11
|
|
12
|
-
expect
|
12
|
+
expect(Gush.gushfile).to eq(nil)
|
13
13
|
end
|
14
14
|
end
|
15
15
|
|
16
|
-
context "Gushfile
|
16
|
+
context "Gushfile exists" do
|
17
17
|
it "returns Pathname to it" do
|
18
18
|
FileUtils.touch(path)
|
19
19
|
Gush.configuration.gushfile = path
|
data/spec/spec_helper.rb
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
require 'gush'
|
2
2
|
require 'fakeredis'
|
3
|
-
require '
|
3
|
+
require 'json'
|
4
|
+
require 'pry'
|
4
5
|
|
5
|
-
|
6
|
-
|
6
|
+
ActiveJob::Base.queue_adapter = :test
|
7
|
+
ActiveJob::Base.logger = nil
|
7
8
|
|
8
9
|
class Prepare < Gush::Job; end
|
9
10
|
class FetchFirstJob < Gush::Job; end
|
@@ -13,7 +14,7 @@ class PersistSecondJob < Gush::Job; end
|
|
13
14
|
class NormalizeJob < Gush::Job; end
|
14
15
|
class BobJob < Gush::Job; end
|
15
16
|
|
16
|
-
GUSHFILE = Pathname.new(__FILE__).parent.join("Gushfile
|
17
|
+
GUSHFILE = Pathname.new(__FILE__).parent.join("Gushfile")
|
17
18
|
|
18
19
|
class TestWorkflow < Gush::Workflow
|
19
20
|
def configure
|
@@ -47,6 +48,15 @@ module GushHelpers
|
|
47
48
|
@redis ||= Redis.new(url: REDIS_URL)
|
48
49
|
end
|
49
50
|
|
51
|
+
def perform_one
|
52
|
+
job = ActiveJob::Base.queue_adapter.enqueued_jobs.first
|
53
|
+
if job
|
54
|
+
Gush::Worker.new.perform(*job[:args])
|
55
|
+
ActiveJob::Base.queue_adapter.performed_jobs << job
|
56
|
+
ActiveJob::Base.queue_adapter.enqueued_jobs.shift
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
50
60
|
def jobs_with_id(jobs_array)
|
51
61
|
jobs_array.map {|job_name| job_with_id(job_name) }
|
52
62
|
end
|
@@ -59,17 +69,18 @@ end
|
|
59
69
|
RSpec::Matchers.define :have_jobs do |flow, jobs|
|
60
70
|
match do |actual|
|
61
71
|
expected = jobs.map do |job|
|
62
|
-
hash_including(
|
72
|
+
hash_including(args: include(flow, job))
|
63
73
|
end
|
64
|
-
expect(
|
74
|
+
expect(ActiveJob::Base.queue_adapter.enqueued_jobs).to match_array(expected)
|
65
75
|
end
|
66
76
|
|
67
77
|
failure_message do |actual|
|
68
|
-
"expected queue to have #{jobs}, but instead has: #{
|
78
|
+
"expected queue to have #{jobs}, but instead has: #{ActiveJob::Base.queue_adapter.enqueued_jobs.map{ |j| j[:args][1]}}"
|
69
79
|
end
|
70
80
|
end
|
71
81
|
|
72
82
|
RSpec.configure do |config|
|
83
|
+
config.include ActiveJob::TestHelper
|
73
84
|
config.include GushHelpers
|
74
85
|
|
75
86
|
config.mock_with :rspec do |mocks|
|
@@ -77,16 +88,19 @@ RSpec.configure do |config|
|
|
77
88
|
end
|
78
89
|
|
79
90
|
config.before(:each) do
|
91
|
+
clear_enqueued_jobs
|
92
|
+
clear_performed_jobs
|
93
|
+
|
80
94
|
Gush.configure do |config|
|
81
95
|
config.redis_url = REDIS_URL
|
82
|
-
config.environment = 'test'
|
83
96
|
config.gushfile = GUSHFILE
|
84
97
|
end
|
85
98
|
end
|
86
99
|
|
87
100
|
|
88
101
|
config.after(:each) do
|
89
|
-
|
102
|
+
clear_enqueued_jobs
|
103
|
+
clear_performed_jobs
|
90
104
|
redis.flushdb
|
91
105
|
end
|
92
106
|
end
|