gush 0.3 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c9e3da7035bd13a0f0d4dbf499e5f2d78dac77ec
4
- data.tar.gz: 02b741c597ffda3ee14e196ee634990bc70f8173
3
+ metadata.gz: 5c0f45a09eb1260f6bcb00f1503181ddb4705735
4
+ data.tar.gz: 13eaec20c3b1165127b5cb7761c99cdd5abc1f41
5
5
  SHA512:
6
- metadata.gz: 7f93715bccd0be2bdd89ebf19c5f83aca62c57d1166e388eaea3fa991b11846fd1f8f9406dad9835f48e377d032ef4ca6f303dcf08d8b56a7f5caeb5b6f4544e
7
- data.tar.gz: e1f067418266f041a7eef3411fcb346b81e3cd851f9f73f0526b5a9f130aca21a196a90ca161f2387eb34ca2cd3ab5d98204aa3fff236924e1a09b48479e5eef
6
+ metadata.gz: 1d1060e1575b133245e78280f96818a83c599fbd7f9b08a93d14c54c212cf3d1f4ae0f75a81883fcb49758b059a1bc51063e46c9121405408280570c124906ef
7
+ data.tar.gz: 7bc51f1ad03adc625ebe497757c86fc588514d8a98d93e278ab961d1817653c21cf706858ad080d4e4fbc29e3d792b0a8d33598f67f126eccd0910d39f222843
data/README.md CHANGED
@@ -210,13 +210,13 @@ flow.status
210
210
  - of a specific workflow:
211
211
 
212
212
  ```
213
- bundle gush show <workflow_id>
213
+ bundle exec gush show <workflow_id>
214
214
  ```
215
215
 
216
216
  - of all created workflows:
217
217
 
218
218
  ```
219
- bundle gush list
219
+ bundle exec gush list
220
220
  ```
221
221
 
222
222
 
data/gush.gemspec CHANGED
@@ -4,7 +4,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
4
 
5
5
  Gem::Specification.new do |spec|
6
6
  spec.name = "gush"
7
- spec.version = "0.3"
7
+ spec.version = "0.3.1"
8
8
  spec.authors = ["Piotrek Okoński"]
9
9
  spec.email = ["piotrek@okonski.org"]
10
10
  spec.summary = "Fast and distributed workflow runner using only Sidekiq and Redis"
data/lib/gush/cli.rb CHANGED
@@ -99,8 +99,6 @@ module Gush
99
99
  def viz(name)
100
100
  client
101
101
  workflow = name.constantize.new
102
- workflow.configure
103
- workflow.resolve_dependencies
104
102
  graph = Graph.new(workflow)
105
103
  graph.viz
106
104
  Launchy.open graph.path
data/lib/gush/client.rb CHANGED
@@ -16,12 +16,10 @@ module Gush
16
16
 
17
17
  def create_workflow(name)
18
18
  begin
19
- flow = name.constantize.new
20
- flow.save
19
+ name.constantize.create
21
20
  rescue NameError
22
21
  raise WorkflowNotFound.new("Workflow with given name doesn't exist")
23
22
  end
24
-
25
23
  flow
26
24
  end
27
25
 
@@ -46,7 +44,18 @@ module Gush
46
44
  persist_workflow(workflow)
47
45
  end
48
46
 
49
- def next_free_id
47
+ def next_free_job_id(workflow_id,job_klass)
48
+ job_identifier = nil
49
+ loop do
50
+ id = SecureRandom.uuid
51
+ job_identifier = "#{job_klass}-#{id}"
52
+ break if !redis.exists("gush.jobs.#{workflow_id}.#{job_identifier}")
53
+ end
54
+
55
+ job_identifier
56
+ end
57
+
58
+ def next_free_workflow_id
50
59
  id = nil
51
60
  loop do
52
61
  id = SecureRandom.uuid
@@ -83,13 +92,20 @@ module Gush
83
92
  end
84
93
 
85
94
  def persist_job(workflow_id, job)
86
- redis.set("gush.jobs.#{workflow_id}.#{job.class.to_s}", job.to_json)
95
+ redis.set("gush.jobs.#{workflow_id}.#{job.name}", job.to_json)
87
96
  end
88
97
 
89
98
  def load_job(workflow_id, job_id)
90
99
  workflow = find_workflow(workflow_id)
91
- data = redis.get("gush.jobs.#{workflow_id}.#{job_id}")
100
+ job_name_match = /(?<klass>\w*[^-])-(?<identifier>.*)/.match(job_id)
101
+ hypen = '-' if job_name_match.nil?
102
+
103
+ keys = redis.keys("gush.jobs.#{workflow_id}.#{job_id}#{hypen}*")
104
+ return nil if keys.nil?
105
+
106
+ data = redis.get(keys.first)
92
107
  return nil if data.nil?
108
+
93
109
  data = Gush::JSON.decode(data, symbolize_keys: true)
94
110
  Gush::Job.from_hash(workflow, data)
95
111
  end
@@ -100,7 +116,7 @@ module Gush
100
116
  end
101
117
 
102
118
  def destroy_job(workflow_id, job)
103
- redis.del("gush.jobs.#{workflow_id}.#{job.class.to_s}")
119
+ redis.del("gush.jobs.#{workflow_id}.#{job.name}")
104
120
  end
105
121
 
106
122
  def worker_report(message)
@@ -118,7 +134,7 @@ module Gush
118
134
  sidekiq.push(
119
135
  'class' => Gush::Worker,
120
136
  'queue' => configuration.namespace,
121
- 'args' => [workflow_id, job.class.to_s]
137
+ 'args' => [workflow_id, job.name]
122
138
  )
123
139
  end
124
140
 
@@ -128,6 +144,7 @@ module Gush
128
144
 
129
145
  def workflow_from_hash(hash, nodes = nil)
130
146
  flow = hash[:klass].constantize.new
147
+ flow.jobs = []
131
148
  flow.stopped = hash.fetch(:stopped, false)
132
149
  flow.id = hash[:id]
133
150
 
data/lib/gush/graph.rb CHANGED
@@ -2,7 +2,7 @@ module Gush
2
2
  class Graph
3
3
  attr_reader :workflow, :filename, :path, :start, :end_node
4
4
 
5
- def initialize(workflow, options: {})
5
+ def initialize(workflow, options = {})
6
6
  @workflow = workflow
7
7
  @filename = options.fetch(:filename, "graph.png")
8
8
  @path = options.fetch(:path, Pathname.new(Dir.tmpdir).join(filename))
@@ -31,17 +31,17 @@ module Gush
31
31
  private
32
32
  def add_job(graph, job)
33
33
  name = job.class.to_s
34
- graph.add_nodes(name)
34
+ graph.add_nodes(job.name, label: name)
35
35
 
36
36
  if job.incoming.empty?
37
- graph.add_edges(start, name)
37
+ graph.add_edges(start, job.name)
38
38
  end
39
39
 
40
40
  if job.outgoing.empty?
41
- graph.add_edges(name, end_node)
41
+ graph.add_edges(job.name, end_node)
42
42
  else
43
- job.outgoing.each do |out|
44
- graph.add_edges(name, out)
43
+ job.outgoing.each do |id|
44
+ graph.add_edges(job.name, id)
45
45
  end
46
46
  end
47
47
  end
data/lib/gush/job.rb CHANGED
@@ -1,8 +1,8 @@
1
1
  module Gush
2
2
  class Job
3
3
  attr_accessor :workflow_id, :incoming, :outgoing, :params,
4
- :finished_at, :failed_at, :started_at, :enqueued_at, :payloads
5
- attr_reader :name, :output_payload, :params
4
+ :finished_at, :failed_at, :started_at, :enqueued_at, :payloads_hash, :klass
5
+ attr_reader :name, :output_payload, :params, :payloads
6
6
 
7
7
  def initialize(workflow, opts = {})
8
8
  @workflow = workflow
@@ -37,6 +37,12 @@ module Gush
37
37
  @output_payload = data
38
38
  end
39
39
 
40
+ def payloads
41
+ payload_h = {}
42
+ payloads_hash.each {|k,val| payload_h[k.to_s] = val.map {|h| h[:payload] }}
43
+ payload_h
44
+ end
45
+
40
46
  def work
41
47
  end
42
48
 
@@ -98,6 +104,10 @@ module Gush
98
104
  end
99
105
 
100
106
  private
107
+ def logger
108
+ Sidekiq.logger
109
+ end
110
+
101
111
  def current_timestamp
102
112
  Time.now.to_i
103
113
  end
@@ -111,6 +121,7 @@ module Gush
111
121
  @started_at = opts[:started_at]
112
122
  @enqueued_at = opts[:enqueued_at]
113
123
  @params = opts[:params] || {}
124
+ @klass = opts[:klass]
114
125
  @output_payload = opts[:output_payload]
115
126
  end
116
127
  end
data/lib/gush/json.rb CHANGED
@@ -1,6 +1,5 @@
1
1
  module Gush
2
2
  class JSON
3
-
4
3
  def self.encode(data)
5
4
  Yajl::Encoder.new.encode(data)
6
5
  end
@@ -9,4 +8,4 @@ module Gush
9
8
  Yajl::Parser.parse(data, options)
10
9
  end
11
10
  end
12
- end
11
+ end
data/lib/gush/worker.rb CHANGED
@@ -9,7 +9,7 @@ module Gush
9
9
  def perform(workflow_id, job_id)
10
10
  setup_job(workflow_id, job_id)
11
11
 
12
- job.payloads = incoming_payloads
12
+ job.payloads_hash = incoming_payloads
13
13
 
14
14
  start = Time.now
15
15
  report(:started, start)
@@ -20,19 +20,15 @@ module Gush
20
20
  mark_as_started
21
21
  begin
22
22
  job.work
23
- rescue Exception => e
24
- failed = true
25
- error = e
26
- end
27
-
28
- unless failed
29
- report(:finished, start)
23
+ rescue Exception => error
24
+ mark_as_failed
25
+ report(:failed, start, error.message)
26
+ raise error
27
+ else
30
28
  mark_as_finished
29
+ report(:finished, start)
31
30
 
32
31
  enqueue_outgoing_jobs
33
- else
34
- mark_as_failed
35
- report(:failed, start, error.message)
36
32
  end
37
33
  end
38
34
 
@@ -51,9 +47,10 @@ module Gush
51
47
  def incoming_payloads
52
48
  payloads = {}
53
49
  job.incoming.each do |job_name|
54
- payloads[job_name] = client.load_job(workflow.id, job_name).output_payload
50
+ job = client.load_job(workflow.id, job_name)
51
+ payloads[job.klass.to_s] ||= []
52
+ payloads[job.klass.to_s] << {:id => job.name, :payload => job.output_payload}
55
53
  end
56
-
57
54
  payloads
58
55
  end
59
56
 
data/lib/gush/workflow.rb CHANGED
@@ -11,6 +11,8 @@ module Gush
11
11
  @persisted = false
12
12
  @stopped = false
13
13
  @arguments = args
14
+
15
+ setup
14
16
  end
15
17
 
16
18
  def self.find(id)
@@ -23,10 +25,17 @@ module Gush
23
25
  flow
24
26
  end
25
27
 
28
+ def continue
29
+ client = Gush::Client.new
30
+ failed_jobs = jobs.select(&:failed?)
31
+
32
+ failed_jobs.each do |job|
33
+ client.enqueue_job(id, job)
34
+ end
35
+ end
36
+
26
37
  def save
27
- configure(*@arguments)
28
- resolve_dependencies
29
- client.persist_workflow(self)
38
+ persist!
30
39
  end
31
40
 
32
41
  def configure(*args)
@@ -63,7 +72,13 @@ module Gush
63
72
  end
64
73
 
65
74
  def find_job(name)
66
- jobs.find { |node| node.name == name.to_s || node.class.to_s == name.to_s }
75
+ match_data = /(?<klass>\w*[^-])-(?<identifier>.*)/.match(name.to_s)
76
+ if match_data.nil?
77
+ job = jobs.find { |node| node.class.to_s == name.to_s }
78
+ else
79
+ job = jobs.find { |node| node.name.to_s == name.to_s }
80
+ end
81
+ job
67
82
  end
68
83
 
69
84
  def finished?
@@ -90,7 +105,7 @@ module Gush
90
105
  options =
91
106
 
92
107
  node = klass.new(self, {
93
- name: klass.to_s,
108
+ name: client.next_free_job_id(id,klass.to_s),
94
109
  params: opts.fetch(:params, {})
95
110
  })
96
111
 
@@ -98,13 +113,15 @@ module Gush
98
113
 
99
114
  deps_after = [*opts[:after]]
100
115
  deps_after.each do |dep|
101
- @dependencies << {from: dep.to_s, to: klass.to_s }
116
+ @dependencies << {from: dep.to_s, to: node.name.to_s }
102
117
  end
103
118
 
104
119
  deps_before = [*opts[:before]]
105
120
  deps_before.each do |dep|
106
- @dependencies << {from: klass.to_s, to: dep.to_s }
121
+ @dependencies << {from: node.name.to_s, to: dep.to_s }
107
122
  end
123
+
124
+ node.name
108
125
  end
109
126
 
110
127
  def reload
@@ -164,11 +181,16 @@ module Gush
164
181
  end
165
182
 
166
183
  def id
167
- @id ||= client.next_free_id
184
+ @id ||= client.next_free_workflow_id
168
185
  end
169
186
 
170
187
  private
171
188
 
189
+ def setup
190
+ configure(*@arguments)
191
+ resolve_dependencies
192
+ end
193
+
172
194
  def client
173
195
  @client ||= Client.new
174
196
  end
@@ -18,19 +18,19 @@ describe "Workflows" do
18
18
  flow = TestWorkflow.create
19
19
  flow.start!
20
20
 
21
- expect(Gush::Worker).to have_jobs(flow.id, ["Prepare"])
21
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(['Prepare']))
22
22
 
23
23
  Gush::Worker.perform_one
24
- expect(Gush::Worker).to have_jobs(flow.id, ["FetchFirstJob", "FetchSecondJob"])
24
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["FetchFirstJob", "FetchSecondJob"]))
25
25
 
26
26
  Gush::Worker.perform_one
27
- expect(Gush::Worker).to have_jobs(flow.id, ["FetchSecondJob", "PersistFirstJob"])
27
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["FetchSecondJob", "PersistFirstJob"]))
28
28
 
29
29
  Gush::Worker.perform_one
30
- expect(Gush::Worker).to have_jobs(flow.id, ["PersistFirstJob"])
30
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["PersistFirstJob"]))
31
31
 
32
32
  Gush::Worker.perform_one
33
- expect(Gush::Worker).to have_jobs(flow.id, ["NormalizeJob"])
33
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(["NormalizeJob"]))
34
34
 
35
35
  Gush::Worker.perform_one
36
36
 
@@ -52,7 +52,7 @@ describe "Workflows" do
52
52
 
53
53
  class PrependJob < Gush::Job
54
54
  def work
55
- string = "#{payloads["PrefixJob"]}: #{payloads["UpcaseJob"]}"
55
+ string = "#{payloads['PrefixJob'].first}: #{payloads['UpcaseJob'].first}"
56
56
  output string
57
57
  end
58
58
  end
@@ -76,5 +76,47 @@ describe "Workflows" do
76
76
 
77
77
  Gush::Worker.perform_one
78
78
  expect(flow.reload.find_job("PrependJob").output_payload).to eq("A prefix: SOME TEXT")
79
+
80
+
81
+ end
82
+
83
+ it "passes payloads from workflow that runs multiple same class jobs with nameized payloads" do
84
+ class RepetitiveJob < Gush::Job
85
+ def work
86
+ output params[:input]
87
+ end
88
+ end
89
+
90
+ class SummaryJob < Gush::Job
91
+ def work
92
+ output payloads['RepetitiveJob']
93
+ end
94
+ end
95
+
96
+ class PayloadWorkflow < Gush::Workflow
97
+ def configure
98
+ jobs = []
99
+ jobs << run(RepetitiveJob, params: {input: 'first'})
100
+ jobs << run(RepetitiveJob, params: {input: 'second'})
101
+ jobs << run(RepetitiveJob, params: {input: 'third'})
102
+ run SummaryJob, after: jobs
103
+ end
104
+ end
105
+
106
+ flow = PayloadWorkflow.create
107
+ flow.start!
108
+
109
+ Gush::Worker.perform_one
110
+ expect(flow.reload.find_job(flow.jobs[0].name).output_payload).to eq('first')
111
+
112
+ Gush::Worker.perform_one
113
+ expect(flow.reload.find_job(flow.jobs[1].name).output_payload).to eq('second')
114
+
115
+ Gush::Worker.perform_one
116
+ expect(flow.reload.find_job(flow.jobs[2].name).output_payload).to eq('third')
117
+
118
+ Gush::Worker.perform_one
119
+ expect(flow.reload.find_job(flow.jobs[3].name).output_payload).to eq(%w(first second third))
120
+
79
121
  end
80
122
  end
@@ -0,0 +1,8 @@
1
+ require 'spec_helper'
2
+
3
+ describe Gush::Graph do
4
+
5
+ describe "#create" do
6
+
7
+ end
8
+ end
@@ -34,9 +34,9 @@ describe Gush::Client do
34
34
  end
35
35
 
36
36
  it "removes stopped flag when the workflow is started" do
37
- workflow = TestWorkflow.new
37
+ workflow = TestWorkflow.create
38
38
  workflow.mark_as_stopped
39
- workflow.save
39
+ workflow.persist!
40
40
  expect {
41
41
  client.start_workflow(workflow)
42
42
  }.to change{client.find_workflow(workflow.id).stopped?}.from(true).to(false)
@@ -85,7 +85,9 @@ describe Gush::Client do
85
85
 
86
86
  describe "#persist_job" do
87
87
  it "persists JSON dump of the job in Redis" do
88
- job = double("job", to_json: 'json')
88
+
89
+ job = BobJob.new(name: 'bob')
90
+
89
91
  client.persist_job('deadbeef', job)
90
92
  expect(redis.keys("gush.jobs.deadbeef.*").length).to eq(1)
91
93
  end
File without changes
@@ -0,0 +1,43 @@
1
+ require 'spec_helper'
2
+
3
+ describe Gush::Graph do
4
+ subject { described_class.new(TestWorkflow.create) }
5
+ let(:filename) { "graph.png" }
6
+
7
+ describe "#viz" do
8
+ it "runs GraphViz to render graph" do
9
+ node = double("node", :[]= => true)
10
+ edge = double("edge", :[]= => true)
11
+ graph = double("graph", node: node, edge: edge)
12
+ path = Pathname.new(Dir.tmpdir).join(filename)
13
+ expect(graph).to receive(:start).with(shape: 'diamond', fillcolor: '#CFF09E')
14
+ expect(graph).to receive(:end).with(shape: 'diamond', fillcolor: '#F56991')
15
+
16
+ expect(graph).to receive(:output).with(png: path.to_s)
17
+
18
+ expect(graph).to receive(:add_nodes).with(/Prepare/, label: "Prepare")
19
+ expect(graph).to receive(:add_nodes).with(/FetchFirstJob/, label: "FetchFirstJob")
20
+ expect(graph).to receive(:add_nodes).with(/FetchSecondJob/, label: "FetchSecondJob")
21
+ expect(graph).to receive(:add_nodes).with(/NormalizeJob/, label: "NormalizeJob")
22
+ expect(graph).to receive(:add_nodes).with(/PersistFirstJob/, label: "PersistFirstJob")
23
+
24
+ expect(graph).to receive(:add_edges).with(nil, /Prepare/)
25
+ expect(graph).to receive(:add_edges).with(/Prepare/, /FetchFirstJob/)
26
+ expect(graph).to receive(:add_edges).with(/Prepare/, /FetchSecondJob/)
27
+ expect(graph).to receive(:add_edges).with(/FetchFirstJob/, /PersistFirstJob/)
28
+ expect(graph).to receive(:add_edges).with(/FetchSecondJob/, /NormalizeJob/)
29
+ expect(graph).to receive(:add_edges).with(/PersistFirstJob/, /NormalizeJob/)
30
+ expect(graph).to receive(:add_edges).with(/NormalizeJob/, nil)
31
+
32
+ expect(GraphViz).to receive(:new).and_yield(graph)
33
+
34
+ subject.viz
35
+ end
36
+ end
37
+
38
+ describe "#path" do
39
+ it "returns string path to the rendered graph" do
40
+ expect(subject.path).to eq(Pathname.new(Dir.tmpdir).join(filename).to_s)
41
+ end
42
+ end
43
+ end
@@ -98,6 +98,7 @@ describe Gush::Job do
98
98
 
99
99
  expect(job.name).to eq('gob')
100
100
  expect(job.class).to eq(Gush::Job)
101
+ expect(job.klass).to eq("Gush::Job")
101
102
  expect(job.finished?).to eq(true)
102
103
  expect(job.failed?).to eq(true)
103
104
  expect(job.enqueued?).to eq(true)
@@ -0,0 +1,21 @@
1
+ require 'spec_helper'
2
+
3
+ describe Gush::JSON do
4
+ subject { described_class }
5
+
6
+ describe ".encode" do
7
+ it "encodes data to JSON" do
8
+ expect(subject.encode({a: 123})).to eq("{\"a\":123}")
9
+ end
10
+ end
11
+
12
+ describe ".decode" do
13
+ it "decodes JSON to data" do
14
+ expect(subject.decode("{\"a\":123}")).to eq({"a" => 123})
15
+ end
16
+
17
+ it "passes options to the internal parser" do
18
+ expect(subject.decode("{\"a\":123}", symbolize_keys: true)).to eq({a: 123})
19
+ end
20
+ end
21
+ end
@@ -23,7 +23,9 @@ describe Gush::Worker do
23
23
  allow(job).to receive(:work).and_raise(StandardError)
24
24
  expect(client).to receive(:worker_report).with(hash_including(status: :failed)).ordered
25
25
 
26
- subject.perform(workflow.id, "Prepare")
26
+ expect do
27
+ subject.perform(workflow.id, "Prepare")
28
+ end.to raise_error(StandardError)
27
29
  expect(workflow.find_job("Prepare")).to be_failed
28
30
  end
29
31
 
@@ -31,7 +33,9 @@ describe Gush::Worker do
31
33
  allow(job).to receive(:work).and_raise(StandardError)
32
34
  expect(client).to receive(:worker_report).with(hash_including(status: :failed)).ordered
33
35
 
34
- subject.perform(workflow.id, "Prepare")
36
+ expect do
37
+ subject.perform(workflow.id, "Prepare")
38
+ end.to raise_error(StandardError)
35
39
  end
36
40
  end
37
41
 
@@ -4,9 +4,6 @@ describe Gush::Workflow do
4
4
  subject { TestWorkflow.create }
5
5
 
6
6
  describe "#initialize" do
7
- end
8
-
9
- describe "#save" do
10
7
  it "passes constructor arguments to the method" do
11
8
  klass = Class.new(Gush::Workflow) do
12
9
  def configure(*args)
@@ -15,12 +12,24 @@ describe Gush::Workflow do
15
12
  end
16
13
  end
17
14
 
15
+ expect_any_instance_of(klass).to receive(:configure).with("arg1", "arg2")
18
16
  flow = klass.new("arg1", "arg2")
19
17
 
20
- expect(flow).to receive(:configure).with("arg1", "arg2")
21
- flow.save
22
18
  end
19
+ end
20
+
21
+ describe "#status" do
22
+ context "when failed" do
23
+ it "returns :failed" do
24
+ flow = TestWorkflow.create
25
+ flow.find_job("Prepare").fail!
26
+ flow.persist!
27
+ expect(flow.reload.status).to eq(:failed)
28
+ end
29
+ end
30
+ end
23
31
 
32
+ describe "#save" do
24
33
  context "workflow not persisted" do
25
34
  it "sets persisted to true" do
26
35
  flow = TestWorkflow.new
@@ -46,6 +55,20 @@ describe Gush::Workflow do
46
55
  end
47
56
  end
48
57
 
58
+ describe "#continue" do
59
+ it "enqueues failed jobs" do
60
+ flow = TestWorkflow.create
61
+ flow.find_job('Prepare').fail!
62
+
63
+ expect(flow.jobs.select(&:failed?)).not_to be_empty
64
+
65
+ flow.continue
66
+
67
+ expect(flow.jobs.select(&:failed?)).to be_empty
68
+ expect(flow.find_job('Prepare').failed_at).to be_nil
69
+ end
70
+ end
71
+
49
72
  describe "#mark_as_stopped" do
50
73
  it "marks workflow as stopped" do
51
74
  expect{ subject.mark_as_stopped }.to change{subject.stopped?}.from(false).to(true)
@@ -70,42 +93,42 @@ describe Gush::Workflow do
70
93
 
71
94
  result = JSON.parse(klass.create("arg1", "arg2").to_json)
72
95
  expected = {
73
- "id" => an_instance_of(String),
74
- "name" => klass.to_s,
75
- "klass" => klass.to_s,
76
- "status" => "running",
77
- "total" => 2,
78
- "finished" => 0,
79
- "started_at" => nil,
80
- "finished_at" => nil,
81
- "stopped" => false,
82
- "arguments" => ["arg1", "arg2"],
83
- "jobs" => [
84
- {
85
- "name"=>"FetchFirstJob",
86
- "klass"=>"FetchFirstJob",
87
- "incoming"=>[],
88
- "outgoing"=>["PersistFirstJob"],
89
- "finished_at"=>nil,
90
- "started_at"=>nil,
91
- "enqueued_at"=>nil,
92
- "failed_at"=>nil,
93
- "params" => {},
94
- "output_payload" => nil
95
- },
96
- {
97
- "name"=>"PersistFirstJob",
98
- "klass"=>"PersistFirstJob",
99
- "incoming"=>["FetchFirstJob"],
100
- "outgoing"=>[],
101
- "finished_at"=>nil,
102
- "started_at"=>nil,
103
- "enqueued_at"=>nil,
104
- "failed_at"=>nil,
105
- "params" => {},
106
- "output_payload" => nil
107
- }
108
- ]
96
+ "id" => an_instance_of(String),
97
+ "name" => klass.to_s,
98
+ "klass" => klass.to_s,
99
+ "status" => "running",
100
+ "total" => 2,
101
+ "finished" => 0,
102
+ "started_at" => nil,
103
+ "finished_at" => nil,
104
+ "stopped" => false,
105
+ "arguments" => ["arg1", "arg2"],
106
+ "jobs" => [
107
+ {
108
+ "name"=>a_string_starting_with('FetchFirstJob'),
109
+ "klass"=>"FetchFirstJob",
110
+ "incoming"=>[],
111
+ "outgoing"=>[a_string_starting_with('PersistFirstJob')],
112
+ "finished_at"=>nil,
113
+ "started_at"=>nil,
114
+ "enqueued_at"=>nil,
115
+ "failed_at"=>nil,
116
+ "params" => {},
117
+ "output_payload" => nil
118
+ },
119
+ {
120
+ "name"=>a_string_starting_with('PersistFirstJob'),
121
+ "klass"=>"PersistFirstJob",
122
+ "incoming"=>["FetchFirstJob"],
123
+ "outgoing"=>[],
124
+ "finished_at"=>nil,
125
+ "started_at"=>nil,
126
+ "enqueued_at"=>nil,
127
+ "failed_at"=>nil,
128
+ "params" => {},
129
+ "output_payload" => nil
130
+ }
131
+ ]
109
132
  }
110
133
  expect(result).to match(expected)
111
134
  end
@@ -145,7 +168,7 @@ describe Gush::Workflow do
145
168
 
146
169
  tree.resolve_dependencies
147
170
 
148
- expect(tree.jobs.first.outgoing).to match_array([klass2.to_s])
171
+ expect(tree.jobs.first.outgoing).to match_array(jobs_with_id([klass2.to_s]))
149
172
  end
150
173
 
151
174
  it "allows `before` to accept an array of jobs" do
@@ -159,7 +182,7 @@ describe Gush::Workflow do
159
182
 
160
183
  tree.resolve_dependencies
161
184
 
162
- expect(tree.jobs.first.incoming).to match_array([klass2.to_s])
185
+ expect(tree.jobs.first.incoming).to match_array(jobs_with_id([klass2.to_s]))
163
186
  end
164
187
 
165
188
  it "attaches job as a child of the job in `after` key" do
@@ -170,7 +193,7 @@ describe Gush::Workflow do
170
193
  tree.run(klass2, after: klass1)
171
194
  tree.resolve_dependencies
172
195
  job = tree.jobs.first
173
- expect(job.outgoing).to match_array([klass2.to_s])
196
+ expect(job.outgoing).to match_array(jobs_with_id([klass2.to_s]))
174
197
  end
175
198
 
176
199
  it "attaches job as a parent of the job in `before` key" do
@@ -181,7 +204,7 @@ describe Gush::Workflow do
181
204
  tree.run(klass2, before: klass1)
182
205
  tree.resolve_dependencies
183
206
  job = tree.jobs.first
184
- expect(job.incoming).to match_array([klass2.to_s])
207
+ expect(job.incoming).to match_array(jobs_with_id([klass2.to_s]))
185
208
  end
186
209
  end
187
210
 
@@ -25,7 +25,7 @@ describe Gush do
25
25
 
26
26
  describe ".root" do
27
27
  it "returns root directory of Gush" do
28
- expected = Pathname.new(__FILE__).parent.parent.parent
28
+ expected = Pathname.new(__FILE__).parent.parent
29
29
  expect(Gush.root).to eq(expected)
30
30
  end
31
31
  end
data/spec/spec_helper.rb CHANGED
@@ -11,6 +11,7 @@ class FetchSecondJob < Gush::Job; end
11
11
  class PersistFirstJob < Gush::Job; end
12
12
  class PersistSecondJob < Gush::Job; end
13
13
  class NormalizeJob < Gush::Job; end
14
+ class BobJob < Gush::Job; end
14
15
 
15
16
  GUSHFILE = Pathname.new(__FILE__).parent.join("Gushfile.rb")
16
17
 
@@ -39,6 +40,14 @@ module GushHelpers
39
40
  def redis
40
41
  @redis ||= Redis.new(url: REDIS_URL)
41
42
  end
43
+
44
+ def jobs_with_id(jobs_array)
45
+ jobs_array.map {|job_name| job_with_id(job_name) }
46
+ end
47
+
48
+ def job_with_id(job_name)
49
+ /#{job_name}-(?<identifier>.*)/
50
+ end
42
51
  end
43
52
 
44
53
  RSpec::Matchers.define :have_jobs do |flow, jobs|
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gush
3
3
  version: !ruby/object:Gem::Version
4
- version: '0.3'
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Piotrek Okoński
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-11-22 00:00:00.000000000 Z
11
+ date: 2016-01-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: sidekiq
@@ -209,12 +209,15 @@ files:
209
209
  - lib/gush/workflow.rb
210
210
  - spec/Gushfile.rb
211
211
  - spec/features/integration_spec.rb
212
- - spec/lib/gush/client_spec.rb
213
- - spec/lib/gush/configuration_spec.rb
214
- - spec/lib/gush/job_spec.rb
215
- - spec/lib/gush/worker_spec.rb
216
- - spec/lib/gush/workflow_spec.rb
217
- - spec/lib/gush_spec.rb
212
+ - spec/gush/cli_spec.rb
213
+ - spec/gush/client_spec.rb
214
+ - spec/gush/configuration_spec.rb
215
+ - spec/gush/graph_spec.rb
216
+ - spec/gush/job_spec.rb
217
+ - spec/gush/json_spec.rb
218
+ - spec/gush/worker_spec.rb
219
+ - spec/gush/workflow_spec.rb
220
+ - spec/gush_spec.rb
218
221
  - spec/spec_helper.rb
219
222
  homepage: https://github.com/pokonski/gush
220
223
  licenses:
@@ -236,18 +239,20 @@ required_rubygems_version: !ruby/object:Gem::Requirement
236
239
  version: '0'
237
240
  requirements: []
238
241
  rubyforge_project:
239
- rubygems_version: 2.4.5
242
+ rubygems_version: 2.4.8
240
243
  signing_key:
241
244
  specification_version: 4
242
245
  summary: Fast and distributed workflow runner using only Sidekiq and Redis
243
246
  test_files:
244
247
  - spec/Gushfile.rb
245
248
  - spec/features/integration_spec.rb
246
- - spec/lib/gush/client_spec.rb
247
- - spec/lib/gush/configuration_spec.rb
248
- - spec/lib/gush/job_spec.rb
249
- - spec/lib/gush/worker_spec.rb
250
- - spec/lib/gush/workflow_spec.rb
251
- - spec/lib/gush_spec.rb
249
+ - spec/gush/cli_spec.rb
250
+ - spec/gush/client_spec.rb
251
+ - spec/gush/configuration_spec.rb
252
+ - spec/gush/graph_spec.rb
253
+ - spec/gush/job_spec.rb
254
+ - spec/gush/json_spec.rb
255
+ - spec/gush/worker_spec.rb
256
+ - spec/gush/workflow_spec.rb
257
+ - spec/gush_spec.rb
252
258
  - spec/spec_helper.rb
253
- has_rdoc: