gush 0.0.1 → 0.1

Sign up to get free protection for your applications and to get access to all the features.
data/lib/gush/workflow.rb CHANGED
@@ -1,46 +1,61 @@
1
1
  require 'securerandom'
2
- require 'gush/metadata'
3
2
 
4
3
  module Gush
5
4
  class Workflow
6
- include Gush::Metadata
5
+ attr_accessor :id, :jobs, :stopped, :persisted
7
6
 
8
- attr_accessor :id, :nodes, :stopped
9
-
10
- def initialize(id, options = {})
7
+ def initialize(should_run_configure = true)
11
8
  @id = id
12
- @nodes = []
9
+ @jobs = []
13
10
  @dependencies = []
14
- @logger_builder = default_logger_builder
11
+ @persisted = false
15
12
  @stopped = false
16
13
 
17
- unless options[:configure] == false
14
+ if should_run_configure
18
15
  configure
19
16
  create_dependencies
20
17
  end
21
18
  end
22
19
 
23
- def default_logger_builder
24
- LoggerBuilder
20
+ def self.find(id)
21
+ Gush::Client.new.find_workflow(id)
22
+ end
23
+
24
+ def self.create(*args)
25
+ flow = new(*args)
26
+ flow.save
27
+ flow
28
+ end
29
+
30
+ def save
31
+ if @id.nil?
32
+ assign_id
33
+ end
34
+
35
+ client.persist_workflow(self)
25
36
  end
26
37
 
27
38
  def configure
28
39
  end
29
40
 
30
- def stop!
41
+ def mark_as_stopped
31
42
  @stopped = true
32
43
  end
33
44
 
34
45
  def start!
35
- @stopped = false
46
+ client.start_workflow(self)
47
+ end
48
+
49
+ def persist!
50
+ client.persist_workflow(self)
36
51
  end
37
52
 
38
- def logger_builder(klass)
39
- @logger_builder = klass
53
+ def mark_as_persisted
54
+ @persisted = true
40
55
  end
41
56
 
42
- def build_logger_for_job(job, jid)
43
- @logger_builder.new(self, job, jid).build
57
+ def mark_as_started
58
+ @stopped = false
44
59
  end
45
60
 
46
61
  def create_dependencies
@@ -54,19 +69,19 @@ module Gush
54
69
  end
55
70
 
56
71
  def find_job(name)
57
- @nodes.find { |node| node.name == name.to_s || node.class.to_s == name.to_s }
72
+ @jobs.find { |node| node.name == name.to_s || node.class.to_s == name.to_s }
58
73
  end
59
74
 
60
75
  def finished?
61
- nodes.all?(&:finished)
76
+ jobs.all?(&:finished?)
62
77
  end
63
78
 
64
79
  def running?
65
- nodes.any? {|j| j.enqueued? || j.running? } && !stopped?
80
+ !stopped? && jobs.any? {|j| j.enqueued? || j.running? }
66
81
  end
67
82
 
68
83
  def failed?
69
- nodes.any?(&:failed)
84
+ jobs.any?(&:failed?)
70
85
  end
71
86
 
72
87
  def stopped?
@@ -74,8 +89,8 @@ module Gush
74
89
  end
75
90
 
76
91
  def run(klass, deps = {})
77
- node = klass.new(name: klass.to_s)
78
- @nodes << node
92
+ node = klass.new(self, name: klass.to_s)
93
+ @jobs << node
79
94
 
80
95
  deps_after = [*deps[:after]]
81
96
  deps_after.each do |dep|
@@ -88,18 +103,26 @@ module Gush
88
103
  end
89
104
  end
90
105
 
106
+ def reload
107
+ self.class.find(@id)
108
+ end
109
+
110
+ def initial_jobs
111
+ jobs.select(&:has_no_dependencies?)
112
+ end
113
+
91
114
  def status
92
115
  case
93
116
  when failed?
94
- "Failed"
117
+ :failed
95
118
  when running?
96
- "Running"
119
+ :running
97
120
  when finished?
98
- "Finished"
121
+ :finished
99
122
  when stopped?
100
- "Stopped"
123
+ :stopped
101
124
  else
102
- "Pending"
125
+ :pending
103
126
  end
104
127
  end
105
128
 
@@ -116,26 +139,19 @@ module Gush
116
139
  {
117
140
  name: name,
118
141
  id: @id,
119
- total: @nodes.count,
120
- finished: @nodes.count(&:finished?),
142
+ total: @jobs.count,
143
+ finished: @jobs.count(&:finished?),
121
144
  klass: name,
122
- nodes: @nodes.map(&:as_json),
145
+ jobs: @jobs.map(&:as_json),
123
146
  status: status,
124
147
  stopped: stopped,
125
148
  started_at: started_at,
126
- finished_at: finished_at,
127
- logger_builder: @logger_builder.to_s
149
+ finished_at: finished_at
128
150
  }
129
151
  end
130
152
 
131
153
  def to_json(options = {})
132
- JSON.dump(to_hash)
133
- end
134
-
135
- def next_jobs
136
- @nodes.select do |job|
137
- job.can_be_started?(self)
138
- end
154
+ Gush::JSON.encode(to_hash)
139
155
  end
140
156
 
141
157
  def self.descendants
@@ -143,12 +159,21 @@ module Gush
143
159
  end
144
160
 
145
161
  private
162
+
163
+ def assign_id
164
+ @id = client.next_free_id
165
+ end
166
+
167
+ def client
168
+ @client ||= Client.new
169
+ end
170
+
146
171
  def first_job
147
- nodes.min_by{ |n| n.started_at || Time.now.to_i }
172
+ jobs.min_by{ |n| n.started_at || Time.now.to_i }
148
173
  end
149
174
 
150
175
  def last_job
151
- nodes.max_by{ |n| n.finished_at || 0 } if nodes.all?(&:finished?)
176
+ jobs.max_by{ |n| n.finished_at || 0 } if jobs.all?(&:finished?)
152
177
  end
153
178
  end
154
179
  end
data/lib/gush.rb CHANGED
@@ -7,14 +7,14 @@ require "redis"
7
7
  require "securerandom"
8
8
  require "sidekiq"
9
9
 
10
+ require "gush/json"
10
11
  require "gush/cli"
12
+ require "gush/cli/overview"
13
+ require "gush/graph"
11
14
  require "gush/client"
12
15
  require "gush/configuration"
13
16
  require "gush/errors"
14
17
  require "gush/job"
15
- require "gush/logger_builder"
16
- require "gush/metadata"
17
- require "gush/null_logger"
18
18
  require "gush/version"
19
19
  require "gush/worker"
20
20
  require "gush/workflow"
@@ -0,0 +1,31 @@
1
+ require 'spec_helper'
2
+
3
+
4
+ describe "Workflows" do
5
+ it "runs the whole workflow in proper order" do
6
+ flow = TestWorkflow.create
7
+ flow.start!
8
+
9
+ expect(Gush::Worker).to have_jobs(flow.id, ["Prepare"])
10
+
11
+ Gush::Worker.perform_one
12
+ expect(Gush::Worker).to have_jobs(flow.id, ["FetchFirstJob", "FetchSecondJob"])
13
+
14
+ Gush::Worker.perform_one
15
+ expect(Gush::Worker).to have_jobs(flow.id, ["FetchSecondJob", "PersistFirstJob"])
16
+
17
+ Gush::Worker.perform_one
18
+ expect(Gush::Worker).to have_jobs(flow.id, ["PersistFirstJob", "NormalizeJob"])
19
+
20
+ Gush::Worker.perform_one
21
+ expect(Gush::Worker).to have_jobs(flow.id, ["NormalizeJob"])
22
+
23
+ Gush::Worker.perform_one
24
+
25
+ expect(Gush::Worker.jobs).to be_empty
26
+
27
+ flow = flow.reload
28
+ expect(flow).to be_finished
29
+ expect(flow).to_not be_failed
30
+ end
31
+ end
@@ -1,12 +1,16 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  describe Gush::Client do
4
+ let(:client) do
5
+ Gush::Client.new(Gush::Configuration.new(gushfile: GUSHFILE, redis_url: REDIS_URL))
6
+ end
7
+
4
8
  describe "#find_workflow" do
5
9
  context "when workflow doesn't exist" do
6
10
  it "returns raises WorkflowNotFound" do
7
11
  expect {
8
12
  client.find_workflow('nope')
9
- }.to raise_error(WorkflowNotFound)
13
+ }.to raise_error(Gush::WorkflowNotFound)
10
14
  end
11
15
  end
12
16
 
@@ -17,57 +21,54 @@ describe Gush::Client do
17
21
  workflow = client.find_workflow(expected_workflow.id)
18
22
 
19
23
  expect(workflow.id).to eq(expected_workflow.id)
20
- expect(workflow.nodes.map(&:name)).to match_array(expected_workflow.nodes.map(&:name))
24
+ expect(workflow.jobs.map(&:name)).to match_array(expected_workflow.jobs.map(&:name))
21
25
  end
22
26
  end
23
27
  end
24
28
 
25
29
  describe "#start_workflow" do
26
30
  it "enqueues next jobs from the workflow" do
27
- id = SecureRandom.uuid
28
- workflow = TestWorkflow.new(id)
31
+ workflow = TestWorkflow.new
29
32
  client.persist_workflow(workflow)
30
33
  expect {
31
- client.start_workflow(id)
34
+ client.start_workflow(workflow)
32
35
  }.to change{Gush::Worker.jobs.count}.from(0).to(1)
33
36
  end
34
37
 
35
38
  it "removes stopped flag when the workflow is started" do
36
- id = SecureRandom.uuid
37
- workflow = TestWorkflow.new(id)
38
- workflow.stop!
39
+ workflow = TestWorkflow.new
40
+ workflow.mark_as_stopped
39
41
  client.persist_workflow(workflow)
40
42
  expect {
41
- client.start_workflow(id)
42
- }.to change{client.find_workflow(id).stopped?}.from(true).to(false)
43
+ client.start_workflow(workflow)
44
+ }.to change{client.find_workflow(workflow.id).stopped?}.from(true).to(false)
43
45
  end
44
46
 
45
47
  it "marks the enqueued jobs as enqueued" do
46
- id = SecureRandom.uuid
47
- workflow = TestWorkflow.new(id)
48
+ workflow = TestWorkflow.new
48
49
  client.persist_workflow(workflow)
49
- client.start_workflow(id)
50
- job = client.find_workflow(id).find_job("Prepare")
50
+ client.start_workflow(workflow)
51
+ job = workflow.reload.find_job("Prepare")
51
52
  expect(job.enqueued?).to eq(true)
52
53
  end
53
54
  end
54
55
 
55
56
  describe "#stop_workflow" do
56
57
  it "marks the workflow as stopped" do
57
- id = SecureRandom.uuid
58
- workflow = TestWorkflow.new(id)
58
+ workflow = TestWorkflow.new
59
59
  client.persist_workflow(workflow)
60
60
  expect {
61
- client.stop_workflow(id)
62
- }.to change{client.find_workflow(id).stopped?}.from(false).to(true)
61
+ client.stop_workflow(workflow.id)
62
+ }.to change{client.find_workflow(workflow.id).stopped?}.from(false).to(true)
63
63
  end
64
64
  end
65
65
 
66
66
  describe "#persist_workflow" do
67
67
  it "persists JSON dump of the Workflow and its jobs" do
68
68
  job = double("job", to_json: 'json')
69
- workflow = double("workflow", id: 'abcd', nodes: [job, job, job], to_json: '"json"')
69
+ workflow = double("workflow", id: 'abcd', jobs: [job, job, job], to_json: '"json"')
70
70
  expect(client).to receive(:persist_job).exactly(3).times.with(workflow.id, job)
71
+ expect(workflow).to receive(:mark_as_persisted)
71
72
  client.persist_workflow(workflow)
72
73
  expect(redis.keys("gush.workflows.abcd").length).to eq(1)
73
74
  end
@@ -75,16 +76,15 @@ describe Gush::Client do
75
76
 
76
77
  describe "#destroy_workflow" do
77
78
  it "removes all Redis keys related to the workflow" do
78
- id = SecureRandom.uuid
79
- workflow = TestWorkflow.new(id)
79
+ workflow = TestWorkflow.new
80
80
  client.persist_workflow(workflow)
81
- expect(redis.keys("gush.workflows.#{id}").length).to eq(1)
82
- expect(redis.keys("gush.jobs.#{id}.*").length).to eq(5)
81
+ expect(redis.keys("gush.workflows.#{workflow.id}").length).to eq(1)
82
+ expect(redis.keys("gush.jobs.#{workflow.id}.*").length).to eq(5)
83
83
 
84
84
  client.destroy_workflow(workflow)
85
85
 
86
- expect(redis.keys("gush.workflows.#{id}").length).to eq(0)
87
- expect(redis.keys("gush.jobs.#{id}.*").length).to eq(0)
86
+ expect(redis.keys("gush.workflows.#{workflow.id}").length).to eq(0)
87
+ expect(redis.keys("gush.jobs.#{workflow.id}.*").length).to eq(0)
88
88
  end
89
89
  end
90
90
 
@@ -106,20 +106,17 @@ describe Gush::Client do
106
106
  end
107
107
 
108
108
  it "should be able to handle outdated data format" do
109
- workflow_id = SecureRandom.uuid
110
- workflow = TestWorkflow.new(workflow_id)
109
+ workflow = TestWorkflow.new
111
110
  client.persist_workflow(workflow)
112
111
 
113
112
  # malform the data
114
- hash = Yajl::Parser.parse(redis.get("gush.workflows.#{workflow_id}"), symbolize_keys: true)
115
- hash.delete(:logger_builder)
113
+ hash = Gush::JSON.decode(redis.get("gush.workflows.#{workflow.id}"), symbolize_keys: true)
116
114
  hash.delete(:stopped)
117
- redis.set("gush.workflows.#{workflow_id}", Yajl::Encoder.new.encode(hash))
115
+ redis.set("gush.workflows.#{workflow.id}", Gush::JSON.encode(hash))
118
116
 
119
117
  expect {
120
- workflow = client.find_workflow(workflow_id)
118
+ workflow = client.find_workflow(workflow.id)
121
119
  expect(workflow.stopped?).to be false
122
- expect(workflow.instance_variable_get(:@logger_builder)).to be Gush::LoggerBuilder
123
120
  }.not_to raise_error
124
121
  end
125
122
  end
@@ -7,10 +7,10 @@ describe Gush::Job do
7
7
  job = described_class.new(name: "a-job")
8
8
  job.fail!
9
9
  expect(job.failed_at).to eq(Time.now.to_i)
10
- expect(job.failed).to eq(true)
11
- expect(job.finished).to eq(true)
12
- expect(job.running).to eq(false)
13
- expect(job.enqueued).to eq(false)
10
+ expect(job.failed?).to eq(true)
11
+ expect(job.finished?).to eq(true)
12
+ expect(job.running?).to eq(false)
13
+ expect(job.enqueued?).to eq(false)
14
14
  end
15
15
  end
16
16
 
@@ -19,10 +19,10 @@ describe Gush::Job do
19
19
  job = described_class.new(name: "a-job")
20
20
  job.finish!
21
21
  expect(job.finished_at).to eq(Time.now.to_i)
22
- expect(job.failed).to eq(false)
23
- expect(job.running).to eq(false)
24
- expect(job.finished).to eq(true)
25
- expect(job.enqueued).to eq(false)
22
+ expect(job.failed?).to eq(false)
23
+ expect(job.running?).to eq(false)
24
+ expect(job.finished?).to eq(true)
25
+ expect(job.enqueued?).to eq(false)
26
26
  end
27
27
  end
28
28
 
@@ -35,28 +35,27 @@ describe Gush::Job do
35
35
  expect(job.started_at).to eq(nil)
36
36
  expect(job.finished_at).to eq(nil)
37
37
  expect(job.failed_at).to eq(nil)
38
- expect(job.failed).to eq(false)
39
- expect(job.finished).to eq(false)
40
- expect(job.enqueued).to eq(true)
41
- expect(job.running).to eq(false)
38
+ expect(job.failed?).to eq(false)
39
+ expect(job.finished?).to eq(false)
40
+ expect(job.enqueued?).to eq(true)
41
+ expect(job.running?).to eq(false)
42
42
  end
43
43
  end
44
44
 
45
45
  describe "#start!" do
46
- it "resets flags to false and sets running to true" do
46
+ it "resets flags and marks as running" do
47
47
  job = described_class.new(name: "a-job")
48
- job.enqueue!
49
48
  job.start!
50
49
  expect(job.started_at).to eq(Time.now.to_i)
51
- expect(job.enqueued).to eq(false)
52
- expect(job.running).to eq(true)
50
+ expect(job.enqueued?).to eq(false)
51
+ expect(job.running?).to eq(true)
53
52
  end
54
53
  end
55
54
 
56
55
  describe "#as_json" do
57
56
  context "finished and enqueued set to true" do
58
57
  it "returns correct hash" do
59
- job = described_class.new(name: "a-job", finished: true, enqueued: true)
58
+ job = described_class.new(double('flow'), name: "a-job", finished_at: 123, enqueued_at: 120)
60
59
  expected = {
61
60
  name: "a-job",
62
61
  klass: "Gush::Job",
@@ -67,7 +66,8 @@ describe Gush::Job do
67
66
  outgoing: [],
68
67
  failed_at: nil,
69
68
  started_at: nil,
70
- finished_at: nil,
69
+ finished_at: 123,
70
+ enqueued_at: 120,
71
71
  running: false
72
72
  }
73
73
  expect(job.as_json).to eq(expected)
@@ -77,38 +77,34 @@ describe Gush::Job do
77
77
 
78
78
  describe ".from_hash" do
79
79
  it "properly restores state of the job from hash" do
80
- job = described_class.from_hash({
81
- klass: 'Gush::Job',
82
- name: 'gob',
83
- finished: true,
84
- failed: true,
85
- enqueued: true,
86
- incoming: ['a', 'b'],
87
- outgoing: ['c'],
88
- failed_at: 123,
89
- finished_at: 122,
90
- started_at: 55
91
- })
80
+ job = described_class.from_hash(
81
+ double('flow'),
82
+ {
83
+ klass: 'Gush::Job',
84
+ name: 'gob',
85
+ finished: true,
86
+ failed: true,
87
+ enqueued: true,
88
+ incoming: ['a', 'b'],
89
+ outgoing: ['c'],
90
+ failed_at: 123,
91
+ finished_at: 122,
92
+ started_at: 55,
93
+ enqueued_at: 444
94
+ }
95
+ )
92
96
 
93
97
  expect(job.name).to eq('gob')
94
98
  expect(job.class).to eq(Gush::Job)
95
- expect(job.finished).to eq(true)
96
- expect(job.failed).to eq(true)
97
- expect(job.enqueued).to eq(true)
99
+ expect(job.finished?).to eq(true)
100
+ expect(job.failed?).to eq(true)
101
+ expect(job.enqueued?).to eq(true)
98
102
  expect(job.incoming).to eq(['a', 'b'])
99
103
  expect(job.outgoing).to eq(['c'])
100
104
  expect(job.failed_at).to eq(123)
101
105
  expect(job.finished_at).to eq(122)
102
106
  expect(job.started_at).to eq(55)
103
- end
104
- end
105
-
106
- describe "#logger" do
107
- it "returns a logger for the job" do
108
- job = described_class.new(name: "a-job", finished: true, running: true)
109
- job.logger = TestLoggerBuilder.new(:workflow, job, 1234).build
110
- expect(job.logger).to be_a TestLogger
111
- expect(job.logger.name).to eq(job.name)
107
+ expect(job.enqueued_at).to eq(444)
112
108
  end
113
109
  end
114
110
  end
@@ -1,96 +1,67 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  describe Gush::Worker do
4
- let(:workflow_id) { '1234' }
5
- let(:workflow) { TestWorkflow.new(workflow_id) }
4
+ subject { described_class.new }
5
+
6
+ let!(:workflow) { TestWorkflow.create }
6
7
  let(:job) { workflow.find_job("Prepare") }
7
- let(:config) { client.configuration.to_json }
8
+ let(:config) { Gush.configuration.to_json }
9
+ let!(:client) { double("client") }
8
10
 
9
11
  before :each do
10
- allow(client).to receive(:find_workflow).with(workflow_id).and_return(workflow)
11
- allow(Gush::Client).to receive(:new).and_return(client)
12
+ allow(subject).to receive(:client).and_return(client)
13
+ allow(subject).to receive(:enqueue_outgoing_jobs)
14
+
15
+ allow(client).to receive(:find_workflow).with(workflow.id).and_return(workflow)
16
+ expect(client).to receive(:persist_job).at_least(1).times
17
+ expect(client).to receive(:worker_report).with(hash_including(status: :started)).ordered
12
18
  end
13
19
 
14
20
  describe "#perform" do
15
21
  context "when job fails" do
16
- before :each do
17
- expect(job).to receive(:work).and_raise(StandardError)
18
- job.enqueue!
19
- job.start!
20
- end
21
-
22
22
  it "should mark it as failed" do
23
- allow(client).to receive(:persist_job)
24
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
25
-
26
- expect(client).to have_received(:persist_job).with(workflow_id, job).at_least(1).times do |_, job|
27
- expect(job).to be_failed
28
- end
23
+ allow(job).to receive(:work).and_raise(StandardError)
24
+ expect(client).to receive(:worker_report).with(hash_including(status: :failed)).ordered
29
25
 
26
+ subject.perform(workflow.id, "Prepare", config)
27
+ expect(workflow.find_job("Prepare")).to be_failed
30
28
  end
31
29
 
32
30
  it "reports that job failed" do
33
- allow(client).to receive(:worker_report)
34
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
35
- expect(client).to have_received(:worker_report).with(hash_including(status: :failed))
36
- end
31
+ allow(job).to receive(:work).and_raise(StandardError)
32
+ expect(client).to receive(:worker_report).with(hash_including(status: :failed)).ordered
37
33
 
38
- it "logs the exception" do
39
- logger = TestLogger.new(1234, 'Prepare')
40
- expect(logger).to receive(:<<).with(instance_of(String)).at_least(1).times
41
- expect(workflow).to receive(:build_logger_for_job).and_return(logger)
42
-
43
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
34
+ subject.perform(workflow.id, "Prepare", config)
44
35
  end
45
36
  end
46
37
 
47
38
  context "when job completes successfully" do
48
39
  it "should mark it as succedeed" do
49
- allow(client).to receive(:persist_job)
50
-
51
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
40
+ expect(subject).to receive(:mark_as_finished)
41
+ expect(client).to receive(:worker_report).with(hash_including(status: :finished)).ordered
52
42
 
53
- expect(client).to have_received(:persist_job).at_least(1).times.with(workflow_id, job) do |_, job|
54
- expect(job).to be_succeeded
55
- end
43
+ subject.perform(workflow.id, "Prepare", config)
56
44
  end
57
45
 
58
46
  it "reports that job succedeed" do
59
- allow(client).to receive(:worker_report)
60
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
47
+ expect(client).to receive(:worker_report).with(hash_including(status: :finished)).ordered
61
48
 
62
- expect(client).to have_received(:worker_report).with(hash_including(status: :finished))
49
+ subject.perform(workflow.id, "Prepare", config)
63
50
  end
64
51
  end
65
52
 
66
- [:before_work, :work, :after_work].each do |method|
67
- it "calls job.#{method} hook" do
68
- expect(job).to receive(method)
69
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
70
- end
71
- end
53
+ it "calls job.work method" do
54
+ expect(job).to receive(:work)
55
+ expect(client).to receive(:worker_report).with(hash_including(status: :finished)).ordered
72
56
 
73
- it "sets up a logger for the job" do
74
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
75
- job.start!
76
- expect(job.logger).to be_a TestLogger
77
- end
78
-
79
- it "sets a job id" do
80
- job_id = 1234
81
- worker = Gush::Worker.new
82
-
83
- allow(worker).to receive(:jid).and_return(job_id)
84
-
85
- worker.perform(workflow_id, "Prepare", config)
86
- job.enqueue!
87
- expect(job.jid).to eq job_id
57
+ subject.perform(workflow.id, "Prepare", config)
88
58
  end
89
59
 
90
60
  it "reports when the job is started" do
91
61
  allow(client).to receive(:worker_report)
92
- Gush::Worker.new.perform(workflow_id, "Prepare", config)
93
- expect(client).to have_received(:worker_report).with(hash_including(status: :started))
62
+ expect(client).to receive(:worker_report).with(hash_including(status: :finished)).ordered
63
+
64
+ subject.perform(workflow.id, "Prepare", config)
94
65
  end
95
66
  end
96
67
  end