dwf 0.1.9 → 0.1.13

Sign up to get free protection for your applications and to get access to all the features.
data/lib/dwf/item.rb CHANGED
@@ -1,12 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'client'
4
+ require_relative 'concerns/checkable'
4
5
 
5
6
  module Dwf
6
7
  class Item
8
+ include Concerns::Checkable
9
+
7
10
  attr_reader :workflow_id, :id, :params, :queue, :klass, :started_at,
8
- :enqueued_at, :finished_at, :failed_at, :callback_type, :output_payload
9
- attr_accessor :incoming, :outgoing
11
+ :enqueued_at, :finished_at, :failed_at, :output_payload
12
+ attr_writer :payloads
13
+ attr_accessor :incoming, :outgoing, :callback_type
10
14
 
11
15
  def initialize(options = {})
12
16
  assign_attributes(options)
@@ -16,9 +20,17 @@ module Dwf
16
20
  Module.const_get(hash[:klass]).new(hash)
17
21
  end
18
22
 
23
+ def start_initial!
24
+ cb_build_in? ? persist_and_perform_async! : start_batch!
25
+ end
26
+
27
+ def start_batch!
28
+ enqueue_and_persist!
29
+ Dwf::Callback.new.start(self)
30
+ end
31
+
19
32
  def persist_and_perform_async!
20
- enqueue!
21
- persist!
33
+ enqueue_and_persist!
22
34
  perform_async
23
35
  end
24
36
 
@@ -28,14 +40,18 @@ module Dwf
28
40
  callback_type == Dwf::Workflow::BUILD_IN
29
41
  end
30
42
 
43
+ def workflow
44
+ @workflow ||= client.find_workflow(workflow_id)
45
+ end
46
+
31
47
  def reload
32
48
  item = client.find_job(workflow_id, name)
33
49
  assign_attributes(item.to_hash)
34
50
  end
35
51
 
36
- def perform_async
37
- Dwf::Worker.set(queue: queue || client.config.namespace)
38
- .perform_async(workflow_id, name)
52
+ def perform_async(options = {})
53
+ Dwf::Worker.set(options.merge(queue: queue || client.config.namespace))
54
+ .perform_async(workflow_id, name)
39
55
  end
40
56
 
41
57
  def name
@@ -51,20 +67,11 @@ module Dwf
51
67
  end
52
68
 
53
69
  def parents_succeeded?
54
- incoming.all? do |name|
55
- client.find_job(workflow_id, name).succeeded?
56
- end
70
+ incoming.all? { |name| client.find_node(name, workflow_id).succeeded? }
57
71
  end
58
72
 
59
73
  def payloads
60
- incoming.map do |job_name|
61
- job = client.find_job(workflow_id, job_name)
62
- {
63
- id: job.name,
64
- class: job.klass.to_s,
65
- output: job.output_payload
66
- }
67
- end
74
+ @payloads ||= build_payloads
68
75
  end
69
76
 
70
77
  def enqueue!
@@ -109,33 +116,21 @@ module Dwf
109
116
  !failed_at.nil?
110
117
  end
111
118
 
112
- def succeeded?
113
- finished? && !failed?
114
- end
115
-
116
- def started?
117
- !started_at.nil?
118
- end
119
-
120
- def running?
121
- started? && !finished?
122
- end
123
-
124
- def ready_to_start?
125
- !running? && !enqueued? && !finished? && !failed? && parents_succeeded?
126
- end
127
-
128
119
  def current_timestamp
129
120
  Time.now.to_i
130
121
  end
131
122
 
132
123
  def enqueue_outgoing_jobs
124
+ return workflow.enqueue_outgoing_jobs if leaf?
125
+
133
126
  outgoing.each do |job_name|
134
- client.check_or_lock(workflow_id, job_name)
135
- out = client.find_job(workflow_id, job_name)
136
- out.persist_and_perform_async! if out.ready_to_start?
137
- client.release_lock(workflow_id, job_name)
127
+ client.check_or_lock(workflow_id, job_name) do
128
+ out = client.find_node(job_name, workflow_id)
129
+ out.persist_and_perform_async! if out.ready_to_start?
130
+ end
138
131
  end
132
+ rescue RedisMutex::LockError
133
+ perform_async(wait: 2)
139
134
  end
140
135
 
141
136
  def to_hash
@@ -152,7 +147,8 @@ module Dwf
152
147
  params: params,
153
148
  workflow_id: workflow_id,
154
149
  callback_type: callback_type,
155
- output_payload: output_payload
150
+ output_payload: output_payload,
151
+ payloads: @payloads
156
152
  }
157
153
  end
158
154
 
@@ -166,6 +162,11 @@ module Dwf
166
162
 
167
163
  private
168
164
 
165
+ def enqueue_and_persist!
166
+ enqueue!
167
+ persist!
168
+ end
169
+
169
170
  def client
170
171
  @client ||= Dwf::Client.new
171
172
  end
@@ -184,6 +185,21 @@ module Dwf
184
185
  @started_at = options[:started_at]
185
186
  @callback_type = options[:callback_type]
186
187
  @output_payload = options[:output_payload]
188
+ @payloads = options[:payloads]
189
+ end
190
+
191
+ def build_payloads
192
+ data = incoming.map do |job_name|
193
+ node = client.find_node(job_name, workflow_id)
194
+ next if node.output_payload.nil?
195
+
196
+ {
197
+ id: node.name,
198
+ class: node.klass.to_s,
199
+ output: node.output_payload
200
+ }
201
+ end.compact
202
+ data.empty? ? nil : data
187
203
  end
188
204
  end
189
205
  end
data/lib/dwf/utils.rb CHANGED
@@ -34,5 +34,11 @@ module Dwf
34
34
  obj
35
35
  end
36
36
  end
37
+
38
+ def self.workflow_name?(name)
39
+ node_name = name.include?('|') ? name.split('|').first : name
40
+
41
+ Module.const_get(node_name) <= Workflow
42
+ end
37
43
  end
38
44
  end
data/lib/dwf/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Dwf
4
- VERSION = '0.1.8'
4
+ VERSION = '0.1.13'
5
5
  end
data/lib/dwf/workflow.rb CHANGED
@@ -2,15 +2,18 @@
2
2
 
3
3
  require_relative 'client'
4
4
  require_relative 'worker'
5
- require_relative 'callback'
5
+ require_relative 'concerns/checkable'
6
6
 
7
7
  module Dwf
8
8
  class Workflow
9
+ include Concerns::Checkable
10
+
9
11
  CALLBACK_TYPES = [
10
12
  BUILD_IN = 'build-in',
11
13
  SK_BATCH = 'sk-batch'
12
14
  ].freeze
13
- attr_reader :dependencies, :jobs, :started_at, :finished_at, :persisted, :stopped,
15
+ attr_accessor :jobs, :stopped, :id, :incoming, :outgoing, :parent_id
16
+ attr_reader :dependencies, :started_at, :finished_at, :persisted, :arguments, :klass,
14
17
  :callback_type
15
18
 
16
19
  class << self
@@ -19,51 +22,89 @@ module Dwf
19
22
  flow.save
20
23
  flow
21
24
  end
25
+
26
+ def find(id)
27
+ Dwf::Client.new.find_workflow(id)
28
+ end
22
29
  end
23
30
 
24
- def initialize(options = {})
31
+ def initialize(*args)
25
32
  @dependencies = []
26
- @id = id
33
+ @id = build_id
27
34
  @jobs = []
28
35
  @persisted = false
29
36
  @stopped = false
30
- @callback_type = options[:callback_type] || BUILD_IN
37
+ @arguments = *args
38
+ @parent_id = nil
39
+ @klass = self.class
40
+ @callback_type = BUILD_IN
41
+ @incoming = []
42
+ @outgoing = []
31
43
 
32
44
  setup
33
45
  end
34
46
 
47
+ def persist!
48
+ client.persist_workflow(self)
49
+ jobs.each(&:persist!)
50
+ mark_as_persisted
51
+ true
52
+ end
53
+
54
+ def name
55
+ "#{self.class.name}|#{id}"
56
+ end
57
+
58
+ def sub_workflow?
59
+ !parent_id.nil?
60
+ end
61
+
62
+ def callback_type=(type = BUILD_IN)
63
+ @callback_type = type
64
+ jobs.each { |job| job.callback_type = type }
65
+ end
66
+
67
+ alias save persist!
68
+
35
69
  def start!
70
+ mark_as_started
71
+ persist!
36
72
  initial_jobs.each do |job|
37
- cb_build_in? ? job.persist_and_perform_async! : Dwf::Callback.new.start(job)
73
+ job.payloads = payloads if sub_workflow?
74
+ job.start_initial!
38
75
  end
39
76
  end
40
77
 
41
- def save
42
- client.persist_workflow(self)
43
- jobs.each(&:persist!)
44
- mark_as_persisted
45
- true
78
+ def payloads
79
+ @payloads ||= build_payloads
80
+ end
81
+
82
+ def start_initial!
83
+ cb_build_in? ? start! : Callback.new.start(self)
84
+ end
85
+
86
+ alias persist_and_perform_async! start!
87
+
88
+ def reload
89
+ flow = self.class.find(id)
90
+ self.stopped = flow.stopped
91
+ self.jobs = flow.jobs
92
+
93
+ self
46
94
  end
47
95
 
48
96
  def cb_build_in?
49
97
  callback_type == BUILD_IN
50
98
  end
51
99
 
52
- def id
53
- @id ||= client.build_workflow_id
100
+ def build_id
101
+ client.build_workflow_id
54
102
  end
55
103
 
56
- def configure; end
104
+ def configure(*arguments); end
57
105
 
58
106
  def run(klass, options = {})
59
- node = klass.new(
60
- workflow_id: id,
61
- id: client.build_job_id(id, klass.to_s),
62
- params: options.fetch(:params, {}),
63
- queue: options[:queue],
64
- callback_type: callback_type
65
- )
66
-
107
+ node = build_node(klass, options)
67
108
  jobs << node
68
109
 
69
110
  build_dependencies_structure(node, options)
@@ -95,7 +136,10 @@ module Dwf
95
136
  stopped: stopped,
96
137
  started_at: started_at,
97
138
  finished_at: finished_at,
98
- callback_type: callback_type
139
+ callback_type: callback_type,
140
+ incoming: incoming,
141
+ outgoing: outgoing,
142
+ parent_id: parent_id
99
143
  }
100
144
  end
101
145
 
@@ -107,13 +151,7 @@ module Dwf
107
151
  jobs.all?(&:finished?)
108
152
  end
109
153
 
110
- def started?
111
- !!started_at
112
- end
113
-
114
- def running?
115
- started? && !finished?
116
- end
154
+ alias enqueued? started?
117
155
 
118
156
  def failed?
119
157
  jobs.any?(&:failed?)
@@ -123,6 +161,10 @@ module Dwf
123
161
  stopped
124
162
  end
125
163
 
164
+ def parents_succeeded?
165
+ incoming.all? { |name| client.find_node(name, parent_id).succeeded? }
166
+ end
167
+
126
168
  def status
127
169
  return :failed if failed?
128
170
  return :running if running?
@@ -132,6 +174,17 @@ module Dwf
132
174
  :running
133
175
  end
134
176
 
177
+ def enqueue_outgoing_jobs
178
+ return unless sub_workflow?
179
+
180
+ outgoing.each do |job_name|
181
+ client.check_or_lock(parent_id, job_name) do
182
+ node = client.find_node(job_name, parent_id)
183
+ node.persist_and_perform_async! if node.ready_to_start?
184
+ end
185
+ end
186
+ end
187
+
135
188
  def mark_as_persisted
136
189
  @persisted = true
137
190
  end
@@ -140,28 +193,94 @@ module Dwf
140
193
  @stopped = false
141
194
  end
142
195
 
196
+ def leaf_nodes
197
+ jobs.select(&:leaf?)
198
+ end
199
+
200
+ def output_payload
201
+ leaf_nodes.map do |node|
202
+ data = node.output_payload
203
+ next if data.nil?
204
+
205
+ data
206
+ end.compact
207
+ end
143
208
 
144
209
  private
145
210
 
211
+ def build_node(klass, options)
212
+ if klass < Dwf::Workflow
213
+ node = options[:params].nil? ? klass.new : klass.new(options[:params])
214
+ node.parent_id = id
215
+ node.callback_type = callback_type
216
+ node.save
217
+ node
218
+ else
219
+ klass.new(
220
+ workflow_id: id,
221
+ id: client.build_job_id(id, klass.to_s),
222
+ params: options.fetch(:params, {}),
223
+ queue: options[:queue],
224
+ callback_type: callback_type
225
+ )
226
+ end
227
+ end
228
+
146
229
  def initial_jobs
147
230
  jobs.select(&:no_dependencies?)
148
231
  end
149
232
 
150
233
  def setup
151
- configure
234
+ configure(*arguments)
152
235
  resolve_dependencies
153
236
  end
154
237
 
238
+ def find_node(node_name)
239
+ if Utils.workflow_name?(node_name)
240
+ find_subworkflow(node_name)
241
+ else
242
+ find_job(node_name)
243
+ end
244
+ end
245
+
246
+ def find_subworkflow(node_name)
247
+ fname, = node_name.split('|')
248
+ jobs.find { |j| j.klass.name == fname }
249
+ end
250
+
155
251
  def resolve_dependencies
156
252
  @dependencies.each do |dependency|
157
- from = find_job(dependency[:from])
158
- to = find_job(dependency[:to])
253
+ from = find_node(dependency[:from])
254
+ to = find_node(dependency[:to])
159
255
 
160
- to.incoming << dependency[:from]
161
- from.outgoing << dependency[:to]
256
+ to.incoming << from.name
257
+ from.outgoing << to.name
162
258
  end
163
259
  end
164
260
 
261
+ def invalid_callback?
262
+ cb_build_in? && jobs.any? { |job| job.class < Workflow }
263
+ end
264
+
265
+ def build_payloads
266
+ return unless sub_workflow?
267
+
268
+ data = incoming.map do |job_name|
269
+ next if Utils.workflow_name?(job_name)
270
+
271
+ node = client.find_node(job_name, parent_id)
272
+ next if node.output_payload.nil?
273
+
274
+ {
275
+ id: node.name,
276
+ class: node.klass.to_s,
277
+ output: node.output_payload
278
+ }
279
+ end.compact
280
+
281
+ data.empty? ? nil : data
282
+ end
283
+
165
284
  def build_dependencies_structure(node, options)
166
285
  deps_after = [*options[:after]]
167
286
 
@@ -2,6 +2,8 @@
2
2
 
3
3
  require 'spec_helper'
4
4
  require 'mock_redis'
5
+ FirstWorkflow = Class.new(Dwf::Workflow)
6
+ SecondWorkflow = Class.new(Dwf::Workflow)
5
7
 
6
8
  describe Dwf::Client, client: true do
7
9
  let(:client) { described_class.new }
@@ -30,12 +32,76 @@ describe Dwf::Client, client: true do
30
32
  end
31
33
 
32
34
  context 'find by item name' do
33
- it {
35
+ it do
34
36
  item = client.find_job(workflow_id, job.name)
35
37
  expect(item.workflow_id).to eq workflow_id
36
38
  expect(item.id).to eq id
37
39
  expect(item.name).to eq job.name
38
- }
40
+ end
41
+ end
42
+ end
43
+
44
+ describe '#find_workflow' do
45
+ before do
46
+ wf = Dwf::Workflow.new
47
+ wf.id = workflow_id
48
+ wf.save
49
+ j = Dwf::Item.new(id: id, workflow_id: workflow_id)
50
+ j.persist!
51
+ end
52
+
53
+ it do
54
+ wf = client.find_workflow(workflow_id)
55
+
56
+ expect(wf).not_to be_nil
57
+ expect(wf.jobs.first).to be_kind_of(Dwf::Item)
58
+ end
59
+
60
+ it do
61
+ expect do
62
+ client.find_workflow(SecureRandom.uuid)
63
+ end.to raise_error Dwf::WorkflowNotFound
64
+ end
65
+ end
66
+
67
+ describe '#find_node' do
68
+ context 'find job' do
69
+ let!(:job) do
70
+ j = Dwf::Item.new(workflow_id: workflow_id, id: id)
71
+ j.persist!
72
+ j
73
+ end
74
+
75
+ it do
76
+ item = client.find_node(Dwf::Item.name, workflow_id)
77
+ expect(item.workflow_id).to eq workflow_id
78
+ expect(item.id).to eq id
79
+ expect(item.name).to eq job.name
80
+ end
81
+ end
82
+
83
+ context 'find_workflow' do
84
+ let!(:wf1) { FirstWorkflow.create }
85
+ let!(:wf2) do
86
+ wf = SecondWorkflow.new
87
+ wf.parent_id = wf1.id
88
+ wf.save
89
+ wf
90
+ end
91
+
92
+ context 'find with class name and parent id' do
93
+ it do
94
+ wf = client.find_node(wf2.class.name, wf1.id)
95
+ expect(wf).to be_kind_of(SecondWorkflow)
96
+ end
97
+ end
98
+
99
+ context 'find with name and parent id' do
100
+ it do
101
+ wf = client.find_node(wf2.name, wf1.id)
102
+ expect(wf).to be_kind_of(SecondWorkflow)
103
+ end
104
+ end
39
105
  end
40
106
  end
41
107
 
@@ -53,13 +119,44 @@ describe Dwf::Client, client: true do
53
119
  end
54
120
  end
55
121
 
122
+ describe '#find_sub_workflow' do
123
+ let!(:wf1) { FirstWorkflow.create }
124
+ let!(:wf2) do
125
+ wf = SecondWorkflow.new
126
+ wf.parent_id = wf1.id
127
+ wf.save
128
+ wf
129
+ end
130
+
131
+ it do
132
+ wf = client.find_sub_workflow(wf2.class.name, wf1.id)
133
+ expect(wf).to be_kind_of(SecondWorkflow)
134
+ end
135
+ end
136
+
137
+ describe '#sub_workflows' do
138
+ let!(:wf1) { FirstWorkflow.create }
139
+ let!(:wf2) do
140
+ wf = SecondWorkflow.new
141
+ wf.parent_id = wf1.id
142
+ wf.save
143
+ wf
144
+ end
145
+
146
+ it do
147
+ wfs = client.sub_workflows(wf1.id)
148
+ expect(wfs).not_to be_empty
149
+ expect(wfs.first).to be_kind_of(SecondWorkflow)
150
+ end
151
+ end
152
+
56
153
  describe '#persist_workflow' do
57
154
  let(:workflow) { Dwf::Workflow.new }
58
155
 
59
156
  it do
60
157
  expect(redis.exists?("dwf.workflows.#{workflow.id}")).to be_falsy
61
158
  client.persist_workflow(workflow)
62
- expect(redis.exists?("dwf.workflows.#{workflow.id}")).to be_truthy
159
+ expect(redis.keys("dwf.workflows.#{workflow.id}*").any?).to be_truthy
63
160
  end
64
161
  end
65
162
 
@@ -73,24 +170,18 @@ describe Dwf::Client, client: true do
73
170
 
74
171
  before do
75
172
  allow(client).to receive(:set)
76
- redis.set("wf_enqueue_outgoing_jobs_#{workflow_id}-#{job_name}", 'running')
77
- client.check_or_lock(workflow_id, job_name)
78
- end
79
-
80
- it { expect(client).not_to have_received(:set) }
81
- end
82
-
83
- context 'job is not running' do
84
- let(:job_name) { 'ahihi' }
85
-
86
- before do
87
- allow(redis).to receive(:set)
88
- client.check_or_lock(workflow_id, job_name)
173
+ allow(RedisMutex).to receive(:with_lock)
174
+ client.check_or_lock(workflow_id, job_name) {}
89
175
  end
90
176
 
91
177
  it do
92
- expect(redis).to have_received(:set)
93
- .with("wf_enqueue_outgoing_jobs_#{workflow_id}-#{job_name}", 'running')
178
+ expect(RedisMutex)
179
+ .to have_received(:with_lock)
180
+ .with(
181
+ "wf_enqueue_outgoing_jobs_#{workflow_id}-#{job_name}",
182
+ sleep: 0.3,
183
+ block: 2
184
+ )
94
185
  end
95
186
  end
96
187
  end