ductwork 0.3.1 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +31 -0
- data/app/models/ductwork/job.rb +5 -2
- data/app/models/ductwork/pipeline.rb +90 -56
- data/app/models/ductwork/step.rb +1 -0
- data/lib/ductwork/configuration.rb +68 -23
- data/lib/ductwork/processes/job_worker.rb +5 -1
- data/lib/ductwork/processes/job_worker_runner.rb +1 -1
- data/lib/ductwork/processes/pipeline_advancer.rb +16 -9
- data/lib/ductwork/version.rb +1 -1
- data/lib/generators/ductwork/install/templates/config/ductwork.yml +2 -0
- data/lib/generators/ductwork/install/templates/db/create_ductwork_availabilities.rb +3 -0
- data/lib/generators/ductwork/install/templates/db/create_ductwork_executions.rb +2 -0
- data/lib/generators/ductwork/install/templates/db/create_ductwork_jobs.rb +2 -2
- data/lib/generators/ductwork/install/templates/db/create_ductwork_pipelines.rb +1 -1
- data/lib/generators/ductwork/install/templates/db/create_ductwork_results.rb +2 -0
- data/lib/generators/ductwork/install/templates/db/create_ductwork_runs.rb +2 -0
- data/lib/generators/ductwork/install/templates/db/create_ductwork_steps.rb +3 -0
- metadata +1 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: e10d600812e131829fe76b4a694bf755a707d4a4c66e29afd21701ee17e2fa8b
|
|
4
|
+
data.tar.gz: 101e1a25cac0c898f678b0da11f2c3e0d679bfb9c11741ed99c8e4f5bb8e3782
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: be40286349648fa967100abdcecc2fd3519e616881e635362903fb90656c6568e59a005e1015ac477bcae7eff61cda7b05378b35603302f2ca2ea08f3558de6d
|
|
7
|
+
data.tar.gz: ed9f573f3d6f2913c2d22dc0081064cb6d67eb743f92a1e573260d31c56592837d093292029a7de82b6ca40ebe022dc2092522167b84f52eb805ff9768fbe6f8
|
data/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,36 @@
|
|
|
1
1
|
# Ductwork Changelog
|
|
2
2
|
|
|
3
|
+
## [0.5.0]
|
|
4
|
+
|
|
5
|
+
- core: add "waiting" status to `Step` model
|
|
6
|
+
- core: add "waiting" status to `Pipeline` model
|
|
7
|
+
- fix: change `jobs.input_args` and `jobs.output_payload` column type to `text`
|
|
8
|
+
- fix: change `pipelines.definition` column type to `text` - this prevents larger definitions from being clipped if there is a size limit on the string column
|
|
9
|
+
- feat: add missing unique index on `ductwork_results` and `ductwork_runs` tables
|
|
10
|
+
- feat: add missing composite index on `ductwork_executions` table for `Ductwork::Job.claim_latest` method
|
|
11
|
+
- feat: add missing composite index on `ductwork_availabilities` table for `Ductwork::Job.claim_latest` method
|
|
12
|
+
- feat: use array instead of ActiveRecord relation when advancing pipelines - this has major performance benefits but comes with memory-usage implications (see comments)
|
|
13
|
+
- fix: add condition to query to return correct pipelines that need advancing
|
|
14
|
+
- fix: release pipeline claim only if successfully claimed
|
|
15
|
+
- chore: add pipeline ID to misc log lines
|
|
16
|
+
- feat: add missing composite indexes on `ductwork_steps` table
|
|
17
|
+
|
|
18
|
+
## [0.4.0]
|
|
19
|
+
|
|
20
|
+
- chore: change job worker thread name format
|
|
21
|
+
- feat: add and respect pipeline-level `pipeline_advancer.polling_timeout` configuration in pipeline advancer
|
|
22
|
+
- feat: respect `job_worker.polling_timeout` configuration in job runner
|
|
23
|
+
- feat: add pipeline-level `job_worker.polling_timeout` configuration
|
|
24
|
+
- feat: check pipeline and step-level max retry configurations when retrying a job
|
|
25
|
+
- feat: add pipeline and step-level `job_worker.max_retry` configurations
|
|
26
|
+
- feat: add ability to set `job_worker.count` config manually
|
|
27
|
+
- chore: move configuration specs under their own directory
|
|
28
|
+
- feat: halt pipeline instead of erroring if max step depth is exceeded
|
|
29
|
+
- chore: move specs under directory
|
|
30
|
+
- feat: allow setting `pipeline_advancer.steps_max_depth` configuration manually
|
|
31
|
+
- feat: raise `Ductwork::Pipeline::StepDepthError` error if return payload count exceeds the configuration
|
|
32
|
+
- feat: add `pipeline_advancer.steps.max_depth` configuration
|
|
33
|
+
|
|
3
34
|
## [0.3.1]
|
|
4
35
|
|
|
5
36
|
- chore: bump dependencies and update necessary files
|
data/app/models/ductwork/job.rb
CHANGED
|
@@ -142,9 +142,12 @@ module Ductwork
|
|
|
142
142
|
end
|
|
143
143
|
end
|
|
144
144
|
|
|
145
|
-
def execution_failed!(execution, run, error) # rubocop:
|
|
145
|
+
def execution_failed!(execution, run, error) # rubocop:todo Metrics
|
|
146
146
|
halted = false
|
|
147
147
|
pipeline = step.pipeline
|
|
148
|
+
max_retry = Ductwork
|
|
149
|
+
.configuration
|
|
150
|
+
.job_worker_max_retry(pipeline: pipeline.klass, step: klass)
|
|
148
151
|
|
|
149
152
|
Ductwork::Record.transaction do
|
|
150
153
|
execution.update!(completed_at: Time.current)
|
|
@@ -156,7 +159,7 @@ module Ductwork
|
|
|
156
159
|
error_backtrace: error.backtrace
|
|
157
160
|
)
|
|
158
161
|
|
|
159
|
-
if execution.retry_count <
|
|
162
|
+
if execution.retry_count < max_retry
|
|
160
163
|
new_execution = executions.create!(
|
|
161
164
|
retry_count: execution.retry_count + 1,
|
|
162
165
|
started_at: FAILED_EXECUTION_TIMEOUT.from_now
|
|
@@ -14,6 +14,7 @@ module Ductwork
|
|
|
14
14
|
enum :status,
|
|
15
15
|
pending: "pending",
|
|
16
16
|
in_progress: "in_progress",
|
|
17
|
+
waiting: "waiting",
|
|
17
18
|
halted: "halted",
|
|
18
19
|
completed: "completed"
|
|
19
20
|
|
|
@@ -87,22 +88,19 @@ module Ductwork
|
|
|
87
88
|
end
|
|
88
89
|
|
|
89
90
|
def advance!
|
|
90
|
-
# NOTE:
|
|
91
|
-
#
|
|
92
|
-
#
|
|
93
|
-
#
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
.fetch(:edges, {})
|
|
98
|
-
.select { |k| k.in?(advancing.pluck(:klass)) }
|
|
99
|
-
end
|
|
91
|
+
# NOTE: if we've expanded the pipeline there could be a lot of
|
|
92
|
+
# advancing records which may cause memory issues. something to
|
|
93
|
+
# watch out for here and maybe add in config to use AR relation
|
|
94
|
+
# at certain counts or even memory limits.
|
|
95
|
+
advancing_steps = steps.advancing.pluck(:id, :klass)
|
|
96
|
+
advancing_ids = advancing_steps.map(&:first)
|
|
97
|
+
edges = find_edges(advancing_steps)
|
|
100
98
|
|
|
101
99
|
Ductwork::Record.transaction do
|
|
102
100
|
if edges.nil? || edges.values.all?(&:empty?)
|
|
103
|
-
conditionally_complete_pipeline(
|
|
101
|
+
conditionally_complete_pipeline(advancing_ids)
|
|
104
102
|
else
|
|
105
|
-
advance_to_next_steps_by_type(edges,
|
|
103
|
+
advance_to_next_steps_by_type(edges, advancing_ids)
|
|
106
104
|
end
|
|
107
105
|
end
|
|
108
106
|
end
|
|
@@ -120,10 +118,25 @@ module Ductwork
|
|
|
120
118
|
Ductwork::Job.enqueue(next_step, input_arg)
|
|
121
119
|
end
|
|
122
120
|
|
|
123
|
-
def
|
|
124
|
-
|
|
121
|
+
def find_edges(advancing_steps)
|
|
122
|
+
if advancing_steps.any?
|
|
123
|
+
klasses = advancing_steps.map(&:last)
|
|
125
124
|
|
|
126
|
-
|
|
125
|
+
parsed_definition.fetch(:edges, {}).select { |k| k.in?(klasses) }
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def conditionally_complete_pipeline(advancing_ids)
|
|
130
|
+
steps
|
|
131
|
+
.where(id: advancing_ids)
|
|
132
|
+
.update_all(status: :completed, completed_at: Time.current)
|
|
133
|
+
|
|
134
|
+
remaining = steps
|
|
135
|
+
.where(status: %w[in_progress pending advancing])
|
|
136
|
+
.where.not(id: advancing_ids)
|
|
137
|
+
.exists?
|
|
138
|
+
|
|
139
|
+
if !remaining
|
|
127
140
|
update!(status: :completed, completed_at: Time.current)
|
|
128
141
|
|
|
129
142
|
Ductwork.logger.info(
|
|
@@ -134,9 +147,11 @@ module Ductwork
|
|
|
134
147
|
end
|
|
135
148
|
end
|
|
136
149
|
|
|
137
|
-
def advance_to_next_steps_by_type(edges,
|
|
150
|
+
def advance_to_next_steps_by_type(edges, advancing_ids)
|
|
151
|
+
steps.where(id: advancing_ids).update_all(status: :completed, completed_at: Time.current)
|
|
152
|
+
|
|
138
153
|
if edges.all? { |_, v| v.dig(-1, :type) == "combine" }
|
|
139
|
-
conditionally_combine_next_steps(edges,
|
|
154
|
+
conditionally_combine_next_steps(edges, advancing_ids)
|
|
140
155
|
else
|
|
141
156
|
edges.each do |step_klass, step_edges|
|
|
142
157
|
edge = step_edges[-1]
|
|
@@ -145,27 +160,25 @@ module Ductwork
|
|
|
145
160
|
step_type = edge[:type] == "chain" ? "default" : edge[:type]
|
|
146
161
|
|
|
147
162
|
if step_type == "collapse"
|
|
148
|
-
conditionally_collapse_next_steps(step_klass, edge,
|
|
163
|
+
conditionally_collapse_next_steps(step_klass, edge, advancing_ids)
|
|
149
164
|
else
|
|
150
|
-
advance_non_merging_steps(step_klass,
|
|
165
|
+
advance_non_merging_steps(step_klass, edge, advancing_ids)
|
|
151
166
|
end
|
|
152
167
|
end
|
|
153
168
|
end
|
|
154
|
-
advancing.update!(status: :completed, completed_at: Time.current)
|
|
155
169
|
log_pipeline_advanced(edges)
|
|
156
170
|
end
|
|
157
171
|
|
|
158
|
-
def advance_non_merging_steps(step_klass,
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
# this enum value "default" :sad:
|
|
163
|
-
step_type = edge[:type] == "chain" ? "default" : edge[:type]
|
|
172
|
+
def advance_non_merging_steps(step_klass, edge, advancing_ids)
|
|
173
|
+
# NOTE: "chain" is used by ActiveRecord so we have to call
|
|
174
|
+
# this enum value "default" :sad:
|
|
175
|
+
step_type = edge[:type] == "chain" ? "default" : edge[:type]
|
|
164
176
|
|
|
177
|
+
steps.where(id: advancing_ids, klass: step_klass).find_each do |step|
|
|
165
178
|
if step_type.in?(%w[default divide])
|
|
166
|
-
advance_to_next_steps(step_type,
|
|
179
|
+
advance_to_next_steps(step_type, step.id, edge)
|
|
167
180
|
elsif step_type == "expand"
|
|
168
|
-
expand_to_next_steps(step_type,
|
|
181
|
+
expand_to_next_steps(step_type, step.id, edge)
|
|
169
182
|
else
|
|
170
183
|
Ductwork.logger.error(
|
|
171
184
|
msg: "Invalid step type",
|
|
@@ -177,21 +190,34 @@ module Ductwork
|
|
|
177
190
|
end
|
|
178
191
|
end
|
|
179
192
|
|
|
180
|
-
def advance_to_next_steps(step_type,
|
|
181
|
-
edge[:to].
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
193
|
+
def advance_to_next_steps(step_type, step_id, edge)
|
|
194
|
+
too_many = edge[:to].tally.any? do |to_klass, count|
|
|
195
|
+
depth = Ductwork
|
|
196
|
+
.configuration
|
|
197
|
+
.steps_max_depth(pipeline: klass, step: to_klass)
|
|
198
|
+
|
|
199
|
+
depth != -1 && count > depth
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
if too_many
|
|
203
|
+
halted!
|
|
204
|
+
else
|
|
205
|
+
edge[:to].each do |to_klass|
|
|
206
|
+
next_step = steps.create!(
|
|
207
|
+
klass: to_klass,
|
|
208
|
+
status: :in_progress,
|
|
209
|
+
step_type: step_type,
|
|
210
|
+
started_at: Time.current
|
|
211
|
+
)
|
|
212
|
+
return_value = Ductwork::Job.find_by(step_id:).return_value
|
|
213
|
+
Ductwork::Job.enqueue(next_step, return_value)
|
|
214
|
+
end
|
|
189
215
|
end
|
|
190
216
|
end
|
|
191
217
|
|
|
192
|
-
def conditionally_combine_next_steps(edges,
|
|
218
|
+
def conditionally_combine_next_steps(edges, advancing_ids)
|
|
193
219
|
if steps.where(status: %w[pending in_progress], klass: edges.keys).none?
|
|
194
|
-
combine_next_steps(edges,
|
|
220
|
+
combine_next_steps(edges, advancing_ids)
|
|
195
221
|
else
|
|
196
222
|
Ductwork.logger.debug(
|
|
197
223
|
msg: "Not all divided steps have completed; not combining",
|
|
@@ -201,14 +227,15 @@ module Ductwork
|
|
|
201
227
|
end
|
|
202
228
|
end
|
|
203
229
|
|
|
204
|
-
def combine_next_steps(edges,
|
|
230
|
+
def combine_next_steps(edges, advancing_ids)
|
|
205
231
|
klass = edges.values.sample.dig(-1, :to).sole
|
|
206
232
|
step_type = "combine"
|
|
207
|
-
groups =
|
|
233
|
+
groups = steps
|
|
234
|
+
.where(id: advancing_ids)
|
|
208
235
|
.group(:klass)
|
|
209
236
|
.count
|
|
210
237
|
.keys
|
|
211
|
-
.map { |k|
|
|
238
|
+
.map { |k| steps.where(id: advancing_ids).where(klass: k) }
|
|
212
239
|
|
|
213
240
|
groups.first.zip(*groups[1..]).each do |group|
|
|
214
241
|
input_arg = Ductwork::Job
|
|
@@ -218,19 +245,29 @@ module Ductwork
|
|
|
218
245
|
end
|
|
219
246
|
end
|
|
220
247
|
|
|
221
|
-
def expand_to_next_steps(step_type,
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
248
|
+
def expand_to_next_steps(step_type, step_id, edge)
|
|
249
|
+
next_klass = edge[:to].sole
|
|
250
|
+
return_value = Ductwork::Job
|
|
251
|
+
.find_by(step_id:)
|
|
252
|
+
.return_value
|
|
253
|
+
max_depth = Ductwork.configuration.steps_max_depth(pipeline: klass, step: next_klass)
|
|
254
|
+
|
|
255
|
+
if max_depth != -1 && return_value.count > max_depth
|
|
256
|
+
halted!
|
|
257
|
+
else
|
|
258
|
+
Array(return_value).each do |input_arg|
|
|
259
|
+
create_step_and_enqueue_job(
|
|
260
|
+
klass: next_klass,
|
|
261
|
+
step_type: step_type,
|
|
262
|
+
input_arg: input_arg
|
|
263
|
+
)
|
|
264
|
+
end
|
|
228
265
|
end
|
|
229
266
|
end
|
|
230
267
|
|
|
231
|
-
def conditionally_collapse_next_steps(step_klass, edge,
|
|
268
|
+
def conditionally_collapse_next_steps(step_klass, edge, advancing_ids)
|
|
232
269
|
if steps.where(status: %w[pending in_progress], klass: step_klass).none?
|
|
233
|
-
collapse_next_steps(edge[:to].sole,
|
|
270
|
+
collapse_next_steps(edge[:to].sole, advancing_ids)
|
|
234
271
|
else
|
|
235
272
|
Ductwork.logger.debug(
|
|
236
273
|
msg: "Not all expanded steps have completed; not collapsing",
|
|
@@ -240,14 +277,11 @@ module Ductwork
|
|
|
240
277
|
end
|
|
241
278
|
end
|
|
242
279
|
|
|
243
|
-
def collapse_next_steps(klass,
|
|
280
|
+
def collapse_next_steps(klass, advancing_ids)
|
|
244
281
|
step_type = "collapse"
|
|
245
282
|
input_arg = []
|
|
246
283
|
|
|
247
|
-
|
|
248
|
-
# could be A LOT of jobs so we want to use batch methods
|
|
249
|
-
# to avoid creating too many in-memory objects
|
|
250
|
-
Ductwork::Job.where(step_id: advancing.ids).find_each do |job|
|
|
284
|
+
Ductwork::Job.where(step_id: advancing_ids).find_each do |job|
|
|
251
285
|
input_arg << job.return_value
|
|
252
286
|
end
|
|
253
287
|
|
data/app/models/ductwork/step.rb
CHANGED
|
@@ -12,15 +12,17 @@ module Ductwork
|
|
|
12
12
|
DEFAULT_LOGGER_SOURCE = "default" # `Logger` instance writing to STDOUT
|
|
13
13
|
DEFAULT_PIPELINE_POLLING_TIMEOUT = 1 # second
|
|
14
14
|
DEFAULT_PIPELINE_SHUTDOWN_TIMEOUT = 20 # seconds
|
|
15
|
+
DEFAULT_STEPS_MAX_DEPTH = -1 # unlimited count
|
|
15
16
|
DEFAULT_SUPERVISOR_POLLING_TIMEOUT = 1 # second
|
|
16
17
|
DEFAULT_SUPERVISOR_SHUTDOWN_TIMEOUT = 30 # seconds
|
|
17
18
|
DEFAULT_LOGGER = ::Logger.new($stdout)
|
|
18
19
|
PIPELINES_WILDCARD = "*"
|
|
19
20
|
|
|
20
|
-
attr_writer :
|
|
21
|
-
:job_worker_max_retry,
|
|
21
|
+
attr_writer :job_worker_count, :job_worker_polling_timeout,
|
|
22
|
+
:job_worker_shutdown_timeout, :job_worker_max_retry,
|
|
22
23
|
:logger_level,
|
|
23
24
|
:pipeline_polling_timeout, :pipeline_shutdown_timeout,
|
|
25
|
+
:steps_max_depth,
|
|
24
26
|
:supervisor_polling_timeout, :supervisor_shutdown_timeout
|
|
25
27
|
|
|
26
28
|
def initialize(path: DEFAULT_FILE_PATH)
|
|
@@ -49,6 +51,8 @@ module Ductwork
|
|
|
49
51
|
end
|
|
50
52
|
|
|
51
53
|
def job_worker_count(pipeline)
|
|
54
|
+
return @job_worker_count if instance_variable_defined?(:@job_worker_count)
|
|
55
|
+
|
|
52
56
|
raw_count = config.dig(:job_worker, :count) || DEFAULT_JOB_WORKER_COUNT
|
|
53
57
|
|
|
54
58
|
if raw_count.is_a?(Hash)
|
|
@@ -58,12 +62,36 @@ module Ductwork
|
|
|
58
62
|
end
|
|
59
63
|
end
|
|
60
64
|
|
|
61
|
-
def job_worker_max_retry
|
|
62
|
-
@job_worker_max_retry
|
|
65
|
+
def job_worker_max_retry(pipeline: nil, step: nil) # rubocop:disable Metrics
|
|
66
|
+
return @job_worker_max_retry if instance_variable_defined?(:@job_worker_max_retry)
|
|
67
|
+
|
|
68
|
+
pipeline ||= :default
|
|
69
|
+
step ||= :default
|
|
70
|
+
base_config = config.dig(:job_worker, :max_retry)
|
|
71
|
+
|
|
72
|
+
if base_config.is_a?(Hash) && base_config[pipeline.to_sym].is_a?(Hash)
|
|
73
|
+
pipeline_config = config.dig(:job_worker, :max_retry, pipeline.to_sym)
|
|
74
|
+
|
|
75
|
+
pipeline_config[step.to_sym] || pipeline_config[:default] || DEFAULT_JOB_WORKER_MAX_RETRY
|
|
76
|
+
elsif base_config.is_a?(Hash)
|
|
77
|
+
base_config[pipeline.to_sym] || base_config[:default] || DEFAULT_JOB_WORKER_MAX_RETRY
|
|
78
|
+
else
|
|
79
|
+
base_config || DEFAULT_JOB_WORKER_MAX_RETRY
|
|
80
|
+
end
|
|
63
81
|
end
|
|
64
82
|
|
|
65
|
-
def job_worker_polling_timeout
|
|
66
|
-
|
|
83
|
+
def job_worker_polling_timeout(pipeline = nil)
|
|
84
|
+
pipeline ||= :default
|
|
85
|
+
default = DEFAULT_JOB_WORKER_POLLING_TIMEOUT
|
|
86
|
+
base_config = config.dig(:job_worker, :polling_timeout)
|
|
87
|
+
|
|
88
|
+
if instance_variable_defined?(:@job_worker_polling_timeout)
|
|
89
|
+
@job_worker_polling_timeout
|
|
90
|
+
elsif base_config.is_a?(Hash)
|
|
91
|
+
base_config[pipeline.to_sym] || base_config[:default] || default
|
|
92
|
+
else
|
|
93
|
+
base_config || default
|
|
94
|
+
end
|
|
67
95
|
end
|
|
68
96
|
|
|
69
97
|
def job_worker_shutdown_timeout
|
|
@@ -78,14 +106,46 @@ module Ductwork
|
|
|
78
106
|
@logger_source ||= fetch_logger_source
|
|
79
107
|
end
|
|
80
108
|
|
|
81
|
-
def pipeline_polling_timeout
|
|
82
|
-
|
|
109
|
+
def pipeline_polling_timeout(pipeline = nil)
|
|
110
|
+
pipeline ||= :default
|
|
111
|
+
default = DEFAULT_PIPELINE_POLLING_TIMEOUT
|
|
112
|
+
base_config = config.dig(:pipeline_advancer, :polling_timeout)
|
|
113
|
+
|
|
114
|
+
if instance_variable_defined?(:@pipeline_polling_timeout)
|
|
115
|
+
@pipeline_polling_timeout
|
|
116
|
+
elsif base_config.is_a?(Hash)
|
|
117
|
+
base_config[pipeline.to_sym] || base_config[:default] || default
|
|
118
|
+
else
|
|
119
|
+
base_config || default
|
|
120
|
+
end
|
|
83
121
|
end
|
|
84
122
|
|
|
85
123
|
def pipeline_shutdown_timeout
|
|
86
124
|
@pipeline_shutdown_timeout ||= fetch_pipeline_shutdown_timeout
|
|
87
125
|
end
|
|
88
126
|
|
|
127
|
+
def steps_max_depth(pipeline: nil, step: nil) # rubocop:disable Metrics
|
|
128
|
+
return @steps_max_depth if instance_variable_defined?(:@steps_max_depth)
|
|
129
|
+
|
|
130
|
+
pipeline ||= :default
|
|
131
|
+
step ||= :default
|
|
132
|
+
base_config = config.dig(:pipeline_advancer, :steps, :max_depth)
|
|
133
|
+
|
|
134
|
+
if base_config.is_a?(Hash) && base_config[pipeline.to_sym].is_a?(Hash)
|
|
135
|
+
pipeline_config = config.dig(:pipeline_advancer, :steps, :max_depth, pipeline.to_sym)
|
|
136
|
+
|
|
137
|
+
pipeline_config[step.to_sym] ||
|
|
138
|
+
pipeline_config[:default] ||
|
|
139
|
+
DEFAULT_STEPS_MAX_DEPTH
|
|
140
|
+
elsif base_config.is_a?(Hash)
|
|
141
|
+
base_config[pipeline.to_sym] ||
|
|
142
|
+
base_config[:default] ||
|
|
143
|
+
DEFAULT_STEPS_MAX_DEPTH
|
|
144
|
+
else
|
|
145
|
+
base_config || DEFAULT_STEPS_MAX_DEPTH
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
89
149
|
def supervisor_polling_timeout
|
|
90
150
|
@supervisor_polling_timeout ||= fetch_supervisor_polling_timeout
|
|
91
151
|
end
|
|
@@ -98,16 +158,6 @@ module Ductwork
|
|
|
98
158
|
|
|
99
159
|
attr_reader :config
|
|
100
160
|
|
|
101
|
-
def fetch_job_worker_max_retry
|
|
102
|
-
config.dig(:job_worker, :max_retry) ||
|
|
103
|
-
DEFAULT_JOB_WORKER_MAX_RETRY
|
|
104
|
-
end
|
|
105
|
-
|
|
106
|
-
def fetch_job_worker_polling_timeout
|
|
107
|
-
config.dig(:job_worker, :polling_timeout) ||
|
|
108
|
-
DEFAULT_JOB_WORKER_POLLING_TIMEOUT
|
|
109
|
-
end
|
|
110
|
-
|
|
111
161
|
def fetch_job_worker_shutdown_timeout
|
|
112
162
|
config.dig(:job_worker, :shutdown_timeout) ||
|
|
113
163
|
DEFAULT_JOB_WORKER_SHUTDOWN_TIMEOUT
|
|
@@ -121,11 +171,6 @@ module Ductwork
|
|
|
121
171
|
config.dig(:logger, :source) || DEFAULT_LOGGER_SOURCE
|
|
122
172
|
end
|
|
123
173
|
|
|
124
|
-
def fetch_pipeline_polling_timeout
|
|
125
|
-
config.dig(:pipeline_advancer, :polling_timeout) ||
|
|
126
|
-
DEFAULT_PIPELINE_POLLING_TIMEOUT
|
|
127
|
-
end
|
|
128
|
-
|
|
129
174
|
def fetch_pipeline_shutdown_timeout
|
|
130
175
|
config.dig(:pipeline_advancer, :shutdown_timeout) ||
|
|
131
176
|
DEFAULT_PIPELINE_SHUTDOWN_TIMEOUT
|
|
@@ -35,7 +35,7 @@ module Ductwork
|
|
|
35
35
|
role: :job_worker,
|
|
36
36
|
pipeline: pipeline
|
|
37
37
|
)
|
|
38
|
-
sleep(
|
|
38
|
+
sleep(polling_timeout)
|
|
39
39
|
end
|
|
40
40
|
end
|
|
41
41
|
|
|
@@ -62,6 +62,10 @@ module Ductwork
|
|
|
62
62
|
end
|
|
63
63
|
end
|
|
64
64
|
end
|
|
65
|
+
|
|
66
|
+
def polling_timeout
|
|
67
|
+
Ductwork.configuration.job_worker_polling_timeout(pipeline)
|
|
68
|
+
end
|
|
65
69
|
end
|
|
66
70
|
end
|
|
67
71
|
end
|
|
@@ -15,6 +15,7 @@ module Ductwork
|
|
|
15
15
|
Ductwork::Pipeline
|
|
16
16
|
.in_progress
|
|
17
17
|
.where(klass: klass, claimed_for_advancing_at: nil)
|
|
18
|
+
.where(steps: Ductwork::Step.where(status: :advancing))
|
|
18
19
|
.where.not(steps: Ductwork::Step.where.not(status: %w[advancing completed]))
|
|
19
20
|
.order(:last_advanced_at)
|
|
20
21
|
.limit(1)
|
|
@@ -30,6 +31,7 @@ module Ductwork
|
|
|
30
31
|
if rows_updated == 1
|
|
31
32
|
Ductwork.logger.debug(
|
|
32
33
|
msg: "Pipeline claimed",
|
|
34
|
+
pipeline_id: id,
|
|
33
35
|
pipeline: klass,
|
|
34
36
|
role: :pipeline_advancer
|
|
35
37
|
)
|
|
@@ -39,33 +41,34 @@ module Ductwork
|
|
|
39
41
|
|
|
40
42
|
Ductwork.logger.debug(
|
|
41
43
|
msg: "Pipeline advanced",
|
|
44
|
+
pipeline_id: id,
|
|
42
45
|
pipeline: klass,
|
|
43
46
|
role: :pipeline_advancer
|
|
44
47
|
)
|
|
48
|
+
|
|
49
|
+
# release the pipeline and set last advanced at so it doesnt block.
|
|
50
|
+
# we're not using a queue so we have to use a db timestamp
|
|
51
|
+
pipeline.update!(
|
|
52
|
+
claimed_for_advancing_at: nil,
|
|
53
|
+
last_advanced_at: Time.current
|
|
54
|
+
)
|
|
45
55
|
else
|
|
46
56
|
Ductwork.logger.debug(
|
|
47
57
|
msg: "Did not claim pipeline, avoided race condition",
|
|
58
|
+
pipeline_id: id,
|
|
48
59
|
pipeline: klass,
|
|
49
60
|
role: :pipeline_advancer
|
|
50
61
|
)
|
|
51
62
|
end
|
|
52
|
-
|
|
53
|
-
# release the pipeline and set last advanced at so it doesnt block.
|
|
54
|
-
# we're not using a queue so we have to use a db timestamp
|
|
55
|
-
Ductwork::Pipeline.find(id).update!(
|
|
56
|
-
claimed_for_advancing_at: nil,
|
|
57
|
-
last_advanced_at: Time.current
|
|
58
|
-
)
|
|
59
63
|
else
|
|
60
64
|
Ductwork.logger.debug(
|
|
61
65
|
msg: "No pipeline needs advancing",
|
|
62
66
|
pipeline: klass,
|
|
63
|
-
id: id,
|
|
64
67
|
role: :pipeline_advancer
|
|
65
68
|
)
|
|
66
69
|
end
|
|
67
70
|
|
|
68
|
-
sleep(
|
|
71
|
+
sleep(polling_timeout)
|
|
69
72
|
end
|
|
70
73
|
|
|
71
74
|
run_hooks_for(:stop)
|
|
@@ -82,6 +85,10 @@ module Ductwork
|
|
|
82
85
|
end
|
|
83
86
|
end
|
|
84
87
|
end
|
|
88
|
+
|
|
89
|
+
def polling_timeout
|
|
90
|
+
Ductwork.configuration.pipeline_polling_timeout(klass)
|
|
91
|
+
end
|
|
85
92
|
end
|
|
86
93
|
end
|
|
87
94
|
end
|
data/lib/ductwork/version.rb
CHANGED
|
@@ -12,5 +12,8 @@ class CreateDuctworkAvailabilities < ActiveRecord::Migration[<%= Rails::VERSION:
|
|
|
12
12
|
|
|
13
13
|
add_index :ductwork_availabilities, :execution_id, unique: true
|
|
14
14
|
add_index :ductwork_availabilities, %i[id process_id]
|
|
15
|
+
add_index :ductwork_availabilities,
|
|
16
|
+
%i[completed_at started_at created_at],
|
|
17
|
+
name: "index_ductwork_availabilities_on_claim_latest"
|
|
15
18
|
end
|
|
16
19
|
end
|
|
@@ -7,8 +7,8 @@ class CreateDuctworkJobs < ActiveRecord::Migration[<%= Rails::VERSION::MAJOR %>.
|
|
|
7
7
|
table.string :klass, null: false
|
|
8
8
|
table.timestamp :started_at, null: false
|
|
9
9
|
table.timestamp :completed_at
|
|
10
|
-
table.
|
|
11
|
-
table.
|
|
10
|
+
table.text :input_args, null: false
|
|
11
|
+
table.text :output_payload
|
|
12
12
|
table.timestamps null: false
|
|
13
13
|
end
|
|
14
14
|
|
|
@@ -4,7 +4,7 @@ class CreateDuctworkPipelines < ActiveRecord::Migration[<%= Rails::VERSION::MAJO
|
|
|
4
4
|
def change
|
|
5
5
|
create_table :ductwork_pipelines do |table|
|
|
6
6
|
table.string :klass, null: false
|
|
7
|
-
table.
|
|
7
|
+
table.text :definition, null: false
|
|
8
8
|
table.string :definition_sha1, null: false
|
|
9
9
|
table.timestamp :triggered_at, null: false
|
|
10
10
|
table.timestamp :completed_at
|
|
@@ -12,6 +12,9 @@ class CreateDuctworkSteps < ActiveRecord::Migration[<%= Rails::VERSION::MAJOR %>
|
|
|
12
12
|
table.timestamps null: false
|
|
13
13
|
end
|
|
14
14
|
|
|
15
|
+
add_index :ductwork_steps, %i[pipeline_id status klass]
|
|
16
|
+
add_index :ductwork_steps, %i[pipeline_id klass status]
|
|
17
|
+
add_index :ductwork_steps, %i[status klass]
|
|
15
18
|
add_index :ductwork_steps, %i[pipeline_id status]
|
|
16
19
|
end
|
|
17
20
|
end
|