job-workflow 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.rubocop.yml +91 -0
- data/CHANGELOG.md +23 -0
- data/LICENSE.txt +21 -0
- data/README.md +47 -0
- data/Rakefile +55 -0
- data/Steepfile +10 -0
- data/guides/API_REFERENCE.md +112 -0
- data/guides/BEST_PRACTICES.md +113 -0
- data/guides/CACHE_STORE_INTEGRATION.md +145 -0
- data/guides/CONDITIONAL_EXECUTION.md +66 -0
- data/guides/DEPENDENCY_WAIT.md +386 -0
- data/guides/DRY_RUN.md +390 -0
- data/guides/DSL_BASICS.md +216 -0
- data/guides/ERROR_HANDLING.md +187 -0
- data/guides/GETTING_STARTED.md +524 -0
- data/guides/INSTRUMENTATION.md +131 -0
- data/guides/LIFECYCLE_HOOKS.md +415 -0
- data/guides/NAMESPACES.md +75 -0
- data/guides/OPENTELEMETRY_INTEGRATION.md +86 -0
- data/guides/PARALLEL_PROCESSING.md +302 -0
- data/guides/PRODUCTION_DEPLOYMENT.md +110 -0
- data/guides/QUEUE_MANAGEMENT.md +141 -0
- data/guides/README.md +174 -0
- data/guides/SCHEDULED_JOBS.md +165 -0
- data/guides/STRUCTURED_LOGGING.md +268 -0
- data/guides/TASK_OUTPUTS.md +240 -0
- data/guides/TESTING_STRATEGY.md +56 -0
- data/guides/THROTTLING.md +198 -0
- data/guides/TROUBLESHOOTING.md +53 -0
- data/guides/WORKFLOW_COMPOSITION.md +675 -0
- data/guides/WORKFLOW_STATUS_QUERY.md +288 -0
- data/lib/job-workflow.rb +3 -0
- data/lib/job_workflow/argument_def.rb +16 -0
- data/lib/job_workflow/arguments.rb +40 -0
- data/lib/job_workflow/auto_scaling/adapter/aws_adapter.rb +66 -0
- data/lib/job_workflow/auto_scaling/adapter.rb +31 -0
- data/lib/job_workflow/auto_scaling/configuration.rb +85 -0
- data/lib/job_workflow/auto_scaling/executor.rb +43 -0
- data/lib/job_workflow/auto_scaling.rb +69 -0
- data/lib/job_workflow/cache_store_adapters.rb +46 -0
- data/lib/job_workflow/context.rb +352 -0
- data/lib/job_workflow/dry_run_config.rb +31 -0
- data/lib/job_workflow/dsl.rb +236 -0
- data/lib/job_workflow/error_hook.rb +24 -0
- data/lib/job_workflow/hook.rb +24 -0
- data/lib/job_workflow/hook_registry.rb +66 -0
- data/lib/job_workflow/instrumentation/log_subscriber.rb +194 -0
- data/lib/job_workflow/instrumentation/opentelemetry_subscriber.rb +221 -0
- data/lib/job_workflow/instrumentation.rb +257 -0
- data/lib/job_workflow/job_status.rb +92 -0
- data/lib/job_workflow/logger.rb +86 -0
- data/lib/job_workflow/namespace.rb +36 -0
- data/lib/job_workflow/output.rb +81 -0
- data/lib/job_workflow/output_def.rb +14 -0
- data/lib/job_workflow/queue.rb +74 -0
- data/lib/job_workflow/queue_adapter.rb +38 -0
- data/lib/job_workflow/queue_adapters/abstract.rb +87 -0
- data/lib/job_workflow/queue_adapters/null_adapter.rb +127 -0
- data/lib/job_workflow/queue_adapters/solid_queue_adapter.rb +224 -0
- data/lib/job_workflow/runner.rb +173 -0
- data/lib/job_workflow/schedule.rb +46 -0
- data/lib/job_workflow/semaphore.rb +71 -0
- data/lib/job_workflow/task.rb +83 -0
- data/lib/job_workflow/task_callable.rb +43 -0
- data/lib/job_workflow/task_context.rb +70 -0
- data/lib/job_workflow/task_dependency_wait.rb +66 -0
- data/lib/job_workflow/task_enqueue.rb +50 -0
- data/lib/job_workflow/task_graph.rb +43 -0
- data/lib/job_workflow/task_job_status.rb +70 -0
- data/lib/job_workflow/task_output.rb +51 -0
- data/lib/job_workflow/task_retry.rb +64 -0
- data/lib/job_workflow/task_throttle.rb +46 -0
- data/lib/job_workflow/version.rb +5 -0
- data/lib/job_workflow/workflow.rb +87 -0
- data/lib/job_workflow/workflow_status.rb +112 -0
- data/lib/job_workflow.rb +59 -0
- data/rbs_collection.lock.yaml +172 -0
- data/rbs_collection.yaml +14 -0
- data/sig/generated/job-workflow.rbs +2 -0
- data/sig/generated/job_workflow/argument_def.rbs +14 -0
- data/sig/generated/job_workflow/arguments.rbs +26 -0
- data/sig/generated/job_workflow/auto_scaling/adapter/aws_adapter.rbs +32 -0
- data/sig/generated/job_workflow/auto_scaling/adapter.rbs +22 -0
- data/sig/generated/job_workflow/auto_scaling/configuration.rbs +50 -0
- data/sig/generated/job_workflow/auto_scaling/executor.rbs +29 -0
- data/sig/generated/job_workflow/auto_scaling.rbs +47 -0
- data/sig/generated/job_workflow/cache_store_adapters.rbs +28 -0
- data/sig/generated/job_workflow/context.rbs +155 -0
- data/sig/generated/job_workflow/dry_run_config.rbs +16 -0
- data/sig/generated/job_workflow/dsl.rbs +117 -0
- data/sig/generated/job_workflow/error_hook.rbs +18 -0
- data/sig/generated/job_workflow/hook.rbs +18 -0
- data/sig/generated/job_workflow/hook_registry.rbs +47 -0
- data/sig/generated/job_workflow/instrumentation/log_subscriber.rbs +102 -0
- data/sig/generated/job_workflow/instrumentation/opentelemetry_subscriber.rbs +113 -0
- data/sig/generated/job_workflow/instrumentation.rbs +138 -0
- data/sig/generated/job_workflow/job_status.rbs +46 -0
- data/sig/generated/job_workflow/logger.rbs +56 -0
- data/sig/generated/job_workflow/namespace.rbs +24 -0
- data/sig/generated/job_workflow/output.rbs +39 -0
- data/sig/generated/job_workflow/output_def.rbs +12 -0
- data/sig/generated/job_workflow/queue.rbs +49 -0
- data/sig/generated/job_workflow/queue_adapter.rbs +18 -0
- data/sig/generated/job_workflow/queue_adapters/abstract.rbs +56 -0
- data/sig/generated/job_workflow/queue_adapters/null_adapter.rbs +73 -0
- data/sig/generated/job_workflow/queue_adapters/solid_queue_adapter.rbs +111 -0
- data/sig/generated/job_workflow/runner.rbs +66 -0
- data/sig/generated/job_workflow/schedule.rbs +34 -0
- data/sig/generated/job_workflow/semaphore.rbs +37 -0
- data/sig/generated/job_workflow/task.rbs +60 -0
- data/sig/generated/job_workflow/task_callable.rbs +30 -0
- data/sig/generated/job_workflow/task_context.rbs +52 -0
- data/sig/generated/job_workflow/task_dependency_wait.rbs +42 -0
- data/sig/generated/job_workflow/task_enqueue.rbs +27 -0
- data/sig/generated/job_workflow/task_graph.rbs +27 -0
- data/sig/generated/job_workflow/task_job_status.rbs +42 -0
- data/sig/generated/job_workflow/task_output.rbs +29 -0
- data/sig/generated/job_workflow/task_retry.rbs +30 -0
- data/sig/generated/job_workflow/task_throttle.rbs +20 -0
- data/sig/generated/job_workflow/version.rbs +5 -0
- data/sig/generated/job_workflow/workflow.rbs +48 -0
- data/sig/generated/job_workflow/workflow_status.rbs +55 -0
- data/sig/generated/job_workflow.rbs +8 -0
- data/sig-private/activejob.rbs +35 -0
- data/sig-private/activesupport.rbs +23 -0
- data/sig-private/aws.rbs +32 -0
- data/sig-private/opentelemetry.rbs +40 -0
- data/sig-private/solid_queue.rbs +108 -0
- data/tmp/.keep +0 -0
- metadata +190 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class Context # rubocop:disable Metrics/ClassLength
|
|
5
|
+
attr_reader :workflow #: Workflow
|
|
6
|
+
attr_reader :arguments #: Arguments
|
|
7
|
+
attr_reader :output #: Output
|
|
8
|
+
attr_reader :job_status #: JobStatus
|
|
9
|
+
|
|
10
|
+
class << self
|
|
11
|
+
#: (Hash[Symbol, untyped]) -> Context
|
|
12
|
+
def from_hash(hash)
|
|
13
|
+
workflow = hash.fetch(:workflow)
|
|
14
|
+
new(
|
|
15
|
+
job: hash[:job],
|
|
16
|
+
workflow:,
|
|
17
|
+
arguments: Arguments.new(data: workflow.build_arguments_hash),
|
|
18
|
+
task_context: TaskContext.new(**(hash[:task_context] || {}).symbolize_keys),
|
|
19
|
+
output: Output.from_hash_array(hash.fetch(:task_outputs, [])),
|
|
20
|
+
job_status: JobStatus.from_hash_array(hash.fetch(:task_job_statuses, []))
|
|
21
|
+
)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
#: (Hash[String, untyped]) -> Context
|
|
25
|
+
def deserialize(hash)
|
|
26
|
+
workflow = hash.fetch("workflow")
|
|
27
|
+
new(
|
|
28
|
+
job: hash["job"],
|
|
29
|
+
workflow: hash.fetch("workflow"),
|
|
30
|
+
arguments: Arguments.new(data: workflow.build_arguments_hash),
|
|
31
|
+
task_context: TaskContext.deserialize(
|
|
32
|
+
hash["task_context"].merge(
|
|
33
|
+
"task" => workflow.fetch_task(
|
|
34
|
+
hash.fetch(
|
|
35
|
+
"task_context",
|
|
36
|
+
{} #: Hash[String, untyped]
|
|
37
|
+
)["task_name"]&.to_sym
|
|
38
|
+
)
|
|
39
|
+
)
|
|
40
|
+
),
|
|
41
|
+
output: Output.deserialize(hash),
|
|
42
|
+
job_status: JobStatus.deserialize(hash)
|
|
43
|
+
)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
#: (
|
|
48
|
+
# workflow: Workflow,
|
|
49
|
+
# arguments: Arguments,
|
|
50
|
+
# task_context: TaskContext,
|
|
51
|
+
# output: Output,
|
|
52
|
+
# job_status: JobStatus,
|
|
53
|
+
# ?job: DSL?
|
|
54
|
+
# ) -> void
|
|
55
|
+
def initialize(workflow:, arguments:, task_context:, output:, job_status:, job: nil) # rubocop:disable Metrics/ParameterLists
|
|
56
|
+
raise "job does not match the provided workflow" if job&.then { |j| j.class._workflow != workflow }
|
|
57
|
+
|
|
58
|
+
self.job = job
|
|
59
|
+
self.workflow = workflow
|
|
60
|
+
self.arguments = arguments
|
|
61
|
+
self.task_context = task_context
|
|
62
|
+
self.output = output
|
|
63
|
+
self.job_status = job_status
|
|
64
|
+
self.enabled_with_each_value = false
|
|
65
|
+
self.throttle_index = 0
|
|
66
|
+
self.skip_in_dry_run_index = 0
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
#: () -> Hash[String, untyped]
|
|
70
|
+
def serialize
|
|
71
|
+
sub_job? ? serialize_for_sub_job : serialize_for_job
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
#: (Hash[Symbol, untyped]) -> Context
|
|
75
|
+
def _update_arguments(other_arguments)
|
|
76
|
+
self.arguments = arguments.merge(other_arguments.symbolize_keys)
|
|
77
|
+
self
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
#: (DSL) -> void
|
|
81
|
+
def _job=(job)
|
|
82
|
+
self.job = job
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
#: () -> DSL?
|
|
86
|
+
def _job
|
|
87
|
+
job
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
#: () -> String
|
|
91
|
+
def job_id
|
|
92
|
+
local_job = job
|
|
93
|
+
raise "job is not set" if local_job.nil?
|
|
94
|
+
|
|
95
|
+
local_job.job_id
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
#: () -> bool
|
|
99
|
+
def sub_job?
|
|
100
|
+
parent_job_id != job_id
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
#: () -> String?
|
|
104
|
+
def concurrency_key
|
|
105
|
+
task = task_context.task
|
|
106
|
+
return if task.nil?
|
|
107
|
+
|
|
108
|
+
[task_context.parent_job_id, task.task_name].compact.join("/")
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
#: (Task) -> Enumerator[Context]
|
|
112
|
+
def _with_each_value(task)
|
|
113
|
+
raise "Nested _with_each_value calls are not allowed" if enabled_with_each_value
|
|
114
|
+
|
|
115
|
+
self.enabled_with_each_value = true
|
|
116
|
+
Enumerator.new do |y|
|
|
117
|
+
with_task_context(task, y)
|
|
118
|
+
ensure
|
|
119
|
+
self.enabled_with_each_value = false
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
#: () { () -> void } -> void
|
|
124
|
+
def _with_task_throttle(&)
|
|
125
|
+
task = task_context.task || (raise "with_throttle can be called only within iterate_each_value")
|
|
126
|
+
|
|
127
|
+
semaphore = task.throttle.semaphore
|
|
128
|
+
return yield if semaphore.nil?
|
|
129
|
+
|
|
130
|
+
semaphore.with(&)
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
#: (limit: Integer, ?key: String?, ?ttl: Integer) { () -> void } -> void
|
|
134
|
+
def throttle(limit:, key: nil, ttl: 180, &)
|
|
135
|
+
task = task_context.task || (raise "throttle can be called only in task")
|
|
136
|
+
|
|
137
|
+
semaphore = Semaphore.new(
|
|
138
|
+
concurrency_key: key || "#{task.throttle_prefix_key}:#{throttle_index}",
|
|
139
|
+
concurrency_limit: limit,
|
|
140
|
+
concurrency_duration: ttl.seconds
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
self.throttle_index += 1
|
|
144
|
+
|
|
145
|
+
semaphore.with(&)
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
# Instruments a custom operation with ActiveSupport::Notifications.
|
|
149
|
+
# This creates a span in OpenTelemetry (if enabled) and logs the event.
|
|
150
|
+
#
|
|
151
|
+
# @example Basic usage
|
|
152
|
+
# ```ruby
|
|
153
|
+
# ctx.instrument("api_call", endpoint: "/users") do
|
|
154
|
+
# HTTP.get("https://api.example.com/users")
|
|
155
|
+
# end
|
|
156
|
+
# ```
|
|
157
|
+
#
|
|
158
|
+
# @example With automatic operation name
|
|
159
|
+
# ```ruby
|
|
160
|
+
# ctx.instrument do
|
|
161
|
+
# # operation name defaults to "custom"
|
|
162
|
+
# expensive_operation()
|
|
163
|
+
# end
|
|
164
|
+
# ```
|
|
165
|
+
#
|
|
166
|
+
#: (?String, **untyped) { () -> untyped } -> untyped
|
|
167
|
+
def instrument(operation = "custom", **payload, &)
|
|
168
|
+
task = task_context.task
|
|
169
|
+
full_payload = {
|
|
170
|
+
job_id: job_id,
|
|
171
|
+
job_name: job.class.name,
|
|
172
|
+
task_name: task&.task_name,
|
|
173
|
+
each_index: task_context.index,
|
|
174
|
+
operation:,
|
|
175
|
+
**payload
|
|
176
|
+
}
|
|
177
|
+
Instrumentation.instrument_custom(operation, full_payload, &)
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
#: () -> bool
|
|
181
|
+
def dry_run?
|
|
182
|
+
task_context.dry_run
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
#: (?Symbol?, ?fallback: untyped) { () -> untyped } -> untyped
|
|
186
|
+
def skip_in_dry_run(dry_run_name = nil, fallback: nil)
|
|
187
|
+
local_job = job
|
|
188
|
+
task = task_context.task
|
|
189
|
+
|
|
190
|
+
raise "job is not set" if local_job.nil?
|
|
191
|
+
raise "skip_in_dry_run can be called only within with_task_context" if task.nil?
|
|
192
|
+
|
|
193
|
+
current_index = skip_in_dry_run_index
|
|
194
|
+
self.skip_in_dry_run_index += 1
|
|
195
|
+
Instrumentation.instrument_dry_run(local_job, self, dry_run_name, current_index, dry_run?) do
|
|
196
|
+
dry_run? ? fallback : yield
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
#: () -> untyped
|
|
201
|
+
def each_value
|
|
202
|
+
raise "each_value can be called only within each_values block" unless task_context.enabled?
|
|
203
|
+
|
|
204
|
+
task_context.value
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
#: () -> TaskOutput?
|
|
208
|
+
def each_task_output
|
|
209
|
+
task = task_context.task
|
|
210
|
+
raise "each_task_output can be called only _with_task block" if task.nil?
|
|
211
|
+
raise "each_task_output can be called only _with_each_value block" unless task_context.enabled?
|
|
212
|
+
|
|
213
|
+
task_name = task.task_name
|
|
214
|
+
each_index = task_context.index
|
|
215
|
+
output.fetch(task_name:, each_index:)
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
#: () -> TaskContext
|
|
219
|
+
def _task_context
|
|
220
|
+
task_context
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
#: (TaskOutput) -> void
|
|
224
|
+
def _add_task_output(task_output)
|
|
225
|
+
output.add_task_output(task_output)
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
#: () -> void
|
|
229
|
+
def _load_parent_task_output
|
|
230
|
+
return unless sub_job?
|
|
231
|
+
|
|
232
|
+
workflow_status = WorkflowStatus.find(parent_job_id)
|
|
233
|
+
parent_context = workflow_status.context
|
|
234
|
+
parent_context.output.flat_task_outputs.each { |task_output| output.add_task_output(task_output) }
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
private
|
|
238
|
+
|
|
239
|
+
attr_accessor :job #: DSL?
|
|
240
|
+
attr_writer :workflow #: Workflow
|
|
241
|
+
attr_writer :arguments #: Arguments
|
|
242
|
+
attr_writer :output #: Output
|
|
243
|
+
attr_writer :job_status #: JobStatus
|
|
244
|
+
attr_accessor :task_context #: TaskContext
|
|
245
|
+
attr_accessor :enabled_with_each_value #: bool
|
|
246
|
+
attr_accessor :throttle_index #: Integer
|
|
247
|
+
attr_accessor :skip_in_dry_run_index #: Integer
|
|
248
|
+
|
|
249
|
+
#: () -> String
|
|
250
|
+
def parent_job_id
|
|
251
|
+
_task_context.parent_job_id || job_id
|
|
252
|
+
end
|
|
253
|
+
|
|
254
|
+
#: () -> Hash[String, untyped]
|
|
255
|
+
def serialize_for_job
|
|
256
|
+
{
|
|
257
|
+
"task_context" => _task_context.serialize,
|
|
258
|
+
"task_outputs" => output.flat_task_outputs.map(&:serialize),
|
|
259
|
+
"task_job_statuses" => job_status.flat_task_job_statuses.map(&:serialize)
|
|
260
|
+
}
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
#: () -> Hash[String, untyped]
|
|
264
|
+
def serialize_for_sub_job
|
|
265
|
+
task_output = output.fetch(task_name: task_context.task&.task_name, each_index: task_context.index)
|
|
266
|
+
{
|
|
267
|
+
"task_context" => _task_context.serialize,
|
|
268
|
+
"task_outputs" => [task_output].compact.map(&:serialize),
|
|
269
|
+
"task_job_statuses" => []
|
|
270
|
+
}
|
|
271
|
+
end
|
|
272
|
+
|
|
273
|
+
#: (Task, Enumerator::Yielder) -> void
|
|
274
|
+
def with_task_context(task, yielder) # rubocop:disable Metrics/MethodLength
|
|
275
|
+
reset_task_context_if_task_changed(task)
|
|
276
|
+
|
|
277
|
+
with_each_index_and_value(task) do |value, index|
|
|
278
|
+
dry_run = calculate_dry_run(task)
|
|
279
|
+
with_retry(task) do |retry_count|
|
|
280
|
+
self.task_context = TaskContext.new(task:, parent_job_id:, index:, value:, retry_count:, dry_run:)
|
|
281
|
+
with_task_timeout do
|
|
282
|
+
yielder << self
|
|
283
|
+
end
|
|
284
|
+
end
|
|
285
|
+
ensure
|
|
286
|
+
clear_after_each_index_and_value
|
|
287
|
+
end
|
|
288
|
+
end
|
|
289
|
+
|
|
290
|
+
#: (Task) -> void
|
|
291
|
+
def reset_task_context_if_task_changed(task)
|
|
292
|
+
return if sub_job?
|
|
293
|
+
|
|
294
|
+
self.task_context = TaskContext.new if task_context.task&.task_name != task.task_name
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
#: (Task) { (untyped, Integer) -> void } -> void
|
|
298
|
+
def with_each_index_and_value(task)
|
|
299
|
+
task.each.call(self).each.with_index do |value, index|
|
|
300
|
+
next if index < task_context.index
|
|
301
|
+
|
|
302
|
+
yield value, index
|
|
303
|
+
|
|
304
|
+
break if sub_job?
|
|
305
|
+
end
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
#: () -> void
|
|
309
|
+
def clear_after_each_index_and_value
|
|
310
|
+
self.throttle_index = 0
|
|
311
|
+
self.skip_in_dry_run_index = 0
|
|
312
|
+
end
|
|
313
|
+
|
|
314
|
+
#: () { () -> void } -> void
|
|
315
|
+
def with_task_timeout
|
|
316
|
+
task = task_context.task || (raise "with_task_timeout can be called only within with_task_context")
|
|
317
|
+
|
|
318
|
+
timeout = task.timeout
|
|
319
|
+
return yield if timeout.nil?
|
|
320
|
+
|
|
321
|
+
Timeout.timeout(timeout) { yield } # rubocop:disable Style/ExplicitBlockArgument
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
#: (Task) { (Integer) -> void } -> void
|
|
325
|
+
def with_retry(task)
|
|
326
|
+
task_retry = task.task_retry
|
|
327
|
+
0.upto(task_retry.count) do |retry_count|
|
|
328
|
+
next if retry_count < task_context.retry_count
|
|
329
|
+
|
|
330
|
+
yield retry_count
|
|
331
|
+
break
|
|
332
|
+
rescue StandardError => e
|
|
333
|
+
next_retry_count = retry_count + 1
|
|
334
|
+
raise e if next_retry_count >= task_retry.count
|
|
335
|
+
|
|
336
|
+
wait_next_retry(task, task_retry, next_retry_count, e)
|
|
337
|
+
end
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
#: (Task, TaskRetry, Integer, StandardError) -> void
|
|
341
|
+
def wait_next_retry(task, task_retry, next_retry_count, error)
|
|
342
|
+
delay = task_retry.delay_for(next_retry_count)
|
|
343
|
+
Instrumentation.notify_task_retry(task, self, job_id, next_retry_count, delay, error)
|
|
344
|
+
sleep(delay)
|
|
345
|
+
end
|
|
346
|
+
|
|
347
|
+
#: (Task) -> bool
|
|
348
|
+
def calculate_dry_run(task)
|
|
349
|
+
workflow.dry_run_config.evaluate(self) || task.dry_run_config.evaluate(self)
|
|
350
|
+
end
|
|
351
|
+
end
|
|
352
|
+
end
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class DryRunConfig
|
|
5
|
+
attr_reader :evaluator #: ^(Context) -> bool
|
|
6
|
+
|
|
7
|
+
class << self
|
|
8
|
+
#: (bool | ^(Context) -> bool | nil) -> DryRunConfig
|
|
9
|
+
def from_primitive_value(value)
|
|
10
|
+
case value
|
|
11
|
+
when nil then new
|
|
12
|
+
when true then new(evaluator: ->(_ctx) { true })
|
|
13
|
+
when false then new(evaluator: ->(_ctx) { false })
|
|
14
|
+
when Proc then new(evaluator: value)
|
|
15
|
+
else
|
|
16
|
+
raise ArgumentError, "dry_run must be true, false, or Proc"
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
#: (?evaluator: ^(Context) -> bool) -> void
|
|
22
|
+
def initialize(evaluator: ->(_ctx) { false })
|
|
23
|
+
@evaluator = evaluator
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
#: (Context) -> bool
|
|
27
|
+
def evaluate(context)
|
|
28
|
+
@evaluator.call(context)
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
module DSL
|
|
5
|
+
extend ActiveSupport::Concern
|
|
6
|
+
|
|
7
|
+
include ActiveJob::Continuable
|
|
8
|
+
|
|
9
|
+
mattr_accessor :_included_classes, default: Set.new
|
|
10
|
+
|
|
11
|
+
# @rbs! extend ClassMethods
|
|
12
|
+
|
|
13
|
+
# @rbs!
|
|
14
|
+
# def self._included_classes: () -> Set[singleton(DSL)]
|
|
15
|
+
#
|
|
16
|
+
# def class: () -> ClassMethods
|
|
17
|
+
#
|
|
18
|
+
# def job_id: () -> String
|
|
19
|
+
#
|
|
20
|
+
# def queue_name: () -> String
|
|
21
|
+
#
|
|
22
|
+
# def set: (Hash[Symbol, untyped]) -> self
|
|
23
|
+
#
|
|
24
|
+
# def step: (Symbol, ?start: ActiveJob::Continuation::_Succ, ?isolated: bool) -> void
|
|
25
|
+
# | (Symbol, ?start: ActiveJob::Continuation::_Succ, ?isolated: bool) { (ActiveJob::Continuation::Step) -> void } -> void
|
|
26
|
+
|
|
27
|
+
included do
|
|
28
|
+
DSL._included_classes << self
|
|
29
|
+
|
|
30
|
+
class_attribute :_workflow, default: Workflow.new
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
#: (Hash[untyped, untyped]) -> void
|
|
34
|
+
def perform(arguments)
|
|
35
|
+
self._context ||= Context.from_hash({ job: self, workflow: self.class._workflow })
|
|
36
|
+
context = self._context #: Context
|
|
37
|
+
Runner.new(context: context._update_arguments(arguments)).run
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
#: () -> Output
|
|
41
|
+
def output
|
|
42
|
+
context = self._context
|
|
43
|
+
raise "context is not set." if context.nil?
|
|
44
|
+
|
|
45
|
+
context.output
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
#: (Context) -> void
|
|
49
|
+
def _context=(context)
|
|
50
|
+
@_context = context
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
#: () -> Context?
|
|
54
|
+
def _context
|
|
55
|
+
@_context
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
#: () -> Hash[String, untyped]
|
|
59
|
+
def serialize
|
|
60
|
+
super.merge({ "job_workflow_context" => _context&.serialize }.compact)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
#: (Hash[String, untyped]) -> void
|
|
64
|
+
def deserialize(job_data)
|
|
65
|
+
super
|
|
66
|
+
|
|
67
|
+
job_data["job_workflow_context"]&.then do |context_data|
|
|
68
|
+
self._context = Context.deserialize(
|
|
69
|
+
context_data.merge("job" => self, "workflow" => self.class._workflow)
|
|
70
|
+
)
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
module ClassMethods
|
|
75
|
+
# @rbs!
|
|
76
|
+
# def class_attribute: (Symbol, default: untyped) -> void
|
|
77
|
+
#
|
|
78
|
+
# def _workflow: () -> Workflow
|
|
79
|
+
#
|
|
80
|
+
# def new: (Hash[untyped, untyped]) -> DSL
|
|
81
|
+
#
|
|
82
|
+
# def name: () -> String
|
|
83
|
+
#
|
|
84
|
+
# def enqueue: (Hash[untyped, untyped]) -> void
|
|
85
|
+
#
|
|
86
|
+
# def queue_name: () -> String
|
|
87
|
+
#
|
|
88
|
+
# def queue_as: () -> String
|
|
89
|
+
#
|
|
90
|
+
# def limits_concurrency: (
|
|
91
|
+
# to: Integer,
|
|
92
|
+
# key: ^(untyped) -> untyped,
|
|
93
|
+
# ?duration: ActiveSupport::Duration?,
|
|
94
|
+
# ?group: String?,
|
|
95
|
+
# ?on_conflict: Symbol?
|
|
96
|
+
# ) -> void
|
|
97
|
+
|
|
98
|
+
#: (Context) -> DSL
|
|
99
|
+
def from_context(context) # rubocop:disable Metrics/AbcSize
|
|
100
|
+
new_context = context.dup
|
|
101
|
+
task = new_context._task_context.task
|
|
102
|
+
job = new(new_context.arguments.to_h)
|
|
103
|
+
new_context._job = job
|
|
104
|
+
job._context = new_context
|
|
105
|
+
job.set(queue: task.enqueue.queue) if !task.nil? && !task.enqueue.queue.nil?
|
|
106
|
+
job
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
#: (Symbol argument_name, String type, ?default: untyped) -> void
|
|
110
|
+
def argument(argument_name, type, default: nil)
|
|
111
|
+
validate_namespace!
|
|
112
|
+
_workflow.add_argument(ArgumentDef.new(name: argument_name, type:, default:))
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
#: (Symbol) { () -> void } -> void
|
|
116
|
+
def namespace(namespace_name, &)
|
|
117
|
+
_workflow.add_namespace(Namespace.new(name: namespace_name), &)
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
# rubocop:disable Metrics/ParameterLists
|
|
121
|
+
#
|
|
122
|
+
#: (
|
|
123
|
+
# Symbol task_name,
|
|
124
|
+
# ?each: ^(Context) -> untyped,
|
|
125
|
+
# ?enqueue: true | false | ^(Context) -> bool | Hash[Symbol, untyped],
|
|
126
|
+
# ?retry: Integer | Hash[Symbol, untyped],
|
|
127
|
+
# ?output: Hash[Symbol, String],
|
|
128
|
+
# ?depends_on: Array[Symbol],
|
|
129
|
+
# ?condition: ^(Context) -> bool,
|
|
130
|
+
# ?throttle: Integer | Hash[Symbol, untyped],
|
|
131
|
+
# ?timeout: Numeric?,
|
|
132
|
+
# ?dependency_wait: Hash[Symbol, untyped],
|
|
133
|
+
# ?dry_run: bool | ^(Context) -> bool
|
|
134
|
+
# ) { (untyped) -> void } -> void
|
|
135
|
+
def task(
|
|
136
|
+
task_name,
|
|
137
|
+
each: ->(_ctx) { [TaskContext::NULL_VALUE] },
|
|
138
|
+
enqueue: nil,
|
|
139
|
+
retry: 0,
|
|
140
|
+
output: {},
|
|
141
|
+
depends_on: [],
|
|
142
|
+
condition: ->(_ctx) { true },
|
|
143
|
+
throttle: {},
|
|
144
|
+
timeout: nil,
|
|
145
|
+
dependency_wait: {},
|
|
146
|
+
dry_run: false,
|
|
147
|
+
&block
|
|
148
|
+
)
|
|
149
|
+
new_task = Task.new(
|
|
150
|
+
job_name: name,
|
|
151
|
+
name: task_name,
|
|
152
|
+
namespace: _workflow.namespace,
|
|
153
|
+
block: block,
|
|
154
|
+
enqueue:,
|
|
155
|
+
each:,
|
|
156
|
+
task_retry: binding.local_variable_get(:retry),
|
|
157
|
+
output:,
|
|
158
|
+
depends_on:,
|
|
159
|
+
condition:,
|
|
160
|
+
throttle:,
|
|
161
|
+
timeout:,
|
|
162
|
+
dependency_wait:,
|
|
163
|
+
dry_run:
|
|
164
|
+
)
|
|
165
|
+
_workflow.add_task(new_task)
|
|
166
|
+
if new_task.enqueue.should_limits_concurrency? # rubocop:disable Style/GuardClause
|
|
167
|
+
concurrency = new_task.enqueue.concurrency #: Integer
|
|
168
|
+
limits_concurrency(to: concurrency, key: ->(ctx) { ctx.concurrency_key }) # rubocop:disable Style/SymbolProc
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
# rubocop:enable Metrics/ParameterLists
|
|
172
|
+
|
|
173
|
+
#: (*Symbol) { (Context) -> void } -> void
|
|
174
|
+
def before(*task_names, &block)
|
|
175
|
+
validate_namespace!
|
|
176
|
+
_workflow.add_hook(:before, task_names:, block:)
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
#: (*Symbol) { (Context) -> void } -> void
|
|
180
|
+
def after(*task_names, &block)
|
|
181
|
+
validate_namespace!
|
|
182
|
+
_workflow.add_hook(:after, task_names:, block:)
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
#: (*Symbol) { (Context, TaskCallable) -> void } -> void
|
|
186
|
+
def around(*task_names, &block)
|
|
187
|
+
validate_namespace!
|
|
188
|
+
_workflow.add_hook(:around, task_names:, block:)
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
#: (*Symbol) { (Context, StandardError, Task) -> void } -> void
|
|
192
|
+
def on_error(*task_names, &block)
|
|
193
|
+
validate_namespace!
|
|
194
|
+
_workflow.add_hook(:error, task_names:, block:)
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
#: (?bool) ?{ (Context) -> bool } -> void
|
|
198
|
+
def dry_run(value = nil, &block)
|
|
199
|
+
validate_namespace!
|
|
200
|
+
_workflow.dry_run_config = block || value
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
# rubocop:disable Metrics/ParameterLists
|
|
204
|
+
#: (
|
|
205
|
+
# String expression,
|
|
206
|
+
# ?key: (String | Symbol)?,
|
|
207
|
+
# ?queue: String?,
|
|
208
|
+
# ?priority: Integer?,
|
|
209
|
+
# ?args: Hash[Symbol, untyped],
|
|
210
|
+
# ?description: String?
|
|
211
|
+
# ) -> void
|
|
212
|
+
def schedule(expression, key: nil, queue: nil, priority: nil, args: {}, description: nil)
|
|
213
|
+
validate_namespace!
|
|
214
|
+
_workflow.add_schedule(
|
|
215
|
+
Schedule.new(
|
|
216
|
+
expression:,
|
|
217
|
+
class_name: name,
|
|
218
|
+
key:,
|
|
219
|
+
queue:,
|
|
220
|
+
priority:,
|
|
221
|
+
args:,
|
|
222
|
+
description:
|
|
223
|
+
)
|
|
224
|
+
)
|
|
225
|
+
end
|
|
226
|
+
# rubocop:enable Metrics/ParameterLists
|
|
227
|
+
|
|
228
|
+
private
|
|
229
|
+
|
|
230
|
+
#: () -> void
|
|
231
|
+
def validate_namespace!
|
|
232
|
+
raise "cannot be defined within a namespace." unless _workflow.namespace.default?
|
|
233
|
+
end
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
end
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class ErrorHook
|
|
5
|
+
attr_reader :task_names #: Set[Symbol]
|
|
6
|
+
attr_reader :block #: ^(Context, StandardError, Task) -> void
|
|
7
|
+
|
|
8
|
+
#: (task_names: Array[Symbol], block: ^(Context, StandardError, Task) -> void) -> void
|
|
9
|
+
def initialize(task_names:, block:)
|
|
10
|
+
@task_names = task_names.to_set
|
|
11
|
+
@block = block
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
#: (Symbol) -> bool
|
|
15
|
+
def applies_to?(task_name)
|
|
16
|
+
task_names.empty? || task_names.include?(task_name)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
#: () -> bool
|
|
20
|
+
def global?
|
|
21
|
+
task_names.empty?
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
end
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class Hook
|
|
5
|
+
attr_reader :task_names #: Set[Symbol]
|
|
6
|
+
attr_reader :block #: ^(Context, ?TaskCallable) -> void
|
|
7
|
+
|
|
8
|
+
#: (task_names: Array[Symbol], block: ^(Context, ?TaskCallable) -> void) -> void
|
|
9
|
+
def initialize(task_names:, block:)
|
|
10
|
+
@task_names = task_names.to_set
|
|
11
|
+
@block = block
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
#: (Symbol) -> bool
|
|
15
|
+
def applies_to?(task_name)
|
|
16
|
+
task_names.empty? || task_names.include?(task_name)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
#: () -> bool
|
|
20
|
+
def global?
|
|
21
|
+
task_names.empty?
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
end
|