job-workflow 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.rubocop.yml +91 -0
- data/CHANGELOG.md +23 -0
- data/LICENSE.txt +21 -0
- data/README.md +47 -0
- data/Rakefile +55 -0
- data/Steepfile +10 -0
- data/guides/API_REFERENCE.md +112 -0
- data/guides/BEST_PRACTICES.md +113 -0
- data/guides/CACHE_STORE_INTEGRATION.md +145 -0
- data/guides/CONDITIONAL_EXECUTION.md +66 -0
- data/guides/DEPENDENCY_WAIT.md +386 -0
- data/guides/DRY_RUN.md +390 -0
- data/guides/DSL_BASICS.md +216 -0
- data/guides/ERROR_HANDLING.md +187 -0
- data/guides/GETTING_STARTED.md +524 -0
- data/guides/INSTRUMENTATION.md +131 -0
- data/guides/LIFECYCLE_HOOKS.md +415 -0
- data/guides/NAMESPACES.md +75 -0
- data/guides/OPENTELEMETRY_INTEGRATION.md +86 -0
- data/guides/PARALLEL_PROCESSING.md +302 -0
- data/guides/PRODUCTION_DEPLOYMENT.md +110 -0
- data/guides/QUEUE_MANAGEMENT.md +141 -0
- data/guides/README.md +174 -0
- data/guides/SCHEDULED_JOBS.md +165 -0
- data/guides/STRUCTURED_LOGGING.md +268 -0
- data/guides/TASK_OUTPUTS.md +240 -0
- data/guides/TESTING_STRATEGY.md +56 -0
- data/guides/THROTTLING.md +198 -0
- data/guides/TROUBLESHOOTING.md +53 -0
- data/guides/WORKFLOW_COMPOSITION.md +675 -0
- data/guides/WORKFLOW_STATUS_QUERY.md +288 -0
- data/lib/job-workflow.rb +3 -0
- data/lib/job_workflow/argument_def.rb +16 -0
- data/lib/job_workflow/arguments.rb +40 -0
- data/lib/job_workflow/auto_scaling/adapter/aws_adapter.rb +66 -0
- data/lib/job_workflow/auto_scaling/adapter.rb +31 -0
- data/lib/job_workflow/auto_scaling/configuration.rb +85 -0
- data/lib/job_workflow/auto_scaling/executor.rb +43 -0
- data/lib/job_workflow/auto_scaling.rb +69 -0
- data/lib/job_workflow/cache_store_adapters.rb +46 -0
- data/lib/job_workflow/context.rb +352 -0
- data/lib/job_workflow/dry_run_config.rb +31 -0
- data/lib/job_workflow/dsl.rb +236 -0
- data/lib/job_workflow/error_hook.rb +24 -0
- data/lib/job_workflow/hook.rb +24 -0
- data/lib/job_workflow/hook_registry.rb +66 -0
- data/lib/job_workflow/instrumentation/log_subscriber.rb +194 -0
- data/lib/job_workflow/instrumentation/opentelemetry_subscriber.rb +221 -0
- data/lib/job_workflow/instrumentation.rb +257 -0
- data/lib/job_workflow/job_status.rb +92 -0
- data/lib/job_workflow/logger.rb +86 -0
- data/lib/job_workflow/namespace.rb +36 -0
- data/lib/job_workflow/output.rb +81 -0
- data/lib/job_workflow/output_def.rb +14 -0
- data/lib/job_workflow/queue.rb +74 -0
- data/lib/job_workflow/queue_adapter.rb +38 -0
- data/lib/job_workflow/queue_adapters/abstract.rb +87 -0
- data/lib/job_workflow/queue_adapters/null_adapter.rb +127 -0
- data/lib/job_workflow/queue_adapters/solid_queue_adapter.rb +224 -0
- data/lib/job_workflow/runner.rb +173 -0
- data/lib/job_workflow/schedule.rb +46 -0
- data/lib/job_workflow/semaphore.rb +71 -0
- data/lib/job_workflow/task.rb +83 -0
- data/lib/job_workflow/task_callable.rb +43 -0
- data/lib/job_workflow/task_context.rb +70 -0
- data/lib/job_workflow/task_dependency_wait.rb +66 -0
- data/lib/job_workflow/task_enqueue.rb +50 -0
- data/lib/job_workflow/task_graph.rb +43 -0
- data/lib/job_workflow/task_job_status.rb +70 -0
- data/lib/job_workflow/task_output.rb +51 -0
- data/lib/job_workflow/task_retry.rb +64 -0
- data/lib/job_workflow/task_throttle.rb +46 -0
- data/lib/job_workflow/version.rb +5 -0
- data/lib/job_workflow/workflow.rb +87 -0
- data/lib/job_workflow/workflow_status.rb +112 -0
- data/lib/job_workflow.rb +59 -0
- data/rbs_collection.lock.yaml +172 -0
- data/rbs_collection.yaml +14 -0
- data/sig/generated/job-workflow.rbs +2 -0
- data/sig/generated/job_workflow/argument_def.rbs +14 -0
- data/sig/generated/job_workflow/arguments.rbs +26 -0
- data/sig/generated/job_workflow/auto_scaling/adapter/aws_adapter.rbs +32 -0
- data/sig/generated/job_workflow/auto_scaling/adapter.rbs +22 -0
- data/sig/generated/job_workflow/auto_scaling/configuration.rbs +50 -0
- data/sig/generated/job_workflow/auto_scaling/executor.rbs +29 -0
- data/sig/generated/job_workflow/auto_scaling.rbs +47 -0
- data/sig/generated/job_workflow/cache_store_adapters.rbs +28 -0
- data/sig/generated/job_workflow/context.rbs +155 -0
- data/sig/generated/job_workflow/dry_run_config.rbs +16 -0
- data/sig/generated/job_workflow/dsl.rbs +117 -0
- data/sig/generated/job_workflow/error_hook.rbs +18 -0
- data/sig/generated/job_workflow/hook.rbs +18 -0
- data/sig/generated/job_workflow/hook_registry.rbs +47 -0
- data/sig/generated/job_workflow/instrumentation/log_subscriber.rbs +102 -0
- data/sig/generated/job_workflow/instrumentation/opentelemetry_subscriber.rbs +113 -0
- data/sig/generated/job_workflow/instrumentation.rbs +138 -0
- data/sig/generated/job_workflow/job_status.rbs +46 -0
- data/sig/generated/job_workflow/logger.rbs +56 -0
- data/sig/generated/job_workflow/namespace.rbs +24 -0
- data/sig/generated/job_workflow/output.rbs +39 -0
- data/sig/generated/job_workflow/output_def.rbs +12 -0
- data/sig/generated/job_workflow/queue.rbs +49 -0
- data/sig/generated/job_workflow/queue_adapter.rbs +18 -0
- data/sig/generated/job_workflow/queue_adapters/abstract.rbs +56 -0
- data/sig/generated/job_workflow/queue_adapters/null_adapter.rbs +73 -0
- data/sig/generated/job_workflow/queue_adapters/solid_queue_adapter.rbs +111 -0
- data/sig/generated/job_workflow/runner.rbs +66 -0
- data/sig/generated/job_workflow/schedule.rbs +34 -0
- data/sig/generated/job_workflow/semaphore.rbs +37 -0
- data/sig/generated/job_workflow/task.rbs +60 -0
- data/sig/generated/job_workflow/task_callable.rbs +30 -0
- data/sig/generated/job_workflow/task_context.rbs +52 -0
- data/sig/generated/job_workflow/task_dependency_wait.rbs +42 -0
- data/sig/generated/job_workflow/task_enqueue.rbs +27 -0
- data/sig/generated/job_workflow/task_graph.rbs +27 -0
- data/sig/generated/job_workflow/task_job_status.rbs +42 -0
- data/sig/generated/job_workflow/task_output.rbs +29 -0
- data/sig/generated/job_workflow/task_retry.rbs +30 -0
- data/sig/generated/job_workflow/task_throttle.rbs +20 -0
- data/sig/generated/job_workflow/version.rbs +5 -0
- data/sig/generated/job_workflow/workflow.rbs +48 -0
- data/sig/generated/job_workflow/workflow_status.rbs +55 -0
- data/sig/generated/job_workflow.rbs +8 -0
- data/sig-private/activejob.rbs +35 -0
- data/sig-private/activesupport.rbs +23 -0
- data/sig-private/aws.rbs +32 -0
- data/sig-private/opentelemetry.rbs +40 -0
- data/sig-private/solid_queue.rbs +108 -0
- data/tmp/.keep +0 -0
- metadata +190 -0
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
# Instrumentation provides ActiveSupport::Notifications-based event instrumentation for JobWorkflow workflows and tasks.
|
|
5
|
+
#
|
|
6
|
+
# @example Subscribing to events
|
|
7
|
+
# ```ruby
|
|
8
|
+
# ActiveSupport::Notifications.subscribe("task.start.job_workflow") do |name, start, finish, id, payload|
|
|
9
|
+
# puts "Task #{payload[:task_name]} started"
|
|
10
|
+
# end
|
|
11
|
+
# ```
|
|
12
|
+
module Instrumentation # rubocop:disable Metrics/ModuleLength
|
|
13
|
+
NAMESPACE = "job_workflow"
|
|
14
|
+
|
|
15
|
+
module Events
|
|
16
|
+
WORKFLOW = "workflow.#{NAMESPACE}".freeze
|
|
17
|
+
WORKFLOW_START = "workflow.start.#{NAMESPACE}".freeze
|
|
18
|
+
WORKFLOW_COMPLETE = "workflow.complete.#{NAMESPACE}".freeze
|
|
19
|
+
TASK = "task.#{NAMESPACE}".freeze
|
|
20
|
+
TASK_START = "task.start.#{NAMESPACE}".freeze
|
|
21
|
+
TASK_COMPLETE = "task.complete.#{NAMESPACE}".freeze
|
|
22
|
+
TASK_ERROR = "task.error.#{NAMESPACE}".freeze
|
|
23
|
+
TASK_SKIP = "task.skip.#{NAMESPACE}".freeze
|
|
24
|
+
TASK_ENQUEUE = "task.enqueue.#{NAMESPACE}".freeze
|
|
25
|
+
TASK_RETRY = "task.retry.#{NAMESPACE}".freeze
|
|
26
|
+
THROTTLE_ACQUIRE = "throttle.acquire.#{NAMESPACE}".freeze
|
|
27
|
+
THROTTLE_ACQUIRE_START = "throttle.acquire.start.#{NAMESPACE}".freeze
|
|
28
|
+
THROTTLE_ACQUIRE_COMPLETE = "throttle.acquire.complete.#{NAMESPACE}".freeze
|
|
29
|
+
THROTTLE_RELEASE = "throttle.release.#{NAMESPACE}".freeze
|
|
30
|
+
DEPENDENT_WAIT = "dependent.wait.#{NAMESPACE}".freeze
|
|
31
|
+
DEPENDENT_WAIT_START = "dependent.wait.start.#{NAMESPACE}".freeze
|
|
32
|
+
DEPENDENT_WAIT_COMPLETE = "dependent.wait.complete.#{NAMESPACE}".freeze
|
|
33
|
+
DEPENDENT_RESCHEDULE = "dependent.reschedule.#{NAMESPACE}".freeze
|
|
34
|
+
QUEUE_PAUSE = "queue.pause.#{NAMESPACE}".freeze
|
|
35
|
+
QUEUE_RESUME = "queue.resume.#{NAMESPACE}".freeze
|
|
36
|
+
CUSTOM = "custom.#{NAMESPACE}".freeze
|
|
37
|
+
DRY_RUN = "dry_run.#{NAMESPACE}".freeze
|
|
38
|
+
DRY_RUN_SKIP = "dry_run.skip.#{NAMESPACE}".freeze
|
|
39
|
+
DRY_RUN_EXECUTE = "dry_run.execute.#{NAMESPACE}".freeze
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
class << self
|
|
43
|
+
#: (DSL) { () -> untyped } -> untyped
|
|
44
|
+
def instrument_workflow(job, &)
|
|
45
|
+
payload = build_workflow_payload(job)
|
|
46
|
+
instrument(Events::WORKFLOW_START, payload)
|
|
47
|
+
instrument(Events::WORKFLOW, payload, &)
|
|
48
|
+
ensure
|
|
49
|
+
instrument(Events::WORKFLOW_COMPLETE, payload)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
#: (DSL, Task, Context) { () -> untyped } -> untyped
|
|
53
|
+
def instrument_task(job, task, ctx, &)
|
|
54
|
+
payload = build_task_payload(job, task, ctx)
|
|
55
|
+
instrument(Events::TASK_START, payload)
|
|
56
|
+
instrument(Events::TASK, payload, &)
|
|
57
|
+
ensure
|
|
58
|
+
instrument(Events::TASK_COMPLETE, payload)
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
#: (DSL, Task, String) -> void
|
|
62
|
+
def notify_task_skip(job, task, reason)
|
|
63
|
+
instrument(Events::TASK_SKIP, build_task_skip_payload(job, task, reason))
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
#: (DSL, Task, Integer) -> void
|
|
67
|
+
def notify_task_enqueue(job, task, sub_job_count)
|
|
68
|
+
instrument(Events::TASK_ENQUEUE, build_task_enqueue_payload(job, task, sub_job_count))
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
#: (Task, Context, String, Integer, Float, StandardError) -> void
|
|
72
|
+
def notify_task_retry(task, ctx, job_id, attempt, delay, error) # rubocop:disable Metrics/ParameterLists
|
|
73
|
+
instrument(Events::TASK_RETRY, build_task_retry_payload(task, ctx, job_id, attempt, delay, error))
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
#: (DSL, Task) { () -> untyped } -> untyped
|
|
77
|
+
def instrument_dependent_wait(job, task, &)
|
|
78
|
+
payload = build_dependent_payload(job, task)
|
|
79
|
+
instrument(Events::DEPENDENT_WAIT_START, payload)
|
|
80
|
+
instrument(Events::DEPENDENT_WAIT, payload, &)
|
|
81
|
+
ensure
|
|
82
|
+
instrument(Events::DEPENDENT_WAIT_COMPLETE, payload)
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
#: (DSL, Task, Numeric, Integer) -> void
|
|
86
|
+
def notify_dependent_reschedule(job, task, reschedule_delay, poll_count)
|
|
87
|
+
instrument(
|
|
88
|
+
Events::DEPENDENT_RESCHEDULE,
|
|
89
|
+
build_dependent_reschedule_payload(job, task, reschedule_delay, poll_count)
|
|
90
|
+
)
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
#: (Semaphore) { () -> untyped } -> untyped
|
|
94
|
+
def instrument_throttle(semaphore, &)
|
|
95
|
+
payload = build_throttle_payload(semaphore)
|
|
96
|
+
instrument(Events::THROTTLE_ACQUIRE_START, payload)
|
|
97
|
+
instrument(Events::THROTTLE_ACQUIRE, payload, &)
|
|
98
|
+
ensure
|
|
99
|
+
instrument(Events::THROTTLE_ACQUIRE_COMPLETE, payload)
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
#: (Semaphore) -> void
|
|
103
|
+
def notify_throttle_release(semaphore)
|
|
104
|
+
instrument(Events::THROTTLE_RELEASE, build_throttle_payload(semaphore))
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
#: (String) -> void
|
|
108
|
+
def notify_queue_pause(queue_name)
|
|
109
|
+
instrument(Events::QUEUE_PAUSE, build_queue_payload(queue_name))
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
#: (String) -> void
|
|
113
|
+
def notify_queue_resume(queue_name)
|
|
114
|
+
instrument(Events::QUEUE_RESUME, build_queue_payload(queue_name))
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
#: (String, Hash[Symbol, untyped]) { () -> untyped } -> untyped
|
|
118
|
+
def instrument_custom(operation, payload = {}, &)
|
|
119
|
+
event_name = "#{operation}.#{NAMESPACE}"
|
|
120
|
+
instrument(event_name, payload, &)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
#: (DSL, Context, Symbol?, Integer, bool) { () -> untyped } -> untyped
|
|
124
|
+
def instrument_dry_run(job, ctx, dry_run_name, skip_in_dry_run_index, dry_run, &)
|
|
125
|
+
start_event = dry_run ? Events::DRY_RUN_SKIP : Events::DRY_RUN_EXECUTE
|
|
126
|
+
payload = build_skip_in_dry_run_payload(job, ctx, dry_run_name, skip_in_dry_run_index, dry_run)
|
|
127
|
+
instrument(start_event, payload)
|
|
128
|
+
instrument(Events::DRY_RUN, payload, &)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
private
|
|
132
|
+
|
|
133
|
+
#: (String, Hash[Symbol, untyped]) ?{ () -> untyped } -> untyped
|
|
134
|
+
def instrument(event_name, payload = {}, &)
|
|
135
|
+
ActiveSupport::Notifications.instrument(event_name, payload, &)
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
#: (DSL) -> Hash[Symbol, untyped]
|
|
139
|
+
def build_workflow_payload(job)
|
|
140
|
+
{
|
|
141
|
+
job:,
|
|
142
|
+
job_id: job.job_id,
|
|
143
|
+
job_name: job.class.name
|
|
144
|
+
}
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
#: (DSL, Task, Context) -> Hash[Symbol, untyped]
|
|
148
|
+
def build_task_payload(job, task, ctx)
|
|
149
|
+
task_ctx = ctx._task_context
|
|
150
|
+
{
|
|
151
|
+
job:,
|
|
152
|
+
job_id: job.job_id,
|
|
153
|
+
job_name: job.class.name,
|
|
154
|
+
task:,
|
|
155
|
+
task_name: task.task_name,
|
|
156
|
+
context: ctx,
|
|
157
|
+
each_index: task_ctx.index,
|
|
158
|
+
retry_count: task_ctx.retry_count
|
|
159
|
+
}
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
#: (DSL, Task, String) -> Hash[Symbol, untyped]
|
|
163
|
+
def build_task_skip_payload(job, task, reason)
|
|
164
|
+
{
|
|
165
|
+
job:,
|
|
166
|
+
job_id: job.job_id,
|
|
167
|
+
job_name: job.class.name,
|
|
168
|
+
task:,
|
|
169
|
+
task_name: task.task_name,
|
|
170
|
+
reason:
|
|
171
|
+
}
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
#: (DSL, Task, Integer) -> Hash[Symbol, untyped]
|
|
175
|
+
def build_task_enqueue_payload(job, task, sub_job_count)
|
|
176
|
+
{
|
|
177
|
+
job:,
|
|
178
|
+
job_id: job.job_id,
|
|
179
|
+
job_name: job.class.name,
|
|
180
|
+
task:,
|
|
181
|
+
task_name: task.task_name,
|
|
182
|
+
sub_job_count:
|
|
183
|
+
}
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
#: (Task, Context, String, Integer, Float, StandardError) -> Hash[Symbol, untyped]
|
|
187
|
+
def build_task_retry_payload(task, ctx, job_id, attempt, delay, error) # rubocop:disable Metrics/ParameterLists
|
|
188
|
+
task_ctx = ctx._task_context
|
|
189
|
+
{
|
|
190
|
+
task:,
|
|
191
|
+
task_name: task.task_name,
|
|
192
|
+
job_id:,
|
|
193
|
+
each_index: task_ctx.index,
|
|
194
|
+
attempt:,
|
|
195
|
+
max_attempts: task.task_retry.count,
|
|
196
|
+
delay_seconds: delay.round(3),
|
|
197
|
+
error:,
|
|
198
|
+
error_class: error.class.name,
|
|
199
|
+
error_message: error.message
|
|
200
|
+
}
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
#: (DSL, Task) -> Hash[Symbol, untyped]
|
|
204
|
+
def build_dependent_payload(job, task)
|
|
205
|
+
{
|
|
206
|
+
job:,
|
|
207
|
+
job_id: job.job_id,
|
|
208
|
+
job_name: job.class.name,
|
|
209
|
+
task:,
|
|
210
|
+
dependent_task_name: task.task_name
|
|
211
|
+
}
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
#: (DSL, Task, Numeric, Integer) -> Hash[Symbol, untyped]
|
|
215
|
+
def build_dependent_reschedule_payload(job, task, reschedule_delay, poll_count)
|
|
216
|
+
{
|
|
217
|
+
job:,
|
|
218
|
+
job_id: job.job_id,
|
|
219
|
+
job_name: job.class.name,
|
|
220
|
+
task:,
|
|
221
|
+
dependent_task_name: task.task_name,
|
|
222
|
+
reschedule_delay:,
|
|
223
|
+
poll_count:
|
|
224
|
+
}
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
#: (Semaphore) -> Hash[Symbol, untyped]
|
|
228
|
+
def build_throttle_payload(semaphore)
|
|
229
|
+
{
|
|
230
|
+
semaphore:,
|
|
231
|
+
concurrency_key: semaphore.concurrency_key,
|
|
232
|
+
concurrency_limit: semaphore.concurrency_limit
|
|
233
|
+
}
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
#: (String) -> Hash[Symbol, untyped]
|
|
237
|
+
def build_queue_payload(queue_name)
|
|
238
|
+
{
|
|
239
|
+
queue_name:
|
|
240
|
+
}
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
#: (DSL, Context, Symbol?, Integer, bool) -> Hash[Symbol, untyped]
|
|
244
|
+
def build_skip_in_dry_run_payload(job, ctx, dry_run_name, dry_run_index, dry_run)
|
|
245
|
+
{
|
|
246
|
+
job_id: job.job_id,
|
|
247
|
+
job_name: job.class.name,
|
|
248
|
+
task_name: ctx._task_context.task&.task_name,
|
|
249
|
+
each_index: ctx._task_context.index,
|
|
250
|
+
dry_run_name:,
|
|
251
|
+
dry_run_index:,
|
|
252
|
+
dry_run:
|
|
253
|
+
}
|
|
254
|
+
end
|
|
255
|
+
end
|
|
256
|
+
end
|
|
257
|
+
end
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class JobStatus
|
|
5
|
+
class << self
|
|
6
|
+
#: (Array[Hash[untyped, untyped]]) -> JobStatus
|
|
7
|
+
def from_hash_array(array)
|
|
8
|
+
new(task_job_statuses: array.map { |hash| TaskJobStatus.from_hash(hash) })
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
#: (Hash[String, untyped]) -> JobStatus
|
|
12
|
+
def deserialize(hash)
|
|
13
|
+
new(task_job_statuses: hash.fetch("task_job_statuses", []).map { |shash| TaskJobStatus.deserialize(shash) })
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
#: (?task_job_statuses: Array[TaskJobStatus]) -> void
|
|
18
|
+
def initialize(task_job_statuses: [])
|
|
19
|
+
self.task_job_statuses = {}
|
|
20
|
+
task_job_statuses.each { |task_job_status| update_task_job_status(task_job_status) }
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
#: (task_name: Symbol) -> Array[TaskJobStatus]
|
|
24
|
+
def fetch_all(task_name:)
|
|
25
|
+
task_job_statuses.fetch(task_name, []).compact
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
#: (task_name: Symbol, index: Integer) -> TaskJobStatus?
|
|
29
|
+
def fetch(task_name:, index:)
|
|
30
|
+
task_job_statuses.fetch(task_name, [])[index]
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
#: (task_name: Symbol) -> Array[String]
|
|
34
|
+
def finished_job_ids(task_name:)
|
|
35
|
+
fetch_all(task_name:).filter(&:finished?).map(&:job_id)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
#: () -> Array[TaskJobStatus]
|
|
39
|
+
def flat_task_job_statuses
|
|
40
|
+
task_job_statuses.values.flatten
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# @note
|
|
44
|
+
# - If the array is empty, the task is not enqueued and is considered completed.
|
|
45
|
+
# - If we add a task existence check in the future, we'll check here.
|
|
46
|
+
#
|
|
47
|
+
#: (Symbol) -> bool
|
|
48
|
+
def needs_waiting?(task_name)
|
|
49
|
+
task_job_statuses.fetch(task_name, []).all?(&:finished?)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
#: (TaskJobStatus) -> void
|
|
53
|
+
def update_task_job_status(task_job_status)
|
|
54
|
+
task_job_statuses[task_job_status.task_name] ||= []
|
|
55
|
+
task_job_statuses[task_job_status.task_name][task_job_status.each_index] = task_job_status
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
#: (task_name: Symbol, jobs: Array[DSL]) -> void
|
|
59
|
+
def update_task_job_statuses_from_jobs(task_name:, jobs:)
|
|
60
|
+
jobs.each.with_index do |job, index|
|
|
61
|
+
update_task_job_status(
|
|
62
|
+
TaskJobStatus.new(
|
|
63
|
+
task_name:,
|
|
64
|
+
job_id: job.job_id,
|
|
65
|
+
each_index: index,
|
|
66
|
+
status: :pending
|
|
67
|
+
)
|
|
68
|
+
)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
#: (Symbol) -> void
|
|
73
|
+
def update_task_job_statuses_from_db(task_name)
|
|
74
|
+
statuses = task_job_statuses.fetch(task_name, []).reject(&:finished?).index_by(&:job_id)
|
|
75
|
+
return if statuses.empty?
|
|
76
|
+
|
|
77
|
+
task_jobs = QueueAdapter.current.fetch_job_statuses(statuses.keys)
|
|
78
|
+
|
|
79
|
+
statuses.each do |job_id, task_job_status|
|
|
80
|
+
task_job = task_jobs[job_id]
|
|
81
|
+
next unless task_job
|
|
82
|
+
|
|
83
|
+
task_job_status.update_status(QueueAdapter.current.job_status(task_job))
|
|
84
|
+
update_task_job_status(task_job_status)
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
private
|
|
89
|
+
|
|
90
|
+
attr_accessor :task_job_statuses #: Hash[Symbol, Array[TaskJobStatus]]
|
|
91
|
+
end
|
|
92
|
+
end
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
# Logger provides structured JSON logging for JobWorkflow workflows.
|
|
5
|
+
#
|
|
6
|
+
# @example Basic usage
|
|
7
|
+
# ```ruby
|
|
8
|
+
# JobWorkflow.logger = ActiveSupport::Logger.new($stdout)
|
|
9
|
+
# JobWorkflow.logger.formatter = JobWorkflow::Logger::JsonFormatter.new
|
|
10
|
+
# ```
|
|
11
|
+
#
|
|
12
|
+
# @example With custom log tags
|
|
13
|
+
# ```ruby
|
|
14
|
+
# JobWorkflow.logger.formatter = JobWorkflow::Logger::JsonFormatter.new(log_tags: [:request_id])
|
|
15
|
+
# ```
|
|
16
|
+
module Logger
|
|
17
|
+
#: (ActiveSupport::Logger) -> void
|
|
18
|
+
attr_writer :logger
|
|
19
|
+
|
|
20
|
+
#: () -> ActiveSupport::Logger
|
|
21
|
+
def logger
|
|
22
|
+
@logger ||= build_default_logger
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
private
|
|
26
|
+
|
|
27
|
+
#: () -> ActiveSupport::Logger
|
|
28
|
+
def build_default_logger
|
|
29
|
+
logger = ActiveSupport::Logger.new($stdout)
|
|
30
|
+
logger.formatter = Logger::JsonFormatter.new
|
|
31
|
+
logger
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# JSON formatter for structured logging output.
|
|
35
|
+
# @rbs inherits ::Logger::Formatter
|
|
36
|
+
class JsonFormatter < ::Logger::Formatter
|
|
37
|
+
include ActiveSupport::TaggedLogging::Formatter
|
|
38
|
+
|
|
39
|
+
#: (?log_tags: Array[Symbol]) -> void
|
|
40
|
+
def initialize(log_tags: [])
|
|
41
|
+
@log_tags = log_tags
|
|
42
|
+
super()
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
#: (String, Time, String, String | Hash[untyped, untyped]) -> String
|
|
46
|
+
def call(severity, time, progname, msg)
|
|
47
|
+
base_hash = build_base_hash(severity, time, progname)
|
|
48
|
+
tags_hash = build_tags_hash
|
|
49
|
+
msg_hash = build_msg_hash(msg)
|
|
50
|
+
"#{JSON.generate({ **base_hash, **tags_hash, **msg_hash })}\n"
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
private
|
|
54
|
+
|
|
55
|
+
attr_reader :log_tags #: Array[Symbol]
|
|
56
|
+
|
|
57
|
+
#: (String, Time, String) -> Hash[Symbol, untyped]
|
|
58
|
+
def build_base_hash(severity, time, progname)
|
|
59
|
+
time_in_zone = time.in_time_zone(Time.zone || "UTC")
|
|
60
|
+
{ time: time_in_zone.iso8601(6), level: severity, progname: progname }
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
#: () -> Hash[Symbol, untyped]
|
|
64
|
+
def build_tags_hash
|
|
65
|
+
log_tags.zip(current_tags).to_h
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
#: (String | Hash[untyped, untyped]) -> Hash[Symbol, untyped]
|
|
69
|
+
def build_msg_hash(msg)
|
|
70
|
+
case msg
|
|
71
|
+
when Hash
|
|
72
|
+
msg.symbolize_keys
|
|
73
|
+
else
|
|
74
|
+
parse_json_or_message(msg.to_s)
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
#: (String) -> Hash[Symbol, untyped]
|
|
79
|
+
def parse_json_or_message(msg)
|
|
80
|
+
JSON.parse(msg, symbolize_names: true)
|
|
81
|
+
rescue JSON::ParserError
|
|
82
|
+
{ message: msg }
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class Namespace
|
|
5
|
+
attr_reader :name #: Symbol
|
|
6
|
+
attr_reader :parent #: Namespace?
|
|
7
|
+
|
|
8
|
+
class << self
|
|
9
|
+
#: () -> Namespace
|
|
10
|
+
def default
|
|
11
|
+
new(name: :"")
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
#: (name: Symbol, ?parent: Namespace?) -> void
|
|
16
|
+
def initialize(name:, parent: nil)
|
|
17
|
+
@name = name #: Symbol
|
|
18
|
+
@parent = parent #: Namespace?
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
#: () -> bool
|
|
22
|
+
def default?
|
|
23
|
+
name.empty?
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
#: (Namespace) -> Namespace
|
|
27
|
+
def update_parent(parent)
|
|
28
|
+
self.class.new(name:, parent:)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
#: () -> Symbol
|
|
32
|
+
def full_name
|
|
33
|
+
[parent&.full_name, name.to_s].compact.reject(&:empty?).join(":").to_sym
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class Output
|
|
5
|
+
class << self
|
|
6
|
+
#: (Array[Hash[untyped, untyped]]) -> Output
|
|
7
|
+
def from_hash_array(array)
|
|
8
|
+
task_outputs = array.map do |hash|
|
|
9
|
+
normalized_hash = hash.transform_keys(&:to_sym)
|
|
10
|
+
task_name = normalized_hash[:task_name]
|
|
11
|
+
each_index = normalized_hash[:each_index]
|
|
12
|
+
data = normalized_hash[:data]
|
|
13
|
+
TaskOutput.new(task_name:, each_index:, data:)
|
|
14
|
+
end
|
|
15
|
+
new(task_outputs:)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
#: (Hash[String, untyped]) -> Output
|
|
19
|
+
def deserialize(hash)
|
|
20
|
+
new(task_outputs: hash.fetch("task_outputs", []).map { |shash| TaskOutput.deserialize(shash) })
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
#: (?task_outputs: Array[TaskOutput]) -> void
|
|
25
|
+
def initialize(task_outputs: [])
|
|
26
|
+
self.task_outputs = {}
|
|
27
|
+
task_outputs.each { |task_output| add_task_output(task_output) }
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
#: (task_name: Symbol?) -> Array[TaskOutput]
|
|
31
|
+
def fetch_all(task_name:)
|
|
32
|
+
fixed_type_task_name = task_name #: Symbol
|
|
33
|
+
task_outputs.fetch(fixed_type_task_name, []).compact
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
#: (task_name: Symbol?, each_index: Integer) -> TaskOutput?
|
|
37
|
+
def fetch(task_name:, each_index:)
|
|
38
|
+
fixed_type_task_name = task_name #: Symbol
|
|
39
|
+
task_outputs.fetch(fixed_type_task_name, [])[each_index]
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
#: (Symbol | String) -> Array[TaskOutput?]
|
|
43
|
+
def [](task_name)
|
|
44
|
+
task_outputs.fetch(task_name.to_sym, [])
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
#: (TaskOutput) -> void
|
|
48
|
+
def add_task_output(task_output)
|
|
49
|
+
task_outputs[task_output.task_name] ||= []
|
|
50
|
+
task_outputs[task_output.task_name][task_output.each_index] = task_output
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
#: (Array[String], Workflow) -> void
|
|
54
|
+
def update_task_outputs_from_db(job_ids, workflow)
|
|
55
|
+
jobs = SolidQueue::Job.where(active_job_id: job_ids)
|
|
56
|
+
return if jobs.empty?
|
|
57
|
+
|
|
58
|
+
update_task_outputs_from_jobs(jobs.to_a, workflow)
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
#: (Array[SolidQueue::Job], Workflow) -> void
|
|
62
|
+
def update_task_outputs_from_jobs(jobs, workflow)
|
|
63
|
+
jobs.each do |job|
|
|
64
|
+
context = Context.deserialize(job.arguments["job_workflow_context"].merge("workflow" => workflow))
|
|
65
|
+
task_output = context.each_task_output
|
|
66
|
+
next if task_output.nil?
|
|
67
|
+
|
|
68
|
+
add_task_output(task_output)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
#: () -> Array[TaskOutput]
|
|
73
|
+
def flat_task_outputs
|
|
74
|
+
task_outputs.values.flatten.compact
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
private
|
|
78
|
+
|
|
79
|
+
attr_accessor :task_outputs #: Hash[Symbol, Array[TaskOutput]]
|
|
80
|
+
end
|
|
81
|
+
end
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
class OutputDef
|
|
5
|
+
attr_reader :name #: Symbol
|
|
6
|
+
attr_reader :type #: String
|
|
7
|
+
|
|
8
|
+
#: (name: Symbol, type: String) -> void
|
|
9
|
+
def initialize(name:, type:)
|
|
10
|
+
@name = name
|
|
11
|
+
@type = type
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
end
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module JobWorkflow
|
|
4
|
+
# Queue provides a unified interface for queue operations across different queue adapters.
|
|
5
|
+
#
|
|
6
|
+
# @example Pausing and resuming a queue
|
|
7
|
+
# ```ruby
|
|
8
|
+
# JobWorkflow::Queue.pause(:import_workflow)
|
|
9
|
+
# JobWorkflow::Queue.paused?(:import_workflow) # => true
|
|
10
|
+
# JobWorkflow::Queue.resume(:import_workflow)
|
|
11
|
+
# JobWorkflow::Queue.paused?(:import_workflow) # => false
|
|
12
|
+
# ```
|
|
13
|
+
#
|
|
14
|
+
# @example Getting queue metrics
|
|
15
|
+
# ```ruby
|
|
16
|
+
# JobWorkflow::Queue.latency(:import_workflow) # => 120 (seconds)
|
|
17
|
+
# JobWorkflow::Queue.size(:import_workflow) # => 42 (pending jobs)
|
|
18
|
+
# ```
|
|
19
|
+
#
|
|
20
|
+
# @example Listing workflows associated with a queue
|
|
21
|
+
# ```ruby
|
|
22
|
+
# JobWorkflow::Queue.workflows(:import_workflow) # => [ImportJob, DataSyncJob]
|
|
23
|
+
# ```
|
|
24
|
+
class Queue
|
|
25
|
+
class << self
|
|
26
|
+
#: (String | Symbol) -> bool
|
|
27
|
+
def pause(queue_name)
|
|
28
|
+
queue_name_str = queue_name.to_s
|
|
29
|
+
result = QueueAdapter.current.pause_queue(queue_name_str)
|
|
30
|
+
Instrumentation.notify_queue_pause(queue_name_str) if result
|
|
31
|
+
result
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
#: (String | Symbol) -> bool
|
|
35
|
+
def resume(queue_name)
|
|
36
|
+
queue_name_str = queue_name.to_s
|
|
37
|
+
result = QueueAdapter.current.resume_queue(queue_name_str)
|
|
38
|
+
Instrumentation.notify_queue_resume(queue_name_str) if result
|
|
39
|
+
result
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
#: (String | Symbol) -> bool
|
|
43
|
+
def paused?(queue_name)
|
|
44
|
+
QueueAdapter.current.queue_paused?(queue_name.to_s)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
#: () -> Array[String]
|
|
48
|
+
def paused_queues
|
|
49
|
+
QueueAdapter.current.paused_queues
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
#: (String | Symbol) -> Integer?
|
|
53
|
+
def latency(queue_name)
|
|
54
|
+
QueueAdapter.current.queue_latency(queue_name.to_s)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
#: (String | Symbol) -> Integer
|
|
58
|
+
def size(queue_name)
|
|
59
|
+
QueueAdapter.current.queue_size(queue_name.to_s)
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
#: (String | Symbol) -> bool
|
|
63
|
+
def clear(queue_name)
|
|
64
|
+
QueueAdapter.current.clear_queue(queue_name.to_s)
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
#: (String | Symbol) -> Array[singleton(DSL)]
|
|
68
|
+
def workflows(queue_name)
|
|
69
|
+
queue_name_str = queue_name.to_s
|
|
70
|
+
DSL._included_classes.filter { |job_class| job_class.queue_name == queue_name_str }.to_a
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
end
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "queue_adapters/abstract"
|
|
4
|
+
require_relative "queue_adapters/null_adapter"
|
|
5
|
+
require_relative "queue_adapters/solid_queue_adapter"
|
|
6
|
+
|
|
7
|
+
module JobWorkflow
|
|
8
|
+
module QueueAdapter
|
|
9
|
+
# @rbs!
|
|
10
|
+
# def self._current: () -> QueueAdapters::Abstract
|
|
11
|
+
# def self._current=: (QueueAdapters::Abstract?) -> void
|
|
12
|
+
|
|
13
|
+
mattr_accessor :_current
|
|
14
|
+
|
|
15
|
+
class << self
|
|
16
|
+
#: () -> QueueAdapters::Abstract
|
|
17
|
+
def current
|
|
18
|
+
self._current ||= detect_adapter
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
#: () -> void
|
|
22
|
+
def reset!
|
|
23
|
+
self._current = nil
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
private
|
|
27
|
+
|
|
28
|
+
#: () -> QueueAdapters::Abstract
|
|
29
|
+
def detect_adapter
|
|
30
|
+
if defined?(SolidQueue)
|
|
31
|
+
QueueAdapters::SolidQueueAdapter.new
|
|
32
|
+
else
|
|
33
|
+
QueueAdapters::NullAdapter.new
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|