solidflow 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +70 -0
- data/Rakefile +10 -0
- data/app/jobs/solidflow/jobs/run_execution_job.rb +21 -0
- data/app/jobs/solidflow/jobs/run_task_job.rb +107 -0
- data/app/jobs/solidflow/jobs/timer_sweep_job.rb +32 -0
- data/app/models/solidflow/application_record.rb +7 -0
- data/app/models/solidflow/event.rb +23 -0
- data/app/models/solidflow/execution.rb +36 -0
- data/app/models/solidflow/signal_message.rb +16 -0
- data/app/models/solidflow/timer.rb +19 -0
- data/bin/solidflow +7 -0
- data/db/migrate/001_create_solidflow_core_tables.rb +71 -0
- data/lib/solid_flow/cli.rb +68 -0
- data/lib/solid_flow/determinism.rb +39 -0
- data/lib/solid_flow/engine.rb +23 -0
- data/lib/solid_flow/errors.rb +52 -0
- data/lib/solid_flow/idempotency.rb +34 -0
- data/lib/solid_flow/instrumentation.rb +16 -0
- data/lib/solid_flow/registries/task_registry.rb +32 -0
- data/lib/solid_flow/registries/workflow_registry.rb +33 -0
- data/lib/solid_flow/replay.rb +258 -0
- data/lib/solid_flow/runner.rb +468 -0
- data/lib/solid_flow/serializers/oj.rb +24 -0
- data/lib/solid_flow/signals.rb +47 -0
- data/lib/solid_flow/stores/active_record.rb +461 -0
- data/lib/solid_flow/stores/base.rb +87 -0
- data/lib/solid_flow/task.rb +152 -0
- data/lib/solid_flow/testing.rb +28 -0
- data/lib/solid_flow/version.rb +5 -0
- data/lib/solid_flow/wait.rb +76 -0
- data/lib/solid_flow/workflow.rb +263 -0
- data/lib/solid_flow.rb +76 -0
- data/lib/solidflow.rb +3 -0
- metadata +213 -0
|
@@ -0,0 +1,461 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "ostruct"
|
|
4
|
+
|
|
5
|
+
module SolidFlow
|
|
6
|
+
module Stores
|
|
7
|
+
class ActiveRecord < Base
|
|
8
|
+
THREAD_EXECUTION = :__solidflow_current_execution__
|
|
9
|
+
|
|
10
|
+
def start_execution(workflow_class:, input:, graph_signature:)
|
|
11
|
+
execution_id = SolidFlow.configuration.id_generator.call
|
|
12
|
+
now = time_provider.call
|
|
13
|
+
first_step = workflow_class.steps.first&.name
|
|
14
|
+
input_payload = input.respond_to?(:deep_stringify_keys) ? input.deep_stringify_keys : input
|
|
15
|
+
|
|
16
|
+
execution = SolidFlow::Execution.create!(
|
|
17
|
+
id: execution_id,
|
|
18
|
+
workflow: workflow_class.workflow_name,
|
|
19
|
+
state: "running",
|
|
20
|
+
ctx: input_payload,
|
|
21
|
+
cursor_step: first_step&.to_s,
|
|
22
|
+
cursor_index: 0,
|
|
23
|
+
graph_signature: graph_signature,
|
|
24
|
+
metadata: {},
|
|
25
|
+
started_at: now,
|
|
26
|
+
updated_at: now
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
append_event(
|
|
30
|
+
execution_id: execution.id,
|
|
31
|
+
type: :workflow_started,
|
|
32
|
+
payload: { input: input }
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
enqueue_execution(
|
|
36
|
+
execution_id: execution.id,
|
|
37
|
+
reason: :start
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
build_handle(execution)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def signal_execution(execution_id:, workflow_class:, signal_name:, payload:)
|
|
44
|
+
execution = SolidFlow::Execution.find(execution_id)
|
|
45
|
+
|
|
46
|
+
message = execution.signal_messages.create!(
|
|
47
|
+
signal_name: signal_name.to_s,
|
|
48
|
+
payload: payload,
|
|
49
|
+
metadata: {},
|
|
50
|
+
status: "pending",
|
|
51
|
+
received_at: time_provider.call
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
append_event(
|
|
55
|
+
execution_id: execution.id,
|
|
56
|
+
type: :signal_received,
|
|
57
|
+
payload: {
|
|
58
|
+
signal: signal_name,
|
|
59
|
+
payload: payload,
|
|
60
|
+
metadata: message.metadata,
|
|
61
|
+
received_at: message.received_at
|
|
62
|
+
}
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
enqueue_execution(
|
|
66
|
+
execution_id: execution.id,
|
|
67
|
+
reason: :signal
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
SolidFlow.instrument(
|
|
71
|
+
"solidflow.signal.received",
|
|
72
|
+
execution_id: execution.id,
|
|
73
|
+
workflow: workflow_class.workflow_name,
|
|
74
|
+
signal: signal_name,
|
|
75
|
+
payload:
|
|
76
|
+
)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def query_execution(execution_id:, workflow_class:)
|
|
80
|
+
execution = SolidFlow::Execution.find(execution_id)
|
|
81
|
+
events = load_history(execution_id)
|
|
82
|
+
state = Replay.new(
|
|
83
|
+
workflow_class:,
|
|
84
|
+
events:,
|
|
85
|
+
execution_record: serialize_execution(execution)
|
|
86
|
+
).call
|
|
87
|
+
|
|
88
|
+
yield(state)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def with_execution(execution_id, lock: true)
|
|
92
|
+
SolidFlow::Execution.transaction do
|
|
93
|
+
relation = SolidFlow::Execution.where(id: execution_id)
|
|
94
|
+
relation = relation.lock("FOR UPDATE SKIP LOCKED") if lock
|
|
95
|
+
execution = relation.first
|
|
96
|
+
raise Errors::ExecutionNotFound, execution_id unless execution
|
|
97
|
+
|
|
98
|
+
previous = Thread.current[THREAD_EXECUTION]
|
|
99
|
+
Thread.current[THREAD_EXECUTION] = execution
|
|
100
|
+
|
|
101
|
+
yield serialize_execution(execution)
|
|
102
|
+
ensure
|
|
103
|
+
Thread.current[THREAD_EXECUTION] = previous
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def load_history(execution_id)
|
|
108
|
+
SolidFlow::Event.where(execution_id:)
|
|
109
|
+
.ordered
|
|
110
|
+
.map(&:to_replay_event)
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def append_event(execution_id:, type:, payload:, idempotency_key: nil)
|
|
114
|
+
execution = execution_for(execution_id)
|
|
115
|
+
|
|
116
|
+
next_sequence = (execution.events.maximum(:sequence) || 0) + 1
|
|
117
|
+
|
|
118
|
+
execution.events.create!(
|
|
119
|
+
sequence: next_sequence,
|
|
120
|
+
event_type: type.to_s,
|
|
121
|
+
payload: payload,
|
|
122
|
+
recorded_at: time_provider.call,
|
|
123
|
+
idempotency_key:
|
|
124
|
+
)
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def update_execution(execution_id:, attributes:)
|
|
128
|
+
execution = execution_for(execution_id)
|
|
129
|
+
execution.update!(normalize_attributes(attributes))
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def persist_context(execution_id:, ctx:)
|
|
133
|
+
execution = execution_for(execution_id)
|
|
134
|
+
execution.update!(ctx: ctx.deep_stringify_keys)
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def enqueue_execution(execution_id:, reason:)
|
|
138
|
+
SolidFlow.instrument(
|
|
139
|
+
"solidflow.execution.enqueued",
|
|
140
|
+
execution_id:,
|
|
141
|
+
reason:
|
|
142
|
+
)
|
|
143
|
+
SolidFlow::Jobs::RunExecutionJob.perform_later(execution_id)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def schedule_task(execution_id:, step:, task:, arguments:, headers:, run_at: nil)
|
|
147
|
+
job = SolidFlow::Jobs::RunTaskJob
|
|
148
|
+
if run_at
|
|
149
|
+
job.set(wait_until: run_at).perform_later(execution_id, step, task, arguments, headers)
|
|
150
|
+
else
|
|
151
|
+
job.perform_later(execution_id, step, task, arguments, headers)
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def record_task_result(execution_id:, workflow_class:, step:, result:, attempt:, idempotency_key:)
|
|
156
|
+
execution = execution_for(execution_id)
|
|
157
|
+
|
|
158
|
+
existing = execution.events.where(
|
|
159
|
+
event_type: "task_completed",
|
|
160
|
+
idempotency_key:
|
|
161
|
+
).first
|
|
162
|
+
|
|
163
|
+
return if existing
|
|
164
|
+
|
|
165
|
+
ctx = execution.ctx_hash
|
|
166
|
+
ctx[step.to_s] = result
|
|
167
|
+
|
|
168
|
+
steps = workflow_class.steps
|
|
169
|
+
next_index = execution.cursor_index + 1
|
|
170
|
+
next_step = steps[next_index]&.name
|
|
171
|
+
new_state = next_step ? "running" : "completed"
|
|
172
|
+
|
|
173
|
+
execution.assign_attributes(
|
|
174
|
+
ctx: ctx.deep_stringify_keys,
|
|
175
|
+
cursor_index: next_index,
|
|
176
|
+
cursor_step: next_step&.to_s,
|
|
177
|
+
state: new_state,
|
|
178
|
+
last_error: nil
|
|
179
|
+
)
|
|
180
|
+
execution.save!
|
|
181
|
+
|
|
182
|
+
append_event(
|
|
183
|
+
execution_id:,
|
|
184
|
+
type: :task_completed,
|
|
185
|
+
payload: {
|
|
186
|
+
step: step,
|
|
187
|
+
result: result,
|
|
188
|
+
attempt: attempt,
|
|
189
|
+
ctx_snapshot: ctx,
|
|
190
|
+
idempotency_key:
|
|
191
|
+
},
|
|
192
|
+
idempotency_key: idempotency_key
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
if new_state == "completed"
|
|
196
|
+
append_event(
|
|
197
|
+
execution_id:,
|
|
198
|
+
type: :workflow_completed,
|
|
199
|
+
payload: {}
|
|
200
|
+
)
|
|
201
|
+
SolidFlow.instrument(
|
|
202
|
+
"solidflow.execution.completed",
|
|
203
|
+
execution_id:,
|
|
204
|
+
workflow: workflow_class.workflow_name
|
|
205
|
+
)
|
|
206
|
+
else
|
|
207
|
+
enqueue_execution(
|
|
208
|
+
execution_id:,
|
|
209
|
+
reason: :task_completed
|
|
210
|
+
)
|
|
211
|
+
end
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
def record_task_failure(execution_id:, workflow_class:, step:, attempt:, error:, retryable:)
|
|
215
|
+
execution = execution_for(execution_id)
|
|
216
|
+
|
|
217
|
+
execution.update!(
|
|
218
|
+
last_error: error,
|
|
219
|
+
state: retryable ? "running" : "failed"
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
append_event(
|
|
223
|
+
execution_id:,
|
|
224
|
+
type: :task_failed,
|
|
225
|
+
payload: {
|
|
226
|
+
step: step,
|
|
227
|
+
attempt: attempt,
|
|
228
|
+
error: error,
|
|
229
|
+
retryable: retryable
|
|
230
|
+
}
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
if retryable
|
|
234
|
+
enqueue_execution(
|
|
235
|
+
execution_id:,
|
|
236
|
+
reason: :task_failed
|
|
237
|
+
)
|
|
238
|
+
else
|
|
239
|
+
append_event(
|
|
240
|
+
execution_id:,
|
|
241
|
+
type: :workflow_failed,
|
|
242
|
+
payload: {
|
|
243
|
+
step: step,
|
|
244
|
+
error: error
|
|
245
|
+
}
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
SolidFlow.instrument(
|
|
249
|
+
"solidflow.execution.failed",
|
|
250
|
+
execution_id:,
|
|
251
|
+
step: step,
|
|
252
|
+
error: error
|
|
253
|
+
)
|
|
254
|
+
end
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
def schedule_timer(execution_id:, step:, run_at:, instruction:, metadata:)
|
|
258
|
+
execution = execution_for(execution_id)
|
|
259
|
+
|
|
260
|
+
timer = execution.timers.create!(
|
|
261
|
+
step: step.to_s,
|
|
262
|
+
run_at: run_at,
|
|
263
|
+
status: "scheduled",
|
|
264
|
+
instruction: instruction,
|
|
265
|
+
metadata: metadata
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
append_event(
|
|
269
|
+
execution_id:,
|
|
270
|
+
type: :timer_scheduled,
|
|
271
|
+
payload: {
|
|
272
|
+
step: step,
|
|
273
|
+
timer_id: timer.id,
|
|
274
|
+
run_at: run_at,
|
|
275
|
+
metadata: metadata
|
|
276
|
+
}
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
SolidFlow.instrument(
|
|
280
|
+
"solidflow.timer.scheduled",
|
|
281
|
+
execution_id: execution.id,
|
|
282
|
+
timer_id: timer.id,
|
|
283
|
+
step: step,
|
|
284
|
+
run_at: run_at
|
|
285
|
+
)
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
def mark_timer_fired(timer_id:)
|
|
289
|
+
timer = SolidFlow::Timer.lock.find(timer_id)
|
|
290
|
+
return if timer.fired?
|
|
291
|
+
|
|
292
|
+
timer.update!(status: "fired", fired_at: time_provider.call)
|
|
293
|
+
|
|
294
|
+
append_event(
|
|
295
|
+
execution_id: timer.execution_id,
|
|
296
|
+
type: :timer_fired,
|
|
297
|
+
payload: {
|
|
298
|
+
timer_id: timer.id,
|
|
299
|
+
step: timer.step
|
|
300
|
+
}
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
enqueue_execution(
|
|
304
|
+
execution_id: timer.execution_id,
|
|
305
|
+
reason: :timer_fired
|
|
306
|
+
)
|
|
307
|
+
end
|
|
308
|
+
|
|
309
|
+
def persist_signal_consumed(execution_id:, signal_name:)
|
|
310
|
+
message = SolidFlow::SignalMessage
|
|
311
|
+
.where(execution_id:, signal_name: signal_name.to_s, status: "pending")
|
|
312
|
+
.order(:received_at)
|
|
313
|
+
.first
|
|
314
|
+
|
|
315
|
+
return unless message
|
|
316
|
+
|
|
317
|
+
message.update!(
|
|
318
|
+
status: "consumed",
|
|
319
|
+
consumed_at: time_provider.call
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
append_event(
|
|
323
|
+
execution_id:,
|
|
324
|
+
type: :signal_consumed,
|
|
325
|
+
payload: {
|
|
326
|
+
signal: signal_name
|
|
327
|
+
}
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
SolidFlow.instrument(
|
|
331
|
+
"solidflow.signal.consumed",
|
|
332
|
+
execution_id:,
|
|
333
|
+
signal: signal_name
|
|
334
|
+
)
|
|
335
|
+
end
|
|
336
|
+
|
|
337
|
+
def schedule_compensation(execution_id:, workflow_class:, step:, compensation_task:, context:)
|
|
338
|
+
execution = execution_for(execution_id)
|
|
339
|
+
|
|
340
|
+
already_scheduled = execution.events.where(event_type: "compensation_scheduled")
|
|
341
|
+
.where("payload ->> 'step' = ?", step.to_s)
|
|
342
|
+
.where("payload ->> 'task' = ?", compensation_task.to_s)
|
|
343
|
+
.exists?
|
|
344
|
+
return if already_scheduled
|
|
345
|
+
|
|
346
|
+
append_event(
|
|
347
|
+
execution_id:,
|
|
348
|
+
type: :compensation_scheduled,
|
|
349
|
+
payload: {
|
|
350
|
+
step: step,
|
|
351
|
+
task: compensation_task
|
|
352
|
+
}
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
headers = {
|
|
356
|
+
execution_id: execution_id,
|
|
357
|
+
step_name: step,
|
|
358
|
+
attempt: 1,
|
|
359
|
+
idempotency_key: Idempotency.digest(execution_id, step, "compensation"),
|
|
360
|
+
workflow_name: workflow_class.workflow_name,
|
|
361
|
+
metadata: execution.metadata,
|
|
362
|
+
compensation: true,
|
|
363
|
+
compensation_task: compensation_task
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
SolidFlow::Jobs::RunTaskJob.perform_later(
|
|
367
|
+
execution_id,
|
|
368
|
+
step,
|
|
369
|
+
compensation_task,
|
|
370
|
+
context,
|
|
371
|
+
headers
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
SolidFlow.instrument(
|
|
375
|
+
"solidflow.compensation.scheduled",
|
|
376
|
+
execution_id:,
|
|
377
|
+
step: step,
|
|
378
|
+
task: compensation_task
|
|
379
|
+
)
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
def record_compensation_result(execution_id:, step:, compensation_task:, result:)
|
|
383
|
+
append_event(
|
|
384
|
+
execution_id:,
|
|
385
|
+
type: :compensation_completed,
|
|
386
|
+
payload: {
|
|
387
|
+
step: step,
|
|
388
|
+
task: compensation_task,
|
|
389
|
+
result: result
|
|
390
|
+
}
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
SolidFlow.instrument(
|
|
394
|
+
"solidflow.compensation.completed",
|
|
395
|
+
execution_id:,
|
|
396
|
+
step: step,
|
|
397
|
+
task: compensation_task,
|
|
398
|
+
result: result
|
|
399
|
+
)
|
|
400
|
+
end
|
|
401
|
+
|
|
402
|
+
def record_compensation_failure(execution_id:, step:, compensation_task:, error:)
|
|
403
|
+
append_event(
|
|
404
|
+
execution_id:,
|
|
405
|
+
type: :compensation_failed,
|
|
406
|
+
payload: {
|
|
407
|
+
step: step,
|
|
408
|
+
task: compensation_task,
|
|
409
|
+
error: error
|
|
410
|
+
}
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
SolidFlow.instrument(
|
|
414
|
+
"solidflow.compensation.failed",
|
|
415
|
+
execution_id:,
|
|
416
|
+
step: step,
|
|
417
|
+
task: compensation_task,
|
|
418
|
+
error: error
|
|
419
|
+
)
|
|
420
|
+
end
|
|
421
|
+
|
|
422
|
+
private
|
|
423
|
+
|
|
424
|
+
def execution_for(execution_id)
|
|
425
|
+
current = Thread.current[THREAD_EXECUTION]
|
|
426
|
+
return current if current&.id == execution_id
|
|
427
|
+
|
|
428
|
+
SolidFlow::Execution.find(execution_id)
|
|
429
|
+
end
|
|
430
|
+
|
|
431
|
+
def serialize_execution(execution)
|
|
432
|
+
{
|
|
433
|
+
id: execution.id,
|
|
434
|
+
workflow: execution.workflow,
|
|
435
|
+
state: execution.state,
|
|
436
|
+
ctx: execution.ctx_hash,
|
|
437
|
+
cursor_step: execution.cursor_step,
|
|
438
|
+
cursor_index: execution.cursor_index,
|
|
439
|
+
graph_signature: execution.graph_signature,
|
|
440
|
+
metadata: execution.metadata || {}
|
|
441
|
+
}
|
|
442
|
+
end
|
|
443
|
+
|
|
444
|
+
def normalize_attributes(attributes)
|
|
445
|
+
attributes.transform_keys(&:to_s).tap do |normalized|
|
|
446
|
+
normalized["cursor_step"] = normalized["cursor_step"].to_s if normalized.key?("cursor_step") && !normalized["cursor_step"].nil?
|
|
447
|
+
end
|
|
448
|
+
end
|
|
449
|
+
|
|
450
|
+
def build_handle(execution)
|
|
451
|
+
OpenStruct.new(
|
|
452
|
+
id: execution.id,
|
|
453
|
+
workflow: execution.workflow,
|
|
454
|
+
state: execution.state,
|
|
455
|
+
cursor_step: execution.cursor_step,
|
|
456
|
+
cursor_index: execution.cursor_index
|
|
457
|
+
)
|
|
458
|
+
end
|
|
459
|
+
end
|
|
460
|
+
end
|
|
461
|
+
end
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module SolidFlow
|
|
4
|
+
module Stores
|
|
5
|
+
class Base
|
|
6
|
+
def initialize(event_serializer:, time_provider:, logger:)
|
|
7
|
+
@event_serializer = event_serializer
|
|
8
|
+
@time_provider = time_provider
|
|
9
|
+
@logger = logger
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
attr_reader :event_serializer, :time_provider, :logger
|
|
13
|
+
|
|
14
|
+
def start_execution(workflow_class:, input:, graph_signature:)
|
|
15
|
+
raise NotImplementedError
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def signal_execution(execution_id:, workflow_class:, signal_name:, payload:)
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def query_execution(execution_id:, workflow_class:)
|
|
23
|
+
raise NotImplementedError
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def with_execution(execution_id, lock: true)
|
|
27
|
+
raise NotImplementedError
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def load_history(execution_id)
|
|
31
|
+
raise NotImplementedError
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def append_event(execution_id:, type:, payload:)
|
|
35
|
+
raise NotImplementedError
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def update_execution(execution_id:, attributes:)
|
|
39
|
+
raise NotImplementedError
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def persist_context(execution_id:, ctx:)
|
|
43
|
+
raise NotImplementedError
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def enqueue_execution(execution_id:, reason:)
|
|
47
|
+
raise NotImplementedError
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def schedule_task(execution_id:, step:, task:, arguments:, headers:, run_at: nil)
|
|
51
|
+
raise NotImplementedError
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def record_task_result(execution_id:, step:, result:, attempt:, ctx_snapshot:, idempotency_key:)
|
|
55
|
+
raise NotImplementedError
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def record_task_failure(execution_id:, step:, attempt:, error:, retryable:)
|
|
59
|
+
raise NotImplementedError
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def schedule_timer(execution_id:, step:, run_at:, instruction:, metadata:)
|
|
63
|
+
raise NotImplementedError
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def mark_timer_fired(timer_id:)
|
|
67
|
+
raise NotImplementedError
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def persist_signal_consumed(execution_id:, signal_name:)
|
|
71
|
+
raise NotImplementedError
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def schedule_compensation(execution_id:, workflow_class:, step:, compensation_task:, context:)
|
|
75
|
+
raise NotImplementedError
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def record_compensation_result(execution_id:, step:, compensation_task:, result:)
|
|
79
|
+
raise NotImplementedError
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def record_compensation_failure(execution_id:, step:, compensation_task:, error:)
|
|
83
|
+
raise NotImplementedError
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "active_support/core_ext/hash/keys"
|
|
4
|
+
require "active_support/core_ext/string/inflections"
|
|
5
|
+
|
|
6
|
+
module SolidFlow
|
|
7
|
+
# Base class for all stateless tasks executed out-of-band from the workflow process.
|
|
8
|
+
class Task
|
|
9
|
+
THREAD_KEY = :__solidflow_task_context__
|
|
10
|
+
|
|
11
|
+
class << self
|
|
12
|
+
attr_reader :timeout_config, :retry_config, :queue_name, :task_symbol
|
|
13
|
+
|
|
14
|
+
def inherited(subclass)
|
|
15
|
+
super
|
|
16
|
+
subclass.instance_variable_set(:@timeout_config, timeout_config&.dup)
|
|
17
|
+
subclass.instance_variable_set(:@retry_config, retry_config&.dup)
|
|
18
|
+
subclass.instance_variable_set(:@queue_name, queue_name)
|
|
19
|
+
subclass.instance_variable_set(:@task_symbol, subclass.default_task_symbol)
|
|
20
|
+
SolidFlow.task_registry.register(subclass.default_task_symbol, subclass)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def default_task_symbol
|
|
24
|
+
name.demodulize.underscore.to_sym
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def register_as(name)
|
|
28
|
+
@task_symbol = name.to_sym
|
|
29
|
+
SolidFlow.task_registry.register(@task_symbol, self)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def timeout(value = nil)
|
|
33
|
+
if value
|
|
34
|
+
@timeout_config = value
|
|
35
|
+
else
|
|
36
|
+
@timeout_config
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def retry(options = nil)
|
|
41
|
+
if options
|
|
42
|
+
@retry_config = options.deep_symbolize_keys
|
|
43
|
+
else
|
|
44
|
+
@retry_config || {}
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def queue(name = nil)
|
|
49
|
+
if name
|
|
50
|
+
@queue_name = name
|
|
51
|
+
else
|
|
52
|
+
@queue_name || SolidFlow.configuration.default_task_queue
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def execute(arguments:, headers:)
|
|
57
|
+
context = TaskContext.new(
|
|
58
|
+
execution_id: headers[:execution_id],
|
|
59
|
+
step_name: headers[:step_name],
|
|
60
|
+
attempt: headers[:attempt],
|
|
61
|
+
idempotency_key: headers[:idempotency_key],
|
|
62
|
+
workflow_name: headers[:workflow_name],
|
|
63
|
+
metadata: headers[:metadata] || {}
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
SolidFlow.instrument(
|
|
67
|
+
"solidflow.task.started",
|
|
68
|
+
task: task_symbol,
|
|
69
|
+
execution_id: context.execution_id,
|
|
70
|
+
step_name: context.step_name,
|
|
71
|
+
attempt: context.attempt
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
result = with_task_context(context) do
|
|
75
|
+
new.perform(**arguments.symbolize_keys)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
SolidFlow.instrument(
|
|
79
|
+
"solidflow.task.completed",
|
|
80
|
+
task: task_symbol,
|
|
81
|
+
execution_id: context.execution_id,
|
|
82
|
+
step_name: context.step_name,
|
|
83
|
+
attempt: context.attempt,
|
|
84
|
+
result:
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
result
|
|
88
|
+
rescue StandardError => e
|
|
89
|
+
SolidFlow.instrument(
|
|
90
|
+
"solidflow.task.failed",
|
|
91
|
+
task: task_symbol,
|
|
92
|
+
execution_id: context.execution_id,
|
|
93
|
+
step_name: context.step_name,
|
|
94
|
+
attempt: context.attempt,
|
|
95
|
+
error: e
|
|
96
|
+
)
|
|
97
|
+
raise Errors::TaskFailure.new(e.message, details: { class: e.class.name, backtrace: e.backtrace })
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def current_context
|
|
101
|
+
Thread.current[THREAD_KEY]
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
private
|
|
105
|
+
|
|
106
|
+
def with_task_context(context)
|
|
107
|
+
previous = Thread.current[THREAD_KEY]
|
|
108
|
+
Thread.current[THREAD_KEY] = context
|
|
109
|
+
yield
|
|
110
|
+
ensure
|
|
111
|
+
Thread.current[THREAD_KEY] = previous
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
class TaskContext
|
|
116
|
+
attr_reader :execution_id, :step_name, :attempt, :idempotency_key, :workflow_name, :metadata
|
|
117
|
+
|
|
118
|
+
def initialize(execution_id:, step_name:, attempt:, idempotency_key:, workflow_name:, metadata:)
|
|
119
|
+
@execution_id = execution_id
|
|
120
|
+
@step_name = step_name
|
|
121
|
+
@attempt = attempt
|
|
122
|
+
@idempotency_key = idempotency_key
|
|
123
|
+
@workflow_name = workflow_name
|
|
124
|
+
@metadata = metadata
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
def perform(**)
|
|
129
|
+
raise NotImplementedError, "Tasks must implement #perform"
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def current_context
|
|
133
|
+
self.class.current_context
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
def current_execution_id
|
|
137
|
+
current_context&.execution_id
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
def current_step_name
|
|
141
|
+
current_context&.step_name
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
def current_attempt
|
|
145
|
+
current_context&.attempt
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def current_idempotency_key
|
|
149
|
+
current_context&.idempotency_key
|
|
150
|
+
end
|
|
151
|
+
end
|
|
152
|
+
end
|