fractor 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +227 -102
- data/README.adoc +113 -1940
- data/docs/.lycheeignore +16 -0
- data/docs/Gemfile +24 -0
- data/docs/README.md +157 -0
- data/docs/_config.yml +151 -0
- data/docs/_features/error-handling.adoc +1192 -0
- data/docs/_features/index.adoc +80 -0
- data/docs/_features/monitoring.adoc +589 -0
- data/docs/_features/signal-handling.adoc +202 -0
- data/docs/_features/workflows.adoc +1235 -0
- data/docs/_guides/continuous-mode.adoc +736 -0
- data/docs/_guides/cookbook.adoc +1133 -0
- data/docs/_guides/index.adoc +55 -0
- data/docs/_guides/pipeline-mode.adoc +730 -0
- data/docs/_guides/troubleshooting.adoc +358 -0
- data/docs/_pages/architecture.adoc +1390 -0
- data/docs/_pages/core-concepts.adoc +1392 -0
- data/docs/_pages/design-principles.adoc +862 -0
- data/docs/_pages/getting-started.adoc +290 -0
- data/docs/_pages/installation.adoc +143 -0
- data/docs/_reference/api.adoc +1080 -0
- data/docs/_reference/error-reporting.adoc +670 -0
- data/docs/_reference/examples.adoc +181 -0
- data/docs/_reference/index.adoc +96 -0
- data/docs/_reference/troubleshooting.adoc +862 -0
- data/docs/_tutorials/complex-workflows.adoc +1022 -0
- data/docs/_tutorials/data-processing-pipeline.adoc +740 -0
- data/docs/_tutorials/first-application.adoc +384 -0
- data/docs/_tutorials/index.adoc +48 -0
- data/docs/_tutorials/long-running-services.adoc +931 -0
- data/docs/assets/images/favicon-16.png +0 -0
- data/docs/assets/images/favicon-32.png +0 -0
- data/docs/assets/images/favicon-48.png +0 -0
- data/docs/assets/images/favicon.ico +0 -0
- data/docs/assets/images/favicon.png +0 -0
- data/docs/assets/images/favicon.svg +45 -0
- data/docs/assets/images/fractor-icon.svg +49 -0
- data/docs/assets/images/fractor-logo.svg +61 -0
- data/docs/index.adoc +131 -0
- data/docs/lychee.toml +39 -0
- data/examples/api_aggregator/README.adoc +627 -0
- data/examples/api_aggregator/api_aggregator.rb +376 -0
- data/examples/auto_detection/README.adoc +407 -29
- data/examples/continuous_chat_common/message_protocol.rb +1 -1
- data/examples/error_reporting.rb +207 -0
- data/examples/file_processor/README.adoc +170 -0
- data/examples/file_processor/file_processor.rb +615 -0
- data/examples/file_processor/sample_files/invalid.csv +1 -0
- data/examples/file_processor/sample_files/orders.xml +24 -0
- data/examples/file_processor/sample_files/products.json +23 -0
- data/examples/file_processor/sample_files/users.csv +6 -0
- data/examples/hierarchical_hasher/README.adoc +629 -41
- data/examples/image_processor/README.adoc +610 -0
- data/examples/image_processor/image_processor.rb +349 -0
- data/examples/image_processor/processed_images/sample_10_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_1_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_2_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_3_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_4_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_5_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_6_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_7_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_8_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_9_processed.jpg.json +12 -0
- data/examples/image_processor/test_images/sample_1.png +1 -0
- data/examples/image_processor/test_images/sample_10.png +1 -0
- data/examples/image_processor/test_images/sample_2.png +1 -0
- data/examples/image_processor/test_images/sample_3.png +1 -0
- data/examples/image_processor/test_images/sample_4.png +1 -0
- data/examples/image_processor/test_images/sample_5.png +1 -0
- data/examples/image_processor/test_images/sample_6.png +1 -0
- data/examples/image_processor/test_images/sample_7.png +1 -0
- data/examples/image_processor/test_images/sample_8.png +1 -0
- data/examples/image_processor/test_images/sample_9.png +1 -0
- data/examples/log_analyzer/README.adoc +662 -0
- data/examples/log_analyzer/log_analyzer.rb +579 -0
- data/examples/log_analyzer/sample_logs/apache.log +20 -0
- data/examples/log_analyzer/sample_logs/json.log +15 -0
- data/examples/log_analyzer/sample_logs/nginx.log +15 -0
- data/examples/log_analyzer/sample_logs/rails.log +29 -0
- data/examples/multi_work_type/README.adoc +576 -26
- data/examples/performance_monitoring.rb +120 -0
- data/examples/pipeline_processing/README.adoc +740 -26
- data/examples/pipeline_processing/pipeline_processing.rb +2 -2
- data/examples/priority_work_example.rb +155 -0
- data/examples/producer_subscriber/README.adoc +889 -46
- data/examples/scatter_gather/README.adoc +829 -27
- data/examples/simple/README.adoc +347 -0
- data/examples/specialized_workers/README.adoc +622 -26
- data/examples/specialized_workers/specialized_workers.rb +44 -8
- data/examples/stream_processor/README.adoc +206 -0
- data/examples/stream_processor/stream_processor.rb +284 -0
- data/examples/web_scraper/README.adoc +625 -0
- data/examples/web_scraper/web_scraper.rb +285 -0
- data/examples/workflow/README.adoc +406 -0
- data/examples/workflow/circuit_breaker/README.adoc +360 -0
- data/examples/workflow/circuit_breaker/circuit_breaker_workflow.rb +225 -0
- data/examples/workflow/conditional/README.adoc +483 -0
- data/examples/workflow/conditional/conditional_workflow.rb +215 -0
- data/examples/workflow/dead_letter_queue/README.adoc +374 -0
- data/examples/workflow/dead_letter_queue/dead_letter_queue_workflow.rb +217 -0
- data/examples/workflow/fan_out/README.adoc +381 -0
- data/examples/workflow/fan_out/fan_out_workflow.rb +202 -0
- data/examples/workflow/retry/README.adoc +248 -0
- data/examples/workflow/retry/retry_workflow.rb +195 -0
- data/examples/workflow/simple_linear/README.adoc +267 -0
- data/examples/workflow/simple_linear/simple_linear_workflow.rb +175 -0
- data/examples/workflow/simplified/README.adoc +329 -0
- data/examples/workflow/simplified/simplified_workflow.rb +222 -0
- data/exe/fractor +10 -0
- data/lib/fractor/cli.rb +288 -0
- data/lib/fractor/configuration.rb +307 -0
- data/lib/fractor/continuous_server.rb +60 -65
- data/lib/fractor/error_formatter.rb +72 -0
- data/lib/fractor/error_report_generator.rb +152 -0
- data/lib/fractor/error_reporter.rb +244 -0
- data/lib/fractor/error_statistics.rb +147 -0
- data/lib/fractor/execution_tracer.rb +162 -0
- data/lib/fractor/logger.rb +230 -0
- data/lib/fractor/main_loop_handler.rb +406 -0
- data/lib/fractor/main_loop_handler3.rb +135 -0
- data/lib/fractor/main_loop_handler4.rb +299 -0
- data/lib/fractor/performance_metrics_collector.rb +181 -0
- data/lib/fractor/performance_monitor.rb +215 -0
- data/lib/fractor/performance_report_generator.rb +202 -0
- data/lib/fractor/priority_work.rb +93 -0
- data/lib/fractor/priority_work_queue.rb +189 -0
- data/lib/fractor/result_aggregator.rb +32 -0
- data/lib/fractor/shutdown_handler.rb +168 -0
- data/lib/fractor/signal_handler.rb +80 -0
- data/lib/fractor/supervisor.rb +382 -269
- data/lib/fractor/supervisor_logger.rb +88 -0
- data/lib/fractor/version.rb +1 -1
- data/lib/fractor/work.rb +12 -0
- data/lib/fractor/work_distribution_manager.rb +151 -0
- data/lib/fractor/work_queue.rb +20 -0
- data/lib/fractor/work_result.rb +181 -9
- data/lib/fractor/worker.rb +73 -0
- data/lib/fractor/workflow/builder.rb +210 -0
- data/lib/fractor/workflow/chain_builder.rb +169 -0
- data/lib/fractor/workflow/circuit_breaker.rb +183 -0
- data/lib/fractor/workflow/circuit_breaker_orchestrator.rb +208 -0
- data/lib/fractor/workflow/circuit_breaker_registry.rb +112 -0
- data/lib/fractor/workflow/dead_letter_queue.rb +334 -0
- data/lib/fractor/workflow/execution_hooks.rb +39 -0
- data/lib/fractor/workflow/execution_strategy.rb +225 -0
- data/lib/fractor/workflow/execution_trace.rb +134 -0
- data/lib/fractor/workflow/helpers.rb +191 -0
- data/lib/fractor/workflow/job.rb +290 -0
- data/lib/fractor/workflow/job_dependency_validator.rb +120 -0
- data/lib/fractor/workflow/logger.rb +110 -0
- data/lib/fractor/workflow/pre_execution_context.rb +193 -0
- data/lib/fractor/workflow/retry_config.rb +156 -0
- data/lib/fractor/workflow/retry_orchestrator.rb +184 -0
- data/lib/fractor/workflow/retry_strategy.rb +93 -0
- data/lib/fractor/workflow/structured_logger.rb +30 -0
- data/lib/fractor/workflow/type_compatibility_validator.rb +222 -0
- data/lib/fractor/workflow/visualizer.rb +211 -0
- data/lib/fractor/workflow/workflow_context.rb +132 -0
- data/lib/fractor/workflow/workflow_executor.rb +669 -0
- data/lib/fractor/workflow/workflow_result.rb +55 -0
- data/lib/fractor/workflow/workflow_validator.rb +295 -0
- data/lib/fractor/workflow.rb +333 -0
- data/lib/fractor/wrapped_ractor.rb +66 -101
- data/lib/fractor/wrapped_ractor3.rb +161 -0
- data/lib/fractor/wrapped_ractor4.rb +242 -0
- data/lib/fractor.rb +92 -4
- metadata +179 -6
- data/tests/sample.rb.bak +0 -309
- data/tests/sample_working.rb.bak +0 -209
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Fractor
|
|
4
|
+
class Workflow
|
|
5
|
+
# Base class for workflow execution strategies.
|
|
6
|
+
# Defines the interface for different execution patterns.
|
|
7
|
+
#
|
|
8
|
+
# @abstract Subclasses must implement the `execute` method
|
|
9
|
+
class ExecutionStrategy
|
|
10
|
+
attr_reader :executor, :debug
|
|
11
|
+
|
|
12
|
+
# Initialize a new execution strategy.
|
|
13
|
+
#
|
|
14
|
+
# @param executor [WorkflowExecutor] The workflow executor
|
|
15
|
+
# @param debug [Boolean] Whether to enable debug logging
|
|
16
|
+
def initialize(executor, debug: false)
|
|
17
|
+
@executor = executor
|
|
18
|
+
@debug = debug
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Execute a group of jobs according to the strategy.
|
|
22
|
+
#
|
|
23
|
+
# @param job_group [Array<Job>] Jobs to execute
|
|
24
|
+
# @return [Boolean] true if execution should continue
|
|
25
|
+
# @raise [WorkflowError] if execution fails
|
|
26
|
+
def execute(job_group)
|
|
27
|
+
raise NotImplementedError, "#{self.class} must implement #execute"
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Check if a job should be executed based on its condition.
|
|
31
|
+
#
|
|
32
|
+
# @param job [Job] The job to check
|
|
33
|
+
# @return [Boolean] true if the job should execute
|
|
34
|
+
def should_execute_job?(job)
|
|
35
|
+
return true unless job.condition_proc
|
|
36
|
+
|
|
37
|
+
job.condition_proc.call(executor.context)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
protected
|
|
41
|
+
|
|
42
|
+
# Get the workflow instance.
|
|
43
|
+
#
|
|
44
|
+
# @return [Workflow] The workflow
|
|
45
|
+
def workflow
|
|
46
|
+
executor.workflow
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Get the workflow context.
|
|
50
|
+
#
|
|
51
|
+
# @return [WorkflowContext] The context
|
|
52
|
+
def context
|
|
53
|
+
executor.context
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Get the execution hooks.
|
|
57
|
+
#
|
|
58
|
+
# @return [ExecutionHooks] The hooks
|
|
59
|
+
def hooks
|
|
60
|
+
executor.hooks
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Get the execution trace.
|
|
64
|
+
#
|
|
65
|
+
# @return [ExecutionTrace, nil] The trace or nil
|
|
66
|
+
def trace
|
|
67
|
+
executor.trace
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Get the dead letter queue.
|
|
71
|
+
#
|
|
72
|
+
# @return [DeadLetterQueue, nil] The DLQ or nil
|
|
73
|
+
def dead_letter_queue
|
|
74
|
+
executor.dead_letter_queue
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Log a debug message if debug mode is enabled.
|
|
78
|
+
#
|
|
79
|
+
# @param message [String] The message to log
|
|
80
|
+
def log_debug(message)
|
|
81
|
+
puts "[ExecutionStrategy] #{message}" if debug
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Strategy for executing jobs sequentially, one after another.
|
|
86
|
+
# Jobs are executed in the order they appear in the job group.
|
|
87
|
+
class SequentialExecutionStrategy < ExecutionStrategy
|
|
88
|
+
# Execute a group of jobs sequentially.
|
|
89
|
+
#
|
|
90
|
+
# @param job_group [Array<Job>] Jobs to execute
|
|
91
|
+
# @return [Boolean] true if execution should continue
|
|
92
|
+
def execute(job_group)
|
|
93
|
+
log_debug "Executing #{job_group.size} jobs sequentially: #{job_group.map(&:name).join(', ')}"
|
|
94
|
+
|
|
95
|
+
job_group.each do |job|
|
|
96
|
+
execute_single_job(job)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
true
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
private
|
|
103
|
+
|
|
104
|
+
# Execute a single job.
|
|
105
|
+
#
|
|
106
|
+
# @param job [Job] The job to execute
|
|
107
|
+
def execute_single_job(job)
|
|
108
|
+
return unless should_execute_job?(job)
|
|
109
|
+
|
|
110
|
+
job_trace = trace&.start_job(job_name: job.name,
|
|
111
|
+
worker_class: job.worker_class&.name)
|
|
112
|
+
job_trace&.set_input(job_input(job))
|
|
113
|
+
|
|
114
|
+
result = execute_job_with_retry(job, job_trace)
|
|
115
|
+
|
|
116
|
+
job_trace&.complete!(output: result)
|
|
117
|
+
context.store_job_output(job.name, result)
|
|
118
|
+
|
|
119
|
+
executor.instance_variable_get(:@completed_jobs).add(job.name)
|
|
120
|
+
job.state(:completed)
|
|
121
|
+
|
|
122
|
+
hooks.trigger(:job_complete, job, result, 0)
|
|
123
|
+
rescue StandardError => e
|
|
124
|
+
handle_job_error(job, e)
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
# Get the input for a job.
|
|
128
|
+
#
|
|
129
|
+
# @param job [Job] The job
|
|
130
|
+
# @return [Object] The job input
|
|
131
|
+
def job_input(job)
|
|
132
|
+
executor.send(:prepare_job_input, job)
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
# Execute a job with retry logic.
|
|
136
|
+
#
|
|
137
|
+
# @param job [Job] The job to execute
|
|
138
|
+
# @param job_trace [Object] The job trace
|
|
139
|
+
# @return [Object] The execution result
|
|
140
|
+
def execute_job_with_retry(job, job_trace)
|
|
141
|
+
if job.retry_config
|
|
142
|
+
return executor.send(:execute_job_with_retry, job,
|
|
143
|
+
job_trace)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
executor.send(:execute_job_once, job, job_trace)
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
# Handle a job execution error.
|
|
150
|
+
#
|
|
151
|
+
# @param job [Job] The job that failed
|
|
152
|
+
# @param error [Exception] The error
|
|
153
|
+
# @raise [WorkflowError] if the error should propagate
|
|
154
|
+
def handle_job_error(job, error)
|
|
155
|
+
executor.instance_variable_get(:@failed_jobs).add(job.name)
|
|
156
|
+
job.state(:failed)
|
|
157
|
+
|
|
158
|
+
if job.fallback_job
|
|
159
|
+
executor.send(:execute_fallback_job, job, error, nil, nil)
|
|
160
|
+
else
|
|
161
|
+
executor.send(:add_to_dead_letter_queue, job, error, nil)
|
|
162
|
+
raise
|
|
163
|
+
end
|
|
164
|
+
end
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
# Strategy for executing jobs in parallel.
|
|
168
|
+
# Jobs are executed concurrently using Supervisor.
|
|
169
|
+
class ParallelExecutionStrategy < ExecutionStrategy
|
|
170
|
+
# Execute a group of jobs in parallel.
|
|
171
|
+
#
|
|
172
|
+
# @param job_group [Array<Job>] Jobs to execute
|
|
173
|
+
# @return [Boolean] true if execution should continue
|
|
174
|
+
def execute(job_group)
|
|
175
|
+
log_debug "Executing #{job_group.size} jobs in parallel: #{job_group.map(&:name).join(', ')}"
|
|
176
|
+
|
|
177
|
+
executor.send(:execute_jobs_parallel, job_group)
|
|
178
|
+
|
|
179
|
+
# Check if any jobs failed
|
|
180
|
+
failed_jobs = job_group.select do |job|
|
|
181
|
+
executor.instance_variable_get(:@failed_jobs).include?(job.name)
|
|
182
|
+
end
|
|
183
|
+
if failed_jobs.any?
|
|
184
|
+
handle_parallel_errors(failed_jobs)
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
true
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
private
|
|
191
|
+
|
|
192
|
+
# Handle errors from parallel execution.
|
|
193
|
+
#
|
|
194
|
+
# @param failed_jobs [Array<Job>] Jobs that failed
|
|
195
|
+
# @raise [WorkflowError] If any failed jobs don't have fallbacks
|
|
196
|
+
def handle_parallel_errors(failed_jobs)
|
|
197
|
+
jobs_without_fallback = failed_jobs.reject(&:fallback_job)
|
|
198
|
+
return if jobs_without_fallback.empty?
|
|
199
|
+
|
|
200
|
+
error_messages = jobs_without_fallback.map(&:name).join(", ")
|
|
201
|
+
raise WorkflowError,
|
|
202
|
+
"Parallel jobs failed without fallbacks: #{error_messages}"
|
|
203
|
+
end
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
# Strategy for executing jobs as a pipeline.
|
|
207
|
+
# Jobs are executed sequentially with data flowing from one to the next.
|
|
208
|
+
class PipelineExecutionStrategy < SequentialExecutionStrategy
|
|
209
|
+
# Execute a group of jobs as a pipeline.
|
|
210
|
+
#
|
|
211
|
+
# @param job_group [Array<Job>] Jobs to execute (must be exactly 1 for pipeline)
|
|
212
|
+
# @return [Boolean] true if execution should continue
|
|
213
|
+
def execute(job_group)
|
|
214
|
+
if job_group.size > 1
|
|
215
|
+
raise WorkflowError,
|
|
216
|
+
"Pipeline strategy expects exactly 1 job per group, got #{job_group.size}"
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
log_debug "Executing pipeline job: #{job_group.first.name}"
|
|
220
|
+
|
|
221
|
+
super
|
|
222
|
+
end
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
end
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "time"
|
|
4
|
+
|
|
5
|
+
module Fractor
|
|
6
|
+
class Workflow
|
|
7
|
+
# Tracks execution details for workflow runs.
|
|
8
|
+
# Provides detailed trace of job execution, timings, and results.
|
|
9
|
+
class ExecutionTrace
|
|
10
|
+
attr_reader :workflow_name, :execution_id, :correlation_id,
|
|
11
|
+
:started_at, :completed_at, :job_traces
|
|
12
|
+
|
|
13
|
+
def initialize(workflow_name:, execution_id:, correlation_id:)
|
|
14
|
+
@workflow_name = workflow_name
|
|
15
|
+
@execution_id = execution_id
|
|
16
|
+
@correlation_id = correlation_id
|
|
17
|
+
@started_at = Time.now.utc
|
|
18
|
+
@completed_at = nil
|
|
19
|
+
@job_traces = []
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Record the start of a job execution
|
|
23
|
+
def start_job(job_name:, worker_class:)
|
|
24
|
+
job_trace = JobTrace.new(
|
|
25
|
+
job_name: job_name,
|
|
26
|
+
worker_class: worker_class,
|
|
27
|
+
)
|
|
28
|
+
@job_traces << job_trace
|
|
29
|
+
job_trace
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Mark the workflow as completed
|
|
33
|
+
def complete!
|
|
34
|
+
@completed_at = Time.now.utc
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Total duration in milliseconds
|
|
38
|
+
def total_duration_ms
|
|
39
|
+
return nil unless @completed_at
|
|
40
|
+
|
|
41
|
+
((@completed_at - @started_at) * 1000).round(2)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
# Convert trace to hash for serialization
|
|
45
|
+
def to_h
|
|
46
|
+
{
|
|
47
|
+
workflow: @workflow_name,
|
|
48
|
+
execution_id: @execution_id,
|
|
49
|
+
correlation_id: @correlation_id,
|
|
50
|
+
started_at: @started_at.strftime("%Y-%m-%dT%H:%M:%S.%3NZ"),
|
|
51
|
+
completed_at: @completed_at&.strftime("%Y-%m-%dT%H:%M:%S.%3NZ"),
|
|
52
|
+
total_duration_ms: total_duration_ms,
|
|
53
|
+
jobs: @job_traces.map(&:to_h),
|
|
54
|
+
}
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Convert trace to JSON
|
|
58
|
+
def to_json(*_args)
|
|
59
|
+
to_h.to_json
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# Trace data for a single job execution
|
|
63
|
+
class JobTrace
|
|
64
|
+
attr_reader :job_name, :worker_class, :started_at,
|
|
65
|
+
:completed_at, :status, :error
|
|
66
|
+
|
|
67
|
+
def initialize(job_name:, worker_class:)
|
|
68
|
+
@job_name = job_name
|
|
69
|
+
@worker_class = worker_class
|
|
70
|
+
@started_at = Time.now.utc
|
|
71
|
+
@completed_at = nil
|
|
72
|
+
@status = :running
|
|
73
|
+
@error = nil
|
|
74
|
+
@input_hash = nil
|
|
75
|
+
@output_hash = nil
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# Mark job as successfully completed
|
|
79
|
+
def complete!(output: nil)
|
|
80
|
+
@completed_at = Time.now.utc
|
|
81
|
+
@status = :success
|
|
82
|
+
@output_hash = hash_value(output) if output
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Mark job as failed
|
|
86
|
+
def fail!(error:)
|
|
87
|
+
@completed_at = Time.now.utc
|
|
88
|
+
@status = :failed
|
|
89
|
+
@error = {
|
|
90
|
+
class: error.class.name,
|
|
91
|
+
message: error.message,
|
|
92
|
+
backtrace: error.backtrace&.first(5),
|
|
93
|
+
}
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Set input hash for tracking
|
|
97
|
+
def set_input(input)
|
|
98
|
+
@input_hash = hash_value(input)
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
# Duration in milliseconds
|
|
102
|
+
def duration_ms
|
|
103
|
+
return nil unless @completed_at
|
|
104
|
+
|
|
105
|
+
((@completed_at - @started_at) * 1000).round(2)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Convert to hash for serialization
|
|
109
|
+
def to_h
|
|
110
|
+
{
|
|
111
|
+
name: @job_name,
|
|
112
|
+
worker: @worker_class,
|
|
113
|
+
started_at: @started_at.strftime("%Y-%m-%dT%H:%M:%S.%3NZ"),
|
|
114
|
+
completed_at: @completed_at&.strftime("%Y-%m-%dT%H:%M:%S.%3NZ"),
|
|
115
|
+
duration_ms: duration_ms,
|
|
116
|
+
status: @status.to_s,
|
|
117
|
+
input_hash: @input_hash,
|
|
118
|
+
output_hash: @output_hash,
|
|
119
|
+
error: @error,
|
|
120
|
+
}.compact
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
private
|
|
124
|
+
|
|
125
|
+
def hash_value(value)
|
|
126
|
+
return nil unless value
|
|
127
|
+
|
|
128
|
+
# Create a simple hash for tracking (not cryptographic)
|
|
129
|
+
value.to_s.hash.abs.to_s(16)[0..7]
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
end
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Fractor
|
|
4
|
+
class Workflow
|
|
5
|
+
# Helper workers that provide common patterns to reduce boilerplate
|
|
6
|
+
module Helpers
|
|
7
|
+
# Simple worker for basic transformations
|
|
8
|
+
# Just implement the transform method
|
|
9
|
+
#
|
|
10
|
+
# Example:
|
|
11
|
+
# class MyWorker < Fractor::Workflow::Helpers::SimpleWorker
|
|
12
|
+
# input_type InputData
|
|
13
|
+
# output_type OutputData
|
|
14
|
+
#
|
|
15
|
+
# def transform(input)
|
|
16
|
+
# OutputData.new(result: input.value * 2)
|
|
17
|
+
# end
|
|
18
|
+
# end
|
|
19
|
+
class SimpleWorker < Fractor::Worker
|
|
20
|
+
def process(work)
|
|
21
|
+
input = work.input
|
|
22
|
+
output = transform(input)
|
|
23
|
+
Fractor::WorkResult.new(result: output, work: work)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Override this method in subclasses
|
|
27
|
+
def transform(input)
|
|
28
|
+
raise NotImplementedError, "Subclasses must implement #transform"
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Worker for mapping over collections
|
|
33
|
+
# Implement the map_item method
|
|
34
|
+
#
|
|
35
|
+
# Example:
|
|
36
|
+
# class ProcessItems < Fractor::Workflow::Helpers::MapWorker
|
|
37
|
+
# def map_item(item)
|
|
38
|
+
# item.upcase
|
|
39
|
+
# end
|
|
40
|
+
# end
|
|
41
|
+
class MapWorker < Fractor::Worker
|
|
42
|
+
def process(work)
|
|
43
|
+
input = work.input
|
|
44
|
+
collection = extract_collection(input)
|
|
45
|
+
|
|
46
|
+
mapped = collection.map { |item| map_item(item) }
|
|
47
|
+
output = build_output(mapped, input)
|
|
48
|
+
|
|
49
|
+
Fractor::WorkResult.new(result: output, work: work)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Override in subclasses to define how to map each item
|
|
53
|
+
def map_item(item)
|
|
54
|
+
raise NotImplementedError, "Subclasses must implement #map_item"
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Override to specify how to extract collection from input
|
|
58
|
+
# Default: assumes input responds to :to_a
|
|
59
|
+
def extract_collection(input)
|
|
60
|
+
input.respond_to?(:to_a) ? input.to_a : [input]
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Override to specify how to build output from mapped collection
|
|
64
|
+
# Default: returns the array
|
|
65
|
+
def build_output(mapped_collection, _original_input)
|
|
66
|
+
mapped_collection
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Worker for filtering collections
|
|
71
|
+
# Implement the filter_item? method
|
|
72
|
+
#
|
|
73
|
+
# Example:
|
|
74
|
+
# class FilterPositive < Fractor::Workflow::Helpers::FilterWorker
|
|
75
|
+
# def filter_item?(item)
|
|
76
|
+
# item > 0
|
|
77
|
+
# end
|
|
78
|
+
# end
|
|
79
|
+
class FilterWorker < Fractor::Worker
|
|
80
|
+
def process(work)
|
|
81
|
+
input = work.input
|
|
82
|
+
collection = extract_collection(input)
|
|
83
|
+
|
|
84
|
+
filtered = collection.select { |item| filter_item?(item) }
|
|
85
|
+
output = build_output(filtered, input)
|
|
86
|
+
|
|
87
|
+
Fractor::WorkResult.new(result: output, work: work)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Override in subclasses to define filter logic
|
|
91
|
+
def filter_item?(item)
|
|
92
|
+
raise NotImplementedError, "Subclasses must implement #filter_item?"
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Override to specify how to extract collection from input
|
|
96
|
+
def extract_collection(input)
|
|
97
|
+
input.respond_to?(:to_a) ? input.to_a : [input]
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
# Override to specify how to build output from filtered collection
|
|
101
|
+
def build_output(filtered_collection, _original_input)
|
|
102
|
+
filtered_collection
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Worker for reducing/aggregating collections
|
|
107
|
+
# Implement the reduce_items method
|
|
108
|
+
#
|
|
109
|
+
# Example:
|
|
110
|
+
# class SumNumbers < Fractor::Workflow::Helpers::ReduceWorker
|
|
111
|
+
# def reduce_items(collection)
|
|
112
|
+
# collection.sum
|
|
113
|
+
# end
|
|
114
|
+
# end
|
|
115
|
+
class ReduceWorker < Fractor::Worker
|
|
116
|
+
def process(work)
|
|
117
|
+
input = work.input
|
|
118
|
+
collection = extract_collection(input)
|
|
119
|
+
|
|
120
|
+
result = reduce_items(collection)
|
|
121
|
+
output = build_output(result, input)
|
|
122
|
+
|
|
123
|
+
Fractor::WorkResult.new(result: output, work: work)
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Override in subclasses to define reduce logic
|
|
127
|
+
def reduce_items(collection)
|
|
128
|
+
raise NotImplementedError, "Subclasses must implement #reduce_items"
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
# Override to specify how to extract collection from input
|
|
132
|
+
def extract_collection(input)
|
|
133
|
+
input.respond_to?(:to_a) ? input.to_a : [input]
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
# Override to specify how to build output from reduced value
|
|
137
|
+
def build_output(reduced_value, _original_input)
|
|
138
|
+
reduced_value
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
# Worker for validation
|
|
143
|
+
# Implement the validate method
|
|
144
|
+
#
|
|
145
|
+
# Example:
|
|
146
|
+
# class ValidateAge < Fractor::Workflow::Helpers::ValidationWorker
|
|
147
|
+
# def validate(input)
|
|
148
|
+
# return unless input.age < 0
|
|
149
|
+
# add_error("Age must be positive")
|
|
150
|
+
# end
|
|
151
|
+
# end
|
|
152
|
+
class ValidationWorker < Fractor::Worker
|
|
153
|
+
def initialize
|
|
154
|
+
super
|
|
155
|
+
@errors = []
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
def process(work)
|
|
159
|
+
input = work.input
|
|
160
|
+
@errors = []
|
|
161
|
+
|
|
162
|
+
validate(input)
|
|
163
|
+
|
|
164
|
+
output = build_output(input, @errors)
|
|
165
|
+
Fractor::WorkResult.new(result: output, work: work)
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
# Override in subclasses to define validation logic
|
|
169
|
+
# Use add_error(message) to record errors
|
|
170
|
+
def validate(input)
|
|
171
|
+
raise NotImplementedError, "Subclasses must implement #validate"
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
# Add a validation error
|
|
175
|
+
def add_error(message)
|
|
176
|
+
@errors << message
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
# Override to customize output format
|
|
180
|
+
# Default: returns hash with valid? flag and errors
|
|
181
|
+
def build_output(input, errors)
|
|
182
|
+
{
|
|
183
|
+
valid: errors.empty?,
|
|
184
|
+
errors: errors,
|
|
185
|
+
input: input,
|
|
186
|
+
}
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
end
|
|
191
|
+
end
|