fractor 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +227 -102
- data/README.adoc +113 -1940
- data/docs/.lycheeignore +16 -0
- data/docs/Gemfile +24 -0
- data/docs/README.md +157 -0
- data/docs/_config.yml +151 -0
- data/docs/_features/error-handling.adoc +1192 -0
- data/docs/_features/index.adoc +80 -0
- data/docs/_features/monitoring.adoc +589 -0
- data/docs/_features/signal-handling.adoc +202 -0
- data/docs/_features/workflows.adoc +1235 -0
- data/docs/_guides/continuous-mode.adoc +736 -0
- data/docs/_guides/cookbook.adoc +1133 -0
- data/docs/_guides/index.adoc +55 -0
- data/docs/_guides/pipeline-mode.adoc +730 -0
- data/docs/_guides/troubleshooting.adoc +358 -0
- data/docs/_pages/architecture.adoc +1390 -0
- data/docs/_pages/core-concepts.adoc +1392 -0
- data/docs/_pages/design-principles.adoc +862 -0
- data/docs/_pages/getting-started.adoc +290 -0
- data/docs/_pages/installation.adoc +143 -0
- data/docs/_reference/api.adoc +1080 -0
- data/docs/_reference/error-reporting.adoc +670 -0
- data/docs/_reference/examples.adoc +181 -0
- data/docs/_reference/index.adoc +96 -0
- data/docs/_reference/troubleshooting.adoc +862 -0
- data/docs/_tutorials/complex-workflows.adoc +1022 -0
- data/docs/_tutorials/data-processing-pipeline.adoc +740 -0
- data/docs/_tutorials/first-application.adoc +384 -0
- data/docs/_tutorials/index.adoc +48 -0
- data/docs/_tutorials/long-running-services.adoc +931 -0
- data/docs/assets/images/favicon-16.png +0 -0
- data/docs/assets/images/favicon-32.png +0 -0
- data/docs/assets/images/favicon-48.png +0 -0
- data/docs/assets/images/favicon.ico +0 -0
- data/docs/assets/images/favicon.png +0 -0
- data/docs/assets/images/favicon.svg +45 -0
- data/docs/assets/images/fractor-icon.svg +49 -0
- data/docs/assets/images/fractor-logo.svg +61 -0
- data/docs/index.adoc +131 -0
- data/docs/lychee.toml +39 -0
- data/examples/api_aggregator/README.adoc +627 -0
- data/examples/api_aggregator/api_aggregator.rb +376 -0
- data/examples/auto_detection/README.adoc +407 -29
- data/examples/continuous_chat_common/message_protocol.rb +1 -1
- data/examples/error_reporting.rb +207 -0
- data/examples/file_processor/README.adoc +170 -0
- data/examples/file_processor/file_processor.rb +615 -0
- data/examples/file_processor/sample_files/invalid.csv +1 -0
- data/examples/file_processor/sample_files/orders.xml +24 -0
- data/examples/file_processor/sample_files/products.json +23 -0
- data/examples/file_processor/sample_files/users.csv +6 -0
- data/examples/hierarchical_hasher/README.adoc +629 -41
- data/examples/image_processor/README.adoc +610 -0
- data/examples/image_processor/image_processor.rb +349 -0
- data/examples/image_processor/processed_images/sample_10_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_1_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_2_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_3_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_4_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_5_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_6_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_7_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_8_processed.jpg.json +12 -0
- data/examples/image_processor/processed_images/sample_9_processed.jpg.json +12 -0
- data/examples/image_processor/test_images/sample_1.png +1 -0
- data/examples/image_processor/test_images/sample_10.png +1 -0
- data/examples/image_processor/test_images/sample_2.png +1 -0
- data/examples/image_processor/test_images/sample_3.png +1 -0
- data/examples/image_processor/test_images/sample_4.png +1 -0
- data/examples/image_processor/test_images/sample_5.png +1 -0
- data/examples/image_processor/test_images/sample_6.png +1 -0
- data/examples/image_processor/test_images/sample_7.png +1 -0
- data/examples/image_processor/test_images/sample_8.png +1 -0
- data/examples/image_processor/test_images/sample_9.png +1 -0
- data/examples/log_analyzer/README.adoc +662 -0
- data/examples/log_analyzer/log_analyzer.rb +579 -0
- data/examples/log_analyzer/sample_logs/apache.log +20 -0
- data/examples/log_analyzer/sample_logs/json.log +15 -0
- data/examples/log_analyzer/sample_logs/nginx.log +15 -0
- data/examples/log_analyzer/sample_logs/rails.log +29 -0
- data/examples/multi_work_type/README.adoc +576 -26
- data/examples/performance_monitoring.rb +120 -0
- data/examples/pipeline_processing/README.adoc +740 -26
- data/examples/pipeline_processing/pipeline_processing.rb +2 -2
- data/examples/priority_work_example.rb +155 -0
- data/examples/producer_subscriber/README.adoc +889 -46
- data/examples/scatter_gather/README.adoc +829 -27
- data/examples/simple/README.adoc +347 -0
- data/examples/specialized_workers/README.adoc +622 -26
- data/examples/specialized_workers/specialized_workers.rb +44 -8
- data/examples/stream_processor/README.adoc +206 -0
- data/examples/stream_processor/stream_processor.rb +284 -0
- data/examples/web_scraper/README.adoc +625 -0
- data/examples/web_scraper/web_scraper.rb +285 -0
- data/examples/workflow/README.adoc +406 -0
- data/examples/workflow/circuit_breaker/README.adoc +360 -0
- data/examples/workflow/circuit_breaker/circuit_breaker_workflow.rb +225 -0
- data/examples/workflow/conditional/README.adoc +483 -0
- data/examples/workflow/conditional/conditional_workflow.rb +215 -0
- data/examples/workflow/dead_letter_queue/README.adoc +374 -0
- data/examples/workflow/dead_letter_queue/dead_letter_queue_workflow.rb +217 -0
- data/examples/workflow/fan_out/README.adoc +381 -0
- data/examples/workflow/fan_out/fan_out_workflow.rb +202 -0
- data/examples/workflow/retry/README.adoc +248 -0
- data/examples/workflow/retry/retry_workflow.rb +195 -0
- data/examples/workflow/simple_linear/README.adoc +267 -0
- data/examples/workflow/simple_linear/simple_linear_workflow.rb +175 -0
- data/examples/workflow/simplified/README.adoc +329 -0
- data/examples/workflow/simplified/simplified_workflow.rb +222 -0
- data/exe/fractor +10 -0
- data/lib/fractor/cli.rb +288 -0
- data/lib/fractor/configuration.rb +307 -0
- data/lib/fractor/continuous_server.rb +60 -65
- data/lib/fractor/error_formatter.rb +72 -0
- data/lib/fractor/error_report_generator.rb +152 -0
- data/lib/fractor/error_reporter.rb +244 -0
- data/lib/fractor/error_statistics.rb +147 -0
- data/lib/fractor/execution_tracer.rb +162 -0
- data/lib/fractor/logger.rb +230 -0
- data/lib/fractor/main_loop_handler.rb +406 -0
- data/lib/fractor/main_loop_handler3.rb +135 -0
- data/lib/fractor/main_loop_handler4.rb +299 -0
- data/lib/fractor/performance_metrics_collector.rb +181 -0
- data/lib/fractor/performance_monitor.rb +215 -0
- data/lib/fractor/performance_report_generator.rb +202 -0
- data/lib/fractor/priority_work.rb +93 -0
- data/lib/fractor/priority_work_queue.rb +189 -0
- data/lib/fractor/result_aggregator.rb +32 -0
- data/lib/fractor/shutdown_handler.rb +168 -0
- data/lib/fractor/signal_handler.rb +80 -0
- data/lib/fractor/supervisor.rb +382 -269
- data/lib/fractor/supervisor_logger.rb +88 -0
- data/lib/fractor/version.rb +1 -1
- data/lib/fractor/work.rb +12 -0
- data/lib/fractor/work_distribution_manager.rb +151 -0
- data/lib/fractor/work_queue.rb +20 -0
- data/lib/fractor/work_result.rb +181 -9
- data/lib/fractor/worker.rb +73 -0
- data/lib/fractor/workflow/builder.rb +210 -0
- data/lib/fractor/workflow/chain_builder.rb +169 -0
- data/lib/fractor/workflow/circuit_breaker.rb +183 -0
- data/lib/fractor/workflow/circuit_breaker_orchestrator.rb +208 -0
- data/lib/fractor/workflow/circuit_breaker_registry.rb +112 -0
- data/lib/fractor/workflow/dead_letter_queue.rb +334 -0
- data/lib/fractor/workflow/execution_hooks.rb +39 -0
- data/lib/fractor/workflow/execution_strategy.rb +225 -0
- data/lib/fractor/workflow/execution_trace.rb +134 -0
- data/lib/fractor/workflow/helpers.rb +191 -0
- data/lib/fractor/workflow/job.rb +290 -0
- data/lib/fractor/workflow/job_dependency_validator.rb +120 -0
- data/lib/fractor/workflow/logger.rb +110 -0
- data/lib/fractor/workflow/pre_execution_context.rb +193 -0
- data/lib/fractor/workflow/retry_config.rb +156 -0
- data/lib/fractor/workflow/retry_orchestrator.rb +184 -0
- data/lib/fractor/workflow/retry_strategy.rb +93 -0
- data/lib/fractor/workflow/structured_logger.rb +30 -0
- data/lib/fractor/workflow/type_compatibility_validator.rb +222 -0
- data/lib/fractor/workflow/visualizer.rb +211 -0
- data/lib/fractor/workflow/workflow_context.rb +132 -0
- data/lib/fractor/workflow/workflow_executor.rb +669 -0
- data/lib/fractor/workflow/workflow_result.rb +55 -0
- data/lib/fractor/workflow/workflow_validator.rb +295 -0
- data/lib/fractor/workflow.rb +333 -0
- data/lib/fractor/wrapped_ractor.rb +66 -101
- data/lib/fractor/wrapped_ractor3.rb +161 -0
- data/lib/fractor/wrapped_ractor4.rb +242 -0
- data/lib/fractor.rb +92 -4
- metadata +179 -6
- data/tests/sample.rb.bak +0 -309
- data/tests/sample_working.rb.bak +0 -209
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Fractor
|
|
4
|
+
class Workflow
|
|
5
|
+
# Represents the result of a workflow execution.
|
|
6
|
+
# Contains information about completed jobs, failed jobs, execution time, and output.
|
|
7
|
+
class WorkflowResult
|
|
8
|
+
attr_reader :workflow_name, :output, :completed_jobs, :failed_jobs,
|
|
9
|
+
:execution_time, :success, :trace, :correlation_id
|
|
10
|
+
|
|
11
|
+
# Initialize a new workflow result.
|
|
12
|
+
#
|
|
13
|
+
# @param workflow_name [String] The name of the workflow
|
|
14
|
+
# @param output [Object] The workflow output
|
|
15
|
+
# @param completed_jobs [Array<String>] List of completed job names
|
|
16
|
+
# @param failed_jobs [Array<String>] List of failed job names
|
|
17
|
+
# @param execution_time [Float] Execution time in seconds
|
|
18
|
+
# @param success [Boolean] Whether the workflow succeeded
|
|
19
|
+
# @param trace [ExecutionTrace, nil] Optional execution trace
|
|
20
|
+
# @param correlation_id [String, nil] Optional correlation ID
|
|
21
|
+
def initialize(workflow_name:, output:, completed_jobs:, failed_jobs:,
|
|
22
|
+
execution_time:, success:, trace: nil, correlation_id: nil)
|
|
23
|
+
@workflow_name = workflow_name
|
|
24
|
+
@output = output
|
|
25
|
+
@completed_jobs = completed_jobs
|
|
26
|
+
@failed_jobs = failed_jobs
|
|
27
|
+
@execution_time = execution_time
|
|
28
|
+
@success = success
|
|
29
|
+
@trace = trace
|
|
30
|
+
@correlation_id = correlation_id
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Check if the workflow succeeded.
|
|
34
|
+
#
|
|
35
|
+
# @return [Boolean] true if successful
|
|
36
|
+
def success?
|
|
37
|
+
@success
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Check if the workflow failed.
|
|
41
|
+
#
|
|
42
|
+
# @return [Boolean] true if failed
|
|
43
|
+
def failed?
|
|
44
|
+
!@success
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Get execution time in milliseconds.
|
|
48
|
+
#
|
|
49
|
+
# @return [Float] Execution time in milliseconds
|
|
50
|
+
def execution_time_ms
|
|
51
|
+
(@execution_time * 1000).round(2)
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "set"
|
|
4
|
+
|
|
5
|
+
module Fractor
|
|
6
|
+
class Workflow
|
|
7
|
+
# Validates workflow structure and configuration.
|
|
8
|
+
# Checks for cycles, missing dependencies, type compatibility, and proper entry/exit points.
|
|
9
|
+
#
|
|
10
|
+
# This validator integrates JobDependencyValidator and TypeCompatibilityValidator
|
|
11
|
+
# to provide comprehensive validation with detailed error messages.
|
|
12
|
+
class WorkflowValidator
|
|
13
|
+
attr_reader :workflow_class
|
|
14
|
+
|
|
15
|
+
def initialize(workflow_class)
|
|
16
|
+
@workflow_class = workflow_class
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
# Validate the workflow structure.
|
|
20
|
+
# Raises appropriate errors if validation fails.
|
|
21
|
+
def validate!
|
|
22
|
+
validate_basic_structure!
|
|
23
|
+
apply_smart_defaults!
|
|
24
|
+
auto_wire_job_inputs!
|
|
25
|
+
|
|
26
|
+
# Use new validators for better error messages
|
|
27
|
+
validate_dependencies_with_new_validator!
|
|
28
|
+
validate_type_compatibility!
|
|
29
|
+
|
|
30
|
+
validate_entry_exit_points! unless continuous_mode?
|
|
31
|
+
validate_job_workers!
|
|
32
|
+
validate_input_mappings!
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
private
|
|
36
|
+
|
|
37
|
+
def continuous_mode?
|
|
38
|
+
@workflow_class.workflow_mode == :continuous
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def validate_basic_structure!
|
|
42
|
+
if @workflow_class.jobs.empty?
|
|
43
|
+
raise WorkflowError,
|
|
44
|
+
"Workflow '#{@workflow_class.workflow_name}' has no jobs defined.\n\n" \
|
|
45
|
+
"A workflow must define at least one job using the `job` DSL method:\n\n" \
|
|
46
|
+
" workflow '#{@workflow_class.workflow_name}' do\n" \
|
|
47
|
+
" job 'process' do\n" \
|
|
48
|
+
" runs_with MyWorker\n" \
|
|
49
|
+
" inputs_from_workflow\n" \
|
|
50
|
+
" outputs_to_workflow\n" \
|
|
51
|
+
" end\n" \
|
|
52
|
+
" end"
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Apply smart defaults for start/end jobs if not explicitly configured
|
|
57
|
+
def apply_smart_defaults!
|
|
58
|
+
return if continuous_mode?
|
|
59
|
+
|
|
60
|
+
# Auto-detect start job if not specified
|
|
61
|
+
unless @workflow_class.start_job_name
|
|
62
|
+
start_jobs = @workflow_class.jobs.values.select do |job|
|
|
63
|
+
job.dependencies.empty?
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
if start_jobs.size == 1
|
|
67
|
+
@workflow_class.instance_variable_set(:@start_job_name,
|
|
68
|
+
start_jobs.first.name)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Auto-detect end jobs if not specified
|
|
73
|
+
if @workflow_class.end_job_names.empty?
|
|
74
|
+
# Find jobs with no dependents (leaf jobs)
|
|
75
|
+
all_dependencies = @workflow_class.jobs.values.flat_map(&:dependencies).to_set
|
|
76
|
+
end_job_candidates = @workflow_class.jobs.keys.reject do |job_name|
|
|
77
|
+
all_dependencies.include?(job_name)
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
end_job_candidates.each do |job_name|
|
|
81
|
+
job = @workflow_class.jobs[job_name]
|
|
82
|
+
job.outputs_to_workflow
|
|
83
|
+
job.terminates_workflow
|
|
84
|
+
@workflow_class.end_job_names << { name: job_name,
|
|
85
|
+
condition: :success }
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Auto-wire job inputs based on dependencies
|
|
91
|
+
def auto_wire_job_inputs!
|
|
92
|
+
@workflow_class.jobs.each_value(&:auto_wire_inputs!)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Validate dependencies using JobDependencyValidator for better error messages.
|
|
96
|
+
def validate_dependencies_with_new_validator!
|
|
97
|
+
jobs = @workflow_class.jobs.values
|
|
98
|
+
validator = JobDependencyValidator.new(jobs)
|
|
99
|
+
|
|
100
|
+
begin
|
|
101
|
+
validator.validate!
|
|
102
|
+
rescue JobDependencyValidator::DependencyError => e
|
|
103
|
+
# Convert to WorkflowError with additional context
|
|
104
|
+
raise WorkflowError,
|
|
105
|
+
"Workflow '#{@workflow_class.workflow_name}' has dependency issues:\n\n" \
|
|
106
|
+
"#{e.message}\n\n" \
|
|
107
|
+
"Fix: Ensure all job dependencies exist and there are no circular dependencies."
|
|
108
|
+
end
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Validate type compatibility between connected jobs.
|
|
112
|
+
def validate_type_compatibility!
|
|
113
|
+
jobs = @workflow_class.jobs.values
|
|
114
|
+
validator = TypeCompatibilityValidator.new(jobs)
|
|
115
|
+
|
|
116
|
+
issues = validator.check_compatibility_between_jobs
|
|
117
|
+
return if issues.empty?
|
|
118
|
+
|
|
119
|
+
# Build detailed error message
|
|
120
|
+
error_lines = ["Workflow '#{@workflow_class.workflow_name}' has type compatibility issues:\n"]
|
|
121
|
+
|
|
122
|
+
issues.each do |issue|
|
|
123
|
+
error_lines << " Job '#{issue[:consumer]}' depends on '#{issue[:producer]}'"
|
|
124
|
+
error_lines << " Producer output type: #{issue[:producer_type]}"
|
|
125
|
+
error_lines << " Consumer input type: #{issue[:consumer_type]}"
|
|
126
|
+
error_lines << " Suggestion: #{issue[:suggestion]}"
|
|
127
|
+
error_lines << ""
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
error_lines << "Fix: Ensure compatible types between connected jobs."
|
|
131
|
+
|
|
132
|
+
raise WorkflowError, error_lines.join("\n")
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
def validate_entry_exit_points!
|
|
136
|
+
# Pipeline mode requires start_with and end_with
|
|
137
|
+
unless @workflow_class.start_job_name
|
|
138
|
+
raise WorkflowError,
|
|
139
|
+
"Pipeline workflow '#{@workflow_class.workflow_name}' must define start_with.\n\n" \
|
|
140
|
+
"Add a start job to your workflow:\n\n" \
|
|
141
|
+
" workflow '#{@workflow_class.workflow_name}' do\n" \
|
|
142
|
+
" start_with 'process' # Define the starting job\n" \
|
|
143
|
+
" job 'process' do\n" \
|
|
144
|
+
" runs_with MyWorker\n" \
|
|
145
|
+
" # ...\n" \
|
|
146
|
+
" end\n" \
|
|
147
|
+
" end"
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
if @workflow_class.end_job_names.empty?
|
|
151
|
+
raise WorkflowError,
|
|
152
|
+
"Pipeline workflow '#{@workflow_class.workflow_name}' must define at least one end_with.\n\n" \
|
|
153
|
+
"Add an end job to your workflow:\n\n" \
|
|
154
|
+
" workflow '#{@workflow_class.workflow_name}' do\n" \
|
|
155
|
+
" # ...\n" \
|
|
156
|
+
" end_with 'finalize' # Define the ending job\n" \
|
|
157
|
+
" job 'finalize' do\n" \
|
|
158
|
+
" runs_with FinalizeWorker\n" \
|
|
159
|
+
" outputs_to_workflow\n" \
|
|
160
|
+
" terminates_workflow\n" \
|
|
161
|
+
" end\n" \
|
|
162
|
+
" end"
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
# Verify start job exists
|
|
166
|
+
unless @workflow_class.jobs.key?(@workflow_class.start_job_name)
|
|
167
|
+
raise WorkflowError,
|
|
168
|
+
"Start job '#{@workflow_class.start_job_name}' not defined in workflow.\n\n" \
|
|
169
|
+
"Available jobs: #{@workflow_class.jobs.keys.join(', ')}\n\n" \
|
|
170
|
+
"Fix: Define the missing job or correct the start_with name."
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
# Verify end jobs exist
|
|
174
|
+
@workflow_class.end_job_names.each do |end_job_spec|
|
|
175
|
+
job_name = end_job_spec[:name]
|
|
176
|
+
unless @workflow_class.jobs.key?(job_name)
|
|
177
|
+
raise WorkflowError,
|
|
178
|
+
"End job '#{job_name}' not defined in workflow.\n\n" \
|
|
179
|
+
"Available jobs: #{@workflow_class.jobs.keys.join(', ')}\n\n" \
|
|
180
|
+
"Fix: Define the missing job or correct the end_with name."
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
# Verify all jobs are reachable from start
|
|
185
|
+
validate_reachability!
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
def validate_job_workers!
|
|
189
|
+
@workflow_class.jobs.each do |name, job|
|
|
190
|
+
unless job.worker_class
|
|
191
|
+
raise WorkflowError,
|
|
192
|
+
"Job '#{name}' does not specify a worker class.\n\n" \
|
|
193
|
+
"Add a worker using runs_with:\n\n" \
|
|
194
|
+
" job '#{name}' do\n" \
|
|
195
|
+
" runs_with MyWorker # Specify the worker class\n" \
|
|
196
|
+
" end"
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
unless job.input_type
|
|
200
|
+
raise WorkflowError,
|
|
201
|
+
"Job '#{name}' worker '#{job.worker_class}' does not declare input_type.\n\n" \
|
|
202
|
+
"Add input_type to your worker:\n\n" \
|
|
203
|
+
" class #{job.worker_class} < Fractor::Worker\n" \
|
|
204
|
+
" input_type MyInputClass\n" \
|
|
205
|
+
" output_type MyOutputClass\n" \
|
|
206
|
+
" end"
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
unless job.output_type
|
|
210
|
+
raise WorkflowError,
|
|
211
|
+
"Job '#{name}' worker '#{job.worker_class}' does not declare output_type.\n\n" \
|
|
212
|
+
"Add output_type to your worker:\n\n" \
|
|
213
|
+
" class #{job.worker_class} < Fractor::Worker\n" \
|
|
214
|
+
" input_type MyInputClass\n" \
|
|
215
|
+
" output_type MyOutputClass\n" \
|
|
216
|
+
" end"
|
|
217
|
+
end
|
|
218
|
+
end
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
def validate_input_mappings!
|
|
222
|
+
@workflow_class.jobs.each do |name, job|
|
|
223
|
+
# After auto-wiring, all jobs should have input mappings
|
|
224
|
+
if job.input_mappings.empty?
|
|
225
|
+
if job.dependencies.size > 1
|
|
226
|
+
raise WorkflowError,
|
|
227
|
+
"Job '#{name}' has multiple dependencies (#{job.dependencies.join(', ')}). " \
|
|
228
|
+
"Please explicitly configure inputs using inputs_from_job or inputs_from_multiple"
|
|
229
|
+
else
|
|
230
|
+
raise WorkflowError,
|
|
231
|
+
"Job '#{name}' has no input mappings configured"
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
# Validate source jobs exist in mappings
|
|
236
|
+
job.input_mappings.each_key do |source|
|
|
237
|
+
next if source == :workflow
|
|
238
|
+
|
|
239
|
+
unless @workflow_class.jobs.key?(source)
|
|
240
|
+
raise WorkflowError,
|
|
241
|
+
"Job '#{name}' maps inputs from '#{source}' which is not defined"
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
end
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def validate_reachability!
|
|
248
|
+
start_job = @workflow_class.start_job_name
|
|
249
|
+
reachable = compute_reachable_jobs(start_job)
|
|
250
|
+
|
|
251
|
+
unreachable = @workflow_class.jobs.keys.to_set - reachable
|
|
252
|
+
return if unreachable.empty?
|
|
253
|
+
|
|
254
|
+
raise WorkflowError,
|
|
255
|
+
"Unreachable jobs detected: #{unreachable.to_a.join(', ')}. " \
|
|
256
|
+
"All jobs must be reachable from start_with job '#{start_job}'"
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def compute_reachable_jobs(start_job)
|
|
260
|
+
reachable = Set.new
|
|
261
|
+
queue = [start_job]
|
|
262
|
+
|
|
263
|
+
until queue.empty?
|
|
264
|
+
current = queue.shift
|
|
265
|
+
next if reachable.include?(current)
|
|
266
|
+
|
|
267
|
+
reachable.add(current)
|
|
268
|
+
|
|
269
|
+
# Find jobs that depend on current job
|
|
270
|
+
@workflow_class.jobs.each do |name, job|
|
|
271
|
+
# Follow explicit dependencies
|
|
272
|
+
if job.dependencies.include?(current) && !reachable.include?(name)
|
|
273
|
+
queue << name
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Follow fallback relationships from current job
|
|
277
|
+
if current == name && job.fallback_job && !reachable.include?(job.fallback_job)
|
|
278
|
+
queue << job.fallback_job
|
|
279
|
+
end
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
reachable
|
|
284
|
+
end
|
|
285
|
+
end
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
# Custom error classes
|
|
289
|
+
class WorkflowError < StandardError; end
|
|
290
|
+
class WorkflowCycleError < WorkflowError; end
|
|
291
|
+
class WorkflowValidationError < WorkflowError; end
|
|
292
|
+
class InputMismatchError < WorkflowError; end
|
|
293
|
+
class OutputMismatchError < WorkflowError; end
|
|
294
|
+
class WorkflowExecutionError < WorkflowError; end
|
|
295
|
+
end
|
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "workflow/job"
|
|
4
|
+
require_relative "workflow/workflow_context"
|
|
5
|
+
require_relative "workflow/workflow_executor"
|
|
6
|
+
require_relative "workflow/workflow_validator"
|
|
7
|
+
require_relative "workflow/job_dependency_validator"
|
|
8
|
+
require_relative "workflow/type_compatibility_validator"
|
|
9
|
+
require_relative "workflow/execution_strategy"
|
|
10
|
+
require_relative "workflow/builder"
|
|
11
|
+
require_relative "workflow/chain_builder"
|
|
12
|
+
require_relative "workflow/helpers"
|
|
13
|
+
require_relative "workflow/logger"
|
|
14
|
+
require_relative "workflow/structured_logger"
|
|
15
|
+
require_relative "workflow/execution_trace"
|
|
16
|
+
require_relative "workflow/visualizer"
|
|
17
|
+
require_relative "workflow/dead_letter_queue"
|
|
18
|
+
require_relative "workflow/pre_execution_context"
|
|
19
|
+
require_relative "workflow/retry_orchestrator"
|
|
20
|
+
require_relative "workflow/circuit_breaker_orchestrator"
|
|
21
|
+
|
|
22
|
+
module Fractor
|
|
23
|
+
# Base class for defining workflows using a declarative DSL.
|
|
24
|
+
# Workflows coordinate multiple jobs with dependencies, type-safe data flow,
|
|
25
|
+
# and support both pipeline and continuous execution modes.
|
|
26
|
+
class Workflow
|
|
27
|
+
class << self
|
|
28
|
+
attr_reader :workflow_name, :workflow_mode, :jobs, :start_job_name,
|
|
29
|
+
:end_job_names, :input_model_class, :output_model_class,
|
|
30
|
+
:dlq_config
|
|
31
|
+
|
|
32
|
+
# Create a workflow class without inheritance.
|
|
33
|
+
# This is a convenience method that creates an anonymous workflow class.
|
|
34
|
+
#
|
|
35
|
+
# @param name [String] The workflow name
|
|
36
|
+
# @param mode [Symbol] :pipeline (default) or :continuous
|
|
37
|
+
# @yield Block containing job definitions using workflow DSL
|
|
38
|
+
# @return [Class] A new Workflow subclass
|
|
39
|
+
#
|
|
40
|
+
# @example
|
|
41
|
+
# workflow = Fractor::Workflow.define("my-workflow") do
|
|
42
|
+
# job "step1", Worker1
|
|
43
|
+
# job "step2", Worker2, needs: "step1"
|
|
44
|
+
# job "step3", Worker3, needs: "step2"
|
|
45
|
+
# end
|
|
46
|
+
# instance = workflow.new
|
|
47
|
+
# result = instance.execute(input: data)
|
|
48
|
+
def define(name, mode: :pipeline, &block)
|
|
49
|
+
Class.new(Workflow) do
|
|
50
|
+
workflow(name, mode: mode, &block)
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Create a linear chain workflow for sequential processing.
|
|
55
|
+
# This provides a fluent API for simple pipelines.
|
|
56
|
+
#
|
|
57
|
+
# @param name [String] The workflow name
|
|
58
|
+
# @return [ChainBuilder] A builder for constructing the chain
|
|
59
|
+
#
|
|
60
|
+
# @example
|
|
61
|
+
# workflow = Fractor::Workflow.chain("pipeline")
|
|
62
|
+
# .step("uppercase", UppercaseWorker)
|
|
63
|
+
# .step("reverse", ReverseWorker)
|
|
64
|
+
# .step("finalize", FinalizeWorker)
|
|
65
|
+
# .build
|
|
66
|
+
def chain(name)
|
|
67
|
+
ChainBuilder.new(name)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Define a workflow with the given name and optional mode.
|
|
71
|
+
#
|
|
72
|
+
# @param name [String] The workflow name
|
|
73
|
+
# @param mode [Symbol] :pipeline (default) or :continuous
|
|
74
|
+
# @yield Block containing job definitions
|
|
75
|
+
def workflow(name, mode: :pipeline, &block)
|
|
76
|
+
@workflow_name = name
|
|
77
|
+
@workflow_mode = mode
|
|
78
|
+
@jobs = {}
|
|
79
|
+
@start_job_name = nil
|
|
80
|
+
@end_job_names = []
|
|
81
|
+
@input_model_class = nil
|
|
82
|
+
@output_model_class = nil
|
|
83
|
+
@dlq_config = nil
|
|
84
|
+
|
|
85
|
+
instance_eval(&block) if block
|
|
86
|
+
|
|
87
|
+
validate_workflow!
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Declare the workflow's input type.
|
|
91
|
+
#
|
|
92
|
+
# @param klass [Class] A Lutaml::Model::Serializable subclass
|
|
93
|
+
def input_type(klass)
|
|
94
|
+
validate_model_class!(klass, "input_type")
|
|
95
|
+
@input_model_class = klass
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Declare the workflow's output type.
|
|
99
|
+
#
|
|
100
|
+
# @param klass [Class] A Lutaml::Model::Serializable subclass
|
|
101
|
+
def output_type(klass)
|
|
102
|
+
validate_model_class!(klass, "output_type")
|
|
103
|
+
@output_model_class = klass
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Define the starting job for pipeline mode.
|
|
107
|
+
#
|
|
108
|
+
# @param job_name [String, Symbol] The name of the start job
|
|
109
|
+
def start_with(job_name)
|
|
110
|
+
@start_job_name = job_name.to_s
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
# Define an ending job for pipeline mode.
|
|
114
|
+
#
|
|
115
|
+
# @param job_name [String, Symbol] The name of the end job
|
|
116
|
+
# @param on [Symbol] Condition: :success (default), :failure, :cancellation
|
|
117
|
+
def end_with(job_name, on: :success)
|
|
118
|
+
@end_job_names << { name: job_name.to_s, condition: on }
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
# Configure the Dead Letter Queue for failed work.
|
|
122
|
+
#
|
|
123
|
+
# @param max_size [Integer] Maximum number of entries to retain
|
|
124
|
+
# @param persister [Object] Optional persistence strategy
|
|
125
|
+
# @param on_add [Proc] Optional callback when entry is added
|
|
126
|
+
def configure_dead_letter_queue(max_size: 1000, persister: nil,
|
|
127
|
+
on_add: nil)
|
|
128
|
+
@dlq_config = {
|
|
129
|
+
max_size: max_size,
|
|
130
|
+
persister: persister,
|
|
131
|
+
on_add: on_add,
|
|
132
|
+
}
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
# Define a job in the workflow.
|
|
136
|
+
#
|
|
137
|
+
# @param name [String, Symbol] The job name
|
|
138
|
+
# @param worker_class [Class] Optional worker class (shorthand syntax)
|
|
139
|
+
# @param needs [String, Symbol, Array] Optional dependencies (shorthand)
|
|
140
|
+
# @param inputs [Symbol, String, Hash] Optional input configuration (shorthand)
|
|
141
|
+
# @param outputs [Symbol] Optional :workflow to mark outputs (shorthand)
|
|
142
|
+
# @param workers [Integer] Optional parallel worker count (shorthand)
|
|
143
|
+
# @param condition [Proc] Optional conditional execution (shorthand)
|
|
144
|
+
# @yield Block containing job configuration (DSL syntax)
|
|
145
|
+
#
|
|
146
|
+
# @example DSL syntax (original)
|
|
147
|
+
# job "process" do
|
|
148
|
+
# runs_with ProcessWorker
|
|
149
|
+
# needs "validate"
|
|
150
|
+
# end
|
|
151
|
+
#
|
|
152
|
+
# @example Shorthand syntax (simplified)
|
|
153
|
+
# job "process", ProcessWorker, needs: "validate"
|
|
154
|
+
#
|
|
155
|
+
# @example Shorthand with multiple options
|
|
156
|
+
# job "process", ProcessWorker, needs: "validate", outputs: :workflow
|
|
157
|
+
def job(name, worker_class = nil, needs: nil, inputs: nil, outputs: nil,
|
|
158
|
+
workers: nil, condition: nil, &block)
|
|
159
|
+
job_name = name.to_s
|
|
160
|
+
if @jobs.key?(job_name)
|
|
161
|
+
raise ArgumentError,
|
|
162
|
+
"Job '#{job_name}' already defined"
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
job_obj = Job.new(job_name, self)
|
|
166
|
+
|
|
167
|
+
# Apply shorthand parameters if provided
|
|
168
|
+
if worker_class
|
|
169
|
+
job_obj.runs_with(worker_class)
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
if needs
|
|
173
|
+
needs_array = needs.is_a?(Array) ? needs : [needs]
|
|
174
|
+
job_obj.needs(*needs_array)
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
if inputs
|
|
178
|
+
case inputs
|
|
179
|
+
when :workflow, "workflow"
|
|
180
|
+
job_obj.inputs_from_workflow
|
|
181
|
+
when String, Symbol
|
|
182
|
+
job_obj.inputs_from_job(inputs.to_s)
|
|
183
|
+
when Hash
|
|
184
|
+
job_obj.inputs_from_multiple(inputs)
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
if outputs == :workflow
|
|
189
|
+
job_obj.outputs_to_workflow
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
if workers
|
|
193
|
+
job_obj.parallel_workers(workers)
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
if condition
|
|
197
|
+
job_obj.if_condition(condition)
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
# Apply DSL block if provided
|
|
201
|
+
job_obj.instance_eval(&block) if block
|
|
202
|
+
|
|
203
|
+
@jobs[job_name] = job_obj
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
# Generate a Mermaid flowchart diagram of the workflow
|
|
207
|
+
#
|
|
208
|
+
# @return [String] Mermaid diagram syntax
|
|
209
|
+
def to_mermaid
|
|
210
|
+
Visualizer.new(self).to_mermaid
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
# Generate a DOT/Graphviz diagram of the workflow
|
|
214
|
+
#
|
|
215
|
+
# @return [String] DOT diagram syntax
|
|
216
|
+
def to_dot
|
|
217
|
+
Visualizer.new(self).to_dot
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
# Generate an ASCII art diagram of the workflow
|
|
221
|
+
#
|
|
222
|
+
# @return [String] ASCII art representation
|
|
223
|
+
def to_ascii
|
|
224
|
+
Visualizer.new(self).to_ascii
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
# Print ASCII diagram to stdout
|
|
228
|
+
def print_diagram
|
|
229
|
+
Visualizer.new(self).print
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
private
|
|
233
|
+
|
|
234
|
+
def validate_model_class!(klass, method_name)
|
|
235
|
+
# Allow any class - in production you may want stricter validation
|
|
236
|
+
return if klass.is_a?(Class)
|
|
237
|
+
|
|
238
|
+
raise ArgumentError, "#{method_name} must be a Class"
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
def validate_workflow!
|
|
242
|
+
validator = WorkflowValidator.new(self)
|
|
243
|
+
validator.validate!
|
|
244
|
+
end
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
# Create a new workflow instance.
|
|
248
|
+
def initialize(input = nil)
|
|
249
|
+
unless self.class.workflow_name
|
|
250
|
+
raise "Workflow not defined. Use 'workflow \"name\" do ... end' in class definition"
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
@workflow_input = input
|
|
254
|
+
@dead_letter_queue = initialize_dead_letter_queue
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
# Access the Dead Letter Queue for this workflow.
|
|
258
|
+
#
|
|
259
|
+
# @return [DeadLetterQueue, nil] The DLQ instance or nil if not configured
|
|
260
|
+
def dead_letter_queue
|
|
261
|
+
@dead_letter_queue
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
# Execute the workflow with the given input.
|
|
265
|
+
#
|
|
266
|
+
# @param input [Lutaml::Model::Serializable, nil] The workflow input (optional if provided to initialize)
|
|
267
|
+
# @param correlation_id [String] Optional correlation ID for tracking
|
|
268
|
+
# @param logger [Logger] Optional logger instance
|
|
269
|
+
# @param trace [Boolean] Whether to generate execution trace
|
|
270
|
+
# @yield [WorkflowExecutor] Optional block for registering hooks
|
|
271
|
+
# @return [WorkflowResult] The execution result
|
|
272
|
+
def execute(input: nil, correlation_id: nil, logger: nil, trace: false,
|
|
273
|
+
&block)
|
|
274
|
+
# Use provided input or fall back to initialization input
|
|
275
|
+
workflow_input = input || @workflow_input
|
|
276
|
+
validate_input!(workflow_input)
|
|
277
|
+
|
|
278
|
+
executor = WorkflowExecutor.new(
|
|
279
|
+
self,
|
|
280
|
+
workflow_input,
|
|
281
|
+
correlation_id: correlation_id,
|
|
282
|
+
logger: logger,
|
|
283
|
+
trace: trace,
|
|
284
|
+
dead_letter_queue: @dead_letter_queue,
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
# Allow block to register hooks
|
|
288
|
+
block&.call(executor)
|
|
289
|
+
|
|
290
|
+
executor.execute
|
|
291
|
+
end
|
|
292
|
+
|
|
293
|
+
# Run the workflow in continuous mode with a work queue.
|
|
294
|
+
#
|
|
295
|
+
# @param work_queue [WorkQueue] The queue to receive workflow inputs
|
|
296
|
+
def run_continuous(work_queue:)
|
|
297
|
+
unless self.class.workflow_mode == :continuous
|
|
298
|
+
raise "Workflow '#{self.class.workflow_name}' is not configured for continuous mode"
|
|
299
|
+
end
|
|
300
|
+
|
|
301
|
+
# Continuous mode implementation will be added
|
|
302
|
+
raise NotImplementedError, "Continuous mode coming soon"
|
|
303
|
+
end
|
|
304
|
+
|
|
305
|
+
private
|
|
306
|
+
|
|
307
|
+
def initialize_dead_letter_queue
|
|
308
|
+
config = self.class.dlq_config
|
|
309
|
+
return nil unless config
|
|
310
|
+
|
|
311
|
+
dlq = DeadLetterQueue.new(
|
|
312
|
+
max_size: config[:max_size],
|
|
313
|
+
persister: config[:persister],
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
# Register callback if provided
|
|
317
|
+
dlq.on_add(&config[:on_add]) if config[:on_add]
|
|
318
|
+
|
|
319
|
+
dlq
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
def validate_input!(input)
|
|
323
|
+
expected_type = self.class.input_model_class
|
|
324
|
+
return unless expected_type
|
|
325
|
+
|
|
326
|
+
unless input.is_a?(expected_type)
|
|
327
|
+
raise TypeError,
|
|
328
|
+
"Workflow '#{self.class.workflow_name}' expects input of type " \
|
|
329
|
+
"#{expected_type}, got #{input.class}"
|
|
330
|
+
end
|
|
331
|
+
end
|
|
332
|
+
end
|
|
333
|
+
end
|