fractor 0.1.6 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (172) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop_todo.yml +227 -102
  3. data/README.adoc +113 -1940
  4. data/docs/.lycheeignore +16 -0
  5. data/docs/Gemfile +24 -0
  6. data/docs/README.md +157 -0
  7. data/docs/_config.yml +151 -0
  8. data/docs/_features/error-handling.adoc +1192 -0
  9. data/docs/_features/index.adoc +80 -0
  10. data/docs/_features/monitoring.adoc +589 -0
  11. data/docs/_features/signal-handling.adoc +202 -0
  12. data/docs/_features/workflows.adoc +1235 -0
  13. data/docs/_guides/continuous-mode.adoc +736 -0
  14. data/docs/_guides/cookbook.adoc +1133 -0
  15. data/docs/_guides/index.adoc +55 -0
  16. data/docs/_guides/pipeline-mode.adoc +730 -0
  17. data/docs/_guides/troubleshooting.adoc +358 -0
  18. data/docs/_pages/architecture.adoc +1390 -0
  19. data/docs/_pages/core-concepts.adoc +1392 -0
  20. data/docs/_pages/design-principles.adoc +862 -0
  21. data/docs/_pages/getting-started.adoc +290 -0
  22. data/docs/_pages/installation.adoc +143 -0
  23. data/docs/_reference/api.adoc +1080 -0
  24. data/docs/_reference/error-reporting.adoc +670 -0
  25. data/docs/_reference/examples.adoc +181 -0
  26. data/docs/_reference/index.adoc +96 -0
  27. data/docs/_reference/troubleshooting.adoc +862 -0
  28. data/docs/_tutorials/complex-workflows.adoc +1022 -0
  29. data/docs/_tutorials/data-processing-pipeline.adoc +740 -0
  30. data/docs/_tutorials/first-application.adoc +384 -0
  31. data/docs/_tutorials/index.adoc +48 -0
  32. data/docs/_tutorials/long-running-services.adoc +931 -0
  33. data/docs/assets/images/favicon-16.png +0 -0
  34. data/docs/assets/images/favicon-32.png +0 -0
  35. data/docs/assets/images/favicon-48.png +0 -0
  36. data/docs/assets/images/favicon.ico +0 -0
  37. data/docs/assets/images/favicon.png +0 -0
  38. data/docs/assets/images/favicon.svg +45 -0
  39. data/docs/assets/images/fractor-icon.svg +49 -0
  40. data/docs/assets/images/fractor-logo.svg +61 -0
  41. data/docs/index.adoc +131 -0
  42. data/docs/lychee.toml +39 -0
  43. data/examples/api_aggregator/README.adoc +627 -0
  44. data/examples/api_aggregator/api_aggregator.rb +376 -0
  45. data/examples/auto_detection/README.adoc +407 -29
  46. data/examples/continuous_chat_common/message_protocol.rb +1 -1
  47. data/examples/error_reporting.rb +207 -0
  48. data/examples/file_processor/README.adoc +170 -0
  49. data/examples/file_processor/file_processor.rb +615 -0
  50. data/examples/file_processor/sample_files/invalid.csv +1 -0
  51. data/examples/file_processor/sample_files/orders.xml +24 -0
  52. data/examples/file_processor/sample_files/products.json +23 -0
  53. data/examples/file_processor/sample_files/users.csv +6 -0
  54. data/examples/hierarchical_hasher/README.adoc +629 -41
  55. data/examples/image_processor/README.adoc +610 -0
  56. data/examples/image_processor/image_processor.rb +349 -0
  57. data/examples/image_processor/processed_images/sample_10_processed.jpg.json +12 -0
  58. data/examples/image_processor/processed_images/sample_1_processed.jpg.json +12 -0
  59. data/examples/image_processor/processed_images/sample_2_processed.jpg.json +12 -0
  60. data/examples/image_processor/processed_images/sample_3_processed.jpg.json +12 -0
  61. data/examples/image_processor/processed_images/sample_4_processed.jpg.json +12 -0
  62. data/examples/image_processor/processed_images/sample_5_processed.jpg.json +12 -0
  63. data/examples/image_processor/processed_images/sample_6_processed.jpg.json +12 -0
  64. data/examples/image_processor/processed_images/sample_7_processed.jpg.json +12 -0
  65. data/examples/image_processor/processed_images/sample_8_processed.jpg.json +12 -0
  66. data/examples/image_processor/processed_images/sample_9_processed.jpg.json +12 -0
  67. data/examples/image_processor/test_images/sample_1.png +1 -0
  68. data/examples/image_processor/test_images/sample_10.png +1 -0
  69. data/examples/image_processor/test_images/sample_2.png +1 -0
  70. data/examples/image_processor/test_images/sample_3.png +1 -0
  71. data/examples/image_processor/test_images/sample_4.png +1 -0
  72. data/examples/image_processor/test_images/sample_5.png +1 -0
  73. data/examples/image_processor/test_images/sample_6.png +1 -0
  74. data/examples/image_processor/test_images/sample_7.png +1 -0
  75. data/examples/image_processor/test_images/sample_8.png +1 -0
  76. data/examples/image_processor/test_images/sample_9.png +1 -0
  77. data/examples/log_analyzer/README.adoc +662 -0
  78. data/examples/log_analyzer/log_analyzer.rb +579 -0
  79. data/examples/log_analyzer/sample_logs/apache.log +20 -0
  80. data/examples/log_analyzer/sample_logs/json.log +15 -0
  81. data/examples/log_analyzer/sample_logs/nginx.log +15 -0
  82. data/examples/log_analyzer/sample_logs/rails.log +29 -0
  83. data/examples/multi_work_type/README.adoc +576 -26
  84. data/examples/performance_monitoring.rb +120 -0
  85. data/examples/pipeline_processing/README.adoc +740 -26
  86. data/examples/pipeline_processing/pipeline_processing.rb +2 -2
  87. data/examples/priority_work_example.rb +155 -0
  88. data/examples/producer_subscriber/README.adoc +889 -46
  89. data/examples/scatter_gather/README.adoc +829 -27
  90. data/examples/simple/README.adoc +347 -0
  91. data/examples/specialized_workers/README.adoc +622 -26
  92. data/examples/specialized_workers/specialized_workers.rb +44 -8
  93. data/examples/stream_processor/README.adoc +206 -0
  94. data/examples/stream_processor/stream_processor.rb +284 -0
  95. data/examples/web_scraper/README.adoc +625 -0
  96. data/examples/web_scraper/web_scraper.rb +285 -0
  97. data/examples/workflow/README.adoc +406 -0
  98. data/examples/workflow/circuit_breaker/README.adoc +360 -0
  99. data/examples/workflow/circuit_breaker/circuit_breaker_workflow.rb +225 -0
  100. data/examples/workflow/conditional/README.adoc +483 -0
  101. data/examples/workflow/conditional/conditional_workflow.rb +215 -0
  102. data/examples/workflow/dead_letter_queue/README.adoc +374 -0
  103. data/examples/workflow/dead_letter_queue/dead_letter_queue_workflow.rb +217 -0
  104. data/examples/workflow/fan_out/README.adoc +381 -0
  105. data/examples/workflow/fan_out/fan_out_workflow.rb +202 -0
  106. data/examples/workflow/retry/README.adoc +248 -0
  107. data/examples/workflow/retry/retry_workflow.rb +195 -0
  108. data/examples/workflow/simple_linear/README.adoc +267 -0
  109. data/examples/workflow/simple_linear/simple_linear_workflow.rb +175 -0
  110. data/examples/workflow/simplified/README.adoc +329 -0
  111. data/examples/workflow/simplified/simplified_workflow.rb +222 -0
  112. data/exe/fractor +10 -0
  113. data/lib/fractor/cli.rb +288 -0
  114. data/lib/fractor/configuration.rb +307 -0
  115. data/lib/fractor/continuous_server.rb +60 -65
  116. data/lib/fractor/error_formatter.rb +72 -0
  117. data/lib/fractor/error_report_generator.rb +152 -0
  118. data/lib/fractor/error_reporter.rb +244 -0
  119. data/lib/fractor/error_statistics.rb +147 -0
  120. data/lib/fractor/execution_tracer.rb +162 -0
  121. data/lib/fractor/logger.rb +230 -0
  122. data/lib/fractor/main_loop_handler.rb +406 -0
  123. data/lib/fractor/main_loop_handler3.rb +135 -0
  124. data/lib/fractor/main_loop_handler4.rb +299 -0
  125. data/lib/fractor/performance_metrics_collector.rb +181 -0
  126. data/lib/fractor/performance_monitor.rb +215 -0
  127. data/lib/fractor/performance_report_generator.rb +202 -0
  128. data/lib/fractor/priority_work.rb +93 -0
  129. data/lib/fractor/priority_work_queue.rb +189 -0
  130. data/lib/fractor/result_aggregator.rb +32 -0
  131. data/lib/fractor/shutdown_handler.rb +168 -0
  132. data/lib/fractor/signal_handler.rb +80 -0
  133. data/lib/fractor/supervisor.rb +382 -269
  134. data/lib/fractor/supervisor_logger.rb +88 -0
  135. data/lib/fractor/version.rb +1 -1
  136. data/lib/fractor/work.rb +12 -0
  137. data/lib/fractor/work_distribution_manager.rb +151 -0
  138. data/lib/fractor/work_queue.rb +20 -0
  139. data/lib/fractor/work_result.rb +181 -9
  140. data/lib/fractor/worker.rb +73 -0
  141. data/lib/fractor/workflow/builder.rb +210 -0
  142. data/lib/fractor/workflow/chain_builder.rb +169 -0
  143. data/lib/fractor/workflow/circuit_breaker.rb +183 -0
  144. data/lib/fractor/workflow/circuit_breaker_orchestrator.rb +208 -0
  145. data/lib/fractor/workflow/circuit_breaker_registry.rb +112 -0
  146. data/lib/fractor/workflow/dead_letter_queue.rb +334 -0
  147. data/lib/fractor/workflow/execution_hooks.rb +39 -0
  148. data/lib/fractor/workflow/execution_strategy.rb +225 -0
  149. data/lib/fractor/workflow/execution_trace.rb +134 -0
  150. data/lib/fractor/workflow/helpers.rb +191 -0
  151. data/lib/fractor/workflow/job.rb +290 -0
  152. data/lib/fractor/workflow/job_dependency_validator.rb +120 -0
  153. data/lib/fractor/workflow/logger.rb +110 -0
  154. data/lib/fractor/workflow/pre_execution_context.rb +193 -0
  155. data/lib/fractor/workflow/retry_config.rb +156 -0
  156. data/lib/fractor/workflow/retry_orchestrator.rb +184 -0
  157. data/lib/fractor/workflow/retry_strategy.rb +93 -0
  158. data/lib/fractor/workflow/structured_logger.rb +30 -0
  159. data/lib/fractor/workflow/type_compatibility_validator.rb +222 -0
  160. data/lib/fractor/workflow/visualizer.rb +211 -0
  161. data/lib/fractor/workflow/workflow_context.rb +132 -0
  162. data/lib/fractor/workflow/workflow_executor.rb +669 -0
  163. data/lib/fractor/workflow/workflow_result.rb +55 -0
  164. data/lib/fractor/workflow/workflow_validator.rb +295 -0
  165. data/lib/fractor/workflow.rb +333 -0
  166. data/lib/fractor/wrapped_ractor.rb +66 -101
  167. data/lib/fractor/wrapped_ractor3.rb +161 -0
  168. data/lib/fractor/wrapped_ractor4.rb +242 -0
  169. data/lib/fractor.rb +92 -4
  170. metadata +179 -6
  171. data/tests/sample.rb.bak +0 -309
  172. data/tests/sample_working.rb.bak +0 -209
@@ -0,0 +1,374 @@
1
+ = Dead Letter Queue Workflow Example
2
+ :toc:
3
+ :toclevels: 3
4
+
5
+ == Purpose
6
+
7
+ This example demonstrates how to use Dead Letter Queue (DLQ) functionality in Fractor workflows to capture and manage permanently failed work items. The DLQ provides a safety net for work that cannot be successfully processed even after retry attempts.
8
+
9
+ == What is a Dead Letter Queue?
10
+
11
+ A Dead Letter Queue is a holding area for work items that have failed permanently and cannot be processed successfully. Instead of losing failed work or letting it crash the system, the DLQ:
12
+
13
+ * Captures failed work items with full context
14
+ * Preserves error information and retry history
15
+ * Enables manual inspection and retry
16
+ * Provides persistence options for durability
17
+ * Supports custom notification handlers
18
+
19
+ == Use Cases
20
+
21
+ * *Error Analysis*: Inspect failed work to identify patterns and root causes
22
+ * *Manual Recovery*: Review and manually retry failed items after fixing issues
23
+ * *Alerting*: Trigger notifications when work fails permanently
24
+ * *Audit Trail*: Maintain a record of all failures with context
25
+ * *Batch Retry*: Retry multiple failed items after deploying fixes
26
+
27
+ == Features Demonstrated
28
+
29
+ === Basic Dead Letter Queue
30
+
31
+ The simplest configuration captures failed work automatically:
32
+
33
+ [source,ruby]
34
+ ----
35
+ class BasicDLQWorkflow
36
+ include Fractor::Workflow
37
+
38
+ workflow_name "basic_dlq_workflow"
39
+
40
+ # Configure Dead Letter Queue
41
+ configure_dead_letter_queue max_size: 100
42
+
43
+ job :unreliable_task,
44
+ worker: UnreliableWorker,
45
+ input: ->(ctx) { ctx.workflow_input },
46
+ retry: {
47
+ max_attempts: 3,
48
+ backoff: :exponential,
49
+ initial_delay: 0.1,
50
+ }
51
+
52
+ end_job :unreliable_task
53
+ end
54
+ ----
55
+
56
+ When retries are exhausted, the work is automatically added to the DLQ with:
57
+
58
+ * The failed work item
59
+ * The error that caused the failure
60
+ * Workflow context (inputs, completed/failed jobs)
61
+ * Retry metadata (attempts, total time, all errors)
62
+
63
+ === Custom Notification Handlers
64
+
65
+ Add callbacks to be notified when work is added to the DLQ:
66
+
67
+ [source,ruby]
68
+ ----
69
+ class DLQWithHandlersWorkflow
70
+ include Fractor::Workflow
71
+
72
+ workflow_name "dlq_with_handlers_workflow"
73
+
74
+ configure_dead_letter_queue(
75
+ max_size: 50,
76
+ on_add: lambda { |entry|
77
+ # Send alert, log to monitoring system, etc.
78
+ puts "⚠️ Work added to DLQ:"
79
+ puts " Error: #{entry.error.class.name}: #{entry.error.message}"
80
+ puts " Timestamp: #{entry.timestamp}"
81
+ puts " Metadata: #{entry.metadata.inspect}"
82
+ }
83
+ )
84
+
85
+ job :risky_task,
86
+ worker: UnreliableWorker,
87
+ input: ->(ctx) { ctx.workflow_input },
88
+ retry: {
89
+ max_attempts: 2,
90
+ backoff: :linear,
91
+ initial_delay: 0.1,
92
+ }
93
+
94
+ end_job :risky_task
95
+ end
96
+ ----
97
+
98
+ === File Persistence
99
+
100
+ Persist DLQ entries to disk for durability across restarts:
101
+
102
+ [source,ruby]
103
+ ----
104
+ class DLQWithPersistenceWorkflow
105
+ include Fractor::Workflow
106
+
107
+ workflow_name "dlq_with_persistence_workflow"
108
+
109
+ configure_dead_letter_queue(
110
+ max_size: 200,
111
+ persister: Fractor::Workflow::DeadLetterQueue::FilePersister.new(
112
+ directory: "tmp/dlq"
113
+ )
114
+ )
115
+
116
+ job :persistent_task,
117
+ worker: UnreliableWorker,
118
+ input: ->(ctx) { ctx.workflow_input },
119
+ retry: {
120
+ max_attempts: 3,
121
+ backoff: :exponential,
122
+ initial_delay: 0.1,
123
+ }
124
+
125
+ end_job :persistent_task
126
+ end
127
+ ----
128
+
129
+ Each failed work item is saved as a JSON file with all context and metadata.
130
+
131
+ == Querying the Dead Letter Queue
132
+
133
+ The DLQ provides multiple methods to query and filter entries:
134
+
135
+ === Get All Entries
136
+
137
+ [source,ruby]
138
+ ----
139
+ dlq = workflow.dead_letter_queue
140
+ all_entries = dlq.all
141
+ puts "Total entries: #{all_entries.size}"
142
+ ----
143
+
144
+ === Filter by Error Class
145
+
146
+ [source,ruby]
147
+ ----
148
+ standard_errors = dlq.by_error_class(StandardError)
149
+ timeout_errors = dlq.by_error_class(Timeout::Error)
150
+ ----
151
+
152
+ === Filter by Time Range
153
+
154
+ [source,ruby]
155
+ ----
156
+ # Get recent failures (last hour)
157
+ recent = dlq.by_time_range(Time.now - 3600, Time.now)
158
+
159
+ # Get yesterday's failures
160
+ yesterday_start = Time.now - 86400
161
+ yesterday_end = Time.now - 86400 + 86400
162
+ yesterday_failures = dlq.by_time_range(yesterday_start, yesterday_end)
163
+ ----
164
+
165
+ === Custom Filtering
166
+
167
+ [source,ruby]
168
+ ----
169
+ # Find entries with specific context
170
+ query_test_entries = dlq.filter do |entry|
171
+ entry.context[:message]&.include?("Query test")
172
+ end
173
+
174
+ # Find entries for specific job
175
+ job_entries = dlq.filter do |entry|
176
+ entry.metadata[:job_name] == "unreliable_task"
177
+ end
178
+ ----
179
+
180
+ == Retrying Failed Work
181
+
182
+ === Retry Single Entry
183
+
184
+ [source,ruby]
185
+ ----
186
+ entry = dlq.all.first
187
+
188
+ dlq.retry_entry(entry) do |work, error, context|
189
+ # Custom retry logic
190
+ # Return result or raise to fail again
191
+ MyWorker.perform(work)
192
+ end
193
+ ----
194
+
195
+ === Retry All Failed Work
196
+
197
+ [source,ruby]
198
+ ----
199
+ dlq.retry_all do |work, error, context|
200
+ # Attempt to reprocess each failed item
201
+ begin
202
+ MyWorker.perform(work)
203
+ rescue StandardError => e
204
+ # Log but don't fail the batch retry
205
+ puts "Retry failed: #{e.message}"
206
+ nil
207
+ end
208
+ end
209
+ ----
210
+
211
+ == DLQ Statistics
212
+
213
+ Get aggregate information about the DLQ:
214
+
215
+ [source,ruby]
216
+ ----
217
+ stats = dlq.stats
218
+
219
+ puts "Total entries: #{stats[:total]}"
220
+ puts "Oldest entry: #{stats[:oldest_timestamp]}"
221
+ puts "Newest entry: #{stats[:newest_timestamp]}"
222
+ puts "Error types: #{stats[:error_types].inspect}"
223
+ puts "Jobs: #{stats[:jobs].inspect}"
224
+ ----
225
+
226
+ == Entry Structure
227
+
228
+ Each DLQ entry contains:
229
+
230
+ === Core Information
231
+
232
+ * `work`: The failed Work object with original payload
233
+ * `error`: The exception that caused the failure
234
+ * `timestamp`: When the entry was added to DLQ
235
+ * `context`: Workflow context (inputs, job states)
236
+ * `metadata`: Additional information
237
+
238
+ === Metadata Fields
239
+
240
+ When added by the workflow executor, entries include:
241
+
242
+ [source,ruby]
243
+ ----
244
+ {
245
+ job_name: "task_name",
246
+ worker_class: "WorkerClass",
247
+ correlation_id: "uuid",
248
+ workflow_name: "workflow_name",
249
+ retry_attempts: 3,
250
+ total_retry_time: 5.2,
251
+ all_errors: ["Error 1", "Error 2", "Error 3"]
252
+ }
253
+ ----
254
+
255
+ == Configuration Options
256
+
257
+ [options="header"]
258
+ |===
259
+ | Option | Type | Default | Description
260
+ | `max_size` | Integer | 1000 | Maximum DLQ entries to retain
261
+ | `persister` | Object | nil | Optional persistence strategy
262
+ | `on_add` | Proc | nil | Callback when entry is added
263
+ |===
264
+
265
+ == Best Practices
266
+
267
+ === Size Management
268
+
269
+ Configure `max_size` based on your error rate and retention needs:
270
+
271
+ [source,ruby]
272
+ ----
273
+ # High-volume system
274
+ configure_dead_letter_queue max_size: 10000
275
+
276
+ # Low-volume system
277
+ configure_dead_letter_queue max_size: 100
278
+ ----
279
+
280
+ === Monitoring
281
+
282
+ Set up monitoring for DLQ growth:
283
+
284
+ [source,ruby]
285
+ ----
286
+ configure_dead_letter_queue(
287
+ on_add: lambda { |entry|
288
+ # Send to monitoring system
289
+ StatsD.increment("dlq.entries")
290
+ StatsD.gauge("dlq.size", workflow.dead_letter_queue.size)
291
+
292
+ # Alert if DLQ grows too large
293
+ if workflow.dead_letter_queue.size > 500
294
+ AlertService.send("DLQ size exceeds threshold")
295
+ end
296
+ }
297
+ )
298
+ ----
299
+
300
+ === Regular Cleanup
301
+
302
+ Implement regular DLQ review and cleanup:
303
+
304
+ [source,ruby]
305
+ ----
306
+ # Review old entries
307
+ old_entries = dlq.by_time_range(Time.now - 7.days, Time.now)
308
+
309
+ # Remove resolved entries
310
+ old_entries.each do |entry|
311
+ if issue_resolved?(entry)
312
+ dlq.remove(entry)
313
+ end
314
+ end
315
+ ----
316
+
317
+ === Persistence Strategy
318
+
319
+ Choose persistence based on requirements:
320
+
321
+ * *Memory-only*: Fast, suitable for development and low-stakes scenarios
322
+ * *File-based*: Durable, good for single-server deployments
323
+ * *Redis/Database*: Centralized, required for multi-server deployments
324
+
325
+ == Integration with Retry Logic
326
+
327
+ The DLQ works seamlessly with retry configuration:
328
+
329
+ [source,ruby]
330
+ ----
331
+ job :task,
332
+ worker: Worker,
333
+ retry: {
334
+ max_attempts: 3, # Try 3 times
335
+ backoff: :exponential, # With exponential backoff
336
+ initial_delay: 1.0,
337
+ }
338
+ ----
339
+
340
+ Flow:
341
+
342
+ 1. Job fails → First retry attempt
343
+ 2. Retry fails → Second retry attempt
344
+ 3. Retry fails → Third retry attempt
345
+ 4. All retries exhausted → **Added to DLQ**
346
+
347
+ == Running the Examples
348
+
349
+ [source,bash]
350
+ ----
351
+ # Run all DLQ examples
352
+ ruby examples/workflow/dead_letter_queue/dead_letter_queue_workflow.rb
353
+
354
+ # Example output:
355
+ # ================================================================================
356
+ # Dead Letter Queue Workflow Examples
357
+ # ================================================================================
358
+ #
359
+ # --- Example 1: Basic Dead Letter Queue ---
360
+ # Running workflow with failing work that exhausts retries...
361
+ #
362
+ # ✓ Workflow failed as expected: Job 'unreliable_task' failed: ...
363
+ #
364
+ # Dead Letter Queue Status:
365
+ # Entries: 1
366
+ # Stats: {:total=>1, :oldest_timestamp=>..., :error_types=>...}
367
+ # ...
368
+ ----
369
+
370
+ == See Also
371
+
372
+ * link:../retry/README.adoc[Retry Mechanism]
373
+ * link:../circuit_breaker/README.adoc[Circuit Breaker]
374
+ * link:../../../docs/workflows.adoc[Workflow Documentation]
@@ -0,0 +1,217 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "../../../lib/fractor"
4
+
5
+ # Work item for DLQ workflow
6
+ class DLQWork < Fractor::Work
7
+ attr_reader :action, :message
8
+
9
+ def initialize(action:, message:)
10
+ @action = action
11
+ @message = message
12
+ super({ action:, message: })
13
+ end
14
+ end
15
+
16
+ # Worker that intentionally fails for specific inputs
17
+ class UnreliableWorker < Fractor::Worker
18
+ input_type DLQWork
19
+ output_type Hash
20
+
21
+ def process(work)
22
+ input = work.input
23
+
24
+ # Simulate different failure scenarios
25
+ case input[:action]
26
+ when "fail_always"
27
+ raise StandardError, "Permanent failure: #{input[:message]}"
28
+ when "fail_random"
29
+ raise StandardError, "Random failure" if rand < 0.7
30
+ { status: "success", data: input[:message] }
31
+ when "timeout"
32
+ sleep 10 # Simulate timeout
33
+ { status: "success", data: input[:message] }
34
+ else
35
+ { status: "success", data: input[:message] }
36
+ end
37
+ end
38
+ end
39
+
40
+ # Worker that always succeeds
41
+ class ReliableWorker < Fractor::Worker
42
+ input_type Hash
43
+ output_type Hash
44
+
45
+ def process(work)
46
+ input = work.input
47
+ { status: "processed", message: input[:message] }
48
+ end
49
+ end
50
+
51
+ # Example 1: Basic Dead Letter Queue with automatic capture
52
+ class BasicDLQWorkflow < Fractor::Workflow
53
+ workflow "basic_dlq_workflow" do
54
+ start_with "unreliable_task"
55
+ configure_dead_letter_queue max_size: 1000
56
+
57
+ job "unreliable_task" do
58
+ runs_with UnreliableWorker
59
+ inputs_from_workflow
60
+
61
+ # Retry up to 3 times with exponential backoff
62
+ retry_on_error max_attempts: 3,
63
+ backoff: :exponential,
64
+ initial_delay: 0.1,
65
+ max_delay: 1
66
+
67
+ # Add error handler for logging
68
+ on_error do |error, context|
69
+ puts "Error in unreliable_task: #{error.message}"
70
+ end
71
+
72
+ outputs_to_workflow
73
+ terminates_workflow
74
+ end
75
+ end
76
+ end
77
+
78
+ # Example 2: DLQ with custom error handlers
79
+ class DLQWithHandlersWorkflow < Fractor::Workflow
80
+ workflow "dlq_with_handlers_workflow" do
81
+ start_with "risky_task"
82
+ configure_dead_letter_queue max_size: 1000
83
+
84
+ job "risky_task" do
85
+ runs_with UnreliableWorker
86
+ inputs_from_workflow
87
+
88
+ # Retry up to 2 times with linear backoff
89
+ retry_on_error max_attempts: 2,
90
+ backoff: :linear,
91
+ initial_delay: 0.1
92
+
93
+ # Add error handler for logging
94
+ on_error do |error, _context|
95
+ puts "\n⚠️ Work added to DLQ:"
96
+ puts " Error: #{error.class.name}: #{error.message}"
97
+ puts " Timestamp: #{Time.now}"
98
+ end
99
+
100
+ outputs_to_workflow
101
+ terminates_workflow
102
+ end
103
+ end
104
+ end
105
+
106
+ # Example 3: DLQ with file persistence
107
+ class DLQWithPersistenceWorkflow < Fractor::Workflow
108
+ workflow "dlq_with_persistence_workflow" do
109
+ start_with "persistent_task"
110
+ configure_dead_letter_queue max_size: 1000
111
+
112
+ job "persistent_task" do
113
+ runs_with UnreliableWorker
114
+ inputs_from_workflow
115
+
116
+ # Retry up to 3 times with exponential backoff
117
+ retry_on_error max_attempts: 3,
118
+ backoff: :exponential,
119
+ initial_delay: 0.1
120
+
121
+ # Add error handler for persistence simulation
122
+ on_error do |error, _context|
123
+ # Simulate file persistence
124
+ require "fileutils"
125
+ FileUtils.mkdir_p("tmp/dlq")
126
+ entry = {
127
+ error: error.class.name,
128
+ message: error.message,
129
+ timestamp: Time.now.to_s,
130
+ }
131
+ File.write("tmp/dlq/entry_#{Time.now.to_i}.json", entry.to_json)
132
+ puts "DLQ entry persisted to tmp/dlq/"
133
+ end
134
+
135
+ outputs_to_workflow
136
+ terminates_workflow
137
+ end
138
+ end
139
+ end
140
+
141
+ # Demonstration runners
142
+ if __FILE__ == $PROGRAM_NAME
143
+ require "json"
144
+
145
+ puts "=" * 80
146
+ puts "Dead Letter Queue Workflow Examples"
147
+ puts "=" * 80
148
+
149
+ # Example 1: Basic DLQ
150
+ puts "\n--- Example 1: Basic Dead Letter Queue ---"
151
+ puts "Running workflow with failing work that exhausts retries..."
152
+
153
+ workflow1 = BasicDLQWorkflow.new
154
+ work1 = DLQWork.new(action: "fail_always", message: "Test 1")
155
+ begin
156
+ result1 = workflow1.execute(work1)
157
+ puts "Workflow completed (should not reach here)"
158
+ rescue Fractor::Workflow::WorkflowExecutionError => e
159
+ puts "\n✓ Workflow failed as expected: #{e.message}"
160
+ end
161
+
162
+ # Example 2: DLQ with handlers
163
+ puts "\n\n--- Example 2: DLQ with Custom Handlers ---"
164
+ puts "Running workflow with custom notification handlers..."
165
+
166
+ workflow2 = DLQWithHandlersWorkflow.new
167
+ work2 = DLQWork.new(action: "fail_always", message: "Test 2")
168
+ begin
169
+ result2 = workflow2.execute(work2)
170
+ rescue Fractor::Workflow::WorkflowExecutionError => e
171
+ puts "\n✓ Workflow failed, handler was triggered above"
172
+ end
173
+
174
+ # Example 3: DLQ with persistence
175
+ puts "\n\n--- Example 3: DLQ with File Persistence ---"
176
+ puts "Running workflow with file-persisted DLQ..."
177
+
178
+ require "fileutils"
179
+ FileUtils.mkdir_p("tmp/dlq")
180
+
181
+ workflow3 = DLQWithPersistenceWorkflow.new
182
+ work3 = DLQWork.new(action: "fail_always", message: "Test 3")
183
+ begin
184
+ result3 = workflow3.execute(work3)
185
+ rescue Fractor::Workflow::WorkflowExecutionError => e
186
+ puts "\n✓ Workflow failed, entry persisted to disk"
187
+
188
+ # Check if file was created
189
+ if Dir.exist?("tmp/dlq")
190
+ files = Dir.glob("tmp/dlq/*.json")
191
+ puts "DLQ Size: #{files.size}"
192
+ if files.any?
193
+ puts "First file: #{files.first}"
194
+ content = JSON.parse(File.read(files.first))
195
+ puts "Content: #{content.inspect}"
196
+ end
197
+ end
198
+ end
199
+
200
+ # Example 4: Successful execution
201
+ puts "\n\n--- Example 4: Successful Execution ---"
202
+ puts "Running workflow with successful work..."
203
+
204
+ workflow4 = BasicDLQWorkflow.new
205
+ work4 = DLQWork.new(action: "success", message: "Success Test")
206
+ begin
207
+ result4 = workflow4.execute(work4)
208
+ puts "\n✓ Workflow completed successfully!"
209
+ puts "Result: #{result4.output.inspect}"
210
+ rescue Fractor::Workflow::WorkflowExecutionError => e
211
+ puts "Workflow failed: #{e.message}"
212
+ end
213
+
214
+ puts "\n" + "=" * 80
215
+ puts "Dead Letter Queue examples complete!"
216
+ puts "=" * 80
217
+ end