ruby_reactor 0.2.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +132 -0
  3. data/Rakefile +2 -2
  4. data/documentation/data_pipelines.md +90 -84
  5. data/documentation/testing.md +812 -0
  6. data/lib/ruby_reactor/configuration.rb +1 -1
  7. data/lib/ruby_reactor/context.rb +13 -5
  8. data/lib/ruby_reactor/context_serializer.rb +70 -4
  9. data/lib/ruby_reactor/dsl/map_builder.rb +6 -2
  10. data/lib/ruby_reactor/dsl/reactor.rb +3 -2
  11. data/lib/ruby_reactor/error/step_failure_error.rb +5 -2
  12. data/lib/ruby_reactor/executor/result_handler.rb +9 -2
  13. data/lib/ruby_reactor/executor/retry_manager.rb +26 -8
  14. data/lib/ruby_reactor/executor/step_executor.rb +24 -99
  15. data/lib/ruby_reactor/executor.rb +3 -13
  16. data/lib/ruby_reactor/map/collector.rb +72 -33
  17. data/lib/ruby_reactor/map/dispatcher.rb +162 -0
  18. data/lib/ruby_reactor/map/element_executor.rb +103 -114
  19. data/lib/ruby_reactor/map/execution.rb +18 -4
  20. data/lib/ruby_reactor/map/helpers.rb +4 -3
  21. data/lib/ruby_reactor/map/result_enumerator.rb +105 -0
  22. data/lib/ruby_reactor/reactor.rb +174 -16
  23. data/lib/ruby_reactor/rspec/helpers.rb +17 -0
  24. data/lib/ruby_reactor/rspec/matchers.rb +256 -0
  25. data/lib/ruby_reactor/rspec/step_executor_patch.rb +85 -0
  26. data/lib/ruby_reactor/rspec/test_subject.rb +625 -0
  27. data/lib/ruby_reactor/rspec.rb +18 -0
  28. data/lib/ruby_reactor/{async_router.rb → sidekiq_adapter.rb} +15 -10
  29. data/lib/ruby_reactor/sidekiq_workers/worker.rb +1 -3
  30. data/lib/ruby_reactor/step/compose_step.rb +0 -1
  31. data/lib/ruby_reactor/step/map_step.rb +52 -27
  32. data/lib/ruby_reactor/storage/redis_adapter.rb +59 -0
  33. data/lib/ruby_reactor/template/dynamic_source.rb +32 -0
  34. data/lib/ruby_reactor/version.rb +1 -1
  35. data/lib/ruby_reactor/web/api.rb +32 -24
  36. data/lib/ruby_reactor.rb +70 -10
  37. metadata +12 -3
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyReactor
4
- class AsyncRouter
4
+ class SidekiqAdapter
5
5
  def self.perform_async(serialized_context, reactor_class_name = nil, intermediate_results: {})
6
6
  job_id = SidekiqWorkers::Worker.perform_async(serialized_context, reactor_class_name)
7
7
  context = ContextSerializer.deserialize(serialized_context)
@@ -19,8 +19,8 @@ module RubyReactor
19
19
  # rubocop:disable Metrics/ParameterLists
20
20
  def self.perform_map_element_async(map_id:, element_id:, index:, serialized_inputs:, reactor_class_info:,
21
21
  strict_ordering:, parent_context_id:, parent_reactor_class_name:, step_name:,
22
- batch_size: nil, serialized_context: nil)
23
- RubyReactor::SidekiqWorkers::MapElementWorker.perform_async(
22
+ batch_size: nil, serialized_context: nil, fail_fast: nil)
23
+ job_id = RubyReactor::SidekiqWorkers::MapElementWorker.perform_async(
24
24
  {
25
25
  "map_id" => map_id,
26
26
  "element_id" => element_id,
@@ -32,9 +32,11 @@ module RubyReactor
32
32
  "parent_reactor_class_name" => parent_reactor_class_name,
33
33
  "step_name" => step_name,
34
34
  "batch_size" => batch_size,
35
- "serialized_context" => serialized_context
35
+ "serialized_context" => serialized_context,
36
+ "fail_fast" => fail_fast
36
37
  }
37
38
  )
39
+ RubyReactor::AsyncResult.new(job_id: job_id)
38
40
  end
39
41
 
40
42
  def self.perform_map_element_in(delay, map_id:, element_id:, index:, serialized_inputs:, reactor_class_info:,
@@ -62,7 +64,7 @@ module RubyReactor
62
64
  # rubocop:disable Metrics/ParameterLists
63
65
  def self.perform_map_collection_async(parent_context_id:, map_id:, parent_reactor_class_name:, step_name:,
64
66
  strict_ordering:, timeout:)
65
- RubyReactor::SidekiqWorkers::MapCollectorWorker.perform_async(
67
+ job_id = RubyReactor::SidekiqWorkers::MapCollectorWorker.perform_async(
66
68
  {
67
69
  "parent_context_id" => parent_context_id,
68
70
  "map_id" => map_id,
@@ -72,13 +74,15 @@ module RubyReactor
72
74
  "timeout" => timeout
73
75
  }
74
76
  )
77
+ RubyReactor::AsyncResult.new(job_id: job_id)
75
78
  end
76
- # rubocop:enable Metrics/ParameterLists
77
79
 
80
+ # rubocop:enable Metrics/ParameterLists
78
81
  # rubocop:disable Metrics/ParameterLists
79
82
  def self.perform_map_execution_async(map_id:, serialized_inputs:, reactor_class_info:, strict_ordering:,
80
- parent_context_id:, parent_reactor_class_name:, step_name:)
81
- RubyReactor::SidekiqWorkers::MapExecutionWorker.perform_async(
83
+ parent_context_id:, parent_reactor_class_name:, step_name:, fail_fast: nil)
84
+ # rubocop:enable Metrics/ParameterLists
85
+ job_id = RubyReactor::SidekiqWorkers::MapExecutionWorker.perform_async(
82
86
  {
83
87
  "map_id" => map_id,
84
88
  "serialized_inputs" => serialized_inputs,
@@ -86,10 +90,11 @@ module RubyReactor
86
90
  "strict_ordering" => strict_ordering,
87
91
  "parent_context_id" => parent_context_id,
88
92
  "parent_reactor_class_name" => parent_reactor_class_name,
89
- "step_name" => step_name
93
+ "step_name" => step_name,
94
+ "fail_fast" => fail_fast
90
95
  }
91
96
  )
97
+ RubyReactor::AsyncResult.new(job_id: job_id)
92
98
  end
93
- # rubocop:enable Metrics/ParameterLists
94
99
  end
95
100
  end
@@ -39,9 +39,7 @@ module RubyReactor
39
39
  executor = Executor.new(context.reactor_class, {}, context)
40
40
  executor.resume_execution
41
41
  executor.save_context
42
-
43
- # Return the executor (which now has the result stored in it)
44
- executor
42
+ executor.result
45
43
  end
46
44
 
47
45
  private
@@ -84,7 +84,6 @@ module RubyReactor
84
84
  def link_contexts(child_context, parent_context)
85
85
  child_context.parent_context = parent_context
86
86
  child_context.root_context = parent_context.root_context || parent_context
87
- child_context.test_mode = parent_context.test_mode
88
87
  child_context.inline_async_execution = parent_context.inline_async_execution
89
88
  end
90
89
 
@@ -28,6 +28,9 @@ module RubyReactor
28
28
  inputs = {}
29
29
 
30
30
  mappings.each do |mapped_input_name, source|
31
+ # Handle serialized template objects (Hashes from Sidekiq)
32
+ source = ContextSerializer.deserialize_value(source) if source.is_a?(Hash) && source["_type"]
33
+
31
34
  value = if source.is_a?(RubyReactor::Template::Element)
32
35
  # Handle element reference
33
36
  # For now assuming element() refers to the current map's element
@@ -54,7 +57,9 @@ module RubyReactor
54
57
  private
55
58
 
56
59
  def should_run_async?(arguments, context)
57
- arguments[:async] && !context.inline_async_execution
60
+ return false if context.inline_async_execution
61
+
62
+ arguments[:async]
58
63
  end
59
64
 
60
65
  def run_inline(arguments, context)
@@ -115,27 +120,21 @@ module RubyReactor
115
120
  def link_contexts(child_context, parent_context)
116
121
  child_context.parent_context = parent_context
117
122
  child_context.root_context = parent_context.root_context || parent_context
118
- child_context.test_mode = parent_context.test_mode
119
123
  child_context.inline_async_execution = parent_context.inline_async_execution
120
124
  end
121
125
 
122
- def process_results(results, collect_block, fail_fast = true)
126
+ def process_results(results, collect_block, _fail_fast = true)
123
127
  if collect_block
124
128
  begin
125
129
  # Collect block receives Result objects when fail_fast is false, values when true
126
- RubyReactor::Success(collect_block.call(results))
130
+ return RubyReactor::Success(collect_block.call(results))
127
131
  rescue StandardError => e
128
- RubyReactor::Failure(e)
132
+ return RubyReactor::Failure(e)
129
133
  end
130
- elsif fail_fast
131
- # Default behavior when no collect block
132
- # Current behavior: results are already values
133
- RubyReactor::Success(results)
134
- else
135
- # New behavior: extract successful values only
136
- successes = results.select(&:success?).map(&:value)
137
- RubyReactor::Success(successes)
138
134
  end
135
+
136
+ # Simplified: both branches returned Success(results)
137
+ RubyReactor::Success(results)
139
138
  end
140
139
 
141
140
  def extract_path(value, path)
@@ -157,18 +156,9 @@ module RubyReactor
157
156
 
158
157
  reactor_class_info = build_reactor_class_info(arguments[:mapped_reactor_class], context, step_name)
159
158
 
160
- job_id = if arguments[:batch_size]
161
- storage = RubyReactor.configuration.storage_adapter
162
- storage.set_last_queued_index(map_id, arguments[:batch_size] - 1, context.reactor_class.name)
163
- queue_fan_out(
164
- map_id: map_id, arguments: arguments, context: context,
165
- reactor_class_info: reactor_class_info, step_name: step_name,
166
- limit: arguments[:batch_size]
167
- )
168
- else
169
- queue_single_worker(map_id: map_id, arguments: arguments, context: context,
170
- reactor_class_info: reactor_class_info, step_name: step_name)
171
- end
159
+ initialize_map_metadata(map_id, arguments, context, reactor_class_info)
160
+
161
+ job_id = dispatch_async_map(map_id, arguments, context, reactor_class_info, step_name)
172
162
 
173
163
  # Store reference in composed_contexts so the UI knows where to find elements
174
164
  context.composed_contexts[step_name.to_s] = {
@@ -178,7 +168,42 @@ module RubyReactor
178
168
  element_reactor_class: arguments[:mapped_reactor_class].name
179
169
  }
180
170
 
181
- RubyReactor::AsyncResult.new(job_id: job_id, intermediate_results: context.intermediate_results)
171
+ RubyReactor::AsyncResult.new(
172
+ job_id: job_id,
173
+ intermediate_results: context.intermediate_results,
174
+ execution_id: context.context_id
175
+ )
176
+ end
177
+
178
+ def initialize_map_metadata(map_id, arguments, context, reactor_class_info)
179
+ storage = RubyReactor.configuration.storage_adapter
180
+ storage.initialize_map_operation(
181
+ map_id, arguments[:source].size, context.reactor_class.name,
182
+ strict_ordering: arguments[:strict_ordering], reactor_class_info: reactor_class_info
183
+ )
184
+ end
185
+
186
+ def dispatch_async_map(map_id, arguments, context, reactor_class_info, step_name)
187
+ if arguments[:batch_size]
188
+ # Use new Dispatcher with Backpressure
189
+ RubyReactor::Map::Dispatcher.perform(
190
+ map_id: map_id,
191
+ parent_context_id: context.context_id,
192
+ parent_reactor_class_name: context.reactor_class.name,
193
+ source: arguments[:source],
194
+ batch_size: arguments[:batch_size],
195
+ step_name: step_name,
196
+ argument_mappings: arguments[:argument_mappings],
197
+ strict_ordering: arguments[:strict_ordering],
198
+ mapped_reactor_class: arguments[:mapped_reactor_class],
199
+ fail_fast: arguments[:fail_fast].nil? || arguments[:fail_fast]
200
+ )
201
+ queue_collector(map_id, context, step_name, arguments[:strict_ordering])
202
+ "map:#{map_id}"
203
+ else
204
+ queue_single_worker(map_id: map_id, arguments: arguments, context: context,
205
+ reactor_class_info: reactor_class_info, step_name: step_name)
206
+ end
182
207
  end
183
208
 
184
209
  def prepare_async_execution(context, map_id, count)
@@ -252,7 +277,7 @@ module RubyReactor
252
277
  map_id: map_id, serialized_inputs: serialized_inputs,
253
278
  reactor_class_info: reactor_class_info, strict_ordering: arguments[:strict_ordering],
254
279
  parent_context_id: context.context_id, parent_reactor_class_name: context.reactor_class.name,
255
- step_name: step_name.to_s
280
+ step_name: step_name.to_s, fail_fast: arguments[:fail_fast].nil? || arguments[:fail_fast]
256
281
  )
257
282
  end
258
283
  end
@@ -230,6 +230,61 @@ module RubyReactor
230
230
  @redis.get(key)
231
231
  end
232
232
 
233
+ def set_map_offset(map_id, offset, reactor_class_name)
234
+ key = map_offset_key(map_id, reactor_class_name)
235
+ @redis.set(key, offset, ex: 86_400)
236
+ end
237
+
238
+ def set_map_offset_if_not_exists(map_id, offset, reactor_class_name)
239
+ key = map_offset_key(map_id, reactor_class_name)
240
+ @redis.set(key, offset, nx: true, ex: 86_400)
241
+ end
242
+
243
+ def retrieve_map_offset(map_id, reactor_class_name)
244
+ key = map_offset_key(map_id, reactor_class_name)
245
+ @redis.get(key)
246
+ end
247
+
248
+ def increment_map_offset(map_id, increment, reactor_class_name)
249
+ key = map_offset_key(map_id, reactor_class_name)
250
+ @redis.incrby(key, increment)
251
+ end
252
+
253
+ def retrieve_map_results_batch(map_id, reactor_class_name, offset:, limit:, strict_ordering: true)
254
+ key = map_results_key(map_id, reactor_class_name)
255
+
256
+ if strict_ordering
257
+ # For Hash based results (indexed), we can use HMGET if we know the keys.
258
+ # Since we use 0-based index keys, we can generate the keys for the batch.
259
+ fields = (offset...(offset + limit)).map(&:to_s)
260
+ results = @redis.hmget(key, *fields)
261
+
262
+ # HMGET returns nil for missing fields, compact them?
263
+ # Or should we respect the holes?
264
+ # Map results are usually dense.
265
+ results.compact.map { |r| JSON.parse(r) }
266
+ else
267
+ # For List based results
268
+ # LRANGE uses inclusive ending index
269
+ end_index = offset + limit - 1
270
+ results = @redis.lrange(key, offset, end_index)
271
+ results.map { |r| JSON.parse(r) }
272
+ end
273
+ end
274
+
275
+ def count_map_results(map_id, reactor_class_name)
276
+ key = map_results_key(map_id, reactor_class_name)
277
+ type = @redis.type(key)
278
+
279
+ if type == "hash"
280
+ @redis.hlen(key)
281
+ elsif type == "list"
282
+ @redis.llen(key)
283
+ else
284
+ 0
285
+ end
286
+ end
287
+
233
288
  private
234
289
 
235
290
  def fetch_and_filter_reactors(keys)
@@ -267,6 +322,10 @@ module RubyReactor
267
322
  "reactor:#{reactor_class_name}:map:#{map_id}:last_queued_index"
268
323
  end
269
324
 
325
+ def map_offset_key(map_id, reactor_class_name)
326
+ "reactor:#{reactor_class_name}:map:#{map_id}:offset"
327
+ end
328
+
270
329
  def correlation_id_key(correlation_id, reactor_class_name)
271
330
  "reactor:#{reactor_class_name}:correlation:#{correlation_id}"
272
331
  end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyReactor
4
+ module Template
5
+ class DynamicSource < Base
6
+ attr_reader :block, :argument_mappings
7
+
8
+ def initialize(argument_mappings, &block)
9
+ super()
10
+ @block = block
11
+ @argument_mappings = argument_mappings
12
+ end
13
+
14
+ def resolve(context)
15
+ args = {}
16
+ @argument_mappings.each do |name, source|
17
+ # Handle serialized template objects if necessary, similar to MapStep logic
18
+ # But here we assume source is a Template object that responds to resolve
19
+ next if source.is_a?(RubyReactor::Template::Element)
20
+
21
+ args[name] = if source.respond_to?(:resolve)
22
+ source.resolve(context)
23
+ else
24
+ source
25
+ end
26
+ end
27
+
28
+ @block.call(args, context)
29
+ end
30
+ end
31
+ end
32
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyReactor
4
- VERSION = "0.2.0"
4
+ VERSION = "0.3.1"
5
5
  end
@@ -24,39 +24,44 @@ module RubyReactor
24
24
  r.on String do |reactor_id|
25
25
  # GET /api/reactors/:id
26
26
  r.get do
27
- data = RubyReactor::Configuration.instance.storage_adapter.find_context_by_id(reactor_id)
28
- return { error: "Reactor not found" } unless data
27
+ raw_data = RubyReactor::Configuration.instance.storage_adapter.find_context_by_id(reactor_id)
28
+ return { error: "Reactor not found" } unless raw_data
29
29
 
30
- reactor_class = data["reactor_class"] ? Object.const_get(data["reactor_class"]) : nil
30
+ # Clean data for API usage
31
+ data = ContextSerializer.deserialize_value(raw_data)
32
+
33
+ reactor_class = data[:reactor_class] ? Object.const_get(data[:reactor_class].to_s) : nil
31
34
  structure = {}
32
35
 
33
36
  structure = self.class.build_structure(reactor_class) if reactor_class.respond_to?(:steps)
34
37
 
35
- {
36
- id: data["context_id"],
37
- class: data["reactor_class"],
38
- status: if %w[failed paused completed running].include?(data["status"])
39
- data["status"]
40
- elsif data["cancelled"]
38
+ response_data = {
39
+ id: data[:context_id],
40
+ class: data[:reactor_class].to_s,
41
+ status: if %w[failed paused completed running].include?(data[:status].to_s)
42
+ data[:status].to_s
43
+ elsif data[:cancelled]
41
44
  "cancelled"
42
45
  else
43
- (data["current_step"] ? "running" : "completed")
46
+ (data[:current_step] ? "running" : "completed")
44
47
  end,
45
- current_step: data["current_step"],
46
- retry_count: data["retry_count"] || 0,
47
- undo_stack: data["undo_stack"] || [],
48
- step_attempts: data.dig("retry_context", "step_attempts") || {},
49
- created_at: data["started_at"],
50
- inputs: data["inputs"],
51
- intermediate_results: data["intermediate_results"],
48
+ current_step: data[:current_step].to_s,
49
+ retry_count: data[:retry_count] || 0,
50
+ undo_stack: data[:undo_stack] || [],
51
+ step_attempts: data.dig(:retry_context, :step_attempts) || {},
52
+ created_at: data[:started_at],
53
+ inputs: data[:inputs],
54
+ intermediate_results: data[:intermediate_results],
52
55
  structure: structure,
53
- steps: data["execution_trace"] || [],
56
+ steps: data[:execution_trace] || [],
54
57
  composed_contexts: self.class.hydrate_composed_contexts(
55
- data["composed_contexts"] || {},
56
- data["reactor_class"]
58
+ data[:composed_contexts] || {},
59
+ data[:reactor_class]&.to_s
57
60
  ),
58
- error: data["failure_reason"]
61
+ error: data[:failure_reason]
59
62
  }
63
+
64
+ ContextSerializer.simplify_for_api(response_data)
60
65
  end
61
66
 
62
67
  # POST /api/reactors/:id/retry
@@ -159,7 +164,8 @@ module RubyReactor
159
164
  return {} unless composed_contexts.is_a?(Hash)
160
165
 
161
166
  composed_contexts.transform_values do |value|
162
- if ["map_ref", :map_ref].include?(value["type"])
167
+ type = value[:type] || value["type"]
168
+ if ["map_ref", :map_ref].include?(type)
163
169
  hydrate_map_ref(value, reactor_class_name)
164
170
  else
165
171
  value
@@ -169,10 +175,12 @@ module RubyReactor
169
175
 
170
176
  def self.hydrate_map_ref(ref_data, reactor_class_name)
171
177
  storage = RubyReactor.configuration.storage_adapter
172
- map_id = ref_data["map_id"]
178
+ map_id = ref_data[:map_id] || ref_data["map_id"]
173
179
 
174
180
  # Use the specific element reactor class if available, otherwise fallback to parent
175
- target_reactor_class = ref_data["element_reactor_class"] || reactor_class_name
181
+ target_reactor_class = ref_data[:element_reactor_class] ||
182
+ ref_data["element_reactor_class"] ||
183
+ reactor_class_name
176
184
 
177
185
  # 1. Check for specific failure (O(1))
178
186
  # Stored by ResultHandler when a map element fails
data/lib/ruby_reactor.rb CHANGED
@@ -20,7 +20,7 @@ rescue LoadError
20
20
  end
21
21
 
22
22
  loader = Zeitwerk::Loader.for_gem
23
- loader.inflector.inflect("api" => "API")
23
+ loader.inflector.inflect("api" => "API", "rspec" => "RSpec")
24
24
  loader.setup
25
25
 
26
26
  module RubyReactor
@@ -39,20 +39,42 @@ module RubyReactor
39
39
  def failure?
40
40
  false
41
41
  end
42
+
43
+ def to_h
44
+ { success: true, value: @value }
45
+ end
42
46
  end
43
47
 
44
48
  class Failure
45
49
  attr_reader :error, :retryable, :step_name, :inputs, :backtrace, :reactor_name, :step_arguments, :exception_class,
46
50
  :file_path, :line_number, :code_snippet, :validation_errors
47
51
 
48
- # rubocop:disable Metrics/ParameterLists
52
+ # rubocop:disable Metrics/ParameterLists, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
49
53
  def initialize(error, retryable: nil, step_name: nil, inputs: {}, backtrace: nil, redact_inputs: [],
50
54
  reactor_name: nil, step_arguments: {}, exception_class: nil,
51
55
  file_path: nil, line_number: nil, code_snippet: nil, invalid_payload: false, validation_errors: nil)
52
- # rubocop:enable Metrics/ParameterLists
56
+ # rubocop:enable Metrics/ParameterLists, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
53
57
  @error = error
58
+
59
+ # Handle case where error is a serialized hash (e.g. from async failure propagation)
60
+ if @error.is_a?(Hash)
61
+ attributes = extract_attributes_from_hash(@error)
62
+ @error = attributes[:error]
63
+ retryable = attributes[:retryable] if retryable.nil?
64
+ step_name ||= attributes[:step_name]
65
+ reactor_name ||= attributes[:reactor_name]
66
+ inputs = attributes[:inputs] if inputs.empty?
67
+ step_arguments = attributes[:step_arguments] if step_arguments.empty?
68
+ raw_backtrace ||= attributes[:backtrace] || backtrace
69
+ exception_class ||= attributes[:exception_class]
70
+ file_path ||= attributes[:file_path]
71
+ line_number ||= attributes[:line_number]
72
+ code_snippet ||= attributes[:code_snippet]
73
+ validation_errors ||= attributes[:validation_errors]
74
+ end
75
+
54
76
  @retryable = if retryable.nil?
55
- error.respond_to?(:retryable?) ? error.retryable? : true
77
+ @error.respond_to?(:retryable?) ? @error.retryable? : true
56
78
  else
57
79
  retryable
58
80
  end
@@ -60,10 +82,10 @@ module RubyReactor
60
82
  @reactor_name = reactor_name
61
83
  @inputs = inputs
62
84
  @step_arguments = step_arguments
63
- raw_backtrace = backtrace || (error.respond_to?(:backtrace) ? error.backtrace : caller)
85
+ raw_backtrace ||= backtrace || (@error.respond_to?(:backtrace) ? @error.backtrace : caller)
64
86
  @backtrace = filter_backtrace(raw_backtrace)
65
87
  @redact_inputs = redact_inputs
66
- @exception_class = exception_class || (error.is_a?(Exception) ? error.class.name : nil)
88
+ @exception_class = exception_class || (@error.is_a?(Exception) ? @error.class.name : nil)
67
89
  @file_path = file_path
68
90
  @line_number = line_number
69
91
  @code_snippet = code_snippet
@@ -99,6 +121,28 @@ module RubyReactor
99
121
  msg.join("\n")
100
122
  end
101
123
 
124
+ def to_s
125
+ message
126
+ end
127
+
128
+ def to_h
129
+ {
130
+ success: false,
131
+ error: error_message,
132
+ step_name: @step_name,
133
+ inputs: @inputs,
134
+ redact_inputs: @redact_inputs,
135
+ reactor_name: @reactor_name,
136
+ step_arguments: @step_arguments,
137
+ exception_class: @exception_class,
138
+ file_path: @file_path,
139
+ line_number: @line_number,
140
+ code_snippet: @code_snippet,
141
+ validation_errors: @validation_errors,
142
+ backtrace: @backtrace
143
+ }
144
+ end
145
+
102
146
  private
103
147
 
104
148
  def build_header
@@ -147,10 +191,6 @@ module RubyReactor
147
191
  msg << backtrace.take(10).map { |line| " #{line}" }.join("\n")
148
192
  end
149
193
 
150
- def to_s
151
- message
152
- end
153
-
154
194
  def filter_backtrace(backtrace)
155
195
  return backtrace if ENV["RUBY_REACTOR_DEBUG"] == "true"
156
196
  return backtrace if backtrace.nil? || backtrace.empty?
@@ -177,6 +217,26 @@ module RubyReactor
177
217
  def error_message
178
218
  @error.respond_to?(:message) ? @error.message : @error.to_s
179
219
  end
220
+
221
+ def extract_attributes_from_hash(error_hash)
222
+ # Ensure indifferent access
223
+ err = ->(k) { error_hash[k.to_s] || error_hash[k.to_sym] }
224
+
225
+ {
226
+ error: err[:message] || err[:error] || error_hash,
227
+ retryable: err[:retryable],
228
+ step_name: err[:step_name],
229
+ reactor_name: err[:reactor_name],
230
+ inputs: err[:inputs] || {},
231
+ step_arguments: err[:step_arguments] || {},
232
+ backtrace: err[:backtrace],
233
+ exception_class: err[:exception_class],
234
+ file_path: err[:file_path],
235
+ line_number: err[:line_number],
236
+ code_snippet: err[:code_snippet],
237
+ validation_errors: err[:validation_errors]
238
+ }
239
+ end
180
240
  end
181
241
 
182
242
  # Async result for background job execution
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby_reactor
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Artur
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2026-01-07 00:00:00.000000000 Z
11
+ date: 2026-01-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dry-validation
@@ -108,6 +108,7 @@ files:
108
108
  - documentation/images/payment_workflow.png
109
109
  - documentation/interrupts.md
110
110
  - documentation/retry_configuration.md
111
+ - documentation/testing.md
111
112
  - gui/.gitignore
112
113
  - gui/README.md
113
114
  - gui/eslint.config.js
@@ -139,7 +140,6 @@ files:
139
140
  - gui/vite.config.ts
140
141
  - gui/vitest.config.ts
141
142
  - lib/ruby_reactor.rb
142
- - lib/ruby_reactor/async_router.rb
143
143
  - lib/ruby_reactor/configuration.rb
144
144
  - lib/ruby_reactor/context.rb
145
145
  - lib/ruby_reactor/context_serializer.rb
@@ -171,14 +171,22 @@ files:
171
171
  - lib/ruby_reactor/executor/step_executor.rb
172
172
  - lib/ruby_reactor/interrupt_result.rb
173
173
  - lib/ruby_reactor/map/collector.rb
174
+ - lib/ruby_reactor/map/dispatcher.rb
174
175
  - lib/ruby_reactor/map/element_executor.rb
175
176
  - lib/ruby_reactor/map/execution.rb
176
177
  - lib/ruby_reactor/map/helpers.rb
178
+ - lib/ruby_reactor/map/result_enumerator.rb
177
179
  - lib/ruby_reactor/max_retries_exhausted_failure.rb
178
180
  - lib/ruby_reactor/reactor.rb
179
181
  - lib/ruby_reactor/registry.rb
180
182
  - lib/ruby_reactor/retry_context.rb
181
183
  - lib/ruby_reactor/retry_queued_result.rb
184
+ - lib/ruby_reactor/rspec.rb
185
+ - lib/ruby_reactor/rspec/helpers.rb
186
+ - lib/ruby_reactor/rspec/matchers.rb
187
+ - lib/ruby_reactor/rspec/step_executor_patch.rb
188
+ - lib/ruby_reactor/rspec/test_subject.rb
189
+ - lib/ruby_reactor/sidekiq_adapter.rb
182
190
  - lib/ruby_reactor/sidekiq_workers/map_collector_worker.rb
183
191
  - lib/ruby_reactor/sidekiq_workers/map_element_worker.rb
184
192
  - lib/ruby_reactor/sidekiq_workers/map_execution_worker.rb
@@ -190,6 +198,7 @@ files:
190
198
  - lib/ruby_reactor/storage/configuration.rb
191
199
  - lib/ruby_reactor/storage/redis_adapter.rb
192
200
  - lib/ruby_reactor/template/base.rb
201
+ - lib/ruby_reactor/template/dynamic_source.rb
193
202
  - lib/ruby_reactor/template/element.rb
194
203
  - lib/ruby_reactor/template/input.rb
195
204
  - lib/ruby_reactor/template/result.rb