ruby_reactor 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +10 -2
  3. data/README.md +177 -3
  4. data/Rakefile +25 -0
  5. data/documentation/data_pipelines.md +90 -84
  6. data/documentation/images/failed_order_processing.png +0 -0
  7. data/documentation/images/payment_workflow.png +0 -0
  8. data/documentation/interrupts.md +161 -0
  9. data/gui/.gitignore +24 -0
  10. data/gui/README.md +73 -0
  11. data/gui/eslint.config.js +23 -0
  12. data/gui/index.html +13 -0
  13. data/gui/package-lock.json +5925 -0
  14. data/gui/package.json +46 -0
  15. data/gui/postcss.config.js +6 -0
  16. data/gui/public/vite.svg +1 -0
  17. data/gui/src/App.css +42 -0
  18. data/gui/src/App.tsx +51 -0
  19. data/gui/src/assets/react.svg +1 -0
  20. data/gui/src/components/DagVisualizer.tsx +424 -0
  21. data/gui/src/components/Dashboard.tsx +163 -0
  22. data/gui/src/components/ErrorBoundary.tsx +47 -0
  23. data/gui/src/components/ReactorDetail.tsx +135 -0
  24. data/gui/src/components/StepInspector.tsx +492 -0
  25. data/gui/src/components/__tests__/DagVisualizer.test.tsx +140 -0
  26. data/gui/src/components/__tests__/ReactorDetail.test.tsx +111 -0
  27. data/gui/src/components/__tests__/StepInspector.test.tsx +408 -0
  28. data/gui/src/globals.d.ts +7 -0
  29. data/gui/src/index.css +14 -0
  30. data/gui/src/lib/utils.ts +13 -0
  31. data/gui/src/main.tsx +14 -0
  32. data/gui/src/test/setup.ts +11 -0
  33. data/gui/tailwind.config.js +11 -0
  34. data/gui/tsconfig.app.json +28 -0
  35. data/gui/tsconfig.json +7 -0
  36. data/gui/tsconfig.node.json +26 -0
  37. data/gui/vite.config.ts +8 -0
  38. data/gui/vitest.config.ts +13 -0
  39. data/lib/ruby_reactor/async_router.rb +12 -8
  40. data/lib/ruby_reactor/context.rb +35 -9
  41. data/lib/ruby_reactor/context_serializer.rb +15 -0
  42. data/lib/ruby_reactor/dependency_graph.rb +2 -0
  43. data/lib/ruby_reactor/dsl/compose_builder.rb +8 -0
  44. data/lib/ruby_reactor/dsl/interrupt_builder.rb +48 -0
  45. data/lib/ruby_reactor/dsl/interrupt_step_config.rb +21 -0
  46. data/lib/ruby_reactor/dsl/map_builder.rb +14 -2
  47. data/lib/ruby_reactor/dsl/reactor.rb +12 -0
  48. data/lib/ruby_reactor/dsl/step_builder.rb +4 -0
  49. data/lib/ruby_reactor/executor/compensation_manager.rb +60 -27
  50. data/lib/ruby_reactor/executor/graph_manager.rb +2 -0
  51. data/lib/ruby_reactor/executor/result_handler.rb +118 -39
  52. data/lib/ruby_reactor/executor/retry_manager.rb +12 -1
  53. data/lib/ruby_reactor/executor/step_executor.rb +38 -4
  54. data/lib/ruby_reactor/executor.rb +86 -13
  55. data/lib/ruby_reactor/interrupt_result.rb +20 -0
  56. data/lib/ruby_reactor/map/collector.rb +71 -35
  57. data/lib/ruby_reactor/map/dispatcher.rb +162 -0
  58. data/lib/ruby_reactor/map/element_executor.rb +62 -56
  59. data/lib/ruby_reactor/map/execution.rb +44 -4
  60. data/lib/ruby_reactor/map/helpers.rb +44 -6
  61. data/lib/ruby_reactor/map/result_enumerator.rb +105 -0
  62. data/lib/ruby_reactor/reactor.rb +187 -1
  63. data/lib/ruby_reactor/registry.rb +25 -0
  64. data/lib/ruby_reactor/sidekiq_workers/worker.rb +1 -1
  65. data/lib/ruby_reactor/step/compose_step.rb +22 -6
  66. data/lib/ruby_reactor/step/map_step.rb +78 -19
  67. data/lib/ruby_reactor/storage/adapter.rb +32 -0
  68. data/lib/ruby_reactor/storage/redis_adapter.rb +213 -11
  69. data/lib/ruby_reactor/template/dynamic_source.rb +32 -0
  70. data/lib/ruby_reactor/utils/code_extractor.rb +31 -0
  71. data/lib/ruby_reactor/version.rb +1 -1
  72. data/lib/ruby_reactor/web/api.rb +206 -0
  73. data/lib/ruby_reactor/web/application.rb +53 -0
  74. data/lib/ruby_reactor/web/config.ru +5 -0
  75. data/lib/ruby_reactor/web/public/assets/index-VdeLgH9k.js +19 -0
  76. data/lib/ruby_reactor/web/public/assets/index-_z-6BvuM.css +1 -0
  77. data/lib/ruby_reactor/web/public/index.html +14 -0
  78. data/lib/ruby_reactor/web/public/vite.svg +1 -0
  79. data/lib/ruby_reactor.rb +94 -28
  80. data/llms-full.txt +66 -0
  81. data/llms.txt +7 -0
  82. metadata +66 -2
@@ -0,0 +1,162 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyReactor
4
+ module Map
5
+ class Dispatcher
6
+ extend Helpers
7
+
8
+ def self.perform(arguments)
9
+ arguments = arguments.transform_keys(&:to_sym)
10
+ parent_reactor_class_name = arguments[:parent_reactor_class_name]
11
+
12
+ storage = RubyReactor.configuration.storage_adapter
13
+
14
+ # Load parent context to resolve source
15
+ parent_context = load_parent_context_from_storage(
16
+ arguments[:parent_context_id],
17
+ parent_reactor_class_name,
18
+ storage
19
+ )
20
+
21
+ # Initialize metadata if first run
22
+ initialize_map_metadata(arguments, storage) unless arguments[:continuation]
23
+
24
+ # Resolve Source
25
+ # We need to resolve the source to know what we are iterating.
26
+ # Strict "Array Only" rule means we expect an Array-like object or we handle the
27
+ # "Query Builder" result if user used it.
28
+ source = resolve_source(arguments, parent_context)
29
+
30
+ # Dispatch next batch
31
+ dispatch_batch(source, arguments, parent_context, storage)
32
+ end
33
+
34
+ def self.initialize_map_metadata(arguments, storage)
35
+ map_id = arguments[:map_id]
36
+ reactor_class_name = arguments[:parent_reactor_class_name]
37
+
38
+ # Reset or set initial offset. Use NX to act as a mutex/guard against duplicate initialization.
39
+ storage.set_map_offset_if_not_exists(map_id, 0, reactor_class_name)
40
+ end
41
+
42
+ def self.resolve_source(arguments, context)
43
+ # Arguments has :source which is a Template::Input or similar.
44
+ # We need to resolve it against the context.
45
+ source_template = arguments[:source]
46
+
47
+ # Fallback: look up from step config if missing (e.g. called from ElementExecutor)
48
+ if source_template.nil? && context
49
+ step_name = arguments[:step_name]
50
+ step_config = context.reactor_class.steps[step_name.to_sym]
51
+ source_template = step_config.arguments[:source][:source]
52
+ end
53
+
54
+ # If source is packaged in arguments as a value (deserialized)
55
+ return source_template if source_template.is_a?(Array)
56
+
57
+ # Resolve template
58
+ return source_template.resolve(context) if source_template.respond_to?(:resolve)
59
+
60
+ source_template
61
+ end
62
+
63
+ def self.dispatch_batch(source, arguments, parent_context, storage)
64
+ map_id = arguments[:map_id]
65
+ reactor_class_name = arguments[:parent_reactor_class_name]
66
+
67
+ # Fail Fast Check
68
+ if arguments[:fail_fast]
69
+ failed_context_id = storage.retrieve_map_failed_context_id(map_id, reactor_class_name)
70
+ return if failed_context_id
71
+ end
72
+
73
+ batch_size = arguments[:batch_size] || source.size # Default to all if no batch_size (async=true only)
74
+
75
+ # Atomically reserve a batch
76
+ new_offset = storage.increment_map_offset(map_id, batch_size, reactor_class_name)
77
+ current_offset = new_offset - batch_size
78
+
79
+ batch_elements = if source.is_a?(Array)
80
+ source.slice(current_offset, batch_size) || []
81
+ elsif source.respond_to?(:offset) && source.respond_to?(:limit)
82
+ # Optimized for ActiveRecord and similar query builders
83
+ source.offset(current_offset).limit(batch_size).to_a
84
+ else
85
+ # Fallback for generic Enumerable
86
+ # This is inefficient for huge sets if not Array, but compliant
87
+ source.drop(current_offset).take(batch_size)
88
+ end
89
+
90
+ return if batch_elements.empty?
91
+
92
+ # Queue Jobs
93
+ queue_options = {
94
+ map_id: map_id,
95
+ arguments: arguments,
96
+ context: parent_context,
97
+ reactor_class_info: resolve_reactor_class_info(arguments, parent_context),
98
+ step_name: arguments[:step_name]
99
+ }
100
+
101
+ batch_elements.each_with_index do |element, i|
102
+ absolute_index = current_offset + i
103
+ queue_element_job(element, absolute_index, queue_options)
104
+ end
105
+ end
106
+
107
+ def self.queue_element_job(element, index, options)
108
+ arguments = options[:arguments]
109
+ context = options[:context]
110
+
111
+ # Resolve mappings
112
+ mappings_template = arguments[:argument_mappings]
113
+
114
+ # Fallback: look up from step config if missing (e.g. called from ElementExecutor)
115
+ if mappings_template.nil? && context
116
+ step_name = options[:step_name] || arguments[:step_name]
117
+ step_config = context.reactor_class.steps[step_name.to_sym]
118
+ mappings_template = step_config.arguments[:argument_mappings]
119
+ end
120
+
121
+ mappings = if mappings_template.respond_to?(:resolve)
122
+ mappings_template.resolve(context)
123
+ else
124
+ mappings_template || {}
125
+ end
126
+
127
+ # Fix for weird structure observed in fallback (wrapped in :source -> Template::Value)
128
+ if mappings.key?(:source) && mappings[:source].respond_to?(:value) && mappings[:source].value.is_a?(Hash)
129
+ mappings = mappings[:source].value
130
+ end
131
+
132
+ mapped_inputs = build_element_inputs(mappings, context, element)
133
+ serialized_inputs = ContextSerializer.serialize_value(mapped_inputs)
134
+
135
+ RubyReactor.configuration.async_router.perform_map_element_async(
136
+ map_id: options[:map_id],
137
+ element_id: "#{options[:map_id]}:#{index}",
138
+ index: index,
139
+ serialized_inputs: serialized_inputs,
140
+ reactor_class_info: options[:reactor_class_info],
141
+ strict_ordering: arguments[:strict_ordering],
142
+ parent_context_id: context.context_id,
143
+ parent_reactor_class_name: context.reactor_class.name,
144
+ step_name: options[:step_name].to_s,
145
+ batch_size: arguments[:batch_size], # Passed to worker so it knows to trigger next batch?
146
+ fail_fast: arguments[:fail_fast]
147
+ )
148
+ end
149
+
150
+ def self.resolve_reactor_class_info(arguments, context)
151
+ mapped_reactor_class = arguments[:mapped_reactor_class]
152
+ step_name = arguments[:step_name]
153
+
154
+ if mapped_reactor_class.respond_to?(:name)
155
+ { "type" => "class", "name" => mapped_reactor_class.name }
156
+ else
157
+ { "type" => "inline", "parent" => context.reactor_class.name, "step" => step_name.to_s }
158
+ end
159
+ end
160
+ end
161
+ end
162
+ end
@@ -25,6 +25,11 @@ module RubyReactor
25
25
  context = ContextSerializer.deserialize(serialized_context)
26
26
  context.map_metadata = arguments
27
27
  reactor_class = context.reactor_class
28
+
29
+ # Ensure inputs are present (fallback to serialized_inputs if missing from context)
30
+ if context.inputs.empty? && serialized_inputs
31
+ context.inputs = ContextSerializer.deserialize_value(serialized_inputs)
32
+ end
28
33
  else
29
34
  # Deserialize inputs
30
35
  inputs = ContextSerializer.deserialize_value(serialized_inputs)
@@ -34,9 +39,33 @@ module RubyReactor
34
39
 
35
40
  # Create context
36
41
  context = Context.new(inputs, reactor_class)
42
+ context.parent_context_id = parent_context_id
37
43
  context.map_metadata = arguments
38
44
  end
45
+
39
46
  storage = RubyReactor.configuration.storage_adapter
47
+ storage.store_map_element_context_id(map_id, context.context_id, parent_reactor_class_name)
48
+
49
+ # Fail Fast Check
50
+ if arguments[:fail_fast]
51
+ failed_context_id = storage.retrieve_map_failed_context_id(map_id, parent_reactor_class_name)
52
+ if failed_context_id
53
+ # Decrement counter as we are skipping execution
54
+ new_count = storage.decrement_map_counter(map_id, parent_reactor_class_name)
55
+ return unless new_count.zero?
56
+
57
+ # Trigger collection if we are the last one (skipped or otherwise)
58
+ RubyReactor.configuration.async_router.perform_map_collection_async(
59
+ parent_context_id: parent_context_id,
60
+ map_id: map_id,
61
+ parent_reactor_class_name: parent_reactor_class_name,
62
+ step_name: step_name,
63
+ strict_ordering: strict_ordering,
64
+ timeout: 3600
65
+ )
66
+ return
67
+ end
68
+ end
40
69
 
41
70
  # Execute
42
71
  executor = Executor.new(reactor_class, {}, context)
@@ -50,27 +79,35 @@ module RubyReactor
50
79
  result = executor.result
51
80
 
52
81
  if result.is_a?(RetryQueuedResult)
53
- queue_next_batch(arguments) if batch_size
82
+ trigger_next_batch_if_needed(arguments, index, batch_size)
54
83
  return
55
84
  end
56
85
 
57
86
  # Store result
58
87
 
59
- # Store result
60
-
61
88
  if result.success?
62
- storage.store_map_result(map_id, index, result.value, parent_reactor_class_name,
89
+ storage.store_map_result(map_id, index,
90
+ ContextSerializer.serialize_value(result.value),
91
+ parent_reactor_class_name,
63
92
  strict_ordering: strict_ordering)
64
93
  else
94
+ # Trigger Compensation Logic
95
+ executor.undo_all
96
+
65
97
  # Store error
66
98
  storage.store_map_result(map_id, index, { _error: result.error }, parent_reactor_class_name,
67
99
  strict_ordering: strict_ordering)
100
+
101
+ if arguments[:fail_fast]
102
+ storage.store_map_failed_context_id(map_id, context.context_id, parent_reactor_class_name)
103
+ end
68
104
  end
69
105
 
70
106
  # Decrement counter
71
107
  new_count = storage.decrement_map_counter(map_id, parent_reactor_class_name)
72
108
 
73
- queue_next_batch(arguments) if batch_size
109
+ # Trigger next batch if it's the last element of the current batch
110
+ trigger_next_batch_if_needed(arguments, index, batch_size)
74
111
 
75
112
  return unless new_count.zero?
76
113
 
@@ -85,23 +122,6 @@ module RubyReactor
85
122
  )
86
123
  end
87
124
 
88
- def self.queue_next_batch(arguments)
89
- storage = RubyReactor.configuration.storage_adapter
90
- map_id = arguments[:map_id]
91
- reactor_class_name = arguments[:parent_reactor_class_name]
92
-
93
- next_index = storage.increment_last_queued_index(map_id, reactor_class_name)
94
- total_count = storage.retrieve_map_metadata(map_id, reactor_class_name)["count"]
95
-
96
- return unless next_index < total_count
97
-
98
- parent_context = load_parent_context(arguments, reactor_class_name, storage)
99
- element = resolve_next_element(arguments, parent_context, next_index)
100
- serialized_inputs = build_serialized_inputs(arguments, parent_context, element)
101
-
102
- queue_element_job(arguments, map_id, next_index, serialized_inputs, reactor_class_name)
103
- end
104
-
105
125
  def self.load_parent_context(arguments, reactor_class_name, storage)
106
126
  parent_context_data = storage.retrieve_context(arguments[:parent_context_id], reactor_class_name)
107
127
  parent_reactor_class = Object.const_get(reactor_class_name)
@@ -113,42 +133,28 @@ module RubyReactor
113
133
  parent_context
114
134
  end
115
135
 
116
- def self.resolve_next_element(arguments, parent_context, next_index)
117
- parent_reactor_class = parent_context.reactor_class
118
- step_config = parent_reactor_class.steps[arguments[:step_name].to_sym]
119
-
120
- source_template = step_config.arguments[:source][:source]
121
- source = source_template.resolve(parent_context)
122
- source[next_index]
136
+ # Legacy helpers resolved_next_element, build_serialized_inputs, queue_element_job
137
+ # are REMOVED as they are no longer used for self-queuing.
138
+
139
+ # Basic helper to build inputs for the CURRENT element (still needed for perform)
140
+ # Wait, perform uses `serialized_inputs` passed to it.
141
+ # We don't need `build_element_inputs` here?
142
+ # `perform` uses `params[:serialized_inputs]`.
143
+ # So we can remove input building helpers too?
144
+ # Let's check if they are used elsewhere.
145
+ # `resolve_reactor_class` is used in `perform`.
146
+ # `build_element_inputs` is likely in Helpers or mixed in?
147
+
148
+ def self.trigger_next_batch_if_needed(arguments, index, batch_size)
149
+ return unless batch_size && ((index + 1) % batch_size).zero?
150
+
151
+ # Trigger Dispatcher for next batch
152
+ next_batch_args = arguments.dup
153
+ next_batch_args[:continuation] = true
154
+ RubyReactor::Map::Dispatcher.perform(next_batch_args)
123
155
  end
124
156
 
125
- def self.build_serialized_inputs(arguments, parent_context, element)
126
- parent_reactor_class = parent_context.reactor_class
127
- step_config = parent_reactor_class.steps[arguments[:step_name].to_sym]
128
-
129
- mappings_template = step_config.arguments[:argument_mappings][:source]
130
- mappings = mappings_template.resolve(parent_context) || {}
131
-
132
- mapped_inputs = build_element_inputs(mappings, parent_context, element)
133
- ContextSerializer.serialize_value(mapped_inputs)
134
- end
135
-
136
- def self.queue_element_job(arguments, map_id, next_index, serialized_inputs, reactor_class_name)
137
- RubyReactor.configuration.async_router.perform_map_element_async(
138
- map_id: map_id,
139
- element_id: "#{map_id}:#{next_index}",
140
- index: next_index,
141
- serialized_inputs: serialized_inputs,
142
- reactor_class_info: arguments[:reactor_class_info],
143
- strict_ordering: arguments[:strict_ordering],
144
- parent_context_id: arguments[:parent_context_id],
145
- parent_reactor_class_name: reactor_class_name,
146
- step_name: arguments[:step_name],
147
- batch_size: arguments[:batch_size]
148
- )
149
- end
150
- private_class_method :queue_next_batch, :load_parent_context,
151
- :resolve_next_element, :build_serialized_inputs, :queue_element_job
157
+ private_class_method :load_parent_context, :trigger_next_batch_if_needed
152
158
  end
153
159
  end
154
160
  end
@@ -21,7 +21,8 @@ module RubyReactor
21
21
  storage_options: {
22
22
  map_id: arguments[:map_id], storage: storage,
23
23
  parent_reactor_class_name: arguments[:parent_reactor_class_name],
24
- strict_ordering: arguments[:strict_ordering]
24
+ strict_ordering: arguments[:strict_ordering],
25
+ fail_fast: arguments[:fail_fast]
25
26
  }
26
27
  )
27
28
 
@@ -31,19 +32,58 @@ module RubyReactor
31
32
 
32
33
  def self.execute_all_elements(source:, mappings:, reactor_class:, parent_context:, storage_options:)
33
34
  source.map.with_index do |element, index|
35
+ if storage_options[:fail_fast]
36
+ failed_context_id = storage_options[:storage].retrieve_map_failed_context_id(
37
+ storage_options[:map_id], storage_options[:parent_reactor_class_name]
38
+ )
39
+ next if failed_context_id
40
+ end
34
41
  element_inputs = build_element_inputs(mappings, parent_context, element)
35
- result = reactor_class.run(element_inputs)
42
+
43
+ # Manually create and link context to ensure parent_context_id is set
44
+ child_context = RubyReactor::Context.new(element_inputs, reactor_class)
45
+ link_contexts(child_context, parent_context)
46
+
47
+ # Ensure we store the element context linkage
48
+ storage_options[:storage].store_map_element_context_id(
49
+ storage_options[:map_id], child_context.context_id, storage_options[:parent_reactor_class_name]
50
+ )
51
+
52
+ # Set map metadata for failure handling
53
+ metadata = {
54
+ map_id: storage_options[:map_id],
55
+ parent_reactor_class_name: storage_options[:parent_reactor_class_name],
56
+ index: index
57
+ }
58
+ child_context.map_metadata = metadata
59
+
60
+ executor = RubyReactor::Executor.new(reactor_class, {}, child_context)
61
+ executor.execute
62
+ result = executor.result
36
63
 
37
64
  store_result(result, index, storage_options)
38
65
 
66
+ if result.failure? && storage_options[:fail_fast]
67
+ storage_options[:storage].store_map_failed_context_id(
68
+ storage_options[:map_id], child_context.context_id, storage_options[:parent_reactor_class_name]
69
+ )
70
+ end
71
+
39
72
  result
40
- end
73
+ end.compact
74
+ end
75
+
76
+ def self.link_contexts(child_context, parent_context)
77
+ child_context.parent_context = parent_context
78
+ child_context.root_context = parent_context.root_context || parent_context
79
+ child_context.test_mode = parent_context.test_mode
80
+ child_context.inline_async_execution = parent_context.inline_async_execution
41
81
  end
42
82
 
43
83
  def self.store_result(result, index, options)
44
84
  value = result.success? ? result.value : { _error: result.error }
45
85
  options[:storage].store_map_result(
46
- options[:map_id], index, value, options[:parent_reactor_class_name],
86
+ options[:map_id], index, ContextSerializer.serialize_value(value), options[:parent_reactor_class_name],
47
87
  strict_ordering: options[:strict_ordering]
48
88
  )
49
89
  end
@@ -7,7 +7,11 @@ module RubyReactor
7
7
  # Resolves the reactor class from reactor_class_info
8
8
  def resolve_reactor_class(info)
9
9
  if info["type"] == "class"
10
- Object.const_get(info["name"])
10
+ begin
11
+ Object.const_get(info["name"])
12
+ rescue NameError
13
+ RubyReactor::Registry.find(info["name"])
14
+ end
11
15
  elsif info["type"] == "inline"
12
16
  parent_class = Object.const_get(info["parent"])
13
17
  step_config = parent_class.steps[info["step"].to_sym]
@@ -49,12 +53,46 @@ module RubyReactor
49
53
 
50
54
  # Resumes parent reactor execution after map completion
51
55
  def resume_parent_execution(parent_context, step_name, final_result, storage)
52
- value = final_result.success? ? final_result.value : final_result
53
- parent_context.set_result(step_name.to_sym, value)
54
- parent_context.current_step = nil
55
-
56
56
  executor = RubyReactor::Executor.new(parent_context.reactor_class, {}, parent_context)
57
- executor.resume_execution
57
+ step_name_sym = step_name.to_sym
58
+
59
+ if final_result.failure?
60
+ parent_context.current_step = step_name_sym
61
+
62
+ error = RubyReactor::Error::StepFailureError.new(
63
+ final_result.error,
64
+ step: step_name_sym,
65
+ context: parent_context,
66
+ original_error: final_result.error.is_a?(Exception) ? final_result.error : nil
67
+ )
68
+
69
+ # Pass backtrace if available
70
+ if final_result.respond_to?(:backtrace) && final_result.backtrace
71
+ error.set_backtrace(final_result.backtrace)
72
+ elsif final_result.error.respond_to?(:backtrace)
73
+ error.set_backtrace(final_result.error.backtrace)
74
+ end
75
+
76
+ failure_response = executor.result_handler.handle_execution_error(error)
77
+ # Manually update context status since we're not running executor loop
78
+ executor.send(:update_context_status, failure_response)
79
+ else
80
+ parent_context.set_result(step_name_sym, final_result.value)
81
+
82
+ # Manually update execution trace to reflect completion
83
+ # This is necessary because resume_execution continues from the NEXT step
84
+ # and the async step (which returned AsyncResult) needs to be marked as done with actual value
85
+ parent_context.execution_trace << {
86
+ type: :result,
87
+ step: step_name_sym,
88
+ timestamp: Time.now,
89
+ value: final_result.value,
90
+ status: :success
91
+ }
92
+
93
+ parent_context.current_step = nil
94
+ executor.resume_execution
95
+ end
58
96
 
59
97
  storage.store_context(
60
98
  parent_context.context_id,
@@ -0,0 +1,105 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyReactor
4
+ module Map
5
+ class ResultEnumerator
6
+ include Enumerable
7
+
8
+ DEFAULT_BATCH_SIZE = 1000
9
+
10
+ attr_reader :map_id, :reactor_class_name, :strict_ordering, :batch_size
11
+
12
+ def initialize(map_id, reactor_class_name, strict_ordering: true, batch_size: DEFAULT_BATCH_SIZE)
13
+ @map_id = map_id
14
+ @reactor_class_name = reactor_class_name
15
+ @strict_ordering = strict_ordering
16
+ @batch_size = batch_size
17
+ @storage = RubyReactor.configuration.storage_adapter
18
+ end
19
+
20
+ def each
21
+ return enum_for(:each) unless block_given?
22
+
23
+ if @strict_ordering
24
+ count.times do |i|
25
+ yield self[i]
26
+ end
27
+ else
28
+ offset = 0
29
+ loop do
30
+ results = @storage.retrieve_map_results_batch(
31
+ @map_id,
32
+ @reactor_class_name,
33
+ offset: offset,
34
+ limit: @batch_size,
35
+ strict_ordering: @strict_ordering
36
+ )
37
+
38
+ break if results.empty?
39
+
40
+ results.each { |result| yield wrap_result(result) }
41
+
42
+ offset += results.size
43
+ break if results.size < @batch_size
44
+ end
45
+ end
46
+ end
47
+
48
+ def count
49
+ @count ||= @storage.count_map_results(@map_id, @reactor_class_name)
50
+ end
51
+ alias size count
52
+ alias length count
53
+
54
+ def empty?
55
+ count.zero?
56
+ end
57
+
58
+ def any?
59
+ !empty?
60
+ end
61
+
62
+ def [](index)
63
+ return nil if index < 0 || index >= count
64
+
65
+ results = @storage.retrieve_map_results_batch(
66
+ @map_id,
67
+ @reactor_class_name,
68
+ offset: index,
69
+ limit: 1,
70
+ strict_ordering: @strict_ordering
71
+ )
72
+
73
+ return nil if results.empty?
74
+
75
+ wrap_result(results.first)
76
+ end
77
+
78
+ def first
79
+ self[0]
80
+ end
81
+
82
+ def last
83
+ self[count - 1]
84
+ end
85
+
86
+ def successes
87
+ lazy.select { |result| result.is_a?(RubyReactor::Success) }.map(&:value)
88
+ end
89
+
90
+ def failures
91
+ lazy.select { |result| result.is_a?(RubyReactor::Failure) }.map(&:error)
92
+ end
93
+
94
+ private
95
+
96
+ def wrap_result(result)
97
+ if result.is_a?(Hash) && result.key?("_error")
98
+ RubyReactor::Failure.new(result["_error"])
99
+ else
100
+ RubyReactor::Success.new(ContextSerializer.deserialize_value(result))
101
+ end
102
+ end
103
+ end
104
+ end
105
+ end