ruby_reactor 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.rubocop.yml +98 -0
- data/CODE_OF_CONDUCT.md +84 -0
- data/README.md +570 -0
- data/Rakefile +12 -0
- data/documentation/DAG.md +457 -0
- data/documentation/README.md +123 -0
- data/documentation/async_reactors.md +369 -0
- data/documentation/composition.md +199 -0
- data/documentation/core_concepts.md +662 -0
- data/documentation/data_pipelines.md +224 -0
- data/documentation/examples/inventory_management.md +749 -0
- data/documentation/examples/order_processing.md +365 -0
- data/documentation/examples/payment_processing.md +654 -0
- data/documentation/getting_started.md +224 -0
- data/documentation/retry_configuration.md +357 -0
- data/lib/ruby_reactor/async_router.rb +91 -0
- data/lib/ruby_reactor/configuration.rb +41 -0
- data/lib/ruby_reactor/context.rb +169 -0
- data/lib/ruby_reactor/context_serializer.rb +164 -0
- data/lib/ruby_reactor/dependency_graph.rb +126 -0
- data/lib/ruby_reactor/dsl/compose_builder.rb +86 -0
- data/lib/ruby_reactor/dsl/map_builder.rb +112 -0
- data/lib/ruby_reactor/dsl/reactor.rb +151 -0
- data/lib/ruby_reactor/dsl/step_builder.rb +177 -0
- data/lib/ruby_reactor/dsl/template_helpers.rb +36 -0
- data/lib/ruby_reactor/dsl/validation_helpers.rb +35 -0
- data/lib/ruby_reactor/error/base.rb +16 -0
- data/lib/ruby_reactor/error/compensation_error.rb +8 -0
- data/lib/ruby_reactor/error/context_too_large_error.rb +11 -0
- data/lib/ruby_reactor/error/dependency_error.rb +8 -0
- data/lib/ruby_reactor/error/deserialization_error.rb +11 -0
- data/lib/ruby_reactor/error/input_validation_error.rb +29 -0
- data/lib/ruby_reactor/error/schema_version_error.rb +11 -0
- data/lib/ruby_reactor/error/step_failure_error.rb +18 -0
- data/lib/ruby_reactor/error/undo_error.rb +8 -0
- data/lib/ruby_reactor/error/validation_error.rb +8 -0
- data/lib/ruby_reactor/executor/compensation_manager.rb +79 -0
- data/lib/ruby_reactor/executor/graph_manager.rb +41 -0
- data/lib/ruby_reactor/executor/input_validator.rb +39 -0
- data/lib/ruby_reactor/executor/result_handler.rb +103 -0
- data/lib/ruby_reactor/executor/retry_manager.rb +156 -0
- data/lib/ruby_reactor/executor/step_executor.rb +319 -0
- data/lib/ruby_reactor/executor.rb +123 -0
- data/lib/ruby_reactor/map/collector.rb +65 -0
- data/lib/ruby_reactor/map/element_executor.rb +154 -0
- data/lib/ruby_reactor/map/execution.rb +60 -0
- data/lib/ruby_reactor/map/helpers.rb +67 -0
- data/lib/ruby_reactor/max_retries_exhausted_failure.rb +19 -0
- data/lib/ruby_reactor/reactor.rb +75 -0
- data/lib/ruby_reactor/retry_context.rb +92 -0
- data/lib/ruby_reactor/retry_queued_result.rb +26 -0
- data/lib/ruby_reactor/sidekiq_workers/map_collector_worker.rb +13 -0
- data/lib/ruby_reactor/sidekiq_workers/map_element_worker.rb +13 -0
- data/lib/ruby_reactor/sidekiq_workers/map_execution_worker.rb +15 -0
- data/lib/ruby_reactor/sidekiq_workers/worker.rb +55 -0
- data/lib/ruby_reactor/step/compose_step.rb +107 -0
- data/lib/ruby_reactor/step/map_step.rb +234 -0
- data/lib/ruby_reactor/step.rb +33 -0
- data/lib/ruby_reactor/storage/adapter.rb +51 -0
- data/lib/ruby_reactor/storage/configuration.rb +15 -0
- data/lib/ruby_reactor/storage/redis_adapter.rb +140 -0
- data/lib/ruby_reactor/template/base.rb +15 -0
- data/lib/ruby_reactor/template/element.rb +25 -0
- data/lib/ruby_reactor/template/input.rb +48 -0
- data/lib/ruby_reactor/template/result.rb +48 -0
- data/lib/ruby_reactor/template/value.rb +22 -0
- data/lib/ruby_reactor/validation/base.rb +26 -0
- data/lib/ruby_reactor/validation/input_validator.rb +62 -0
- data/lib/ruby_reactor/validation/schema_builder.rb +17 -0
- data/lib/ruby_reactor/version.rb +5 -0
- data/lib/ruby_reactor.rb +159 -0
- data/sig/ruby_reactor.rbs +4 -0
- metadata +178 -0
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "sidekiq"
|
|
4
|
+
|
|
5
|
+
module RubyReactor
|
|
6
|
+
module SidekiqWorkers
|
|
7
|
+
class MapExecutionWorker
|
|
8
|
+
include ::Sidekiq::Worker
|
|
9
|
+
|
|
10
|
+
def perform(arguments)
|
|
11
|
+
RubyReactor::Map::Execution.perform(arguments)
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "sidekiq"
|
|
4
|
+
|
|
5
|
+
module RubyReactor
|
|
6
|
+
module SidekiqWorkers
|
|
7
|
+
# Sidekiq worker for executing RubyReactor reactors asynchronously
|
|
8
|
+
# with non-blocking retry capabilities
|
|
9
|
+
class Worker
|
|
10
|
+
include ::Sidekiq::Worker
|
|
11
|
+
|
|
12
|
+
# Enable Sidekiq retries for infrastructure failures only
|
|
13
|
+
sidekiq_options retry: RubyReactor.configuration.sidekiq_retry_count, dead: false,
|
|
14
|
+
queue: RubyReactor.configuration.sidekiq_queue
|
|
15
|
+
|
|
16
|
+
sidekiq_retries_exhausted do |_, exception|
|
|
17
|
+
# Handle infrastructure failures (network, Redis, etc.)
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def perform(serialized_context, reactor_class_name = nil)
|
|
21
|
+
context = ContextSerializer.deserialize(serialized_context)
|
|
22
|
+
|
|
23
|
+
# If reactor_class_name is provided, use it to get the reactor class
|
|
24
|
+
# This handles cases where the class can't be found via const_get
|
|
25
|
+
if reactor_class_name && context.reactor_class.nil?
|
|
26
|
+
begin
|
|
27
|
+
context.reactor_class = Object.const_get(reactor_class_name)
|
|
28
|
+
rescue NameError
|
|
29
|
+
# If not found, try to find it in the current namespace
|
|
30
|
+
# This is a fallback for test environments
|
|
31
|
+
context.reactor_class = reactor_class_name.constantize if reactor_class_name.respond_to?(:constantize)
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
# Mark that we're executing inline to prevent nested async calls
|
|
36
|
+
context.inline_async_execution = true
|
|
37
|
+
|
|
38
|
+
# Resume execution from the failed step
|
|
39
|
+
executor = Executor.new(context.reactor_class, {}, context)
|
|
40
|
+
executor.compensation_manager.undo_stack.concat(context.undo_stack)
|
|
41
|
+
executor.resume_execution
|
|
42
|
+
|
|
43
|
+
# Return the executor (which now has the result stored in it)
|
|
44
|
+
executor
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
private
|
|
48
|
+
|
|
49
|
+
def log_infrastructure_failure(msg, exception)
|
|
50
|
+
::Sidekiq.logger.error("RubyReactor infrastructure failure: #{exception.message}")
|
|
51
|
+
::Sidekiq.logger.error("Job details: #{msg.inspect}")
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyReactor
|
|
4
|
+
module Step
|
|
5
|
+
class ComposeStep
|
|
6
|
+
include RubyReactor::Step
|
|
7
|
+
|
|
8
|
+
attr_reader :composed_reactor_class, :argument_mappings
|
|
9
|
+
|
|
10
|
+
def initialize(composed_reactor_class, argument_mappings = {})
|
|
11
|
+
@composed_reactor_class = composed_reactor_class
|
|
12
|
+
@argument_mappings = argument_mappings
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def self.run(arguments, context)
|
|
16
|
+
step_name = context.current_step
|
|
17
|
+
composed_data = context.composed_contexts[step_name]
|
|
18
|
+
child_context = prepare_child_context(arguments, context, composed_data)
|
|
19
|
+
|
|
20
|
+
# Store the child context in composed_contexts BEFORE execution
|
|
21
|
+
store_child_context(context, step_name, child_context)
|
|
22
|
+
|
|
23
|
+
# Execute the composed reactor
|
|
24
|
+
result = execute_child_reactor(arguments[:composed_reactor_class], child_context, composed_data)
|
|
25
|
+
|
|
26
|
+
# Update the stored context
|
|
27
|
+
store_child_context(context, step_name, child_context)
|
|
28
|
+
|
|
29
|
+
handle_execution_result(result)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def self.compensate(_reason, _arguments, _context)
|
|
33
|
+
# TODO: Implement proper compensation for composed reactors
|
|
34
|
+
# This requires tracking the execution state of the composed reactor
|
|
35
|
+
# and being able to trigger compensation on its completed steps.
|
|
36
|
+
# For now, we assume the composed reactor handles its own compensation
|
|
37
|
+
# or that compensation is not needed for composed steps.
|
|
38
|
+
|
|
39
|
+
RubyReactor.Success()
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
class << self
|
|
43
|
+
private
|
|
44
|
+
|
|
45
|
+
def build_composed_inputs(mappings, context)
|
|
46
|
+
inputs = {}
|
|
47
|
+
|
|
48
|
+
mappings.each do |composed_input_name, source|
|
|
49
|
+
value = source.resolve(context)
|
|
50
|
+
inputs[composed_input_name] = value
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
inputs
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def prepare_child_context(arguments, context, composed_data)
|
|
57
|
+
child_context = composed_data ? composed_data[:context] : nil
|
|
58
|
+
|
|
59
|
+
unless child_context
|
|
60
|
+
composed_inputs = build_composed_inputs(arguments[:argument_mappings] || {}, context)
|
|
61
|
+
child_context = RubyReactor::Context.new(composed_inputs, arguments[:composed_reactor_class])
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
link_contexts(child_context, context)
|
|
65
|
+
child_context
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def link_contexts(child_context, parent_context)
|
|
69
|
+
child_context.parent_context = parent_context
|
|
70
|
+
child_context.root_context = parent_context.root_context || parent_context
|
|
71
|
+
child_context.test_mode = parent_context.test_mode
|
|
72
|
+
child_context.inline_async_execution = parent_context.inline_async_execution
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def store_child_context(context, step_name, child_context)
|
|
76
|
+
context.composed_contexts[step_name] = {
|
|
77
|
+
name: step_name,
|
|
78
|
+
type: :composed,
|
|
79
|
+
context: child_context
|
|
80
|
+
}
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def execute_child_reactor(composed_reactor, child_context, composed_data)
|
|
84
|
+
executor = RubyReactor::Executor.new(composed_reactor, {}, child_context)
|
|
85
|
+
|
|
86
|
+
if composed_data && child_context.current_step
|
|
87
|
+
executor.resume_execution
|
|
88
|
+
else
|
|
89
|
+
executor.execute
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
executor.result
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def handle_execution_result(result)
|
|
96
|
+
return result if result.is_a?(RubyReactor::AsyncResult) || result.is_a?(RubyReactor::RetryQueuedResult)
|
|
97
|
+
|
|
98
|
+
if result.success?
|
|
99
|
+
RubyReactor.Success(result.value)
|
|
100
|
+
else
|
|
101
|
+
RubyReactor.Failure(result.error)
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
end
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyReactor
|
|
4
|
+
module Step
|
|
5
|
+
class MapStep
|
|
6
|
+
include RubyReactor::Step
|
|
7
|
+
|
|
8
|
+
def self.run(arguments, context)
|
|
9
|
+
return RubyReactor::Failure("Map source cannot be nil") if arguments[:source].nil?
|
|
10
|
+
|
|
11
|
+
# Initialize map state in context if not present
|
|
12
|
+
context.map_operations ||= {}
|
|
13
|
+
|
|
14
|
+
if should_run_async?(arguments, context)
|
|
15
|
+
run_async(arguments, context, context.current_step)
|
|
16
|
+
else
|
|
17
|
+
run_inline(arguments, context)
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def self.compensate(_reason, _arguments, _context)
|
|
22
|
+
# TODO: Implement compensation for map steps
|
|
23
|
+
RubyReactor.Success()
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
class << self
|
|
27
|
+
def build_mapped_inputs(mappings, context, element)
|
|
28
|
+
inputs = {}
|
|
29
|
+
|
|
30
|
+
mappings.each do |mapped_input_name, source|
|
|
31
|
+
value = if source.is_a?(RubyReactor::Template::Element)
|
|
32
|
+
# Handle element reference
|
|
33
|
+
# For now assuming element() refers to the current map's element
|
|
34
|
+
# In nested maps, we might need to check the name, but for now simple case
|
|
35
|
+
resolve_element(source, element)
|
|
36
|
+
else
|
|
37
|
+
source.resolve(context)
|
|
38
|
+
end
|
|
39
|
+
inputs[mapped_input_name] = value
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
inputs
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def resolve_element(template_element, current_element)
|
|
46
|
+
# If path is provided, extract it
|
|
47
|
+
if template_element.path
|
|
48
|
+
extract_path(current_element, template_element.path)
|
|
49
|
+
else
|
|
50
|
+
current_element
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
private
|
|
55
|
+
|
|
56
|
+
def should_run_async?(arguments, context)
|
|
57
|
+
arguments[:async] && !context.inline_async_execution
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def run_inline(arguments, context)
|
|
61
|
+
results = execute_inline_map(arguments, context)
|
|
62
|
+
return results if results.is_a?(RubyReactor::Failure)
|
|
63
|
+
|
|
64
|
+
process_results(results, arguments[:collect_block], arguments[:fail_fast])
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def execute_inline_map(arguments, context)
|
|
68
|
+
results = []
|
|
69
|
+
fail_fast = arguments[:fail_fast].nil? || arguments[:fail_fast]
|
|
70
|
+
|
|
71
|
+
arguments[:source].each do |element|
|
|
72
|
+
result = execute_single_element(element, arguments, context)
|
|
73
|
+
|
|
74
|
+
if fail_fast && result.failure?
|
|
75
|
+
return result # Stop immediately on first failure
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# When fail_fast is false, store Result objects; when true, store values
|
|
79
|
+
results << (fail_fast ? result.value : result)
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
results
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def execute_single_element(element, arguments, context)
|
|
86
|
+
mapped_inputs = build_mapped_inputs(arguments[:argument_mappings] || {}, context, element)
|
|
87
|
+
child_context = RubyReactor::Context.new(mapped_inputs, arguments[:mapped_reactor_class])
|
|
88
|
+
|
|
89
|
+
link_contexts(child_context, context)
|
|
90
|
+
|
|
91
|
+
executor = RubyReactor::Executor.new(arguments[:mapped_reactor_class], {}, child_context)
|
|
92
|
+
executor.execute
|
|
93
|
+
executor.result
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def link_contexts(child_context, parent_context)
|
|
97
|
+
child_context.parent_context = parent_context
|
|
98
|
+
child_context.root_context = parent_context.root_context || parent_context
|
|
99
|
+
child_context.test_mode = parent_context.test_mode
|
|
100
|
+
child_context.inline_async_execution = parent_context.inline_async_execution
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def process_results(results, collect_block, fail_fast = true)
|
|
104
|
+
if collect_block
|
|
105
|
+
begin
|
|
106
|
+
# Collect block receives Result objects when fail_fast is false, values when true
|
|
107
|
+
RubyReactor::Success(collect_block.call(results))
|
|
108
|
+
rescue StandardError => e
|
|
109
|
+
RubyReactor::Failure(e)
|
|
110
|
+
end
|
|
111
|
+
elsif fail_fast
|
|
112
|
+
# Default behavior when no collect block
|
|
113
|
+
# Current behavior: results are already values
|
|
114
|
+
RubyReactor::Success(results)
|
|
115
|
+
else
|
|
116
|
+
# New behavior: extract successful values only
|
|
117
|
+
successes = results.select(&:success?).map(&:value)
|
|
118
|
+
RubyReactor::Success(successes)
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
def extract_path(value, path)
|
|
123
|
+
if path.is_a?(Symbol) && value.respond_to?(:[])
|
|
124
|
+
value[path]
|
|
125
|
+
elsif path.is_a?(String)
|
|
126
|
+
path.split(".").reduce(value) { |v, key| v&.send(:[], key) }
|
|
127
|
+
elsif path.is_a?(Array)
|
|
128
|
+
path.reduce(value) { |v, key| v&.send(:[], key) }
|
|
129
|
+
elsif value.respond_to?(path)
|
|
130
|
+
value.send(path)
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
def run_async(arguments, context, step_name)
|
|
135
|
+
map_id = "#{context.context_id}:#{step_name}"
|
|
136
|
+
context.map_operations[step_name.to_s] = map_id
|
|
137
|
+
prepare_async_execution(context, map_id, arguments[:source].count)
|
|
138
|
+
|
|
139
|
+
reactor_class_info = build_reactor_class_info(arguments[:mapped_reactor_class], context, step_name)
|
|
140
|
+
|
|
141
|
+
job_id = if arguments[:batch_size]
|
|
142
|
+
storage = RubyReactor.configuration.storage_adapter
|
|
143
|
+
storage.set_last_queued_index(map_id, arguments[:batch_size] - 1, context.reactor_class.name)
|
|
144
|
+
queue_fan_out(
|
|
145
|
+
map_id: map_id, arguments: arguments, context: context,
|
|
146
|
+
reactor_class_info: reactor_class_info, step_name: step_name,
|
|
147
|
+
limit: arguments[:batch_size]
|
|
148
|
+
)
|
|
149
|
+
else
|
|
150
|
+
queue_single_worker(map_id: map_id, arguments: arguments, context: context,
|
|
151
|
+
reactor_class_info: reactor_class_info, step_name: step_name)
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
RubyReactor::AsyncResult.new(job_id: job_id, intermediate_results: context.intermediate_results)
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def prepare_async_execution(context, map_id, count)
|
|
158
|
+
storage = RubyReactor.configuration.storage_adapter
|
|
159
|
+
serialized_context = ContextSerializer.serialize(context)
|
|
160
|
+
storage.store_context(context.context_id, serialized_context, context.reactor_class.name)
|
|
161
|
+
storage.set_map_counter(map_id, count, context.reactor_class.name)
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
def build_reactor_class_info(mapped_reactor_class, context, step_name)
|
|
165
|
+
if mapped_reactor_class.respond_to?(:name)
|
|
166
|
+
{ "type" => "class", "name" => mapped_reactor_class.name }
|
|
167
|
+
else
|
|
168
|
+
{ "type" => "inline", "parent" => context.reactor_class.name, "step" => step_name.to_s }
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
# rubocop:disable Metrics/ParameterLists
|
|
173
|
+
def queue_fan_out(map_id:, arguments:, context:, reactor_class_info:, step_name:, limit: nil)
|
|
174
|
+
# rubocop:enable Metrics/ParameterLists
|
|
175
|
+
storage = RubyReactor.configuration.storage_adapter
|
|
176
|
+
storage.initialize_map_operation(
|
|
177
|
+
map_id, arguments[:source].count, context.reactor_class.name,
|
|
178
|
+
strict_ordering: arguments[:strict_ordering], reactor_class_info: reactor_class_info
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
limit ||= arguments[:source].count
|
|
182
|
+
first_job_id = nil
|
|
183
|
+
arguments[:source].each_with_index do |element, index|
|
|
184
|
+
break if index >= limit
|
|
185
|
+
|
|
186
|
+
job_id = queue_map_element(
|
|
187
|
+
map_id: map_id, element: element, index: index, arguments: arguments,
|
|
188
|
+
context: context, reactor_class_info: reactor_class_info, step_name: step_name
|
|
189
|
+
)
|
|
190
|
+
first_job_id ||= job_id
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
queue_collector(map_id, context, step_name, arguments[:strict_ordering])
|
|
194
|
+
first_job_id
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# rubocop:disable Metrics/ParameterLists
|
|
198
|
+
def queue_map_element(map_id:, element:, index:, arguments:, context:, reactor_class_info:, step_name:)
|
|
199
|
+
mapped_inputs = build_mapped_inputs(arguments[:argument_mappings] || {}, context, element)
|
|
200
|
+
serialized_inputs = ContextSerializer.serialize_value(mapped_inputs)
|
|
201
|
+
|
|
202
|
+
RubyReactor.configuration.async_router.perform_map_element_async(
|
|
203
|
+
map_id: map_id, element_id: "#{map_id}:#{index}", index: index,
|
|
204
|
+
serialized_inputs: serialized_inputs, reactor_class_info: reactor_class_info,
|
|
205
|
+
strict_ordering: arguments[:strict_ordering], parent_context_id: context.context_id,
|
|
206
|
+
parent_reactor_class_name: context.reactor_class.name, step_name: step_name.to_s,
|
|
207
|
+
batch_size: arguments[:batch_size]
|
|
208
|
+
)
|
|
209
|
+
end
|
|
210
|
+
# rubocop:enable Metrics/ParameterLists
|
|
211
|
+
|
|
212
|
+
def queue_collector(map_id, context, step_name, strict_ordering)
|
|
213
|
+
RubyReactor.configuration.async_router.perform_map_collection_async(
|
|
214
|
+
parent_context_id: context.context_id, map_id: map_id,
|
|
215
|
+
parent_reactor_class_name: context.reactor_class.name, step_name: step_name.to_s,
|
|
216
|
+
strict_ordering: strict_ordering, timeout: 3600
|
|
217
|
+
)
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
def queue_single_worker(map_id:, arguments:, context:, reactor_class_info:, step_name:)
|
|
221
|
+
inputs = { source: arguments[:source], mappings: arguments[:argument_mappings] || {} }
|
|
222
|
+
serialized_inputs = ContextSerializer.serialize_value(inputs)
|
|
223
|
+
|
|
224
|
+
RubyReactor.configuration.async_router.perform_map_execution_async(
|
|
225
|
+
map_id: map_id, serialized_inputs: serialized_inputs,
|
|
226
|
+
reactor_class_info: reactor_class_info, strict_ordering: arguments[:strict_ordering],
|
|
227
|
+
parent_context_id: context.context_id, parent_reactor_class_name: context.reactor_class.name,
|
|
228
|
+
step_name: step_name.to_s
|
|
229
|
+
)
|
|
230
|
+
end
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
end
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyReactor
|
|
4
|
+
module Step
|
|
5
|
+
def self.included(base)
|
|
6
|
+
base.extend(ClassMethods)
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
module ClassMethods
|
|
10
|
+
# rubocop:disable Naming/MethodName
|
|
11
|
+
def Success(value = nil)
|
|
12
|
+
RubyReactor::Success(value)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def Failure(error = nil)
|
|
16
|
+
RubyReactor::Failure(error)
|
|
17
|
+
end
|
|
18
|
+
# rubocop:enable Naming/MethodName
|
|
19
|
+
|
|
20
|
+
def run(arguments, context)
|
|
21
|
+
raise NotImplementedError, "#{self} must implement .run method"
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def compensate(_reason, _arguments, _context)
|
|
25
|
+
RubyReactor.Success() # Default: accept failure and continue rollback
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def undo(_result, _arguments, _context)
|
|
29
|
+
RubyReactor.Success() # Default: no-op undo
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyReactor
|
|
4
|
+
module Storage
|
|
5
|
+
class Adapter
|
|
6
|
+
def store_context(context_id, serialized_context, reactor_class_name)
|
|
7
|
+
raise NotImplementedError
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def retrieve_context(context_id, reactor_class_name)
|
|
11
|
+
raise NotImplementedError
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def store_map_result(map_id, index, serialized_result, reactor_class_name, strict_ordering: true)
|
|
15
|
+
raise NotImplementedError
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def retrieve_map_results(map_id, reactor_class_name, strict_ordering: true)
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def set_map_counter(map_id, count, reactor_class_name)
|
|
23
|
+
raise NotImplementedError
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def initialize_map_operation(map_id, count, reactor_class_info:, strict_ordering: true)
|
|
27
|
+
raise NotImplementedError
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def increment_map_counter(map_id, reactor_class_name)
|
|
31
|
+
raise NotImplementedError
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def decrement_map_counter(map_id, reactor_class_name)
|
|
35
|
+
raise NotImplementedError
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def subscribe(channel, &block)
|
|
39
|
+
raise NotImplementedError
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def publish(channel, message)
|
|
43
|
+
raise NotImplementedError
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def expire(key, seconds)
|
|
47
|
+
raise NotImplementedError
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyReactor
|
|
4
|
+
module Storage
|
|
5
|
+
class Configuration
|
|
6
|
+
attr_accessor :adapter, :redis_url, :redis_options
|
|
7
|
+
|
|
8
|
+
def initialize
|
|
9
|
+
@adapter = :redis
|
|
10
|
+
@redis_url = "redis://localhost:6379/0"
|
|
11
|
+
@redis_options = {}
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "redis"
|
|
4
|
+
require "json"
|
|
5
|
+
|
|
6
|
+
module RubyReactor
|
|
7
|
+
module Storage
|
|
8
|
+
class RedisAdapter < Adapter
|
|
9
|
+
def initialize(redis_config)
|
|
10
|
+
super()
|
|
11
|
+
@redis = Redis.new(redis_config)
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def store_context(context_id, serialized_context, reactor_class_name)
|
|
15
|
+
key = context_key(context_id, reactor_class_name)
|
|
16
|
+
# Use JSON.SET for efficient storage and retrieval
|
|
17
|
+
@redis.call("JSON.SET", key, ".", serialized_context)
|
|
18
|
+
@redis.expire(key, 86_400) # 24h TTL
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def retrieve_context(context_id, reactor_class_name)
|
|
22
|
+
key = context_key(context_id, reactor_class_name)
|
|
23
|
+
json = @redis.call("JSON.GET", key)
|
|
24
|
+
return nil unless json
|
|
25
|
+
|
|
26
|
+
JSON.parse(json)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def store_map_result(map_id, index, serialized_result, reactor_class_name, strict_ordering: true)
|
|
30
|
+
key = map_results_key(map_id, reactor_class_name)
|
|
31
|
+
|
|
32
|
+
if strict_ordering
|
|
33
|
+
# Use Hash for strict ordering by index
|
|
34
|
+
# HSET key index serialized_result
|
|
35
|
+
@redis.hset(key, index.to_s, serialized_result.to_json)
|
|
36
|
+
else
|
|
37
|
+
# Loose ordering: just push to list
|
|
38
|
+
@redis.rpush(key, serialized_result.to_json)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
@redis.expire(key, 86_400)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def retrieve_map_results(map_id, reactor_class_name, strict_ordering: true)
|
|
45
|
+
key = map_results_key(map_id, reactor_class_name)
|
|
46
|
+
|
|
47
|
+
if strict_ordering
|
|
48
|
+
results = @redis.hgetall(key)
|
|
49
|
+
# Sort by index (key)
|
|
50
|
+
results.keys.sort_by(&:to_i).map { |k| JSON.parse(results[k]) }
|
|
51
|
+
else
|
|
52
|
+
results = @redis.lrange(key, 0, -1)
|
|
53
|
+
results.map { |r| JSON.parse(r) }
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def set_map_counter(map_id, count, reactor_class_name)
|
|
58
|
+
key = map_counter_key(map_id, reactor_class_name)
|
|
59
|
+
@redis.set(key, count)
|
|
60
|
+
@redis.expire(key, 86_400)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def initialize_map_operation(map_id, count, parent_reactor_class_name, reactor_class_info:, strict_ordering: true)
|
|
64
|
+
# Ensure counter is set
|
|
65
|
+
set_map_counter(map_id, count, parent_reactor_class_name)
|
|
66
|
+
|
|
67
|
+
# Store metadata
|
|
68
|
+
key = "reactor:#{parent_reactor_class_name}:map:#{map_id}:metadata"
|
|
69
|
+
metadata = {
|
|
70
|
+
count: count,
|
|
71
|
+
strict_ordering: strict_ordering,
|
|
72
|
+
reactor_class_info: reactor_class_info,
|
|
73
|
+
created_at: Time.now.to_i
|
|
74
|
+
}
|
|
75
|
+
@redis.call("JSON.SET", key, ".", metadata.to_json)
|
|
76
|
+
@redis.expire(key, 86_400)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def retrieve_map_metadata(map_id, reactor_class_name)
|
|
80
|
+
key = "reactor:#{reactor_class_name}:map:#{map_id}:metadata"
|
|
81
|
+
json = @redis.call("JSON.GET", key)
|
|
82
|
+
return nil unless json
|
|
83
|
+
|
|
84
|
+
JSON.parse(json)
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def increment_map_counter(map_id, reactor_class_name)
|
|
88
|
+
key = map_counter_key(map_id, reactor_class_name)
|
|
89
|
+
@redis.incr(key)
|
|
90
|
+
@redis.expire(key, 86_400)
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
def decrement_map_counter(map_id, reactor_class_name)
|
|
94
|
+
key = map_counter_key(map_id, reactor_class_name)
|
|
95
|
+
@redis.decr(key)
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
def set_last_queued_index(map_id, index, reactor_class_name)
|
|
99
|
+
key = map_last_queued_index_key(map_id, reactor_class_name)
|
|
100
|
+
@redis.set(key, index)
|
|
101
|
+
@redis.expire(key, 86_400)
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def increment_last_queued_index(map_id, reactor_class_name)
|
|
105
|
+
key = map_last_queued_index_key(map_id, reactor_class_name)
|
|
106
|
+
@redis.incr(key)
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def subscribe(channel, &block)
|
|
110
|
+
@redis.subscribe(channel, &block)
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def publish(channel, message)
|
|
114
|
+
@redis.publish(channel, message)
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def expire(key, seconds)
|
|
118
|
+
@redis.expire(key, seconds)
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
private
|
|
122
|
+
|
|
123
|
+
def context_key(context_id, reactor_class_name)
|
|
124
|
+
"reactor:#{reactor_class_name}:context:#{context_id}"
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def map_results_key(map_id, reactor_class_name)
|
|
128
|
+
"reactor:#{reactor_class_name}:map:#{map_id}:results"
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def map_counter_key(map_id, reactor_class_name)
|
|
132
|
+
"reactor:#{reactor_class_name}:map:#{map_id}:counter"
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
def map_last_queued_index_key(map_id, reactor_class_name)
|
|
136
|
+
"reactor:#{reactor_class_name}:map:#{map_id}:last_queued_index"
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|