ruby_reactor 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +31 -4
- data/documentation/testing.md +812 -0
- data/lib/ruby_reactor/configuration.rb +1 -1
- data/lib/ruby_reactor/context.rb +13 -5
- data/lib/ruby_reactor/context_serializer.rb +55 -4
- data/lib/ruby_reactor/dsl/reactor.rb +3 -2
- data/lib/ruby_reactor/error/step_failure_error.rb +5 -2
- data/lib/ruby_reactor/executor/result_handler.rb +8 -2
- data/lib/ruby_reactor/executor/retry_manager.rb +15 -7
- data/lib/ruby_reactor/executor/step_executor.rb +24 -99
- data/lib/ruby_reactor/executor.rb +3 -13
- data/lib/ruby_reactor/map/collector.rb +16 -15
- data/lib/ruby_reactor/map/element_executor.rb +90 -104
- data/lib/ruby_reactor/map/execution.rb +2 -1
- data/lib/ruby_reactor/map/helpers.rb +2 -1
- data/lib/ruby_reactor/map/result_enumerator.rb +1 -1
- data/lib/ruby_reactor/reactor.rb +174 -16
- data/lib/ruby_reactor/rspec/helpers.rb +17 -0
- data/lib/ruby_reactor/rspec/matchers.rb +256 -0
- data/lib/ruby_reactor/rspec/step_executor_patch.rb +85 -0
- data/lib/ruby_reactor/rspec/test_subject.rb +625 -0
- data/lib/ruby_reactor/rspec.rb +18 -0
- data/lib/ruby_reactor/{async_router.rb → sidekiq_adapter.rb} +10 -5
- data/lib/ruby_reactor/sidekiq_workers/worker.rb +1 -3
- data/lib/ruby_reactor/step/compose_step.rb +0 -1
- data/lib/ruby_reactor/step/map_step.rb +11 -18
- data/lib/ruby_reactor/version.rb +1 -1
- data/lib/ruby_reactor/web/api.rb +32 -24
- data/lib/ruby_reactor.rb +70 -10
- metadata +9 -3
|
@@ -5,151 +5,137 @@ module RubyReactor
|
|
|
5
5
|
class ElementExecutor
|
|
6
6
|
extend Helpers
|
|
7
7
|
|
|
8
|
-
# rubocop:disable Metrics/MethodLength
|
|
9
8
|
def self.perform(arguments)
|
|
10
9
|
arguments = arguments.transform_keys(&:to_sym)
|
|
11
|
-
map_id = arguments[:map_id]
|
|
12
|
-
_element_id = arguments[:element_id]
|
|
13
|
-
index = arguments[:index]
|
|
14
|
-
serialized_inputs = arguments[:serialized_inputs]
|
|
15
|
-
reactor_class_info = arguments[:reactor_class_info]
|
|
16
|
-
strict_ordering = arguments[:strict_ordering]
|
|
17
|
-
parent_context_id = arguments[:parent_context_id]
|
|
18
|
-
parent_reactor_class_name = arguments[:parent_reactor_class_name]
|
|
19
|
-
step_name = arguments[:step_name]
|
|
20
|
-
batch_size = arguments[:batch_size]
|
|
21
|
-
# rubocop:enable Metrics/MethodLength
|
|
22
|
-
serialized_context = arguments[:serialized_context]
|
|
23
10
|
|
|
24
|
-
|
|
25
|
-
|
|
11
|
+
context = hydrate_or_create_context(arguments)
|
|
12
|
+
storage = RubyReactor.configuration.storage_adapter
|
|
13
|
+
storage.store_map_element_context_id(arguments[:map_id], context.context_id,
|
|
14
|
+
arguments[:parent_reactor_class_name])
|
|
15
|
+
|
|
16
|
+
return if check_fail_fast?(arguments, storage)
|
|
17
|
+
|
|
18
|
+
executor = Executor.new(context.reactor_class, {}, context)
|
|
19
|
+
arguments[:serialized_context] ? executor.resume_execution : executor.execute
|
|
20
|
+
|
|
21
|
+
handle_result(executor.result, arguments, context, storage, executor)
|
|
22
|
+
finalize_execution(arguments, storage)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def self.load_parent_context(arguments, reactor_class_name, storage)
|
|
26
|
+
parent_context_data = storage.retrieve_context(arguments[:parent_context_id], reactor_class_name)
|
|
27
|
+
parent_reactor_class = Object.const_get(reactor_class_name)
|
|
28
|
+
parent_context = Context.new(
|
|
29
|
+
ContextSerializer.deserialize_value(parent_context_data["inputs"]),
|
|
30
|
+
parent_reactor_class
|
|
31
|
+
)
|
|
32
|
+
parent_context.context_id = arguments[:parent_context_id]
|
|
33
|
+
parent_context
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Legacy helpers resolved_next_element, build_serialized_inputs, queue_element_job
|
|
37
|
+
# are REMOVED as they are no longer used for self-queuing.
|
|
38
|
+
|
|
39
|
+
# Basic helper to build inputs for the CURRENT element (still needed for perform)
|
|
40
|
+
# Wait, perform uses `serialized_inputs` passed to it.
|
|
41
|
+
# We don't need `build_element_inputs` here?
|
|
42
|
+
# `perform` uses `params[:serialized_inputs]`.
|
|
43
|
+
# So we can remove input building helpers too?
|
|
44
|
+
# Let's check if they are used elsewhere.
|
|
45
|
+
# `resolve_reactor_class` is used in `perform`.
|
|
46
|
+
# `build_element_inputs` is likely in Helpers or mixed in?
|
|
47
|
+
|
|
48
|
+
# rubocop:disable Style/IdenticalConditionalBranches
|
|
49
|
+
def self.hydrate_or_create_context(arguments)
|
|
50
|
+
if arguments[:serialized_context]
|
|
51
|
+
context = ContextSerializer.deserialize(arguments[:serialized_context])
|
|
26
52
|
context.map_metadata = arguments
|
|
27
|
-
reactor_class = context.reactor_class
|
|
28
53
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
context.inputs = ContextSerializer.deserialize_value(serialized_inputs)
|
|
54
|
+
if context.inputs.empty? && arguments[:serialized_inputs]
|
|
55
|
+
context.inputs = ContextSerializer.deserialize_value(arguments[:serialized_inputs])
|
|
32
56
|
end
|
|
57
|
+
context
|
|
33
58
|
else
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
# Resolve reactor class
|
|
38
|
-
reactor_class = resolve_reactor_class(reactor_class_info)
|
|
59
|
+
inputs = ContextSerializer.deserialize_value(arguments[:serialized_inputs])
|
|
60
|
+
reactor_class = resolve_reactor_class(arguments[:reactor_class_info])
|
|
39
61
|
|
|
40
|
-
# Create context
|
|
41
62
|
context = Context.new(inputs, reactor_class)
|
|
42
|
-
context.parent_context_id = parent_context_id
|
|
63
|
+
context.parent_context_id = arguments[:parent_context_id]
|
|
43
64
|
context.map_metadata = arguments
|
|
65
|
+
context
|
|
44
66
|
end
|
|
67
|
+
end
|
|
68
|
+
# rubocop:enable Style/IdenticalConditionalBranches
|
|
45
69
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
# Fail Fast Check
|
|
50
|
-
if arguments[:fail_fast]
|
|
51
|
-
failed_context_id = storage.retrieve_map_failed_context_id(map_id, parent_reactor_class_name)
|
|
52
|
-
if failed_context_id
|
|
53
|
-
# Decrement counter as we are skipping execution
|
|
54
|
-
new_count = storage.decrement_map_counter(map_id, parent_reactor_class_name)
|
|
55
|
-
return unless new_count.zero?
|
|
70
|
+
def self.check_fail_fast?(arguments, storage)
|
|
71
|
+
return false unless arguments[:fail_fast]
|
|
56
72
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
parent_context_id: parent_context_id,
|
|
60
|
-
map_id: map_id,
|
|
61
|
-
parent_reactor_class_name: parent_reactor_class_name,
|
|
62
|
-
step_name: step_name,
|
|
63
|
-
strict_ordering: strict_ordering,
|
|
64
|
-
timeout: 3600
|
|
65
|
-
)
|
|
66
|
-
return
|
|
67
|
-
end
|
|
68
|
-
end
|
|
69
|
-
|
|
70
|
-
# Execute
|
|
71
|
-
executor = Executor.new(reactor_class, {}, context)
|
|
73
|
+
map_id = arguments[:map_id]
|
|
74
|
+
parent_reactor_class_name = arguments[:parent_reactor_class_name]
|
|
72
75
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
else
|
|
76
|
-
executor.execute
|
|
77
|
-
end
|
|
76
|
+
failed_context_id = storage.retrieve_map_failed_context_id(map_id, parent_reactor_class_name)
|
|
77
|
+
return false unless failed_context_id
|
|
78
78
|
|
|
79
|
-
|
|
79
|
+
# Skip execution
|
|
80
|
+
finalize_execution(arguments, storage)
|
|
81
|
+
true
|
|
82
|
+
end
|
|
80
83
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
return
|
|
84
|
-
end
|
|
84
|
+
def self.handle_result(result, arguments, context, storage, executor)
|
|
85
|
+
return if result.is_a?(RetryQueuedResult)
|
|
85
86
|
|
|
86
|
-
|
|
87
|
+
map_id = arguments[:map_id]
|
|
88
|
+
index = arguments[:index]
|
|
89
|
+
parent_class = arguments[:parent_reactor_class_name] # Using short name for variable
|
|
87
90
|
|
|
88
91
|
if result.success?
|
|
89
|
-
storage.store_map_result(map_id, index,
|
|
90
|
-
|
|
91
|
-
parent_reactor_class_name,
|
|
92
|
-
strict_ordering: strict_ordering)
|
|
92
|
+
storage.store_map_result(map_id, index, ContextSerializer.serialize_value(result.value),
|
|
93
|
+
parent_class, strict_ordering: arguments[:strict_ordering])
|
|
93
94
|
else
|
|
94
|
-
# Trigger Compensation Logic
|
|
95
95
|
executor.undo_all
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
storage.store_map_result(map_id, index, { _error: result.error }, parent_reactor_class_name,
|
|
99
|
-
strict_ordering: strict_ordering)
|
|
96
|
+
storage.store_map_result(map_id, index, { _error: result.error }, parent_class,
|
|
97
|
+
strict_ordering: arguments[:strict_ordering])
|
|
100
98
|
|
|
101
99
|
if arguments[:fail_fast]
|
|
102
|
-
storage.store_map_failed_context_id(map_id, context.context_id,
|
|
100
|
+
storage.store_map_failed_context_id(map_id, context.context_id, parent_class)
|
|
101
|
+
# FAST FAIL: Trigger Collector immediately to cancel/fail the map execution
|
|
102
|
+
RubyReactor.configuration.async_router.perform_map_collection_async(
|
|
103
|
+
parent_context_id: arguments[:parent_context_id],
|
|
104
|
+
map_id: map_id,
|
|
105
|
+
parent_reactor_class_name: parent_class,
|
|
106
|
+
step_name: arguments[:step_name],
|
|
107
|
+
strict_ordering: arguments[:strict_ordering],
|
|
108
|
+
timeout: 3600
|
|
109
|
+
)
|
|
103
110
|
end
|
|
104
111
|
end
|
|
112
|
+
end
|
|
105
113
|
|
|
106
|
-
|
|
107
|
-
|
|
114
|
+
def self.finalize_execution(arguments, storage)
|
|
115
|
+
map_id = arguments[:map_id]
|
|
116
|
+
parent_class = arguments[:parent_reactor_class_name]
|
|
108
117
|
|
|
109
|
-
|
|
110
|
-
trigger_next_batch_if_needed(arguments, index, batch_size)
|
|
118
|
+
new_count = storage.decrement_map_counter(map_id, parent_class)
|
|
119
|
+
trigger_next_batch_if_needed(arguments, arguments[:index], arguments[:batch_size])
|
|
111
120
|
|
|
112
121
|
return unless new_count.zero?
|
|
113
122
|
|
|
114
|
-
# Trigger collection
|
|
115
123
|
RubyReactor.configuration.async_router.perform_map_collection_async(
|
|
116
|
-
parent_context_id: parent_context_id,
|
|
124
|
+
parent_context_id: arguments[:parent_context_id],
|
|
117
125
|
map_id: map_id,
|
|
118
|
-
parent_reactor_class_name:
|
|
119
|
-
step_name: step_name,
|
|
120
|
-
strict_ordering: strict_ordering,
|
|
126
|
+
parent_reactor_class_name: parent_class,
|
|
127
|
+
step_name: arguments[:step_name],
|
|
128
|
+
strict_ordering: arguments[:strict_ordering],
|
|
121
129
|
timeout: 3600
|
|
122
130
|
)
|
|
123
131
|
end
|
|
124
132
|
|
|
125
|
-
def self.load_parent_context(arguments, reactor_class_name, storage)
|
|
126
|
-
parent_context_data = storage.retrieve_context(arguments[:parent_context_id], reactor_class_name)
|
|
127
|
-
parent_reactor_class = Object.const_get(reactor_class_name)
|
|
128
|
-
parent_context = Context.new(
|
|
129
|
-
ContextSerializer.deserialize_value(parent_context_data["inputs"]),
|
|
130
|
-
parent_reactor_class
|
|
131
|
-
)
|
|
132
|
-
parent_context.context_id = arguments[:parent_context_id]
|
|
133
|
-
parent_context
|
|
134
|
-
end
|
|
135
|
-
|
|
136
|
-
# Legacy helpers resolved_next_element, build_serialized_inputs, queue_element_job
|
|
137
|
-
# are REMOVED as they are no longer used for self-queuing.
|
|
138
|
-
|
|
139
|
-
# Basic helper to build inputs for the CURRENT element (still needed for perform)
|
|
140
|
-
# Wait, perform uses `serialized_inputs` passed to it.
|
|
141
|
-
# We don't need `build_element_inputs` here?
|
|
142
|
-
# `perform` uses `params[:serialized_inputs]`.
|
|
143
|
-
# So we can remove input building helpers too?
|
|
144
|
-
# Let's check if they are used elsewhere.
|
|
145
|
-
# `resolve_reactor_class` is used in `perform`.
|
|
146
|
-
# `build_element_inputs` is likely in Helpers or mixed in?
|
|
147
|
-
|
|
148
133
|
def self.trigger_next_batch_if_needed(arguments, index, batch_size)
|
|
149
134
|
return unless batch_size && ((index + 1) % batch_size).zero?
|
|
150
135
|
|
|
151
136
|
# Trigger Dispatcher for next batch
|
|
152
137
|
next_batch_args = arguments.dup
|
|
138
|
+
# Ensure we don't carry over temporary execution flags if any
|
|
153
139
|
next_batch_args[:continuation] = true
|
|
154
140
|
RubyReactor::Map::Dispatcher.perform(next_batch_args)
|
|
155
141
|
end
|
|
@@ -31,6 +31,7 @@ module RubyReactor
|
|
|
31
31
|
end
|
|
32
32
|
|
|
33
33
|
def self.execute_all_elements(source:, mappings:, reactor_class:, parent_context:, storage_options:)
|
|
34
|
+
# rubocop:disable Metrics/BlockLength
|
|
34
35
|
source.map.with_index do |element, index|
|
|
35
36
|
if storage_options[:fail_fast]
|
|
36
37
|
failed_context_id = storage_options[:storage].retrieve_map_failed_context_id(
|
|
@@ -71,12 +72,12 @@ module RubyReactor
|
|
|
71
72
|
|
|
72
73
|
result
|
|
73
74
|
end.compact
|
|
75
|
+
# rubocop:enable Metrics/BlockLength
|
|
74
76
|
end
|
|
75
77
|
|
|
76
78
|
def self.link_contexts(child_context, parent_context)
|
|
77
79
|
child_context.parent_context = parent_context
|
|
78
80
|
child_context.root_context = parent_context.root_context || parent_context
|
|
79
|
-
child_context.test_mode = parent_context.test_mode
|
|
80
81
|
child_context.inline_async_execution = parent_context.inline_async_execution
|
|
81
82
|
end
|
|
82
83
|
|
|
@@ -63,7 +63,8 @@ module RubyReactor
|
|
|
63
63
|
final_result.error,
|
|
64
64
|
step: step_name_sym,
|
|
65
65
|
context: parent_context,
|
|
66
|
-
original_error: final_result.error.is_a?(Exception) ? final_result.error : nil
|
|
66
|
+
original_error: final_result.error.is_a?(Exception) ? final_result.error : nil,
|
|
67
|
+
exception_class: final_result.respond_to?(:exception_class) ? final_result.exception_class : nil
|
|
67
68
|
)
|
|
68
69
|
|
|
69
70
|
# Pass backtrace if available
|
data/lib/ruby_reactor/reactor.rb
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
module RubyReactor
|
|
4
|
+
# rubocop:disable Metrics/ClassLength
|
|
4
5
|
class Reactor
|
|
5
6
|
include RubyReactor::Dsl::Reactor
|
|
6
7
|
|
|
@@ -64,34 +65,71 @@ module RubyReactor
|
|
|
64
65
|
def initialize(context = {})
|
|
65
66
|
@context = context
|
|
66
67
|
@result = :unexecuted
|
|
67
|
-
|
|
68
|
-
@
|
|
68
|
+
|
|
69
|
+
if @context.is_a?(Context)
|
|
70
|
+
@execution_trace = @context.execution_trace || []
|
|
71
|
+
@undo_trace = @execution_trace.select { |e| e[:type] == :undo }
|
|
72
|
+
@result = reconstruct_result
|
|
73
|
+
else
|
|
74
|
+
@undo_trace = []
|
|
75
|
+
@execution_trace = []
|
|
76
|
+
end
|
|
69
77
|
end
|
|
70
78
|
|
|
79
|
+
# rubocop:disable Metrics/MethodLength
|
|
71
80
|
def run(inputs = {})
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
81
|
+
# For all reactors, initialize context first to capture execution ID
|
|
82
|
+
@context = @context.is_a?(Context) ? @context : Context.new(inputs, self.class)
|
|
83
|
+
|
|
84
|
+
# Validate inputs
|
|
85
|
+
validation_result = self.class.validate_inputs(inputs)
|
|
86
|
+
if validation_result.failure?
|
|
87
|
+
@result = validation_result
|
|
88
|
+
@context.status = "failed"
|
|
89
|
+
@context.failure_reason = {
|
|
90
|
+
message: validation_result.error.message,
|
|
91
|
+
validation_errors: validation_result.error.field_errors
|
|
92
|
+
}
|
|
93
|
+
save_context
|
|
94
|
+
return validation_result
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
if self.class.async? && !@context.inline_async_execution
|
|
98
|
+
# For async reactors, queue a job for the whole reactor
|
|
99
|
+
@context.status = :running
|
|
100
|
+
save_context
|
|
101
|
+
|
|
102
|
+
serialized_context = ContextSerializer.serialize(@context)
|
|
103
|
+
@result = configuration.async_router.perform_async(serialized_context, self.class.name,
|
|
104
|
+
intermediate_results: @context.intermediate_results)
|
|
105
|
+
|
|
106
|
+
# Even if it's an AsyncResult, it might have finished inline (e.g. Sidekiq::Testing.inline!)
|
|
107
|
+
# Check storage to see if it's already finished or paused (interrupted).
|
|
108
|
+
begin
|
|
109
|
+
reloaded = self.class.find(@context.context_id)
|
|
110
|
+
if reloaded.finished? || reloaded.context.status.to_s == "paused"
|
|
111
|
+
@context = reloaded.context
|
|
112
|
+
@result = reloaded.result
|
|
113
|
+
@execution_trace = reloaded.execution_trace
|
|
114
|
+
@undo_trace = reloaded.undo_trace
|
|
115
|
+
return @result
|
|
116
|
+
end
|
|
117
|
+
rescue StandardError
|
|
118
|
+
# Ignore if not found or other errors during reload check
|
|
119
|
+
end
|
|
120
|
+
|
|
77
121
|
else
|
|
78
122
|
# For sync reactors (potentially with async steps), execute normally
|
|
79
123
|
context = @context.is_a?(Context) ? @context : nil
|
|
80
124
|
executor = Executor.new(self.class, inputs, context)
|
|
81
125
|
@result = executor.execute
|
|
82
|
-
|
|
83
126
|
@context = executor.context
|
|
84
|
-
|
|
85
|
-
# Merge traces
|
|
86
|
-
@undo_trace = executor.undo_trace
|
|
87
127
|
@execution_trace = executor.execution_trace
|
|
88
|
-
|
|
89
|
-
# If execution returned an AsyncResult (from step-level async), return it
|
|
90
|
-
return @result if @result.is_a?(RubyReactor::AsyncResult)
|
|
91
|
-
|
|
92
|
-
@result
|
|
128
|
+
@undo_trace = executor.undo_trace
|
|
93
129
|
end
|
|
130
|
+
@result
|
|
94
131
|
end
|
|
132
|
+
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize
|
|
95
133
|
|
|
96
134
|
def continue(payload:, step_name:, idempotency_key: nil)
|
|
97
135
|
_ = idempotency_key
|
|
@@ -178,6 +216,125 @@ module RubyReactor
|
|
|
178
216
|
raise Error::DependencyError, "Dependency graph contains cycles"
|
|
179
217
|
end
|
|
180
218
|
|
|
219
|
+
def reconstruct_result
|
|
220
|
+
case @context.status.to_s
|
|
221
|
+
when "completed" then reconstruct_success_result
|
|
222
|
+
when "failed" then reconstruct_failure_result
|
|
223
|
+
when "paused" then reconstruct_paused_result
|
|
224
|
+
else :unexecuted
|
|
225
|
+
end
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
def reconstruct_success_result
|
|
229
|
+
rs = self.class.respond_to?(:returns) ? self.class.returns : nil
|
|
230
|
+
val = if rs
|
|
231
|
+
@context.intermediate_results[rs.to_sym] || @context.intermediate_results[rs.to_s]
|
|
232
|
+
else
|
|
233
|
+
find_last_step_result
|
|
234
|
+
end
|
|
235
|
+
Success.new(val)
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
def find_last_step_result
|
|
239
|
+
last_run = @execution_trace.reverse.find { |e| e[:type] == :run || e["type"] == "run" }
|
|
240
|
+
return unless last_run
|
|
241
|
+
|
|
242
|
+
step_name = last_run[:step] || last_run["step"]
|
|
243
|
+
@context.intermediate_results[step_name.to_sym] || @context.intermediate_results[step_name.to_s]
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
def reconstruct_failure_result
|
|
247
|
+
reason = @context.failure_reason || {}
|
|
248
|
+
return reason if reason.is_a?(RubyReactor::Failure)
|
|
249
|
+
|
|
250
|
+
# Use string keys preferred, fallback to symbol
|
|
251
|
+
r = ->(k) { reason[k.to_s] || reason[k.to_sym] }
|
|
252
|
+
|
|
253
|
+
Failure.new(
|
|
254
|
+
r[:message],
|
|
255
|
+
step_name: r[:step_name],
|
|
256
|
+
inputs: r[:inputs] || {},
|
|
257
|
+
backtrace: r[:backtrace],
|
|
258
|
+
reactor_name: r[:reactor_name],
|
|
259
|
+
step_arguments: r[:step_arguments] || {},
|
|
260
|
+
exception_class: r[:exception_class],
|
|
261
|
+
file_path: r[:file_path],
|
|
262
|
+
line_number: r[:line_number],
|
|
263
|
+
code_snippet: r[:code_snippet],
|
|
264
|
+
validation_errors: r[:validation_errors],
|
|
265
|
+
retryable: r[:retryable],
|
|
266
|
+
invalid_payload: r[:invalid_payload]
|
|
267
|
+
)
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
def reconstruct_paused_result
|
|
271
|
+
InterruptResult.new(
|
|
272
|
+
execution_id: @context.context_id,
|
|
273
|
+
intermediate_results: @context.intermediate_results
|
|
274
|
+
)
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
def initialize_and_validate_run?(inputs)
|
|
278
|
+
# For all reactors, initialize context first to capture execution ID
|
|
279
|
+
@context = @context.is_a?(Context) ? @context : Context.new(inputs, self.class)
|
|
280
|
+
|
|
281
|
+
validation_result = self.class.validate_inputs(inputs)
|
|
282
|
+
if validation_result.failure?
|
|
283
|
+
handle_validation_failure(validation_result)
|
|
284
|
+
return false
|
|
285
|
+
end
|
|
286
|
+
true
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
def handle_validation_failure(result)
|
|
290
|
+
@result = result
|
|
291
|
+
@context.status = "failed"
|
|
292
|
+
@context.failure_reason = {
|
|
293
|
+
message: result.error.message,
|
|
294
|
+
validation_errors: result.error.field_errors
|
|
295
|
+
}
|
|
296
|
+
save_context
|
|
297
|
+
end
|
|
298
|
+
|
|
299
|
+
def perform_async_run
|
|
300
|
+
@context.status = :running
|
|
301
|
+
save_context
|
|
302
|
+
|
|
303
|
+
serialized_context = ContextSerializer.serialize(@context)
|
|
304
|
+
@result = configuration.async_router.perform_async(serialized_context, self.class.name,
|
|
305
|
+
intermediate_results: @context.intermediate_results)
|
|
306
|
+
|
|
307
|
+
check_for_inline_completion
|
|
308
|
+
end
|
|
309
|
+
|
|
310
|
+
def check_for_inline_completion
|
|
311
|
+
# Even if it's an AsyncResult, it might have finished inline (e.g. Sidekiq::Testing.inline!)
|
|
312
|
+
# Check storage to see if it's already finished or paused (interrupted).
|
|
313
|
+
reloaded = self.class.find(@context.context_id)
|
|
314
|
+
if reloaded.finished? || reloaded.context.status.to_s == "paused"
|
|
315
|
+
update_state_from_reloaded(reloaded)
|
|
316
|
+
@result
|
|
317
|
+
end
|
|
318
|
+
rescue StandardError
|
|
319
|
+
# Ignore if not found or other errors during reload check
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
def update_state_from_reloaded(reloaded)
|
|
323
|
+
@context = reloaded.context
|
|
324
|
+
@result = reloaded.result
|
|
325
|
+
@execution_trace = reloaded.execution_trace
|
|
326
|
+
@undo_trace = reloaded.undo_trace
|
|
327
|
+
end
|
|
328
|
+
|
|
329
|
+
def perform_sync_run(inputs)
|
|
330
|
+
context = @context.is_a?(Context) ? @context : nil
|
|
331
|
+
executor = Executor.new(self.class, inputs, context)
|
|
332
|
+
@result = executor.execute
|
|
333
|
+
@context = executor.context
|
|
334
|
+
@execution_trace = executor.execution_trace
|
|
335
|
+
@undo_trace = executor.undo_trace
|
|
336
|
+
end
|
|
337
|
+
|
|
181
338
|
def validate_continue_step!(step_name)
|
|
182
339
|
return if step_name.to_s == @context.current_step.to_s
|
|
183
340
|
|
|
@@ -258,4 +415,5 @@ module RubyReactor
|
|
|
258
415
|
storage.store_context(@context.context_id, serialized_context, reactor_class_name)
|
|
259
416
|
end
|
|
260
417
|
end
|
|
418
|
+
# rubocop:enable Metrics/ClassLength
|
|
261
419
|
end
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyReactor
|
|
4
|
+
module RSpec
|
|
5
|
+
module Helpers
|
|
6
|
+
def test_reactor(reactor_class, inputs, context: {}, async: nil, process_jobs: true)
|
|
7
|
+
TestSubject.new(
|
|
8
|
+
reactor_class: reactor_class,
|
|
9
|
+
inputs: inputs,
|
|
10
|
+
context: context,
|
|
11
|
+
async: async,
|
|
12
|
+
process_jobs: process_jobs
|
|
13
|
+
)
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|