shikibu 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +487 -0
- data/lib/shikibu/activity.rb +135 -0
- data/lib/shikibu/app.rb +299 -0
- data/lib/shikibu/channels.rb +360 -0
- data/lib/shikibu/constants.rb +70 -0
- data/lib/shikibu/context.rb +208 -0
- data/lib/shikibu/errors.rb +137 -0
- data/lib/shikibu/integrations/active_job.rb +95 -0
- data/lib/shikibu/integrations/sidekiq.rb +104 -0
- data/lib/shikibu/locking.rb +110 -0
- data/lib/shikibu/middleware/rack_app.rb +197 -0
- data/lib/shikibu/notify/notify_base.rb +67 -0
- data/lib/shikibu/notify/pg_notify.rb +217 -0
- data/lib/shikibu/notify/wake_event.rb +56 -0
- data/lib/shikibu/outbox/relayer.rb +227 -0
- data/lib/shikibu/replay.rb +361 -0
- data/lib/shikibu/retry_policy.rb +81 -0
- data/lib/shikibu/storage/migrations.rb +179 -0
- data/lib/shikibu/storage/sequel_storage.rb +883 -0
- data/lib/shikibu/version.rb +5 -0
- data/lib/shikibu/worker.rb +389 -0
- data/lib/shikibu/workflow.rb +398 -0
- data/lib/shikibu.rb +152 -0
- data/schema/LICENSE +21 -0
- data/schema/README.md +57 -0
- data/schema/db/migrations/mysql/20251217000000_initial_schema.sql +284 -0
- data/schema/db/migrations/postgresql/20251217000000_initial_schema.sql +284 -0
- data/schema/db/migrations/sqlite/20251217000000_initial_schema.sql +284 -0
- data/schema/docs/column-values.md +91 -0
- metadata +231 -0
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Shikibu
|
|
4
|
+
# Workflow instance status values (shared with Python/Go)
|
|
5
|
+
module Status
|
|
6
|
+
RUNNING = 'running'
|
|
7
|
+
COMPLETED = 'completed'
|
|
8
|
+
FAILED = 'failed'
|
|
9
|
+
WAITING_FOR_EVENT = 'waiting_for_event'
|
|
10
|
+
WAITING_FOR_TIMER = 'waiting_for_timer'
|
|
11
|
+
WAITING_FOR_MESSAGE = 'waiting_for_message'
|
|
12
|
+
COMPENSATING = 'compensating'
|
|
13
|
+
CANCELLED = 'cancelled'
|
|
14
|
+
RECURRED = 'recurred'
|
|
15
|
+
|
|
16
|
+
ALL = [
|
|
17
|
+
RUNNING,
|
|
18
|
+
COMPLETED,
|
|
19
|
+
FAILED,
|
|
20
|
+
WAITING_FOR_EVENT,
|
|
21
|
+
WAITING_FOR_TIMER,
|
|
22
|
+
WAITING_FOR_MESSAGE,
|
|
23
|
+
COMPENSATING,
|
|
24
|
+
CANCELLED,
|
|
25
|
+
RECURRED
|
|
26
|
+
].freeze
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
# History event types (shared with Python/Go)
|
|
30
|
+
module EventType
|
|
31
|
+
ACTIVITY_COMPLETED = 'ActivityCompleted'
|
|
32
|
+
ACTIVITY_FAILED = 'ActivityFailed'
|
|
33
|
+
EVENT_RECEIVED = 'EventReceived'
|
|
34
|
+
TIMER_EXPIRED = 'TimerExpired'
|
|
35
|
+
MESSAGE_TIMEOUT = 'MessageTimeout'
|
|
36
|
+
CHANNEL_MESSAGE_RECEIVED = 'ChannelMessageReceived'
|
|
37
|
+
COMPENSATION_EXECUTED = 'CompensationExecuted'
|
|
38
|
+
COMPENSATION_FAILED = 'CompensationFailed'
|
|
39
|
+
WORKFLOW_FAILED = 'WorkflowFailed'
|
|
40
|
+
WORKFLOW_CANCELLED = 'WorkflowCancelled'
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Channel subscription modes
|
|
44
|
+
module ChannelMode
|
|
45
|
+
BROADCAST = 'broadcast'
|
|
46
|
+
COMPETING = 'competing'
|
|
47
|
+
DIRECT = 'direct'
|
|
48
|
+
|
|
49
|
+
ALL = [BROADCAST, COMPETING, DIRECT].freeze
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Data type indicators for storage
|
|
53
|
+
module DataType
|
|
54
|
+
JSON = 'json'
|
|
55
|
+
BINARY = 'binary'
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
# Outbox event status
|
|
59
|
+
module OutboxStatus
|
|
60
|
+
PENDING = 'pending'
|
|
61
|
+
PROCESSING = 'processing'
|
|
62
|
+
PUBLISHED = 'published'
|
|
63
|
+
FAILED = 'failed'
|
|
64
|
+
INVALID = 'invalid'
|
|
65
|
+
EXPIRED = 'expired'
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# Framework identifier for cross-language support
|
|
69
|
+
FRAMEWORK = 'ruby'
|
|
70
|
+
end
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Shikibu
|
|
4
|
+
# Context object passed to workflow execution
|
|
5
|
+
# Provides access to workflow state, history, and operations
|
|
6
|
+
class WorkflowContext
|
|
7
|
+
attr_reader :instance_id, :workflow_name, :worker_id, :storage, :hooks, :last_activity_id
|
|
8
|
+
attr_accessor :current_activity_id
|
|
9
|
+
|
|
10
|
+
def initialize(
|
|
11
|
+
instance_id:,
|
|
12
|
+
workflow_name:,
|
|
13
|
+
worker_id:,
|
|
14
|
+
storage:,
|
|
15
|
+
hooks: nil,
|
|
16
|
+
history_cache: {},
|
|
17
|
+
replaying: false
|
|
18
|
+
)
|
|
19
|
+
@instance_id = instance_id
|
|
20
|
+
@workflow_name = workflow_name
|
|
21
|
+
@worker_id = worker_id
|
|
22
|
+
@storage = storage
|
|
23
|
+
@hooks = hooks
|
|
24
|
+
@history_cache = history_cache
|
|
25
|
+
@replaying = replaying
|
|
26
|
+
@activity_counters = Hash.new(0)
|
|
27
|
+
@compensations = []
|
|
28
|
+
@direct_subscriptions = Set.new
|
|
29
|
+
@last_activity_id = nil
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Record the last completed activity ID (for on_failure to reference)
|
|
33
|
+
def record_last_activity_id(activity_id)
|
|
34
|
+
@last_activity_id = activity_id
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Check if currently replaying from history
|
|
38
|
+
def replaying?
|
|
39
|
+
@replaying
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Generate a unique activity ID for deterministic replay
|
|
43
|
+
# @param name [String] Activity name
|
|
44
|
+
# @return [String] Activity ID in format "name:counter"
|
|
45
|
+
def generate_activity_id(name)
|
|
46
|
+
@activity_counters[name] += 1
|
|
47
|
+
"#{name}:#{@activity_counters[name]}"
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Get cached result for an activity (during replay)
|
|
51
|
+
# @param activity_id [String] Activity ID
|
|
52
|
+
# @return [Object, nil] Cached result or nil
|
|
53
|
+
def get_cached_result(activity_id)
|
|
54
|
+
return nil unless @history_cache.key?(activity_id)
|
|
55
|
+
|
|
56
|
+
@history_cache[activity_id]
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Check if activity has cached result
|
|
60
|
+
# @param activity_id [String] Activity ID
|
|
61
|
+
# @return [Boolean]
|
|
62
|
+
def cached_result?(activity_id)
|
|
63
|
+
@history_cache.key?(activity_id)
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Cache an activity result
|
|
67
|
+
# @param activity_id [String] Activity ID
|
|
68
|
+
# @param result [Object] Result to cache
|
|
69
|
+
def cache_result(activity_id, result)
|
|
70
|
+
@history_cache[activity_id] = result
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
# Register a compensation action (Romancy/Edda compatible)
|
|
74
|
+
# @param activity_id [String] Activity ID that this compensation is for
|
|
75
|
+
# @param compensation_name [String] Compensation function name (from registry)
|
|
76
|
+
# @param args [Hash] Arguments for compensation
|
|
77
|
+
def register_compensation(activity_id:, compensation_name:, args:)
|
|
78
|
+
@compensations << {
|
|
79
|
+
activity_id: activity_id,
|
|
80
|
+
compensation_name: compensation_name,
|
|
81
|
+
args: args
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
# Also persist to storage
|
|
85
|
+
@storage.push_compensation(
|
|
86
|
+
instance_id: @instance_id,
|
|
87
|
+
activity_id: activity_id,
|
|
88
|
+
activity_name: compensation_name,
|
|
89
|
+
args: args
|
|
90
|
+
)
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# Get registered compensations (LIFO order)
|
|
94
|
+
def compensations
|
|
95
|
+
@compensations.reverse
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Access database session (within the same transaction)
|
|
99
|
+
def session
|
|
100
|
+
@storage.db
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
# Execute a block within a transaction
|
|
104
|
+
def transaction(&)
|
|
105
|
+
@storage.transaction(&)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Check if currently in a transaction
|
|
109
|
+
def in_transaction?
|
|
110
|
+
@storage.in_transaction?
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
# Subscribe to a channel
|
|
114
|
+
# @param channel [String] Channel name
|
|
115
|
+
# @param mode [Symbol, String] Subscription mode (:broadcast, :competing, :direct)
|
|
116
|
+
# @return [Channels::Subscription]
|
|
117
|
+
def subscribe(channel, mode: :broadcast)
|
|
118
|
+
mode_str = mode.to_s
|
|
119
|
+
@direct_subscriptions.add(channel) if mode_str == ChannelMode::DIRECT
|
|
120
|
+
|
|
121
|
+
Channels.subscribe(self, channel, mode: mode)
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Unsubscribe from a channel
|
|
125
|
+
# @param channel [String] Channel name
|
|
126
|
+
def unsubscribe(channel)
|
|
127
|
+
@direct_subscriptions.delete(channel)
|
|
128
|
+
Channels.unsubscribe(self, channel)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
# Get list of direct subscriptions
|
|
132
|
+
def direct_subscriptions
|
|
133
|
+
@direct_subscriptions.to_a
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
# List all active subscriptions
|
|
137
|
+
# @return [Array<Channels::Subscription>]
|
|
138
|
+
def subscriptions
|
|
139
|
+
Channels.subscriptions(self)
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
# Restart workflow with new input (tail recursion pattern)
|
|
143
|
+
# @param new_input [Hash] New input for the workflow
|
|
144
|
+
def recur(**new_input)
|
|
145
|
+
raise RecurSignal, new_input
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
# Wait for a timer to expire
|
|
149
|
+
# @param seconds [Numeric] Seconds to wait
|
|
150
|
+
# @param timer_id [String, nil] Optional timer ID
|
|
151
|
+
def sleep(seconds, timer_id: nil)
|
|
152
|
+
sleep_until(Time.now + seconds, timer_id: timer_id)
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
# Wait until a specific time
|
|
156
|
+
# @param until_time [Time] Time to wait until
|
|
157
|
+
# @param timer_id [String, nil] Optional timer ID
|
|
158
|
+
def sleep_until(until_time, timer_id: nil)
|
|
159
|
+
timer_id ||= generate_activity_id('timer')
|
|
160
|
+
|
|
161
|
+
# Check if already expired in history
|
|
162
|
+
return get_cached_result(timer_id) if replaying? && cached_result?(timer_id)
|
|
163
|
+
|
|
164
|
+
# Register timer and suspend
|
|
165
|
+
raise WaitForTimerSignal.new(
|
|
166
|
+
timer_id: timer_id,
|
|
167
|
+
expires_at: until_time,
|
|
168
|
+
activity_id: timer_id
|
|
169
|
+
)
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
# Wait for a message on a channel
|
|
173
|
+
# @param channel [String] Channel name
|
|
174
|
+
# @param timeout [Numeric, nil] Timeout in seconds
|
|
175
|
+
# @param mode [Symbol, String] Subscription mode
|
|
176
|
+
# @return [Channels::Message]
|
|
177
|
+
def receive(channel, timeout: nil, mode: :broadcast)
|
|
178
|
+
Channels.receive(self, channel, timeout: timeout, mode: mode)
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
# Try to receive without blocking (returns nil if no message)
|
|
182
|
+
# @param channel [String] Channel name
|
|
183
|
+
# @param mode [Symbol, String] Subscription mode
|
|
184
|
+
# @return [Channels::Message, nil]
|
|
185
|
+
def try_receive(channel, mode: :broadcast)
|
|
186
|
+
Channels.try_receive(self, channel, mode: mode)
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
# Publish a message to a channel
|
|
190
|
+
# @param channel [String] Channel name
|
|
191
|
+
# @param data [Object] Message data
|
|
192
|
+
# @param metadata [Hash, nil] Optional metadata
|
|
193
|
+
# @return [String] Message ID
|
|
194
|
+
def publish(channel, data, metadata: nil)
|
|
195
|
+
Channels.publish(self, channel, data, metadata: metadata)
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
# Send a message directly to another workflow instance
|
|
199
|
+
# @param target_instance_id [String] Target workflow instance ID
|
|
200
|
+
# @param channel [String] Channel name
|
|
201
|
+
# @param data [Object] Message data
|
|
202
|
+
# @param metadata [Hash, nil] Optional metadata
|
|
203
|
+
# @return [String] Message ID
|
|
204
|
+
def send_to(target_instance_id, channel, data, metadata: nil)
|
|
205
|
+
Channels.send_to(self, target_instance_id, channel, data, metadata: metadata)
|
|
206
|
+
end
|
|
207
|
+
end
|
|
208
|
+
end
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Shikibu
|
|
4
|
+
# Base error class for all Shikibu errors
|
|
5
|
+
class Error < StandardError; end
|
|
6
|
+
|
|
7
|
+
# Raised when a workflow is cancelled
|
|
8
|
+
class WorkflowCancelledError < Error
|
|
9
|
+
attr_reader :instance_id
|
|
10
|
+
|
|
11
|
+
def initialize(instance_id, message = nil)
|
|
12
|
+
@instance_id = instance_id
|
|
13
|
+
super(message || "Workflow #{instance_id} was cancelled")
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Raised when an error should not be retried (terminal failure)
|
|
18
|
+
class TerminalError < Error
|
|
19
|
+
attr_reader :cause
|
|
20
|
+
|
|
21
|
+
def initialize(message, cause: nil)
|
|
22
|
+
@cause = cause
|
|
23
|
+
super(message)
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Raised when all retry attempts have been exhausted
|
|
28
|
+
class RetryExhaustedError < Error
|
|
29
|
+
attr_reader :attempts, :last_error
|
|
30
|
+
|
|
31
|
+
def initialize(message, attempts:, last_error: nil)
|
|
32
|
+
@attempts = attempts
|
|
33
|
+
@last_error = last_error
|
|
34
|
+
super(message)
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# Raised when waiting for an event times out
|
|
39
|
+
class EventTimeoutError < Error
|
|
40
|
+
attr_reader :event_type, :timeout
|
|
41
|
+
|
|
42
|
+
def initialize(event_type, timeout)
|
|
43
|
+
@event_type = event_type
|
|
44
|
+
@timeout = timeout
|
|
45
|
+
super("Timeout waiting for event '#{event_type}' after #{timeout} seconds")
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Raised when a channel message receive times out
|
|
50
|
+
class MessageTimeoutError < Error
|
|
51
|
+
attr_reader :channel, :timeout
|
|
52
|
+
|
|
53
|
+
def initialize(channel, timeout)
|
|
54
|
+
@channel = channel
|
|
55
|
+
@timeout = timeout
|
|
56
|
+
super("Timeout waiting for message on channel '#{channel}' after #{timeout} seconds")
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Raised when subscribing with a different mode than the channel's established mode
|
|
61
|
+
class ChannelModeConflictError < Error
|
|
62
|
+
attr_reader :channel, :existing_mode, :requested_mode
|
|
63
|
+
|
|
64
|
+
def initialize(channel, existing_mode, requested_mode)
|
|
65
|
+
@channel = channel
|
|
66
|
+
@existing_mode = existing_mode
|
|
67
|
+
@requested_mode = requested_mode
|
|
68
|
+
super("Channel '#{channel}' is already configured as '#{existing_mode}' mode. " \
|
|
69
|
+
"Cannot subscribe with '#{requested_mode}' mode.")
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
# Raised when a lock cannot be acquired
|
|
74
|
+
class LockNotAcquiredError < Error
|
|
75
|
+
attr_reader :instance_id
|
|
76
|
+
|
|
77
|
+
def initialize(instance_id)
|
|
78
|
+
@instance_id = instance_id
|
|
79
|
+
super("Could not acquire lock for workflow #{instance_id}")
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# Raised when a workflow is not found
|
|
84
|
+
class WorkflowNotFoundError < Error
|
|
85
|
+
attr_reader :instance_id
|
|
86
|
+
|
|
87
|
+
def initialize(instance_id)
|
|
88
|
+
@instance_id = instance_id
|
|
89
|
+
super("Workflow #{instance_id} not found")
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# Raised when a workflow definition is not registered
|
|
94
|
+
class WorkflowNotRegisteredError < Error
|
|
95
|
+
attr_reader :workflow_name
|
|
96
|
+
|
|
97
|
+
def initialize(workflow_name)
|
|
98
|
+
@workflow_name = workflow_name
|
|
99
|
+
super("Workflow '#{workflow_name}' is not registered")
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
# Internal signal for workflow suspension (waiting for timer)
|
|
104
|
+
class WaitForTimerSignal < Error
|
|
105
|
+
attr_reader :timer_id, :expires_at, :activity_id
|
|
106
|
+
|
|
107
|
+
def initialize(timer_id:, expires_at:, activity_id: nil)
|
|
108
|
+
@timer_id = timer_id
|
|
109
|
+
@expires_at = expires_at
|
|
110
|
+
@activity_id = activity_id
|
|
111
|
+
super("Waiting for timer #{timer_id}")
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
# Internal signal for workflow suspension (waiting for channel message)
|
|
116
|
+
class WaitForChannelSignal < Error
|
|
117
|
+
attr_reader :channel, :mode, :timeout_at, :activity_id
|
|
118
|
+
|
|
119
|
+
def initialize(channel:, mode:, timeout_at: nil, activity_id: nil)
|
|
120
|
+
@channel = channel
|
|
121
|
+
@mode = mode
|
|
122
|
+
@timeout_at = timeout_at
|
|
123
|
+
@activity_id = activity_id
|
|
124
|
+
super("Waiting for message on channel #{channel}")
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Internal signal for workflow recurrence (tail recursion)
|
|
129
|
+
class RecurSignal < Error
|
|
130
|
+
attr_reader :new_input
|
|
131
|
+
|
|
132
|
+
def initialize(new_input)
|
|
133
|
+
@new_input = new_input
|
|
134
|
+
super('Workflow recurrence requested')
|
|
135
|
+
end
|
|
136
|
+
end
|
|
137
|
+
end
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'active_job' if defined?(ActiveJob)
|
|
4
|
+
|
|
5
|
+
module Shikibu
|
|
6
|
+
module Integrations
|
|
7
|
+
# ActiveJob integration for Rails
|
|
8
|
+
#
|
|
9
|
+
# @example config/initializers/shikibu.rb
|
|
10
|
+
# Shikibu.configure do |config|
|
|
11
|
+
# config.database_url = ENV['DATABASE_URL']
|
|
12
|
+
# config.auto_migrate = Rails.env.development?
|
|
13
|
+
# end
|
|
14
|
+
#
|
|
15
|
+
# @example Using the workflow job
|
|
16
|
+
# class ProcessOrderJob < Shikibu::Integrations::WorkflowJob
|
|
17
|
+
# self.workflow_class = OrderSaga
|
|
18
|
+
# end
|
|
19
|
+
#
|
|
20
|
+
# ProcessOrderJob.perform_later(order_id: '123', amount: 99.99)
|
|
21
|
+
#
|
|
22
|
+
# @example Or inline
|
|
23
|
+
# Shikibu::Integrations::WorkflowJob.perform_later(
|
|
24
|
+
# workflow_class: 'OrderSaga',
|
|
25
|
+
# order_id: '123'
|
|
26
|
+
# )
|
|
27
|
+
#
|
|
28
|
+
module ActiveJob
|
|
29
|
+
# Base job for running workflows
|
|
30
|
+
class WorkflowJob < ::ActiveJob::Base
|
|
31
|
+
queue_as :shikibu
|
|
32
|
+
|
|
33
|
+
class << self
|
|
34
|
+
attr_accessor :workflow_class
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Run a workflow
|
|
38
|
+
# @param workflow_class [String, nil] Workflow class name (if not set on class)
|
|
39
|
+
# @param input [Hash] Input parameters
|
|
40
|
+
def perform(workflow_class: nil, **input)
|
|
41
|
+
klass = resolve_workflow_class(workflow_class)
|
|
42
|
+
Shikibu.run(klass, **input)
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
private
|
|
46
|
+
|
|
47
|
+
def resolve_workflow_class(name)
|
|
48
|
+
return self.class.workflow_class if self.class.workflow_class
|
|
49
|
+
|
|
50
|
+
raise ArgumentError, 'workflow_class not specified' unless name
|
|
51
|
+
|
|
52
|
+
Object.const_get(name)
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Job for resuming workflows
|
|
57
|
+
class ResumeJob < ::ActiveJob::Base
|
|
58
|
+
queue_as :shikibu
|
|
59
|
+
|
|
60
|
+
def perform(instance_id)
|
|
61
|
+
Shikibu.app.resume_workflow(instance_id)
|
|
62
|
+
rescue LockNotAcquiredError
|
|
63
|
+
# Another worker got it, retry later
|
|
64
|
+
self.class.set(wait: 1.second).perform_later(instance_id)
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# Job for processing expired timers
|
|
69
|
+
class TimerJob < ::ActiveJob::Base
|
|
70
|
+
queue_as :shikibu
|
|
71
|
+
|
|
72
|
+
def perform(instance_id, timer_id)
|
|
73
|
+
storage = Shikibu.app.storage
|
|
74
|
+
|
|
75
|
+
# Record timer expiration
|
|
76
|
+
storage.append_history(
|
|
77
|
+
instance_id: instance_id,
|
|
78
|
+
activity_id: timer_id,
|
|
79
|
+
event_type: EventType::TIMER_EXPIRED,
|
|
80
|
+
event_data: { timer_id: timer_id, expired_at: Time.now.iso8601 }
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# Remove timer subscription
|
|
84
|
+
storage.remove_timer(instance_id: instance_id, timer_id: timer_id)
|
|
85
|
+
|
|
86
|
+
# Resume workflow
|
|
87
|
+
Shikibu.app.resume_workflow(instance_id)
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
# Convenience alias
|
|
93
|
+
WorkflowJob = ActiveJob::WorkflowJob
|
|
94
|
+
end
|
|
95
|
+
end
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
begin
|
|
4
|
+
require 'sidekiq'
|
|
5
|
+
rescue LoadError
|
|
6
|
+
# Sidekiq not available
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
module Shikibu
|
|
10
|
+
module Integrations
|
|
11
|
+
# Sidekiq integration for background workflow processing
|
|
12
|
+
#
|
|
13
|
+
# @example config/initializers/shikibu.rb
|
|
14
|
+
# Shikibu.configure do |config|
|
|
15
|
+
# config.database_url = ENV['DATABASE_URL']
|
|
16
|
+
# end
|
|
17
|
+
#
|
|
18
|
+
# @example Running a workflow
|
|
19
|
+
# Shikibu::Integrations::Sidekiq::WorkflowWorker.perform_async(
|
|
20
|
+
# 'OrderSaga',
|
|
21
|
+
# { 'order_id' => '123', 'amount' => 99.99 }
|
|
22
|
+
# )
|
|
23
|
+
#
|
|
24
|
+
# @example Custom worker class
|
|
25
|
+
# class ProcessOrderWorker
|
|
26
|
+
# include Sidekiq::Job
|
|
27
|
+
# sidekiq_options queue: 'workflows'
|
|
28
|
+
#
|
|
29
|
+
# def perform(order_id, amount)
|
|
30
|
+
# Shikibu.run(OrderSaga, order_id: order_id, amount: amount)
|
|
31
|
+
# end
|
|
32
|
+
# end
|
|
33
|
+
#
|
|
34
|
+
module Sidekiq
|
|
35
|
+
if defined?(::Sidekiq)
|
|
36
|
+
# Worker for running workflows
|
|
37
|
+
class WorkflowWorker
|
|
38
|
+
include ::Sidekiq::Job
|
|
39
|
+
|
|
40
|
+
sidekiq_options queue: 'shikibu', retry: 3
|
|
41
|
+
|
|
42
|
+
def perform(workflow_class_name, input = {})
|
|
43
|
+
klass = Object.const_get(workflow_class_name)
|
|
44
|
+
symbolized_input = input.transform_keys(&:to_sym)
|
|
45
|
+
Shikibu.run(klass, **symbolized_input)
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Worker for resuming workflows
|
|
50
|
+
class ResumeWorker
|
|
51
|
+
include ::Sidekiq::Job
|
|
52
|
+
|
|
53
|
+
sidekiq_options queue: 'shikibu', retry: 5
|
|
54
|
+
|
|
55
|
+
def perform(instance_id)
|
|
56
|
+
Shikibu.app.resume_workflow(instance_id)
|
|
57
|
+
rescue LockNotAcquiredError
|
|
58
|
+
# Retry with backoff
|
|
59
|
+
self.class.perform_in(1 + rand, instance_id)
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Worker for processing expired timers
|
|
64
|
+
class TimerWorker
|
|
65
|
+
include ::Sidekiq::Job
|
|
66
|
+
|
|
67
|
+
sidekiq_options queue: 'shikibu', retry: 3
|
|
68
|
+
|
|
69
|
+
def perform(instance_id, timer_id)
|
|
70
|
+
storage = Shikibu.app.storage
|
|
71
|
+
|
|
72
|
+
storage.append_history(
|
|
73
|
+
instance_id: instance_id,
|
|
74
|
+
activity_id: timer_id,
|
|
75
|
+
event_type: EventType::TIMER_EXPIRED,
|
|
76
|
+
event_data: { timer_id: timer_id, expired_at: Time.now.iso8601 }
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
storage.remove_timer(instance_id: instance_id, timer_id: timer_id)
|
|
80
|
+
storage.update_instance_status(instance_id, Status::RUNNING)
|
|
81
|
+
|
|
82
|
+
ResumeWorker.perform_async(instance_id)
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Scheduler for checking timers (run with sidekiq-scheduler or similar)
|
|
87
|
+
class TimerScheduler
|
|
88
|
+
include ::Sidekiq::Job
|
|
89
|
+
|
|
90
|
+
sidekiq_options queue: 'shikibu_scheduler', retry: false
|
|
91
|
+
|
|
92
|
+
def perform
|
|
93
|
+
storage = Shikibu.app.storage
|
|
94
|
+
expired = storage.find_expired_timers(limit: 100)
|
|
95
|
+
|
|
96
|
+
expired.each do |timer|
|
|
97
|
+
TimerWorker.perform_async(timer[:instance_id], timer[:timer_id])
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
end
|