shikibu 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +487 -0
- data/lib/shikibu/activity.rb +135 -0
- data/lib/shikibu/app.rb +299 -0
- data/lib/shikibu/channels.rb +360 -0
- data/lib/shikibu/constants.rb +70 -0
- data/lib/shikibu/context.rb +208 -0
- data/lib/shikibu/errors.rb +137 -0
- data/lib/shikibu/integrations/active_job.rb +95 -0
- data/lib/shikibu/integrations/sidekiq.rb +104 -0
- data/lib/shikibu/locking.rb +110 -0
- data/lib/shikibu/middleware/rack_app.rb +197 -0
- data/lib/shikibu/notify/notify_base.rb +67 -0
- data/lib/shikibu/notify/pg_notify.rb +217 -0
- data/lib/shikibu/notify/wake_event.rb +56 -0
- data/lib/shikibu/outbox/relayer.rb +227 -0
- data/lib/shikibu/replay.rb +361 -0
- data/lib/shikibu/retry_policy.rb +81 -0
- data/lib/shikibu/storage/migrations.rb +179 -0
- data/lib/shikibu/storage/sequel_storage.rb +883 -0
- data/lib/shikibu/version.rb +5 -0
- data/lib/shikibu/worker.rb +389 -0
- data/lib/shikibu/workflow.rb +398 -0
- data/lib/shikibu.rb +152 -0
- data/schema/LICENSE +21 -0
- data/schema/README.md +57 -0
- data/schema/db/migrations/mysql/20251217000000_initial_schema.sql +284 -0
- data/schema/db/migrations/postgresql/20251217000000_initial_schema.sql +284 -0
- data/schema/db/migrations/sqlite/20251217000000_initial_schema.sql +284 -0
- data/schema/docs/column-values.md +91 -0
- metadata +231 -0
data/lib/shikibu/app.rb
ADDED
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'securerandom'
|
|
4
|
+
|
|
5
|
+
module Shikibu
|
|
6
|
+
# Main application class for Shikibu
|
|
7
|
+
# Manages configuration, lifecycle, and workflow execution
|
|
8
|
+
#
|
|
9
|
+
# @example
|
|
10
|
+
# app = Shikibu::App.new(
|
|
11
|
+
# database_url: 'sqlite://shikibu.db',
|
|
12
|
+
# service_name: 'my-service',
|
|
13
|
+
# auto_migrate: true
|
|
14
|
+
# )
|
|
15
|
+
#
|
|
16
|
+
# app.register OrderWorkflow
|
|
17
|
+
# app.start
|
|
18
|
+
#
|
|
19
|
+
# instance_id = app.start_workflow(OrderWorkflow, order_id: '123', amount: 99.99)
|
|
20
|
+
# result = app.get_result(instance_id)
|
|
21
|
+
#
|
|
22
|
+
# app.shutdown
|
|
23
|
+
#
|
|
24
|
+
class App
|
|
25
|
+
attr_reader :storage, :worker_id, :service_name, :hooks, :message_retention_days,
|
|
26
|
+
:outbox_enabled, :broker_url, :outbox_poll_interval, :outbox_max_retries, :outbox_max_age_hours
|
|
27
|
+
|
|
28
|
+
# Create a new Shikibu application
|
|
29
|
+
# @param database_url [String] Database connection URL
|
|
30
|
+
# @param service_name [String] Service name for worker identification
|
|
31
|
+
# @param auto_migrate [Boolean] Whether to auto-apply migrations
|
|
32
|
+
# @param hooks [Object] Optional hooks for observability
|
|
33
|
+
# @param use_listen_notify [Boolean, nil] Enable PostgreSQL LISTEN/NOTIFY (nil=auto)
|
|
34
|
+
# @param message_retention_days [Integer] Days to retain channel messages (default: 7)
|
|
35
|
+
# @param outbox_enabled [Boolean] Enable outbox relayer (default: false)
|
|
36
|
+
# @param broker_url [String, nil] Message broker URL for outbox events
|
|
37
|
+
# @param outbox_poll_interval [Float] Outbox polling interval in seconds (default: 1.0)
|
|
38
|
+
# @param outbox_max_retries [Integer] Max retries for outbox events (default: 3)
|
|
39
|
+
# @param outbox_max_age_hours [Float, nil] Max age for outbox events in hours (default: nil)
|
|
40
|
+
def initialize(database_url:, service_name: 'shikibu', auto_migrate: false, hooks: nil,
|
|
41
|
+
use_listen_notify: nil, message_retention_days: 7,
|
|
42
|
+
outbox_enabled: false, broker_url: nil, outbox_poll_interval: 1.0,
|
|
43
|
+
outbox_max_retries: 3, outbox_max_age_hours: nil)
|
|
44
|
+
@database_url = database_url
|
|
45
|
+
@service_name = service_name
|
|
46
|
+
@auto_migrate = auto_migrate
|
|
47
|
+
@hooks = hooks
|
|
48
|
+
@use_listen_notify = use_listen_notify
|
|
49
|
+
@message_retention_days = message_retention_days
|
|
50
|
+
@outbox_enabled = outbox_enabled
|
|
51
|
+
@broker_url = broker_url
|
|
52
|
+
@outbox_poll_interval = outbox_poll_interval
|
|
53
|
+
@outbox_max_retries = outbox_max_retries
|
|
54
|
+
@outbox_max_age_hours = outbox_max_age_hours
|
|
55
|
+
@worker_id = Locking.generate_worker_id(service_name)
|
|
56
|
+
@registered_workflows = {}
|
|
57
|
+
@running = false
|
|
58
|
+
@worker = nil
|
|
59
|
+
@notify_listener = nil
|
|
60
|
+
|
|
61
|
+
# Validate outbox configuration
|
|
62
|
+
raise ArgumentError, 'broker_url is required when outbox_enabled is true' if @outbox_enabled && @broker_url.nil?
|
|
63
|
+
|
|
64
|
+
# Initialize storage
|
|
65
|
+
@storage = Storage::SequelStorage.new(database_url, auto_migrate: auto_migrate)
|
|
66
|
+
|
|
67
|
+
# Initialize notify listener for PostgreSQL
|
|
68
|
+
setup_notify_listener if should_enable_listen_notify?
|
|
69
|
+
|
|
70
|
+
# Initialize replay engine
|
|
71
|
+
@replay_engine = ReplayEngine.new(
|
|
72
|
+
storage: @storage,
|
|
73
|
+
worker_id: @worker_id,
|
|
74
|
+
hooks: @hooks
|
|
75
|
+
)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# Check if outbox relayer is enabled
|
|
79
|
+
def outbox_enabled?
|
|
80
|
+
@outbox_enabled
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# Register a workflow class
|
|
84
|
+
# @param workflow_class [Class] Workflow class to register
|
|
85
|
+
def register(workflow_class)
|
|
86
|
+
name = workflow_class.workflow_name
|
|
87
|
+
@registered_workflows[name] = workflow_class
|
|
88
|
+
Shikibu.register_workflow(workflow_class)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Start the background worker
|
|
92
|
+
def start
|
|
93
|
+
return if @running
|
|
94
|
+
|
|
95
|
+
@running = true
|
|
96
|
+
|
|
97
|
+
# Start notify listener before worker
|
|
98
|
+
@notify_listener&.start
|
|
99
|
+
|
|
100
|
+
@worker = Worker.new(self)
|
|
101
|
+
|
|
102
|
+
# Register notification handlers
|
|
103
|
+
register_notify_handlers if @notify_listener
|
|
104
|
+
|
|
105
|
+
@worker.start
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Gracefully shutdown the application
|
|
109
|
+
def shutdown
|
|
110
|
+
return unless @running
|
|
111
|
+
|
|
112
|
+
@running = false
|
|
113
|
+
@worker&.stop
|
|
114
|
+
@notify_listener&.stop
|
|
115
|
+
@storage.close
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# Access the worker
|
|
119
|
+
attr_reader :worker
|
|
120
|
+
|
|
121
|
+
# Check if app is running
|
|
122
|
+
def running?
|
|
123
|
+
@running
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Start a new workflow
|
|
127
|
+
# @param workflow_class [Class] Workflow class
|
|
128
|
+
# @param instance_id [String, nil] Optional custom instance ID
|
|
129
|
+
# @param input [Hash] Input parameters
|
|
130
|
+
# @return [String] Instance ID
|
|
131
|
+
def start_workflow(workflow_class, instance_id: nil, **input)
|
|
132
|
+
instance_id ||= SecureRandom.uuid
|
|
133
|
+
|
|
134
|
+
@replay_engine.start_workflow(workflow_class, instance_id: instance_id, **input)
|
|
135
|
+
|
|
136
|
+
instance_id
|
|
137
|
+
rescue WaitForTimerSignal, WaitForChannelSignal
|
|
138
|
+
# Workflow suspended, return instance_id
|
|
139
|
+
instance_id
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
# Resume a workflow
|
|
143
|
+
# @param instance_id [String] Instance ID
|
|
144
|
+
def resume_workflow(instance_id)
|
|
145
|
+
@replay_engine.resume_workflow(instance_id)
|
|
146
|
+
rescue WaitForTimerSignal, WaitForChannelSignal
|
|
147
|
+
# Workflow still suspended
|
|
148
|
+
nil
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Get workflow result
|
|
152
|
+
# @param instance_id [String] Instance ID
|
|
153
|
+
# @return [Hash] Status and output
|
|
154
|
+
def get_result(instance_id)
|
|
155
|
+
instance = @storage.get_instance(instance_id)
|
|
156
|
+
raise WorkflowNotFoundError, instance_id unless instance
|
|
157
|
+
|
|
158
|
+
{
|
|
159
|
+
status: instance[:status],
|
|
160
|
+
output: instance[:output_data],
|
|
161
|
+
error: instance[:status] == Status::FAILED ? 'Workflow failed' : nil
|
|
162
|
+
}
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
# Get workflow status
|
|
166
|
+
# @param instance_id [String] Instance ID
|
|
167
|
+
# @return [String] Status
|
|
168
|
+
def get_status(instance_id)
|
|
169
|
+
instance = @storage.get_instance(instance_id)
|
|
170
|
+
raise WorkflowNotFoundError, instance_id unless instance
|
|
171
|
+
|
|
172
|
+
instance[:status]
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
# Cancel a workflow
|
|
176
|
+
# @param instance_id [String] Instance ID
|
|
177
|
+
# @param reason [String, nil] Cancellation reason
|
|
178
|
+
def cancel_workflow(instance_id, reason: nil) # rubocop:disable Lint/UnusedMethodArgument
|
|
179
|
+
instance = @storage.get_instance(instance_id)
|
|
180
|
+
raise WorkflowNotFoundError, instance_id unless instance
|
|
181
|
+
|
|
182
|
+
# Only cancel if not already terminal
|
|
183
|
+
terminal_statuses = [Status::COMPLETED, Status::FAILED, Status::CANCELLED]
|
|
184
|
+
return false if terminal_statuses.include?(instance[:status])
|
|
185
|
+
|
|
186
|
+
@storage.update_instance_status(instance_id, Status::CANCELLED)
|
|
187
|
+
true
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
# List workflow instances
|
|
191
|
+
# @param limit [Integer] Maximum number of results
|
|
192
|
+
# @param offset [Integer] Offset for pagination
|
|
193
|
+
# @param status [String, nil] Filter by status
|
|
194
|
+
# @param workflow_name [String, nil] Filter by workflow name
|
|
195
|
+
# @return [Array<Hash>] List of instances
|
|
196
|
+
def list_workflows(limit: 100, offset: 0, status: nil, workflow_name: nil)
|
|
197
|
+
@storage.list_instances(
|
|
198
|
+
limit: limit,
|
|
199
|
+
offset: offset,
|
|
200
|
+
status_filter: status,
|
|
201
|
+
workflow_name: workflow_name
|
|
202
|
+
)
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
# Send an event to waiting workflows
|
|
206
|
+
# @param event_type [String] Event type (channel name)
|
|
207
|
+
# @param data [Object] Event data
|
|
208
|
+
# @param metadata [Hash, nil] Optional metadata
|
|
209
|
+
# @param target_instance_id [String, nil] If set, deliver only to this specific instance (Point-to-Point)
|
|
210
|
+
def send_event(event_type, data, metadata: nil, target_instance_id: nil)
|
|
211
|
+
full_metadata = (metadata || {}).merge(
|
|
212
|
+
source_service: @service_name,
|
|
213
|
+
published_at: Time.now.iso8601
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
# Publish message to channel for persistence
|
|
217
|
+
message_id = @storage.publish_message(
|
|
218
|
+
channel: event_type,
|
|
219
|
+
data: data,
|
|
220
|
+
metadata: full_metadata
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
# Deliver to subscribers
|
|
224
|
+
if target_instance_id
|
|
225
|
+
# Point-to-Point: deliver only to specific instance
|
|
226
|
+
@storage.deliver_channel_message(
|
|
227
|
+
instance_id: target_instance_id,
|
|
228
|
+
channel: event_type,
|
|
229
|
+
message_id: message_id,
|
|
230
|
+
data: data,
|
|
231
|
+
metadata: full_metadata,
|
|
232
|
+
worker_id: @worker_id
|
|
233
|
+
)
|
|
234
|
+
else
|
|
235
|
+
# Broadcast: wake all waiting subscribers (Worker will handle delivery)
|
|
236
|
+
@notify_listener&.notify(Notify::Channel::CHANNEL_MESSAGE, { channel: event_type })
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
# Call hook
|
|
240
|
+
hooks&.on_event_sent&.call(event_type, @service_name, data)
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# Get registered workflows
|
|
244
|
+
def registered_workflows
|
|
245
|
+
@registered_workflows.dup
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
# Access the notify listener
|
|
249
|
+
attr_reader :notify_listener
|
|
250
|
+
|
|
251
|
+
private
|
|
252
|
+
|
|
253
|
+
def should_enable_listen_notify?
|
|
254
|
+
return @use_listen_notify unless @use_listen_notify.nil?
|
|
255
|
+
|
|
256
|
+
# Auto-detect: enable for PostgreSQL
|
|
257
|
+
@database_url.start_with?('postgres')
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
def setup_notify_listener
|
|
261
|
+
@notify_listener = Notify.create_listener(@database_url)
|
|
262
|
+
@storage.notify_enabled = true
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
def register_notify_handlers
|
|
266
|
+
# Handle workflow_resumable notifications
|
|
267
|
+
@notify_listener.subscribe(Notify::Channel::WORKFLOW_RESUMABLE) do |_payload|
|
|
268
|
+
handle_workflow_resumable_notify
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
# Handle channel_message notifications
|
|
272
|
+
@notify_listener.subscribe(Notify::Channel::CHANNEL_MESSAGE) do |_payload|
|
|
273
|
+
handle_channel_message_notify
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Handle outbox_pending notifications (only if outbox is enabled)
|
|
277
|
+
return unless @outbox_enabled
|
|
278
|
+
|
|
279
|
+
@notify_listener.subscribe(Notify::Channel::OUTBOX_PENDING) do |_payload|
|
|
280
|
+
handle_outbox_pending_notify
|
|
281
|
+
end
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
def handle_workflow_resumable_notify
|
|
285
|
+
# Wake the resumption loop
|
|
286
|
+
@worker&.resume_wake_event&.signal
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
def handle_channel_message_notify
|
|
290
|
+
# Wake the message delivery loop
|
|
291
|
+
@worker&.message_wake_event&.signal
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
def handle_outbox_pending_notify
|
|
295
|
+
# Wake the outbox relayer
|
|
296
|
+
@worker&.outbox_wake_event&.signal
|
|
297
|
+
end
|
|
298
|
+
end
|
|
299
|
+
end
|
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Shikibu
|
|
4
|
+
# Channel-based messaging for workflow communication
|
|
5
|
+
# Supports three modes:
|
|
6
|
+
# - broadcast: All subscribers receive all messages
|
|
7
|
+
# - competing: Each message goes to exactly one subscriber
|
|
8
|
+
# - direct: Point-to-point messaging to specific instance
|
|
9
|
+
#
|
|
10
|
+
# @example Broadcast (pub/sub)
|
|
11
|
+
# # Workflow A
|
|
12
|
+
# subscribe 'notifications', mode: :broadcast
|
|
13
|
+
# message = receive 'notifications', timeout: 60
|
|
14
|
+
#
|
|
15
|
+
# # Workflow B
|
|
16
|
+
# publish 'notifications', { type: 'alert', text: 'Hello!' }
|
|
17
|
+
#
|
|
18
|
+
# @example Competing consumers (work queue)
|
|
19
|
+
# # Worker workflows (multiple instances)
|
|
20
|
+
# subscribe 'tasks', mode: :competing
|
|
21
|
+
# task = receive 'tasks'
|
|
22
|
+
# process(task)
|
|
23
|
+
#
|
|
24
|
+
# # Producer
|
|
25
|
+
# publish 'tasks', { job_id: 123, action: 'process' }
|
|
26
|
+
#
|
|
27
|
+
# @example Direct messaging (point-to-point)
|
|
28
|
+
# # Parent workflow
|
|
29
|
+
# child_id = start_workflow(ChildWorkflow, parent_id: instance_id)
|
|
30
|
+
# subscribe 'results', mode: :direct
|
|
31
|
+
# result = receive 'results', timeout: 300
|
|
32
|
+
#
|
|
33
|
+
# # Child workflow
|
|
34
|
+
# send_to parent_id, 'results', { status: 'done', data: result }
|
|
35
|
+
#
|
|
36
|
+
module Channels
|
|
37
|
+
# Message wrapper with metadata
|
|
38
|
+
class Message
|
|
39
|
+
attr_reader :id, :channel, :data, :metadata, :published_at
|
|
40
|
+
|
|
41
|
+
def initialize(id:, channel:, data:, metadata: nil, published_at: nil)
|
|
42
|
+
@id = id
|
|
43
|
+
@channel = channel
|
|
44
|
+
@data = data
|
|
45
|
+
@metadata = metadata || {}
|
|
46
|
+
@published_at = published_at
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def [](key)
|
|
50
|
+
@data[key]
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def to_h
|
|
54
|
+
{
|
|
55
|
+
id: @id,
|
|
56
|
+
channel: @channel,
|
|
57
|
+
data: @data,
|
|
58
|
+
metadata: @metadata,
|
|
59
|
+
published_at: @published_at
|
|
60
|
+
}
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
# Subscription info
|
|
65
|
+
class Subscription
|
|
66
|
+
attr_reader :channel, :mode, :subscribed_at
|
|
67
|
+
|
|
68
|
+
def initialize(channel:, mode:, subscribed_at: nil)
|
|
69
|
+
@channel = channel
|
|
70
|
+
@mode = mode.to_s
|
|
71
|
+
@subscribed_at = subscribed_at || Time.now
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def broadcast?
|
|
75
|
+
@mode == ChannelMode::BROADCAST
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def competing?
|
|
79
|
+
@mode == ChannelMode::COMPETING
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def direct?
|
|
83
|
+
@mode == ChannelMode::DIRECT
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
module_function
|
|
88
|
+
|
|
89
|
+
# Subscribe to a channel
|
|
90
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
91
|
+
# @param channel [String] Channel name
|
|
92
|
+
# @param mode [Symbol, String] Subscription mode (:broadcast, :competing, :direct)
|
|
93
|
+
# @raise [ChannelModeConflictError] If channel is already configured with a different mode
|
|
94
|
+
def subscribe(ctx, channel, mode: :broadcast)
|
|
95
|
+
mode_str = mode.to_s
|
|
96
|
+
validate_mode!(mode_str)
|
|
97
|
+
|
|
98
|
+
# For direct mode, use instance-specific channel
|
|
99
|
+
actual_channel = if mode_str == ChannelMode::DIRECT
|
|
100
|
+
"#{channel}:#{ctx.instance_id}"
|
|
101
|
+
else
|
|
102
|
+
channel
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Determine actual mode (direct maps to competing)
|
|
106
|
+
actual_mode = mode_str == ChannelMode::DIRECT ? ChannelMode::COMPETING : mode_str
|
|
107
|
+
|
|
108
|
+
# Check for mode conflict
|
|
109
|
+
existing_mode = ctx.storage.get_channel_mode(actual_channel)
|
|
110
|
+
if existing_mode && existing_mode != actual_mode
|
|
111
|
+
raise ChannelModeConflictError.new(channel, existing_mode, mode_str)
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
ctx.storage.subscribe_to_channel(
|
|
115
|
+
instance_id: ctx.instance_id,
|
|
116
|
+
channel: actual_channel,
|
|
117
|
+
mode: actual_mode
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
Subscription.new(channel: actual_channel, mode: mode_str)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Unsubscribe from a channel
|
|
124
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
125
|
+
# @param channel [String] Channel name
|
|
126
|
+
def unsubscribe(ctx, channel)
|
|
127
|
+
ctx.storage.unsubscribe_from_channel(
|
|
128
|
+
instance_id: ctx.instance_id,
|
|
129
|
+
channel: channel
|
|
130
|
+
)
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
# Receive a message from a channel (blocks workflow until message arrives)
|
|
134
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
135
|
+
# @param channel [String] Channel name
|
|
136
|
+
# @param timeout [Numeric, nil] Timeout in seconds (nil = no timeout)
|
|
137
|
+
# @param mode [Symbol, String] Subscription mode
|
|
138
|
+
# @return [Message] Received message
|
|
139
|
+
# @raise [MessageTimeoutError] If timeout expires
|
|
140
|
+
def receive(ctx, channel, timeout: nil, mode: :broadcast)
|
|
141
|
+
mode_str = mode.to_s
|
|
142
|
+
activity_id = ctx.generate_activity_id("receive:#{channel}")
|
|
143
|
+
|
|
144
|
+
# For direct mode, use instance-specific channel
|
|
145
|
+
actual_channel = if mode_str == ChannelMode::DIRECT
|
|
146
|
+
"#{channel}:#{ctx.instance_id}"
|
|
147
|
+
else
|
|
148
|
+
channel
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Check cache during replay
|
|
152
|
+
if ctx.replaying? && ctx.cached_result?(activity_id)
|
|
153
|
+
cached = ctx.get_cached_result(activity_id)
|
|
154
|
+
return build_message_from_cached(cached) if cached[:event_type] == EventType::CHANNEL_MESSAGE_RECEIVED
|
|
155
|
+
|
|
156
|
+
# Timeout was recorded
|
|
157
|
+
raise MessageTimeoutError.new(channel, timeout)
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# Try to get pending message immediately
|
|
161
|
+
message = try_receive_immediate(ctx, actual_channel, mode_str)
|
|
162
|
+
if message
|
|
163
|
+
# Call hook
|
|
164
|
+
ctx.hooks&.on_event_received&.call(ctx.instance_id, actual_channel, message.data)
|
|
165
|
+
|
|
166
|
+
record_message_received(ctx, activity_id, actual_channel, message)
|
|
167
|
+
return message
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
# No message available, suspend workflow
|
|
171
|
+
timeout_at = timeout ? Time.now + timeout : nil
|
|
172
|
+
|
|
173
|
+
raise WaitForChannelSignal.new(
|
|
174
|
+
channel: actual_channel,
|
|
175
|
+
mode: mode_str == ChannelMode::DIRECT ? ChannelMode::COMPETING : mode_str,
|
|
176
|
+
timeout_at: timeout_at,
|
|
177
|
+
activity_id: activity_id
|
|
178
|
+
)
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
# Publish a message to a channel
|
|
182
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
183
|
+
# @param channel [String] Channel name
|
|
184
|
+
# @param data [Object] Message data
|
|
185
|
+
# @param metadata [Hash, nil] Optional metadata
|
|
186
|
+
# @return [String] Message ID
|
|
187
|
+
def publish(ctx, channel, data, metadata: nil)
|
|
188
|
+
full_metadata = (metadata || {}).merge(
|
|
189
|
+
source_instance_id: ctx.instance_id,
|
|
190
|
+
source_workflow: ctx.workflow_name,
|
|
191
|
+
published_at: Time.now.iso8601
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
result = ctx.storage.publish_message(
|
|
195
|
+
channel: channel,
|
|
196
|
+
data: data,
|
|
197
|
+
metadata: full_metadata
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
# Call hook
|
|
201
|
+
ctx.hooks&.on_event_sent&.call(channel, ctx.workflow_name, data)
|
|
202
|
+
|
|
203
|
+
result
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
# Send a message directly to a specific workflow instance
|
|
207
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
208
|
+
# @param target_instance_id [String] Target workflow instance ID
|
|
209
|
+
# @param channel [String] Channel name
|
|
210
|
+
# @param data [Object] Message data
|
|
211
|
+
# @param metadata [Hash, nil] Optional metadata
|
|
212
|
+
# @return [String] Message ID
|
|
213
|
+
def send_to(ctx, target_instance_id, channel, data, metadata: nil)
|
|
214
|
+
direct_channel = "#{channel}:#{target_instance_id}"
|
|
215
|
+
|
|
216
|
+
full_metadata = (metadata || {}).merge(
|
|
217
|
+
source_instance_id: ctx.instance_id,
|
|
218
|
+
source_workflow: ctx.workflow_name,
|
|
219
|
+
target_instance_id: target_instance_id,
|
|
220
|
+
published_at: Time.now.iso8601
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
ctx.storage.publish_message(
|
|
224
|
+
channel: direct_channel,
|
|
225
|
+
data: data,
|
|
226
|
+
metadata: full_metadata
|
|
227
|
+
)
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
# Receive without blocking (returns nil if no message)
|
|
231
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
232
|
+
# @param channel [String] Channel name
|
|
233
|
+
# @param mode [Symbol, String] Subscription mode
|
|
234
|
+
# @return [Message, nil] Message or nil
|
|
235
|
+
def try_receive(ctx, channel, mode: :broadcast)
|
|
236
|
+
mode_str = mode.to_s
|
|
237
|
+
actual_channel = if mode_str == ChannelMode::DIRECT
|
|
238
|
+
"#{channel}:#{ctx.instance_id}"
|
|
239
|
+
else
|
|
240
|
+
channel
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
try_receive_immediate(ctx, actual_channel, mode_str)
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
# List active subscriptions for an instance
|
|
247
|
+
# @param ctx [WorkflowContext] Workflow context
|
|
248
|
+
# @return [Array<Subscription>]
|
|
249
|
+
def subscriptions(ctx)
|
|
250
|
+
subs = ctx.storage.db[:channel_subscriptions]
|
|
251
|
+
.where(instance_id: ctx.instance_id)
|
|
252
|
+
.all
|
|
253
|
+
|
|
254
|
+
subs.map do |row|
|
|
255
|
+
Subscription.new(
|
|
256
|
+
channel: row[:channel],
|
|
257
|
+
mode: row[:mode],
|
|
258
|
+
subscribed_at: row[:subscribed_at]
|
|
259
|
+
)
|
|
260
|
+
end
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
class << self
|
|
264
|
+
private
|
|
265
|
+
|
|
266
|
+
def validate_mode!(mode)
|
|
267
|
+
return if ChannelMode::ALL.include?(mode)
|
|
268
|
+
|
|
269
|
+
raise ArgumentError, "Invalid channel mode: #{mode}. Must be one of: #{ChannelMode::ALL.join(', ')}"
|
|
270
|
+
end
|
|
271
|
+
|
|
272
|
+
def try_receive_immediate(ctx, channel, mode)
|
|
273
|
+
sub = ctx.storage.get_subscription(instance_id: ctx.instance_id, channel: channel)
|
|
274
|
+
|
|
275
|
+
# Get cursor for broadcast mode
|
|
276
|
+
cursor_id = sub&.dig(:cursor_message_id)
|
|
277
|
+
|
|
278
|
+
row = ctx.storage.get_next_message(
|
|
279
|
+
channel: channel,
|
|
280
|
+
mode: mode == ChannelMode::DIRECT ? ChannelMode::COMPETING : mode,
|
|
281
|
+
instance_id: ctx.instance_id,
|
|
282
|
+
cursor_id: cursor_id
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
return nil unless row
|
|
286
|
+
|
|
287
|
+
# For competing mode, try to claim
|
|
288
|
+
if mode == ChannelMode::COMPETING
|
|
289
|
+
claimed = ctx.storage.claim_message(
|
|
290
|
+
message_id: row[:message_id],
|
|
291
|
+
instance_id: ctx.instance_id
|
|
292
|
+
)
|
|
293
|
+
return nil unless claimed
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
# Update cursor for broadcast mode
|
|
297
|
+
update_broadcast_cursor(ctx, channel, row[:id]) if mode == ChannelMode::BROADCAST
|
|
298
|
+
|
|
299
|
+
build_message(row)
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
def update_broadcast_cursor(ctx, channel, message_id)
|
|
303
|
+
ctx.storage.db[:channel_subscriptions]
|
|
304
|
+
.where(instance_id: ctx.instance_id, channel: channel)
|
|
305
|
+
.update(cursor_message_id: message_id)
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
def build_message(row)
|
|
309
|
+
data = if row[:data_type] == DataType::BINARY
|
|
310
|
+
row[:data_binary]
|
|
311
|
+
else
|
|
312
|
+
raw_data = row[:data]
|
|
313
|
+
raw_data.is_a?(String) ? JSON.parse(raw_data, symbolize_names: true) : raw_data
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
metadata = row[:metadata]
|
|
317
|
+
metadata = JSON.parse(metadata, symbolize_names: true) if metadata.is_a?(String)
|
|
318
|
+
|
|
319
|
+
Message.new(
|
|
320
|
+
id: row[:message_id],
|
|
321
|
+
channel: row[:channel],
|
|
322
|
+
data: data,
|
|
323
|
+
metadata: metadata,
|
|
324
|
+
published_at: row[:published_at]
|
|
325
|
+
)
|
|
326
|
+
end
|
|
327
|
+
|
|
328
|
+
def build_message_from_cached(cached)
|
|
329
|
+
# The cached structure is { event_type: ..., data: { message_id:, channel:, data:, metadata: } }
|
|
330
|
+
msg_data = cached[:data] || {}
|
|
331
|
+
Message.new(
|
|
332
|
+
id: msg_data[:message_id] || SecureRandom.uuid,
|
|
333
|
+
channel: msg_data[:channel] || 'unknown',
|
|
334
|
+
data: msg_data[:data],
|
|
335
|
+
metadata: msg_data[:metadata] || {},
|
|
336
|
+
published_at: msg_data[:published_at]
|
|
337
|
+
)
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
def record_message_received(ctx, activity_id, channel, message)
|
|
341
|
+
ctx.storage.append_history(
|
|
342
|
+
instance_id: ctx.instance_id,
|
|
343
|
+
activity_id: activity_id,
|
|
344
|
+
event_type: EventType::CHANNEL_MESSAGE_RECEIVED,
|
|
345
|
+
event_data: {
|
|
346
|
+
channel: channel,
|
|
347
|
+
message_id: message.id,
|
|
348
|
+
data: message.data,
|
|
349
|
+
metadata: message.metadata
|
|
350
|
+
}
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
ctx.cache_result(activity_id, {
|
|
354
|
+
event_type: EventType::CHANNEL_MESSAGE_RECEIVED,
|
|
355
|
+
data: message.to_h
|
|
356
|
+
})
|
|
357
|
+
end
|
|
358
|
+
end
|
|
359
|
+
end
|
|
360
|
+
end
|