shikibu 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +487 -0
- data/lib/shikibu/activity.rb +135 -0
- data/lib/shikibu/app.rb +299 -0
- data/lib/shikibu/channels.rb +360 -0
- data/lib/shikibu/constants.rb +70 -0
- data/lib/shikibu/context.rb +208 -0
- data/lib/shikibu/errors.rb +137 -0
- data/lib/shikibu/integrations/active_job.rb +95 -0
- data/lib/shikibu/integrations/sidekiq.rb +104 -0
- data/lib/shikibu/locking.rb +110 -0
- data/lib/shikibu/middleware/rack_app.rb +197 -0
- data/lib/shikibu/notify/notify_base.rb +67 -0
- data/lib/shikibu/notify/pg_notify.rb +217 -0
- data/lib/shikibu/notify/wake_event.rb +56 -0
- data/lib/shikibu/outbox/relayer.rb +227 -0
- data/lib/shikibu/replay.rb +361 -0
- data/lib/shikibu/retry_policy.rb +81 -0
- data/lib/shikibu/storage/migrations.rb +179 -0
- data/lib/shikibu/storage/sequel_storage.rb +883 -0
- data/lib/shikibu/version.rb +5 -0
- data/lib/shikibu/worker.rb +389 -0
- data/lib/shikibu/workflow.rb +398 -0
- data/lib/shikibu.rb +152 -0
- data/schema/LICENSE +21 -0
- data/schema/README.md +57 -0
- data/schema/db/migrations/mysql/20251217000000_initial_schema.sql +284 -0
- data/schema/db/migrations/postgresql/20251217000000_initial_schema.sql +284 -0
- data/schema/db/migrations/sqlite/20251217000000_initial_schema.sql +284 -0
- data/schema/docs/column-values.md +91 -0
- metadata +231 -0
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'digest'
|
|
4
|
+
|
|
5
|
+
module Shikibu
|
|
6
|
+
# Base class for workflow definitions
|
|
7
|
+
# Subclass this to create durable workflows
|
|
8
|
+
#
|
|
9
|
+
# @example
|
|
10
|
+
# # Register compensation at startup
|
|
11
|
+
# Shikibu.register_compensation(:refund_payment) do |_ctx, order_id:|
|
|
12
|
+
# PaymentService.refund(order_id)
|
|
13
|
+
# end
|
|
14
|
+
#
|
|
15
|
+
# class OrderWorkflow < Shikibu::Workflow
|
|
16
|
+
# workflow_name 'order_processing'
|
|
17
|
+
# event_handler true
|
|
18
|
+
#
|
|
19
|
+
# def execute(order_id:, amount:)
|
|
20
|
+
# result = activity :process_payment do
|
|
21
|
+
# PaymentService.charge(order_id, amount)
|
|
22
|
+
# end
|
|
23
|
+
#
|
|
24
|
+
# on_failure :refund_payment, order_id: order_id
|
|
25
|
+
#
|
|
26
|
+
# { status: 'completed', payment: result }
|
|
27
|
+
# end
|
|
28
|
+
# end
|
|
29
|
+
#
|
|
30
|
+
class Workflow
|
|
31
|
+
class << self
|
|
32
|
+
# Set the workflow name
|
|
33
|
+
# @param name [String, nil] The workflow name (defaults to class name)
|
|
34
|
+
def workflow_name(name = nil)
|
|
35
|
+
if name
|
|
36
|
+
@workflow_name = name.to_s
|
|
37
|
+
else
|
|
38
|
+
@workflow_name || name.to_s.gsub(/([a-z])([A-Z])/, '\1_\2').downcase
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Set whether this workflow handles events
|
|
43
|
+
# @param value [Boolean]
|
|
44
|
+
def event_handler(value = nil)
|
|
45
|
+
if value.nil?
|
|
46
|
+
@event_handler || false
|
|
47
|
+
else
|
|
48
|
+
@event_handler = value
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Set the lock timeout for this workflow
|
|
53
|
+
# @param seconds [Integer]
|
|
54
|
+
def lock_timeout(seconds = nil)
|
|
55
|
+
if seconds
|
|
56
|
+
@lock_timeout = seconds
|
|
57
|
+
else
|
|
58
|
+
@lock_timeout || 300
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# Get the source hash for this workflow
|
|
63
|
+
def source_hash
|
|
64
|
+
@source_hash ||= begin
|
|
65
|
+
source = source_code
|
|
66
|
+
Digest::SHA256.hexdigest(source)[0, 16]
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Get the source code of this workflow
|
|
71
|
+
def source_code
|
|
72
|
+
# Try to get source from file
|
|
73
|
+
file, = instance_method(:execute).source_location
|
|
74
|
+
return '' unless file && File.exist?(file)
|
|
75
|
+
|
|
76
|
+
File.read(file)
|
|
77
|
+
rescue StandardError
|
|
78
|
+
''
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Register this workflow with Shikibu
|
|
82
|
+
def register!
|
|
83
|
+
Shikibu.register_workflow(self)
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Create and start a new workflow instance
|
|
87
|
+
# @param app [App] The Shikibu app
|
|
88
|
+
# @param input [Hash] Input parameters
|
|
89
|
+
# @param instance_id [String, nil] Optional custom instance ID
|
|
90
|
+
# @return [String] The instance ID
|
|
91
|
+
def start(app, instance_id: nil, **input)
|
|
92
|
+
app.start_workflow(self, instance_id: instance_id, **input)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Run workflow using global Shikibu.app
|
|
96
|
+
# @example
|
|
97
|
+
# OrderSaga.run(order_id: '123')
|
|
98
|
+
def run(**input)
|
|
99
|
+
Shikibu.run(self, **input)
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
attr_reader :ctx, :input
|
|
104
|
+
|
|
105
|
+
# Initialize with context (internal) or input (for Shikibu.run with instance)
|
|
106
|
+
def initialize(context_or_input = nil, **input)
|
|
107
|
+
if context_or_input.is_a?(WorkflowContext)
|
|
108
|
+
@ctx = context_or_input
|
|
109
|
+
@input = {}
|
|
110
|
+
else
|
|
111
|
+
# Called as OrderSaga.new(order_id: '123')
|
|
112
|
+
@ctx = nil
|
|
113
|
+
@input = context_or_input.is_a?(Hash) ? context_or_input : input
|
|
114
|
+
end
|
|
115
|
+
@pending_compensations = []
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# Set context (called by ReplayEngine)
|
|
119
|
+
def context=(context)
|
|
120
|
+
@ctx = context
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Override this method to implement workflow logic
|
|
124
|
+
# @param input [Hash] Input parameters (keyword arguments)
|
|
125
|
+
# @return [Object] Workflow result
|
|
126
|
+
def execute(**input)
|
|
127
|
+
raise NotImplementedError, 'Subclasses must implement #execute'
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
# Execute an activity with automatic retry and history tracking
|
|
131
|
+
# @param name [Symbol, String] Activity name
|
|
132
|
+
# @param retry_policy [RetryPolicy] Retry policy
|
|
133
|
+
# @param block [Proc] Activity logic
|
|
134
|
+
# @return [Object] Activity result
|
|
135
|
+
def activity(name, retry_policy: nil, &)
|
|
136
|
+
activity_id = ctx.generate_activity_id(name.to_s)
|
|
137
|
+
ctx.current_activity_id = activity_id
|
|
138
|
+
|
|
139
|
+
# Check for cached result during replay
|
|
140
|
+
if ctx.replaying? && ctx.cached_result?(activity_id)
|
|
141
|
+
cached = ctx.get_cached_result(activity_id)
|
|
142
|
+
handle_cached_result(activity_id, cached)
|
|
143
|
+
ctx.record_last_activity_id(activity_id)
|
|
144
|
+
return cached[:result] if cached[:event_type] == EventType::ACTIVITY_COMPLETED
|
|
145
|
+
|
|
146
|
+
# Re-raise cached error
|
|
147
|
+
raise reconstruct_error(cached)
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
# Execute the activity
|
|
151
|
+
result = execute_activity(activity_id, name.to_s, retry_policy || RetryPolicy.default, &)
|
|
152
|
+
ctx.record_last_activity_id(activity_id)
|
|
153
|
+
result
|
|
154
|
+
ensure
|
|
155
|
+
ctx.current_activity_id = nil
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
# Register a compensation action to run on failure (Romancy/Edda compatible)
|
|
159
|
+
#
|
|
160
|
+
# @example
|
|
161
|
+
# # First register the compensation function
|
|
162
|
+
# Shikibu.register_compensation(:refund_payment) do |ctx, payment_id:|
|
|
163
|
+
# PaymentService.refund(payment_id)
|
|
164
|
+
# end
|
|
165
|
+
#
|
|
166
|
+
# # Then use it in workflow
|
|
167
|
+
# result = activity :charge_payment do
|
|
168
|
+
# PaymentService.charge(order_id, amount)
|
|
169
|
+
# end
|
|
170
|
+
# on_failure :refund_payment, payment_id: result[:id]
|
|
171
|
+
#
|
|
172
|
+
# @param name [Symbol, String] Compensation function name (registered via Shikibu.register_compensation)
|
|
173
|
+
# @param args [Hash] Arguments to pass to the compensation function
|
|
174
|
+
def on_failure(name, **args)
|
|
175
|
+
# Use last_activity_id since current_activity_id is cleared after activity completes
|
|
176
|
+
activity_id = ctx.last_activity_id
|
|
177
|
+
compensation_name = name.to_s
|
|
178
|
+
|
|
179
|
+
# Get the compensation function from registry
|
|
180
|
+
compensation_fn = Shikibu.get_compensation(compensation_name)
|
|
181
|
+
raise ArgumentError, "Compensation '#{compensation_name}' not registered" if compensation_fn.nil?
|
|
182
|
+
|
|
183
|
+
@pending_compensations << {
|
|
184
|
+
activity_id: activity_id,
|
|
185
|
+
compensation_name: compensation_name,
|
|
186
|
+
args: args,
|
|
187
|
+
block: proc { compensation_fn.call(ctx, **args) }
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
ctx.register_compensation(
|
|
191
|
+
activity_id: activity_id,
|
|
192
|
+
compensation_name: compensation_name,
|
|
193
|
+
args: args
|
|
194
|
+
)
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Convenience methods delegated to context
|
|
198
|
+
|
|
199
|
+
def sleep(seconds, timer_id: nil)
|
|
200
|
+
ctx.sleep(seconds, timer_id: timer_id)
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
def sleep_until(until_time, timer_id: nil)
|
|
204
|
+
ctx.sleep_until(until_time, timer_id: timer_id)
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def subscribe(channel, mode: ChannelMode::BROADCAST)
|
|
208
|
+
ctx.subscribe(channel, mode: mode)
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def unsubscribe(channel)
|
|
212
|
+
ctx.unsubscribe(channel)
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def receive(channel, timeout: nil, mode: ChannelMode::BROADCAST)
|
|
216
|
+
ctx.receive(channel, timeout: timeout, mode: mode)
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
def try_receive(channel, mode: ChannelMode::BROADCAST)
|
|
220
|
+
ctx.try_receive(channel, mode: mode)
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
def publish(channel, data, metadata: nil)
|
|
224
|
+
ctx.publish(channel, data, metadata: metadata)
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
def send_to(target_instance_id, channel, data)
|
|
228
|
+
ctx.send_to(target_instance_id, channel, data)
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
def recur(**new_input)
|
|
232
|
+
ctx.recur(**new_input)
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def wait_event(event_type, timeout: nil)
|
|
236
|
+
receive(event_type, timeout: timeout, mode: ChannelMode::BROADCAST)
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
def instance_id
|
|
240
|
+
ctx.instance_id
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# Execute registered compensations in reverse order (LIFO)
|
|
244
|
+
# Called by ReplayEngine on workflow failure
|
|
245
|
+
# Implements Romancy/Edda compatible idempotency checking
|
|
246
|
+
def run_compensations
|
|
247
|
+
# Get compensations from DB to have table IDs (for Romancy/Edda compatibility)
|
|
248
|
+
db_compensations = ctx.storage.get_compensations(ctx.instance_id)
|
|
249
|
+
|
|
250
|
+
# Get already executed compensation IDs from history (idempotency check)
|
|
251
|
+
executed_ids = executed_compensation_ids
|
|
252
|
+
|
|
253
|
+
# Build a lookup from activity_id to DB compensation record
|
|
254
|
+
db_lookup = db_compensations.each_with_object({}) do |comp, hash|
|
|
255
|
+
hash[comp[:activity_id]] = comp
|
|
256
|
+
end
|
|
257
|
+
|
|
258
|
+
# Execute compensations in reverse order (LIFO)
|
|
259
|
+
@pending_compensations.reverse.each do |comp|
|
|
260
|
+
db_comp = db_lookup[comp[:activity_id]]
|
|
261
|
+
compensation_id = db_comp&.dig(:id)
|
|
262
|
+
|
|
263
|
+
# Skip if already executed (idempotency)
|
|
264
|
+
next if compensation_id && executed_ids.include?(compensation_id)
|
|
265
|
+
|
|
266
|
+
execute_single_compensation(comp, compensation_id)
|
|
267
|
+
end
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
private
|
|
271
|
+
|
|
272
|
+
def executed_compensation_ids
|
|
273
|
+
history = ctx.storage.get_history(ctx.instance_id)
|
|
274
|
+
history
|
|
275
|
+
.select { |e| e[:event_type] == EventType::COMPENSATION_EXECUTED }
|
|
276
|
+
.map { |e| e[:data]&.dig(:compensation_id) }
|
|
277
|
+
.compact
|
|
278
|
+
.to_set
|
|
279
|
+
end
|
|
280
|
+
|
|
281
|
+
def execute_single_compensation(comp, compensation_id)
|
|
282
|
+
comp[:block].call
|
|
283
|
+
|
|
284
|
+
# Record successful compensation (Romancy/Edda compatible format)
|
|
285
|
+
ctx.storage.append_history(
|
|
286
|
+
instance_id: ctx.instance_id,
|
|
287
|
+
activity_id: "compensation:#{compensation_id || comp[:activity_id]}",
|
|
288
|
+
event_type: EventType::COMPENSATION_EXECUTED,
|
|
289
|
+
event_data: {
|
|
290
|
+
compensation_id: compensation_id,
|
|
291
|
+
activity_id: comp[:activity_id],
|
|
292
|
+
activity_name: comp[:compensation_name]
|
|
293
|
+
}
|
|
294
|
+
)
|
|
295
|
+
rescue StandardError => e
|
|
296
|
+
# Record failed compensation but continue with others
|
|
297
|
+
ctx.storage.append_history(
|
|
298
|
+
instance_id: ctx.instance_id,
|
|
299
|
+
activity_id: "compensation:#{compensation_id || comp[:activity_id]}",
|
|
300
|
+
event_type: EventType::COMPENSATION_FAILED,
|
|
301
|
+
event_data: {
|
|
302
|
+
compensation_id: compensation_id,
|
|
303
|
+
activity_id: comp[:activity_id],
|
|
304
|
+
activity_name: comp[:compensation_name],
|
|
305
|
+
error_type: e.class.name,
|
|
306
|
+
error_message: e.message
|
|
307
|
+
}
|
|
308
|
+
)
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def execute_activity(activity_id, name, retry_policy, &block)
|
|
312
|
+
attempt = 0
|
|
313
|
+
started_at = Time.now
|
|
314
|
+
last_error = nil
|
|
315
|
+
|
|
316
|
+
loop do
|
|
317
|
+
attempt += 1
|
|
318
|
+
|
|
319
|
+
begin
|
|
320
|
+
# Call hooks
|
|
321
|
+
ctx.hooks&.on_activity_start&.call(ctx.instance_id, activity_id, name, attempt)
|
|
322
|
+
|
|
323
|
+
result = block.call
|
|
324
|
+
|
|
325
|
+
# Record successful result
|
|
326
|
+
ctx.storage.append_history(
|
|
327
|
+
instance_id: ctx.instance_id,
|
|
328
|
+
activity_id: activity_id,
|
|
329
|
+
event_type: EventType::ACTIVITY_COMPLETED,
|
|
330
|
+
event_data: { result: result }
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Cache for replay
|
|
334
|
+
ctx.cache_result(activity_id, {
|
|
335
|
+
event_type: EventType::ACTIVITY_COMPLETED,
|
|
336
|
+
result: result
|
|
337
|
+
})
|
|
338
|
+
|
|
339
|
+
# Call hooks
|
|
340
|
+
ctx.hooks&.on_activity_complete&.call(ctx.instance_id, activity_id, name, result, false)
|
|
341
|
+
|
|
342
|
+
return result
|
|
343
|
+
rescue StandardError => e
|
|
344
|
+
last_error = e
|
|
345
|
+
|
|
346
|
+
# Check if retryable
|
|
347
|
+
unless retry_policy.retryable?(e) && retry_policy.should_retry?(attempt, started_at)
|
|
348
|
+
# Record failure
|
|
349
|
+
ctx.storage.append_history(
|
|
350
|
+
instance_id: ctx.instance_id,
|
|
351
|
+
activity_id: activity_id,
|
|
352
|
+
event_type: EventType::ACTIVITY_FAILED,
|
|
353
|
+
event_data: {
|
|
354
|
+
error_type: e.class.name,
|
|
355
|
+
error_message: e.message,
|
|
356
|
+
attempts: attempt
|
|
357
|
+
}
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
# Cache for replay
|
|
361
|
+
ctx.cache_result(activity_id, {
|
|
362
|
+
event_type: EventType::ACTIVITY_FAILED,
|
|
363
|
+
error_type: e.class.name,
|
|
364
|
+
error_message: e.message
|
|
365
|
+
})
|
|
366
|
+
|
|
367
|
+
# Call hooks
|
|
368
|
+
ctx.hooks&.on_activity_failed&.call(ctx.instance_id, activity_id, name, e, attempt)
|
|
369
|
+
|
|
370
|
+
raise
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
# Call retry hook
|
|
374
|
+
delay = retry_policy.delay_for(attempt)
|
|
375
|
+
ctx.hooks&.on_activity_retry&.call(ctx.instance_id, activity_id, name, e, attempt, delay)
|
|
376
|
+
|
|
377
|
+
# Wait before retry
|
|
378
|
+
Kernel.sleep(delay)
|
|
379
|
+
end
|
|
380
|
+
end
|
|
381
|
+
end
|
|
382
|
+
|
|
383
|
+
def handle_cached_result(activity_id, cached)
|
|
384
|
+
name = activity_id.split(':').first
|
|
385
|
+
ctx.hooks&.on_activity_complete&.call(ctx.instance_id, activity_id, name, cached[:result], true)
|
|
386
|
+
end
|
|
387
|
+
|
|
388
|
+
def reconstruct_error(cached)
|
|
389
|
+
error_class = begin
|
|
390
|
+
Object.const_get(cached[:error_type])
|
|
391
|
+
rescue StandardError
|
|
392
|
+
StandardError
|
|
393
|
+
end
|
|
394
|
+
|
|
395
|
+
error_class.new(cached[:error_message])
|
|
396
|
+
end
|
|
397
|
+
end
|
|
398
|
+
end
|
data/lib/shikibu.rb
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'shikibu/version'
|
|
4
|
+
require_relative 'shikibu/constants'
|
|
5
|
+
require_relative 'shikibu/errors'
|
|
6
|
+
require_relative 'shikibu/retry_policy'
|
|
7
|
+
|
|
8
|
+
# CloudEvents-native Durable Execution framework for Ruby.
|
|
9
|
+
module Shikibu
|
|
10
|
+
class << self
|
|
11
|
+
# Global app instance
|
|
12
|
+
attr_accessor :app
|
|
13
|
+
|
|
14
|
+
# Registry for workflow definitions
|
|
15
|
+
def workflow_registry
|
|
16
|
+
@workflow_registry ||= {}
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
# Register a workflow class
|
|
20
|
+
def register_workflow(workflow_class)
|
|
21
|
+
name = workflow_class.workflow_name
|
|
22
|
+
workflow_registry[name] = workflow_class
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Get a registered workflow by name
|
|
26
|
+
def get_workflow(name)
|
|
27
|
+
workflow_registry[name]
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Registry for compensation functions (Romancy/Edda compatible)
|
|
31
|
+
def compensation_registry
|
|
32
|
+
@compensation_registry ||= {}
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
# Register a compensation function
|
|
36
|
+
# @param name [Symbol, String] Compensation function name
|
|
37
|
+
# @param block [Proc] Compensation logic
|
|
38
|
+
# @example
|
|
39
|
+
# Shikibu.register_compensation(:refund_payment) do |ctx, payment_id:|
|
|
40
|
+
# PaymentService.refund(payment_id)
|
|
41
|
+
# end
|
|
42
|
+
def register_compensation(name, &block)
|
|
43
|
+
compensation_registry[name.to_s] = block
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
# Get a registered compensation function by name
|
|
47
|
+
# @param name [String] Compensation function name
|
|
48
|
+
# @return [Proc, nil] Compensation function or nil if not found
|
|
49
|
+
def get_compensation(name)
|
|
50
|
+
return nil if name.nil?
|
|
51
|
+
|
|
52
|
+
compensation_registry[name.to_s]
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
# Clear all registered workflows and compensations (mainly for testing)
|
|
56
|
+
def clear_registry!
|
|
57
|
+
@workflow_registry = {}
|
|
58
|
+
@compensation_registry = {}
|
|
59
|
+
@app = nil
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# Configure the global app
|
|
63
|
+
def configure
|
|
64
|
+
config = Configuration.new
|
|
65
|
+
yield config
|
|
66
|
+
@app = App.new(
|
|
67
|
+
database_url: config.database_url,
|
|
68
|
+
service_name: config.service_name,
|
|
69
|
+
auto_migrate: config.auto_migrate,
|
|
70
|
+
hooks: config.hooks,
|
|
71
|
+
use_listen_notify: config.use_listen_notify
|
|
72
|
+
)
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Run a workflow
|
|
76
|
+
def run(workflow, **input)
|
|
77
|
+
ensure_app!
|
|
78
|
+
|
|
79
|
+
if workflow.is_a?(Class)
|
|
80
|
+
app.register(workflow) unless workflow_registry.key?(workflow.workflow_name)
|
|
81
|
+
app.start_workflow(workflow, **input)
|
|
82
|
+
else
|
|
83
|
+
# Instance passed - extract class and input
|
|
84
|
+
workflow_class = workflow.class
|
|
85
|
+
app.register(workflow_class) unless workflow_registry.key?(workflow_class.workflow_name)
|
|
86
|
+
app.start_workflow(workflow_class, **workflow.input)
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Get workflow status
|
|
91
|
+
def status(instance_id)
|
|
92
|
+
ensure_app!
|
|
93
|
+
app.get_status(instance_id)
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Get workflow result
|
|
97
|
+
def result(instance_id)
|
|
98
|
+
ensure_app!
|
|
99
|
+
app.get_result(instance_id)
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Send event to workflows
|
|
103
|
+
def send_event(event_type, data, metadata: nil)
|
|
104
|
+
ensure_app!
|
|
105
|
+
app.send_event(event_type, data, metadata: metadata)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
private
|
|
109
|
+
|
|
110
|
+
def ensure_app!
|
|
111
|
+
raise Error, 'Shikibu not configured. Call Shikibu.configure first.' unless @app
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
# Configuration object
|
|
116
|
+
class Configuration
|
|
117
|
+
attr_accessor :database_url, :service_name, :auto_migrate, :hooks, :use_listen_notify
|
|
118
|
+
|
|
119
|
+
def initialize
|
|
120
|
+
@database_url = 'sqlite://shikibu.db'
|
|
121
|
+
@service_name = 'shikibu'
|
|
122
|
+
@auto_migrate = false
|
|
123
|
+
@hooks = nil
|
|
124
|
+
@use_listen_notify = nil # nil = auto-detect, true/false = manual
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
# Core requires (needed before App is loaded)
|
|
130
|
+
require_relative 'shikibu/notify/notify_base'
|
|
131
|
+
require_relative 'shikibu/notify/wake_event'
|
|
132
|
+
require_relative 'shikibu/locking'
|
|
133
|
+
require_relative 'shikibu/channels'
|
|
134
|
+
require_relative 'shikibu/context'
|
|
135
|
+
require_relative 'shikibu/workflow'
|
|
136
|
+
require_relative 'shikibu/activity'
|
|
137
|
+
require_relative 'shikibu/storage/migrations'
|
|
138
|
+
require_relative 'shikibu/storage/sequel_storage'
|
|
139
|
+
require_relative 'shikibu/outbox/relayer'
|
|
140
|
+
require_relative 'shikibu/replay'
|
|
141
|
+
require_relative 'shikibu/worker'
|
|
142
|
+
require_relative 'shikibu/app'
|
|
143
|
+
require_relative 'shikibu/middleware/rack_app'
|
|
144
|
+
|
|
145
|
+
# Optional integrations (autoload)
|
|
146
|
+
module Shikibu
|
|
147
|
+
# Framework integrations (Sidekiq, ActiveJob).
|
|
148
|
+
module Integrations
|
|
149
|
+
autoload :ActiveJob, 'shikibu/integrations/active_job'
|
|
150
|
+
autoload :Sidekiq, 'shikibu/integrations/sidekiq'
|
|
151
|
+
end
|
|
152
|
+
end
|
data/schema/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Yasushi Itoh
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
data/schema/README.md
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# Durax Schema
|
|
2
|
+
|
|
3
|
+
Shared database schema for [Durax](https://github.com/durax-io) - a multi-language durable execution framework.
|
|
4
|
+
|
|
5
|
+
- [edda](https://github.com/i2y/edda) (Python)
|
|
6
|
+
- [romancy](https://github.com/i2y/romancy) (Go)
|
|
7
|
+
|
|
8
|
+
## Quick Start
|
|
9
|
+
|
|
10
|
+
### 1. Install dbmate
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
# macOS
|
|
14
|
+
brew install dbmate
|
|
15
|
+
|
|
16
|
+
# Linux
|
|
17
|
+
curl -fsSL https://github.com/amacneil/dbmate/releases/latest/download/dbmate-linux-amd64 -o dbmate
|
|
18
|
+
chmod +x dbmate && sudo mv dbmate /usr/local/bin/
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
### 2. Run Migration
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
# PostgreSQL
|
|
25
|
+
DATABASE_URL="postgresql://user:pass@localhost:5432/dbname?sslmode=disable" \
|
|
26
|
+
dbmate -d ./db/migrations/postgresql up
|
|
27
|
+
|
|
28
|
+
# MySQL
|
|
29
|
+
DATABASE_URL="mysql://user:pass@localhost:3306/dbname" \
|
|
30
|
+
dbmate -d ./db/migrations/mysql up
|
|
31
|
+
|
|
32
|
+
# SQLite
|
|
33
|
+
DATABASE_URL="sqlite:./mydb.sqlite" \
|
|
34
|
+
dbmate -d ./db/migrations/sqlite up
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Integration as Submodule
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
git submodule add https://github.com/durax-io/schema.git schema
|
|
41
|
+
git submodule update --init
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Other Commands
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
dbmate status # Check migration status
|
|
48
|
+
dbmate down # Rollback latest migration
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Documentation
|
|
52
|
+
|
|
53
|
+
- [Column Values Reference](docs/column-values.md) - Standard values for database columns across all implementations
|
|
54
|
+
|
|
55
|
+
## License
|
|
56
|
+
|
|
57
|
+
MIT
|