kaal 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +340 -0
- data/Rakefile +6 -0
- data/app/models/kaal/cron_definition.rb +71 -0
- data/app/models/kaal/cron_dispatch.rb +50 -0
- data/app/models/kaal/cron_lock.rb +38 -0
- data/config/locales/en.yml +46 -0
- data/lib/generators/kaal/install/install_generator.rb +67 -0
- data/lib/generators/kaal/install/templates/create_kaal_definitions.rb.tt +21 -0
- data/lib/generators/kaal/install/templates/create_kaal_dispatches.rb.tt +20 -0
- data/lib/generators/kaal/install/templates/create_kaal_locks.rb.tt +17 -0
- data/lib/generators/kaal/install/templates/kaal.rb.tt +31 -0
- data/lib/generators/kaal/install/templates/scheduler.yml.tt +22 -0
- data/lib/kaal/backend/adapter.rb +147 -0
- data/lib/kaal/backend/dispatch_logging.rb +79 -0
- data/lib/kaal/backend/memory_adapter.rb +99 -0
- data/lib/kaal/backend/mysql_adapter.rb +170 -0
- data/lib/kaal/backend/postgres_adapter.rb +134 -0
- data/lib/kaal/backend/redis_adapter.rb +145 -0
- data/lib/kaal/backend/sqlite_adapter.rb +116 -0
- data/lib/kaal/configuration.rb +231 -0
- data/lib/kaal/coordinator.rb +437 -0
- data/lib/kaal/cron_humanizer.rb +182 -0
- data/lib/kaal/cron_utils.rb +233 -0
- data/lib/kaal/definition/database_engine.rb +45 -0
- data/lib/kaal/definition/memory_engine.rb +61 -0
- data/lib/kaal/definition/redis_engine.rb +93 -0
- data/lib/kaal/definition/registry.rb +46 -0
- data/lib/kaal/dispatch/database_engine.rb +94 -0
- data/lib/kaal/dispatch/memory_engine.rb +99 -0
- data/lib/kaal/dispatch/redis_engine.rb +103 -0
- data/lib/kaal/dispatch/registry.rb +62 -0
- data/lib/kaal/idempotency_key_generator.rb +26 -0
- data/lib/kaal/railtie.rb +183 -0
- data/lib/kaal/rake_tasks.rb +184 -0
- data/lib/kaal/register_conflict_support.rb +54 -0
- data/lib/kaal/registry.rb +242 -0
- data/lib/kaal/scheduler_config_error.rb +6 -0
- data/lib/kaal/scheduler_file_loader.rb +316 -0
- data/lib/kaal/scheduler_hash_transform.rb +40 -0
- data/lib/kaal/scheduler_placeholder_support.rb +80 -0
- data/lib/kaal/version.rb +10 -0
- data/lib/kaal.rb +571 -0
- data/lib/tasks/kaal_tasks.rake +10 -0
- metadata +142 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
|
|
8
|
+
module Kaal
|
|
9
|
+
##
|
|
10
|
+
# Thread-safe registry for storing and managing registered cron jobs.
|
|
11
|
+
# Each entry consists of a unique key, cron expression, and enqueue callback.
|
|
12
|
+
#
|
|
13
|
+
# @example Register a job
|
|
14
|
+
# registry = Kaal::Registry.new
|
|
15
|
+
# registry.add(key: "reports:daily", cron: "0 9 * * *", enqueue: ->(fire_time:, idempotency_key:) { })
|
|
16
|
+
#
|
|
17
|
+
# @example Retrieve all jobs
|
|
18
|
+
# registry.all # => [#<Kaal::Registry::Entry key="reports:daily", cron="0 9 * * *", enqueue=#<Proc ...>>]
|
|
19
|
+
class Registry
|
|
20
|
+
include Enumerable
|
|
21
|
+
|
|
22
|
+
##
|
|
23
|
+
# Entry class representing a single registered cron job
|
|
24
|
+
# rubocop:disable Style/RedundantStructKeywordInit
|
|
25
|
+
Entry = Struct.new(:key, :cron, :enqueue, keyword_init: true)
|
|
26
|
+
# rubocop:enable Style/RedundantStructKeywordInit
|
|
27
|
+
|
|
28
|
+
##
|
|
29
|
+
# Initialize a new Registry instance.
|
|
30
|
+
def initialize
|
|
31
|
+
@entries = {}
|
|
32
|
+
@mutex = Mutex.new
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
##
|
|
36
|
+
# Register a new cron job.
|
|
37
|
+
#
|
|
38
|
+
# Entries are frozen after creation to prevent external mutation
|
|
39
|
+
# that could corrupt the internal key->entry mapping.
|
|
40
|
+
#
|
|
41
|
+
# @param key [String] unique identifier for the cron task
|
|
42
|
+
# @param cron [String] cron expression (e.g., "0 9 * * *", "@daily")
|
|
43
|
+
# @param enqueue [Proc, Lambda] callable that executes when cron fires
|
|
44
|
+
# @return [Entry] the registered entry (frozen)
|
|
45
|
+
#
|
|
46
|
+
# @raise [ArgumentError] if key is empty, cron is empty, or enqueue is not callable
|
|
47
|
+
# @raise [RegistryError] if key is already registered
|
|
48
|
+
#
|
|
49
|
+
# @example
|
|
50
|
+
# registry.add(
|
|
51
|
+
# key: "job:daily",
|
|
52
|
+
# cron: "0 9 * * *",
|
|
53
|
+
# enqueue: ->(fire_time:, idempotency_key:) { MyJob.perform_later }
|
|
54
|
+
# )
|
|
55
|
+
def add(key:, cron:, enqueue:)
|
|
56
|
+
validate_entry(key, cron, enqueue)
|
|
57
|
+
|
|
58
|
+
@mutex.synchronize do
|
|
59
|
+
raise RegistryError, "Key '#{key}' is already registered" if @entries.key?(key)
|
|
60
|
+
|
|
61
|
+
entry = Entry.new(key: key, cron: cron, enqueue: enqueue).freeze
|
|
62
|
+
@entries[key] = entry
|
|
63
|
+
entry
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
##
|
|
68
|
+
# Insert or replace a cron job entry atomically.
|
|
69
|
+
#
|
|
70
|
+
# Unlike {#add}, this does not raise when the key already exists.
|
|
71
|
+
#
|
|
72
|
+
# @param key [String] unique identifier for the cron task
|
|
73
|
+
# @param cron [String] cron expression (e.g., "0 9 * * *", "@daily")
|
|
74
|
+
# @param enqueue [Proc, Lambda] callable that executes when cron fires
|
|
75
|
+
# @return [Entry] the stored entry (frozen)
|
|
76
|
+
#
|
|
77
|
+
# @raise [ArgumentError] if key is empty, cron is empty, or enqueue is not callable
|
|
78
|
+
def upsert(key:, cron:, enqueue:)
|
|
79
|
+
validate_entry(key, cron, enqueue)
|
|
80
|
+
|
|
81
|
+
@mutex.synchronize do
|
|
82
|
+
entry = Entry.new(key: key, cron: cron, enqueue: enqueue).freeze
|
|
83
|
+
@entries[key] = entry
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
##
|
|
88
|
+
# Unregister (remove) a cron job by key.
|
|
89
|
+
#
|
|
90
|
+
# @param key [String] the key to unregister
|
|
91
|
+
# @return [Entry, nil] the removed entry, or nil if not found
|
|
92
|
+
#
|
|
93
|
+
# @example
|
|
94
|
+
# registry.remove("job:daily")
|
|
95
|
+
def remove(key)
|
|
96
|
+
@mutex.synchronize do
|
|
97
|
+
@entries.delete(key)
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
##
|
|
102
|
+
# Find a registered entry by key.
|
|
103
|
+
#
|
|
104
|
+
# @param key [String] the key to look up
|
|
105
|
+
# @return [Entry, nil] the entry if found, nil otherwise
|
|
106
|
+
#
|
|
107
|
+
# @example
|
|
108
|
+
# entry = registry.find("job:daily")
|
|
109
|
+
# entry.cron # => "0 9 * * *"
|
|
110
|
+
def find(key)
|
|
111
|
+
@mutex.synchronize do
|
|
112
|
+
@entries[key]
|
|
113
|
+
end
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
##
|
|
117
|
+
# Get all registered entries.
|
|
118
|
+
#
|
|
119
|
+
# @return [Array<Entry>] a copy of all registered entries
|
|
120
|
+
#
|
|
121
|
+
# @example
|
|
122
|
+
# all_entries = registry.all
|
|
123
|
+
# all_entries.each { |entry| puts entry.key }
|
|
124
|
+
def all
|
|
125
|
+
@mutex.synchronize do
|
|
126
|
+
@entries.values.dup
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
##
|
|
131
|
+
# Get the number of registered entries.
|
|
132
|
+
#
|
|
133
|
+
# @return [Integer] the count of registered cron jobs
|
|
134
|
+
#
|
|
135
|
+
# @example
|
|
136
|
+
# registry.size # => 3
|
|
137
|
+
def size
|
|
138
|
+
@mutex.synchronize do
|
|
139
|
+
@entries.size
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
##
|
|
144
|
+
# Alias for size method.
|
|
145
|
+
#
|
|
146
|
+
# @return [Integer] the count of registered cron jobs
|
|
147
|
+
alias count size
|
|
148
|
+
|
|
149
|
+
##
|
|
150
|
+
# Check if a key is registered.
|
|
151
|
+
#
|
|
152
|
+
# @param key [String] the key to check
|
|
153
|
+
# @return [Boolean] true if the key is registered, false otherwise
|
|
154
|
+
#
|
|
155
|
+
# @example
|
|
156
|
+
# registry.registered?("job:daily") # => true
|
|
157
|
+
def registered?(key)
|
|
158
|
+
@mutex.synchronize do
|
|
159
|
+
@entries.key?(key)
|
|
160
|
+
end
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
##
|
|
164
|
+
# Clear all registered entries.
|
|
165
|
+
#
|
|
166
|
+
# @return [Integer] the number of entries that were cleared
|
|
167
|
+
#
|
|
168
|
+
# @example
|
|
169
|
+
# cleared_count = registry.clear
|
|
170
|
+
def clear
|
|
171
|
+
@mutex.synchronize do
|
|
172
|
+
count = @entries.size
|
|
173
|
+
@entries.clear
|
|
174
|
+
count
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
##
|
|
179
|
+
# Iterate over all entries with thread-safe access.
|
|
180
|
+
#
|
|
181
|
+
# Copies entries inside the lock and yields outside to avoid deadlocks
|
|
182
|
+
# if the block calls back into the registry.
|
|
183
|
+
#
|
|
184
|
+
# @yield [entry] yields each entry to the block
|
|
185
|
+
# @yieldparam entry [Entry] the cron entry
|
|
186
|
+
# @return [void]
|
|
187
|
+
#
|
|
188
|
+
# @example
|
|
189
|
+
# registry.each { |entry| puts entry.key }
|
|
190
|
+
def each(&)
|
|
191
|
+
return enum_for(:each) unless block_given?
|
|
192
|
+
|
|
193
|
+
entries_snapshot = @mutex.synchronize do
|
|
194
|
+
@entries.values.dup
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
entries_snapshot.each(&)
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
##
|
|
201
|
+
# Convert registry to an array of hashes.
|
|
202
|
+
#
|
|
203
|
+
# @return [Array<Hash>] array of entry details
|
|
204
|
+
#
|
|
205
|
+
# @example
|
|
206
|
+
# registry.to_a # => [{ key: "job:daily", cron: "0 9 * * *", enqueue: Proc }]
|
|
207
|
+
def to_a
|
|
208
|
+
@mutex.synchronize do
|
|
209
|
+
@entries.values.map(&:to_h)
|
|
210
|
+
end
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
##
|
|
214
|
+
# Get a string representation of the registry.
|
|
215
|
+
#
|
|
216
|
+
# @return [String] human-readable registry summary
|
|
217
|
+
def inspect
|
|
218
|
+
@mutex.synchronize do
|
|
219
|
+
"#<Kaal::Registry size=#{@entries.size} keys=[#{@entries.keys.map(&:inspect).join(', ')}]>"
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
private
|
|
224
|
+
|
|
225
|
+
##
|
|
226
|
+
# Validate entry parameters before adding to registry.
|
|
227
|
+
#
|
|
228
|
+
# @param key [String] the key to validate
|
|
229
|
+
# @param cron [String] the cron expression to validate
|
|
230
|
+
# @param enqueue [Proc, Lambda] the enqueue callback to validate
|
|
231
|
+
# @raise [ArgumentError] if any parameter is invalid
|
|
232
|
+
def validate_entry(key, cron, enqueue)
|
|
233
|
+
raise ArgumentError, 'key cannot be empty' if key.to_s.strip.empty?
|
|
234
|
+
raise ArgumentError, 'cron cannot be empty' if cron.to_s.strip.empty?
|
|
235
|
+
raise ArgumentError, 'enqueue must be callable' unless enqueue.respond_to?(:call)
|
|
236
|
+
end
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
##
|
|
240
|
+
# Error raised when registry operations fail.
|
|
241
|
+
class RegistryError < StandardError; end
|
|
242
|
+
end
|
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'erb'
|
|
4
|
+
require 'pathname'
|
|
5
|
+
require 'yaml'
|
|
6
|
+
require 'active_support/core_ext/hash/deep_merge'
|
|
7
|
+
require 'active_support/core_ext/object/deep_dup'
|
|
8
|
+
require 'active_support/core_ext/string/inflections'
|
|
9
|
+
require_relative 'scheduler_hash_transform'
|
|
10
|
+
require_relative 'scheduler_placeholder_support'
|
|
11
|
+
|
|
12
|
+
module Kaal
|
|
13
|
+
# Loads scheduler definitions from config/scheduler.yml and registers them.
|
|
14
|
+
class SchedulerFileLoader
|
|
15
|
+
include SchedulerHashTransform
|
|
16
|
+
include SchedulerPlaceholderSupport
|
|
17
|
+
|
|
18
|
+
PLACEHOLDER_PATTERN = /\{\{\s*([a-zA-Z0-9_.]+)\s*\}\}/
|
|
19
|
+
ALLOWED_PLACEHOLDERS = {
|
|
20
|
+
'fire_time.iso8601' => ->(ctx) { ctx.fetch(:fire_time).iso8601 },
|
|
21
|
+
'fire_time.unix' => ->(ctx) { ctx.fetch(:fire_time).to_i },
|
|
22
|
+
'idempotency_key' => ->(ctx) { ctx.fetch(:idempotency_key) },
|
|
23
|
+
'key' => ->(ctx) { ctx.fetch(:key) }
|
|
24
|
+
}.freeze
|
|
25
|
+
|
|
26
|
+
def initialize(configuration:, definition_registry:, registry:, logger:, rails_context: Rails)
|
|
27
|
+
@configuration = configuration
|
|
28
|
+
@definition_registry = definition_registry
|
|
29
|
+
@registry = registry
|
|
30
|
+
@logger = logger
|
|
31
|
+
@rails_env = rails_context.env.to_s
|
|
32
|
+
@rails_root = rails_context.root
|
|
33
|
+
@placeholder_resolvers = ALLOWED_PLACEHOLDERS
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def load
|
|
37
|
+
applied_job_contexts = []
|
|
38
|
+
path = scheduler_file_path
|
|
39
|
+
return handle_missing_file(path) unless File.exist?(path)
|
|
40
|
+
|
|
41
|
+
payload = parse_yaml(path)
|
|
42
|
+
jobs = extract_jobs(payload)
|
|
43
|
+
validate_unique_keys(jobs)
|
|
44
|
+
normalized_jobs = jobs.map { |job_payload| normalize_job(job_payload) }
|
|
45
|
+
applied_jobs = []
|
|
46
|
+
normalized_jobs.each do |job|
|
|
47
|
+
applied_job_context = apply_job(**job)
|
|
48
|
+
next unless applied_job_context
|
|
49
|
+
|
|
50
|
+
applied_jobs << job
|
|
51
|
+
applied_job_contexts << applied_job_context
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
applied_jobs
|
|
55
|
+
rescue StandardError
|
|
56
|
+
rollback_applied_jobs(applied_job_contexts)
|
|
57
|
+
raise
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
private
|
|
61
|
+
|
|
62
|
+
def scheduler_file_path
|
|
63
|
+
configured_path = @configuration.scheduler_config_path.to_s.strip
|
|
64
|
+
raise SchedulerConfigError, 'scheduler_config_path cannot be blank' if configured_path.empty?
|
|
65
|
+
|
|
66
|
+
path = Pathname.new(configured_path)
|
|
67
|
+
path.absolute? ? path.to_s : @rails_root.join(path).to_s
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def handle_missing_file(path)
|
|
71
|
+
message = "Scheduler file not found at #{path}"
|
|
72
|
+
raise SchedulerConfigError, message if @configuration.scheduler_missing_file_policy == :error
|
|
73
|
+
|
|
74
|
+
@logger&.warn(message)
|
|
75
|
+
[]
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def parse_yaml(path)
|
|
79
|
+
rendered = render_yaml_erb(path)
|
|
80
|
+
parsed = YAML.safe_load(rendered) || {}
|
|
81
|
+
raise SchedulerConfigError, "Expected scheduler YAML root to be a mapping in #{path}" unless parsed.is_a?(Hash)
|
|
82
|
+
|
|
83
|
+
stringify_keys(parsed)
|
|
84
|
+
rescue Psych::Exception => e
|
|
85
|
+
raise SchedulerConfigError, "Failed to parse scheduler YAML at #{path}: #{e.message}"
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
def render_yaml_erb(path)
|
|
89
|
+
ERB.new(File.read(path), trim_mode: '-').result
|
|
90
|
+
rescue StandardError, SyntaxError => e
|
|
91
|
+
raise SchedulerConfigError, "Failed to evaluate scheduler ERB at #{path}: #{e.message}"
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def extract_jobs(payload)
|
|
95
|
+
defaults = fetch_hash(payload, 'defaults')
|
|
96
|
+
env_payload = fetch_hash(payload, @rails_env)
|
|
97
|
+
default_jobs = defaults.fetch('jobs', [])
|
|
98
|
+
env_jobs = env_payload.fetch('jobs', [])
|
|
99
|
+
raise SchedulerConfigError, "Expected 'defaults.jobs' to be an array" unless default_jobs.is_a?(Array)
|
|
100
|
+
raise SchedulerConfigError, "Expected '#{@rails_env}.jobs' to be an array" unless env_jobs.is_a?(Array)
|
|
101
|
+
|
|
102
|
+
default_jobs + env_jobs
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def fetch_hash(payload, key)
|
|
106
|
+
section = payload.fetch(key)
|
|
107
|
+
|
|
108
|
+
raise SchedulerConfigError, "Expected '#{key}' section to be a mapping" unless section.is_a?(Hash)
|
|
109
|
+
|
|
110
|
+
section
|
|
111
|
+
rescue KeyError
|
|
112
|
+
{}
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def validate_unique_keys(jobs)
|
|
116
|
+
keys = jobs.map do |job_payload|
|
|
117
|
+
raise SchedulerConfigError, "Each jobs entry must be a mapping, got #{job_payload.class}" unless job_payload.is_a?(Hash)
|
|
118
|
+
|
|
119
|
+
stringify_keys(job_payload)['key'].to_s.strip
|
|
120
|
+
end
|
|
121
|
+
duplicates = keys.group_by(&:itself).select { |key, arr| !key.empty? && arr.size > 1 }.keys
|
|
122
|
+
return if duplicates.empty?
|
|
123
|
+
|
|
124
|
+
raise SchedulerConfigError, "Duplicate job keys in scheduler file: #{duplicates.join(', ')}"
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def normalize_job(job_payload)
|
|
128
|
+
payload = stringify_keys(job_payload)
|
|
129
|
+
key = payload.fetch('key', '').to_s.strip
|
|
130
|
+
raise SchedulerConfigError, 'Job key cannot be blank' if key.empty?
|
|
131
|
+
|
|
132
|
+
cron = extract_required_string(payload, field: 'cron', error_prefix: "Job cron cannot be blank for key '#{key}'")
|
|
133
|
+
job_class_name = extract_required_string(
|
|
134
|
+
payload, field: 'job_class', error_prefix: "Job class cannot be blank for key '#{key}'"
|
|
135
|
+
)
|
|
136
|
+
validate_cron(key:, cron:)
|
|
137
|
+
options = extract_job_options(payload, key:)
|
|
138
|
+
|
|
139
|
+
{
|
|
140
|
+
key:,
|
|
141
|
+
cron:,
|
|
142
|
+
job_class_name:,
|
|
143
|
+
**options
|
|
144
|
+
}
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
def extract_required_string(payload, field:, error_prefix:)
|
|
148
|
+
value = payload.fetch(field, '').to_s.strip
|
|
149
|
+
|
|
150
|
+
raise SchedulerConfigError, error_prefix if value.empty?
|
|
151
|
+
|
|
152
|
+
value
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def validate_cron(key:, cron:)
|
|
156
|
+
return if Kaal.valid?(cron)
|
|
157
|
+
|
|
158
|
+
raise SchedulerConfigError, "Invalid cron expression '#{cron}' for key '#{key}'"
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def extract_job_options(payload, key:)
|
|
162
|
+
metadata, args, kwargs, queue, enabled_value = payload.values_at('metadata', 'args', 'kwargs', 'queue', 'enabled')
|
|
163
|
+
args ||= []
|
|
164
|
+
kwargs ||= {}
|
|
165
|
+
enabled = true
|
|
166
|
+
if payload.key?('enabled')
|
|
167
|
+
raise SchedulerConfigError, "enabled must be a boolean for key '#{key}'" unless enabled_value.is_a?(TrueClass) || enabled_value.is_a?(FalseClass)
|
|
168
|
+
|
|
169
|
+
enabled = enabled_value
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
raise SchedulerConfigError, "metadata must be a mapping for key '#{key}'" if metadata && !metadata.is_a?(Hash)
|
|
173
|
+
|
|
174
|
+
validate_job_option_types(key:, args:, kwargs:, queue:)
|
|
175
|
+
|
|
176
|
+
validate_placeholders(args, key:)
|
|
177
|
+
validate_placeholders(kwargs, key:)
|
|
178
|
+
|
|
179
|
+
{ queue: queue, args: args.deep_dup, kwargs: kwargs.deep_dup, enabled: enabled, metadata: metadata ? metadata.deep_dup : {} }
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
def validate_job_option_types(key:, args:, kwargs:, queue:)
|
|
183
|
+
raise SchedulerConfigError, "args must be an array for key '#{key}'" unless args.is_a?(Array)
|
|
184
|
+
raise SchedulerConfigError, "kwargs must be a mapping for key '#{key}'" unless kwargs.is_a?(Hash)
|
|
185
|
+
raise SchedulerConfigError, "queue must be a string for key '#{key}'" if queue && !queue.is_a?(String)
|
|
186
|
+
return if kwargs.keys.all? { |kwargs_key| kwargs_key.is_a?(String) || kwargs_key.is_a?(Symbol) }
|
|
187
|
+
|
|
188
|
+
raise SchedulerConfigError, "kwargs keys must be strings or symbols for key '#{key}'"
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
def apply_job(key:, cron:, job_class_name:, queue:, args:, kwargs:, enabled:, metadata:)
|
|
192
|
+
existing_definition = @definition_registry.find_definition(key)
|
|
193
|
+
existing_registry_entry = @registry.find(key)
|
|
194
|
+
return if skip_due_to_conflict?(key:, existing_definition:)
|
|
195
|
+
|
|
196
|
+
callback = build_callback(
|
|
197
|
+
key: key,
|
|
198
|
+
job_class_name: job_class_name,
|
|
199
|
+
queue: queue,
|
|
200
|
+
args_template: args,
|
|
201
|
+
kwargs_template: kwargs
|
|
202
|
+
)
|
|
203
|
+
normalized_metadata = stringify_keys(metadata.deep_dup)
|
|
204
|
+
persisted_metadata = normalized_metadata.deep_merge(
|
|
205
|
+
'execution' => {
|
|
206
|
+
'target' => 'active_job',
|
|
207
|
+
'job_class' => job_class_name,
|
|
208
|
+
'queue' => queue,
|
|
209
|
+
'args' => args,
|
|
210
|
+
'kwargs' => kwargs
|
|
211
|
+
}
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
@definition_registry.upsert_definition(
|
|
215
|
+
key: key,
|
|
216
|
+
cron: cron,
|
|
217
|
+
enabled: enabled,
|
|
218
|
+
source: 'file',
|
|
219
|
+
metadata: persisted_metadata
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
begin
|
|
223
|
+
@registry.upsert(key: key, cron: cron, enqueue: callback)
|
|
224
|
+
rescue StandardError
|
|
225
|
+
rollback_applied_job(key:, existing_definition:, existing_registry_entry:)
|
|
226
|
+
raise
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
{ key: key, existing_definition: existing_definition, existing_registry_entry: existing_registry_entry }
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
def rollback_applied_jobs(applied_job_contexts = [])
|
|
233
|
+
applied_job_contexts.reverse_each do |applied_job_context|
|
|
234
|
+
rollback_applied_job(**applied_job_context)
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
def rollback_applied_job(key:, existing_definition:, existing_registry_entry:)
|
|
239
|
+
if existing_definition
|
|
240
|
+
definition_attributes = existing_definition.slice(:key, :cron, :enabled, :source, :metadata)
|
|
241
|
+
@definition_registry.upsert_definition(**definition_attributes)
|
|
242
|
+
else
|
|
243
|
+
@definition_registry.remove_definition(key)
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
@registry.remove(key) if @registry.registered?(key)
|
|
247
|
+
|
|
248
|
+
return unless existing_registry_entry
|
|
249
|
+
|
|
250
|
+
@registry.upsert(
|
|
251
|
+
key: existing_registry_entry.key,
|
|
252
|
+
cron: existing_registry_entry.cron,
|
|
253
|
+
enqueue: existing_registry_entry.enqueue
|
|
254
|
+
)
|
|
255
|
+
rescue StandardError => e
|
|
256
|
+
@logger&.error("Failed to rollback scheduler file application for #{key}: #{e.message}")
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def skip_due_to_conflict?(key:, existing_definition:)
|
|
260
|
+
existing_source = existing_definition&.[](:source)
|
|
261
|
+
return false unless existing_source && existing_source.to_s != 'file'
|
|
262
|
+
|
|
263
|
+
policy = @configuration.scheduler_conflict_policy
|
|
264
|
+
case policy
|
|
265
|
+
when :error
|
|
266
|
+
raise SchedulerConfigError, "Scheduler key conflict for '#{key}' with existing source '#{existing_source}'"
|
|
267
|
+
when :code_wins
|
|
268
|
+
@logger&.warn("Skipping scheduler file job '#{key}' because scheduler_conflict_policy is :code_wins")
|
|
269
|
+
true
|
|
270
|
+
when :file_wins
|
|
271
|
+
false
|
|
272
|
+
else
|
|
273
|
+
raise SchedulerConfigError, "Unsupported scheduler_conflict_policy '#{policy}'"
|
|
274
|
+
end
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
def build_callback(key:, job_class_name:, queue:, args_template:, kwargs_template:)
|
|
278
|
+
job_class = resolve_job_class(job_class_name:, key:)
|
|
279
|
+
lambda do |fire_time:, idempotency_key:|
|
|
280
|
+
context = {
|
|
281
|
+
fire_time: fire_time,
|
|
282
|
+
idempotency_key: idempotency_key,
|
|
283
|
+
key: key
|
|
284
|
+
}
|
|
285
|
+
resolved_args = resolve_placeholders(args_template.deep_dup, context)
|
|
286
|
+
raw_kwargs = resolve_placeholders(kwargs_template.deep_dup, context) || {}
|
|
287
|
+
raise SchedulerConfigError, "kwargs for scheduler job '#{key}' must be a mapping, got #{raw_kwargs.class}" unless raw_kwargs.is_a?(Hash)
|
|
288
|
+
|
|
289
|
+
keys = raw_kwargs.keys
|
|
290
|
+
index = 0
|
|
291
|
+
while index < keys.length
|
|
292
|
+
kwargs_key = keys[index]
|
|
293
|
+
unless kwargs_key.is_a?(String) || kwargs_key.is_a?(Symbol)
|
|
294
|
+
raise SchedulerConfigError,
|
|
295
|
+
"Invalid keyword argument key #{kwargs_key.inspect} (#{kwargs_key.class}) for scheduler job '#{key}'"
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
index += 1
|
|
299
|
+
end
|
|
300
|
+
|
|
301
|
+
resolved_kwargs = raw_kwargs.transform_keys(&:to_sym)
|
|
302
|
+
|
|
303
|
+
target = queue ? job_class.set(queue: queue) : job_class
|
|
304
|
+
target.perform_later(*resolved_args, **resolved_kwargs)
|
|
305
|
+
end
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
def resolve_job_class(job_class_name:, key:)
|
|
309
|
+
job_class = job_class_name.safe_constantize
|
|
310
|
+
raise SchedulerConfigError, "Unknown job_class '#{job_class_name}' for key '#{key}'" unless job_class
|
|
311
|
+
raise SchedulerConfigError, "job_class '#{job_class_name}' must inherit from ActiveJob::Base for key '#{key}'" unless job_class <= ActiveJob::Base
|
|
312
|
+
|
|
313
|
+
job_class
|
|
314
|
+
end
|
|
315
|
+
end
|
|
316
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kaal
|
|
4
|
+
# Shared deep hash key transformation helpers for scheduler payloads.
|
|
5
|
+
module SchedulerHashTransform
|
|
6
|
+
TO_STRING = :to_s.to_proc
|
|
7
|
+
TO_SYMBOL = ->(key) { key.to_s.to_sym }
|
|
8
|
+
|
|
9
|
+
private
|
|
10
|
+
|
|
11
|
+
def stringify_keys(object)
|
|
12
|
+
deep_transform(object, key_transform: TO_STRING)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def symbolize_keys_deep(object)
|
|
16
|
+
deep_transform(object, key_transform: TO_SYMBOL)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def deep_transform(object, key_transform:)
|
|
20
|
+
case object
|
|
21
|
+
when Hash
|
|
22
|
+
deep_transform_hash(object, key_transform:)
|
|
23
|
+
when Array
|
|
24
|
+
deep_transform_array(object, key_transform:)
|
|
25
|
+
else
|
|
26
|
+
object
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def deep_transform_hash(object, key_transform:)
|
|
31
|
+
object.each_with_object({}) do |(key, child), memo|
|
|
32
|
+
memo[key_transform.call(key)] = deep_transform(child, key_transform:)
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def deep_transform_array(object, key_transform:)
|
|
37
|
+
object.map { |child| deep_transform(child, key_transform:) }
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kaal
|
|
4
|
+
# Placeholder parsing/resolution for scheduler args and kwargs.
|
|
5
|
+
module SchedulerPlaceholderSupport
|
|
6
|
+
private
|
|
7
|
+
|
|
8
|
+
def validate_placeholders(input, key:)
|
|
9
|
+
case input
|
|
10
|
+
when String
|
|
11
|
+
validate_placeholder_syntax(input, key:)
|
|
12
|
+
input.scan(self.class::PLACEHOLDER_PATTERN).flatten.each do |token|
|
|
13
|
+
next if @placeholder_resolvers.key?(token)
|
|
14
|
+
|
|
15
|
+
raise SchedulerConfigError, "Unknown placeholder '{{#{token}}}' for key '#{key}'"
|
|
16
|
+
end
|
|
17
|
+
when Array
|
|
18
|
+
input.each { |item| validate_placeholders(item, key:) }
|
|
19
|
+
when Hash
|
|
20
|
+
input.each_pair do |hash_key, child|
|
|
21
|
+
validate_placeholder_key(hash_key, key:)
|
|
22
|
+
validate_placeholders(child, key:)
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def resolve_placeholders(template, context)
|
|
28
|
+
case template
|
|
29
|
+
when String
|
|
30
|
+
replace_placeholders(template, context)
|
|
31
|
+
when Array
|
|
32
|
+
template.map { |item| resolve_placeholders(item, context) }
|
|
33
|
+
when Hash
|
|
34
|
+
template.transform_values { |child| resolve_placeholders(child, context) }
|
|
35
|
+
else
|
|
36
|
+
template
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def replace_placeholders(text, context)
|
|
41
|
+
pattern = self.class::PLACEHOLDER_PATTERN
|
|
42
|
+
anchored_pattern = Regexp.new("\\A#{pattern.source}\\z", pattern.options)
|
|
43
|
+
if (match = text.match(anchored_pattern))
|
|
44
|
+
return @placeholder_resolvers.fetch(match[1]).call(context)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
text.gsub(pattern) do
|
|
48
|
+
token = Regexp.last_match(1)
|
|
49
|
+
@placeholder_resolvers.fetch(token).call(context).to_s
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def validate_placeholder_key(hash_key, key:)
|
|
54
|
+
return unless hash_key.is_a?(String)
|
|
55
|
+
|
|
56
|
+
validate_placeholder_syntax(hash_key, key:)
|
|
57
|
+
|
|
58
|
+
token = hash_key.scan(self.class::PLACEHOLDER_PATTERN).flatten.first
|
|
59
|
+
return unless token
|
|
60
|
+
|
|
61
|
+
raise SchedulerConfigError, "Placeholders are not supported in hash keys (got '{{#{token}}}' under '#{key}')"
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def validate_placeholder_syntax(input, key:)
|
|
65
|
+
raw_placeholders = input.scan(/\{\{.*?\}\}/)
|
|
66
|
+
raw_placeholders.each do |raw_placeholder|
|
|
67
|
+
next if raw_placeholder.match?(placeholder_token_anchors)
|
|
68
|
+
|
|
69
|
+
raise SchedulerConfigError, "Malformed placeholder '#{raw_placeholder}' for key '#{key}'"
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def placeholder_token_anchors
|
|
74
|
+
@placeholder_token_anchors ||= begin
|
|
75
|
+
pattern = self.class::PLACEHOLDER_PATTERN
|
|
76
|
+
Regexp.new("\\A#{pattern.source}\\z", pattern.options)
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
end
|
data/lib/kaal/version.rb
ADDED