kaal 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +1 -1
- data/README.md +81 -286
- data/Rakefile +4 -2
- data/config/kaal.rb +15 -0
- data/config/scheduler.yml +12 -0
- data/exe/kaal +12 -0
- data/lib/kaal/backend/adapter.rb +0 -1
- data/lib/kaal/backend/dispatch_attempt_logger.rb +33 -0
- data/lib/kaal/backend/dispatch_logging.rb +36 -23
- data/lib/kaal/backend/dispatch_registry_accessor.rb +43 -0
- data/lib/kaal/backend/memory_adapter.rb +7 -5
- data/lib/kaal/backend/redis_adapter.rb +6 -6
- data/lib/kaal/cli.rb +230 -0
- data/lib/kaal/{configuration.rb → config/configuration.rb} +0 -1
- data/lib/{tasks/kaal_tasks.rake → kaal/config/scheduler_config_error.rb} +4 -4
- data/lib/kaal/config/scheduler_time_zone_resolver.rb +50 -0
- data/lib/kaal/config.rb +19 -0
- data/lib/kaal/{coordinator.rb → core/coordinator.rb} +42 -62
- data/lib/kaal/core/enabled_entry_enumerator.rb +51 -0
- data/lib/kaal/core/occurrence_finder.rb +38 -0
- data/lib/kaal/core.rb +18 -0
- data/lib/kaal/definition/memory_engine.rb +15 -17
- data/lib/kaal/definition/persistence_helpers.rb +31 -0
- data/lib/kaal/definition/redis_engine.rb +13 -5
- data/lib/kaal/definition/registry.rb +28 -1
- data/lib/kaal/definitions/registration_service.rb +62 -0
- data/lib/kaal/definitions/registry_accessor.rb +33 -0
- data/lib/kaal/dispatch/memory_engine.rb +3 -4
- data/lib/kaal/dispatch/redis_engine.rb +2 -3
- data/lib/kaal/dispatch/registry.rb +0 -1
- data/lib/kaal/register_conflict_support.rb +4 -0
- data/lib/kaal/registry.rb +0 -1
- data/lib/kaal/runtime/runtime_context.rb +41 -0
- data/lib/kaal/runtime/scheduler_boot_loader.rb +52 -0
- data/lib/kaal/runtime/signal_handler_chain.rb +42 -0
- data/lib/kaal/runtime/signal_handler_installer.rb +39 -0
- data/lib/kaal/runtime.rb +20 -0
- data/lib/kaal/scheduler_file/hash_transform.rb +22 -0
- data/lib/kaal/scheduler_file/helper_bundle.rb +28 -0
- data/lib/kaal/scheduler_file/job_applier.rb +242 -0
- data/lib/kaal/scheduler_file/job_normalizer.rb +90 -0
- data/lib/kaal/scheduler_file/loader.rb +152 -0
- data/lib/kaal/scheduler_file/payload_loader.rb +95 -0
- data/lib/kaal/{scheduler_placeholder_support.rb → scheduler_file/placeholder_support.rb} +4 -0
- data/lib/kaal/scheduler_file.rb +18 -0
- data/lib/kaal/support/hash_tools.rb +93 -0
- data/lib/kaal/{cron_humanizer.rb → utils/cron_humanizer.rb} +19 -1
- data/lib/kaal/{cron_utils.rb → utils/cron_utils.rb} +0 -1
- data/lib/kaal/{idempotency_key_generator.rb → utils/idempotency_key_generator.rb} +7 -2
- data/lib/kaal/utils.rb +18 -0
- data/lib/kaal/version.rb +1 -2
- data/lib/kaal.rb +77 -397
- metadata +64 -44
- data/app/models/kaal/cron_definition.rb +0 -71
- data/app/models/kaal/cron_dispatch.rb +0 -50
- data/app/models/kaal/cron_lock.rb +0 -38
- data/lib/generators/kaal/install/install_generator.rb +0 -67
- data/lib/generators/kaal/install/templates/create_kaal_definitions.rb.tt +0 -21
- data/lib/generators/kaal/install/templates/create_kaal_dispatches.rb.tt +0 -20
- data/lib/generators/kaal/install/templates/create_kaal_locks.rb.tt +0 -17
- data/lib/generators/kaal/install/templates/kaal.rb.tt +0 -31
- data/lib/generators/kaal/install/templates/scheduler.yml.tt +0 -22
- data/lib/kaal/backend/mysql_adapter.rb +0 -170
- data/lib/kaal/backend/postgres_adapter.rb +0 -134
- data/lib/kaal/backend/sqlite_adapter.rb +0 -116
- data/lib/kaal/definition/database_engine.rb +0 -45
- data/lib/kaal/dispatch/database_engine.rb +0 -94
- data/lib/kaal/railtie.rb +0 -183
- data/lib/kaal/rake_tasks.rb +0 -184
- data/lib/kaal/scheduler_config_error.rb +0 -6
- data/lib/kaal/scheduler_file_loader.rb +0 -316
- data/lib/kaal/scheduler_hash_transform.rb +0 -40
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
require 'kaal/support/hash_tools'
|
|
8
|
+
|
|
9
|
+
module Kaal
|
|
10
|
+
class SchedulerFileLoader
|
|
11
|
+
# Applies normalized scheduler jobs and rolls them back on failure.
|
|
12
|
+
class JobApplier
|
|
13
|
+
include Kaal::Support::HashTools
|
|
14
|
+
|
|
15
|
+
def initialize(configuration:, definition_registry:, registry:, logger:, helper_bundle:)
|
|
16
|
+
@configuration = configuration
|
|
17
|
+
@definition_registry = definition_registry
|
|
18
|
+
@registry = registry
|
|
19
|
+
@logger = logger
|
|
20
|
+
@helper_bundle = helper_bundle
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def apply(job)
|
|
24
|
+
key = job.fetch(:key)
|
|
25
|
+
cron = job.fetch(:cron)
|
|
26
|
+
job_class_name = job.fetch(:job_class_name)
|
|
27
|
+
queue = job.fetch(:queue)
|
|
28
|
+
existing_definition = @definition_registry.find_definition(key)
|
|
29
|
+
existing_registry_entry = @registry.find(key)
|
|
30
|
+
return nil if conflict?(key:, existing_definition:)
|
|
31
|
+
|
|
32
|
+
job_class = resolved_job_class(job_class_name:, key:, queue:)
|
|
33
|
+
callback = callback_for(
|
|
34
|
+
key: key,
|
|
35
|
+
job_class_name: job_class_name,
|
|
36
|
+
queue: queue,
|
|
37
|
+
args_template: job.fetch(:args),
|
|
38
|
+
kwargs_template: job.fetch(:kwargs)
|
|
39
|
+
)
|
|
40
|
+
persisted_metadata = persisted_metadata(job, job_class)
|
|
41
|
+
|
|
42
|
+
@definition_registry.upsert_definition(
|
|
43
|
+
key: key,
|
|
44
|
+
cron: cron,
|
|
45
|
+
enabled: job.fetch(:enabled),
|
|
46
|
+
source: 'file',
|
|
47
|
+
metadata: persisted_metadata
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
begin
|
|
51
|
+
@registry.upsert(key: key, cron: cron, enqueue: callback)
|
|
52
|
+
rescue StandardError
|
|
53
|
+
rollback_job(key:, existing_definition:, existing_registry_entry:)
|
|
54
|
+
raise
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
{ key: key, existing_definition: existing_definition, existing_registry_entry: existing_registry_entry }
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def rollback_jobs(applied_job_contexts)
|
|
61
|
+
applied_job_contexts.reverse_each do |applied_job_context|
|
|
62
|
+
rollback_job(**applied_job_context)
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def callback_for(key:, job_class_name:, queue:, args_template:, kwargs_template:)
|
|
67
|
+
job_class = resolved_job_class(job_class_name:, key:, queue:)
|
|
68
|
+
build_callback(
|
|
69
|
+
{
|
|
70
|
+
key: key,
|
|
71
|
+
queue: queue,
|
|
72
|
+
args: args_template,
|
|
73
|
+
kwargs: kwargs_template
|
|
74
|
+
},
|
|
75
|
+
job_class
|
|
76
|
+
)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def resolved_job_class(job_class_name:, key:, queue: nil)
|
|
80
|
+
resolve_job_class(job_class_name:, key:, queue:)
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def conflict?(key:, existing_definition:)
|
|
84
|
+
existing_source = existing_definition&.[](:source)
|
|
85
|
+
return false unless existing_source && existing_source.to_s != 'file'
|
|
86
|
+
|
|
87
|
+
policy = @configuration.scheduler_conflict_policy
|
|
88
|
+
case policy
|
|
89
|
+
when :error
|
|
90
|
+
raise SchedulerConfigError, "Scheduler key conflict for '#{key}' with existing source '#{existing_source}'"
|
|
91
|
+
when :code_wins
|
|
92
|
+
@logger&.warn("Skipping scheduler file job '#{key}' because scheduler_conflict_policy is :code_wins")
|
|
93
|
+
true
|
|
94
|
+
when :file_wins
|
|
95
|
+
false
|
|
96
|
+
else
|
|
97
|
+
raise SchedulerConfigError, "Unsupported scheduler_conflict_policy '#{policy}'"
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def rollback_job(key:, existing_definition:, existing_registry_entry:)
|
|
102
|
+
if existing_definition
|
|
103
|
+
@definition_registry.upsert_definition(
|
|
104
|
+
**Definition::AttributeHelpers.definition_attributes(existing_definition), enabled: existing_definition[:enabled]
|
|
105
|
+
)
|
|
106
|
+
else
|
|
107
|
+
@definition_registry.remove_definition(key)
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
@registry.remove(key) if @registry.registered?(key)
|
|
111
|
+
|
|
112
|
+
return unless existing_registry_entry
|
|
113
|
+
|
|
114
|
+
@registry.upsert(
|
|
115
|
+
key: existing_registry_entry.key,
|
|
116
|
+
cron: existing_registry_entry.cron,
|
|
117
|
+
enqueue: existing_registry_entry.enqueue
|
|
118
|
+
)
|
|
119
|
+
rescue StandardError => e
|
|
120
|
+
@logger&.error("Failed to rollback scheduler file application for #{key}: #{e.message}")
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
private
|
|
124
|
+
|
|
125
|
+
def persisted_metadata(job, job_class)
|
|
126
|
+
metadata, job_class_name, queue, args, kwargs =
|
|
127
|
+
job.values_at(:metadata, :job_class_name, :queue, :args, :kwargs)
|
|
128
|
+
normalized_metadata = @helper_bundle.stringify_keys(deep_dup(metadata || {}))
|
|
129
|
+
Kaal::Support::HashTools.deep_merge(
|
|
130
|
+
normalized_metadata,
|
|
131
|
+
'execution' => {
|
|
132
|
+
'target' => active_job_dispatch?(job_class, queue) ? 'active_job' : 'ruby',
|
|
133
|
+
'job_class' => job_class_name,
|
|
134
|
+
'queue' => queue,
|
|
135
|
+
'args' => args,
|
|
136
|
+
'kwargs' => kwargs
|
|
137
|
+
}
|
|
138
|
+
)
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
def build_callback(job, job_class)
|
|
142
|
+
key = job.fetch(:key)
|
|
143
|
+
queue = job.fetch(:queue)
|
|
144
|
+
args_template = job.fetch(:args)
|
|
145
|
+
kwargs_template = job.fetch(:kwargs)
|
|
146
|
+
|
|
147
|
+
lambda do |fire_time:, idempotency_key:|
|
|
148
|
+
context = {
|
|
149
|
+
fire_time: fire_time,
|
|
150
|
+
idempotency_key: idempotency_key,
|
|
151
|
+
key: key
|
|
152
|
+
}
|
|
153
|
+
resolved_args = @helper_bundle.resolve_placeholders(deep_dup(args_template), context)
|
|
154
|
+
raw_kwargs = @helper_bundle.resolve_placeholders(deep_dup(kwargs_template), context) || {}
|
|
155
|
+
raise SchedulerConfigError, "kwargs for scheduler job '#{key}' must be a mapping, got #{raw_kwargs.class}" unless raw_kwargs.is_a?(Hash)
|
|
156
|
+
|
|
157
|
+
validate_keyword_keys(raw_kwargs, key)
|
|
158
|
+
|
|
159
|
+
resolved_kwargs = raw_kwargs.transform_keys(&:to_sym)
|
|
160
|
+
dispatch_job(job_class, queue, resolved_args, resolved_kwargs)
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
def validate_keyword_keys(raw_kwargs, key)
|
|
165
|
+
keys = raw_kwargs.keys
|
|
166
|
+
index = 0
|
|
167
|
+
while index < keys.length
|
|
168
|
+
kwargs_key = keys[index]
|
|
169
|
+
if kwargs_key.is_a?(String) || kwargs_key.is_a?(Symbol)
|
|
170
|
+
index += 1
|
|
171
|
+
next
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
raise SchedulerConfigError,
|
|
175
|
+
"Invalid keyword argument key #{kwargs_key.inspect} (#{kwargs_key.class}) for scheduler job '#{key}'"
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
nil
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
def resolve_job_class(job_class_name:, key:, queue: nil)
|
|
182
|
+
normalized_job_class_name = job_class_name.to_s.strip
|
|
183
|
+
raise SchedulerConfigError, "Job class cannot be blank for key '#{key}'" if normalized_job_class_name.empty?
|
|
184
|
+
|
|
185
|
+
error_message = "Unknown job_class #{normalized_job_class_name.inspect} for key '#{key}'"
|
|
186
|
+
job_class = begin
|
|
187
|
+
Kaal::Support::HashTools.constantize(normalized_job_class_name)
|
|
188
|
+
rescue NameError
|
|
189
|
+
nil
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
return validate_dispatch_interface(job_class, key, queue) if job_class
|
|
193
|
+
|
|
194
|
+
raise_unknown_job_class(error_message)
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
private :build_callback, :resolve_job_class
|
|
198
|
+
|
|
199
|
+
def dispatch_job(job_class, queue, args, kwargs)
|
|
200
|
+
job_class_name = job_class.name
|
|
201
|
+
|
|
202
|
+
if queue && !job_class.respond_to?(:set)
|
|
203
|
+
raise SchedulerConfigError,
|
|
204
|
+
"job_class '#{job_class_name}' must respond to .set to use queue #{queue.inspect}"
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
if queue
|
|
208
|
+
job_class.set(queue: queue).perform_later(*args, **kwargs)
|
|
209
|
+
elsif job_class.respond_to?(:perform_later)
|
|
210
|
+
job_class.perform_later(*args, **kwargs)
|
|
211
|
+
elsif job_class.respond_to?(:perform)
|
|
212
|
+
job_class.perform(*args, **kwargs)
|
|
213
|
+
else
|
|
214
|
+
raise SchedulerConfigError,
|
|
215
|
+
"job_class '#{job_class_name}' must respond to .perform, .perform_later, or .set(...).perform_later"
|
|
216
|
+
end
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
def raise_unknown_job_class(error_message)
|
|
220
|
+
raise SchedulerConfigError, error_message
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
def validate_dispatch_interface(job_class, key, queue)
|
|
224
|
+
queue_present = !queue.nil?
|
|
225
|
+
supports_set = job_class.respond_to?(:set)
|
|
226
|
+
supports_perform_later = job_class.respond_to?(:perform_later)
|
|
227
|
+
supports_perform = job_class.respond_to?(:perform)
|
|
228
|
+
|
|
229
|
+
return job_class if queue_present && supports_set
|
|
230
|
+
return job_class if !queue_present && supports_perform_later
|
|
231
|
+
return job_class if !queue_present && supports_perform
|
|
232
|
+
|
|
233
|
+
raise SchedulerConfigError,
|
|
234
|
+
"job_class '#{job_class.name}' for key '#{key}' must respond to .perform, .perform_later, or .set(...).perform_later"
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
def active_job_dispatch?(job_class, queue)
|
|
238
|
+
(queue && job_class.respond_to?(:set)) || job_class.respond_to?(:perform_later)
|
|
239
|
+
end
|
|
240
|
+
end
|
|
241
|
+
end
|
|
242
|
+
end
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
require 'kaal/support/hash_tools'
|
|
8
|
+
|
|
9
|
+
module Kaal
|
|
10
|
+
class SchedulerFileLoader
|
|
11
|
+
# Normalizes scheduler job payloads into application-ready hashes.
|
|
12
|
+
class JobNormalizer
|
|
13
|
+
include Kaal::Support::HashTools
|
|
14
|
+
|
|
15
|
+
def initialize(hash_transform:, placeholder_support:, cron_validator:)
|
|
16
|
+
@hash_transform = hash_transform
|
|
17
|
+
@placeholder_support = placeholder_support
|
|
18
|
+
@cron_validator = cron_validator
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def call(job_payload)
|
|
22
|
+
payload = @hash_transform.stringify_keys(job_payload)
|
|
23
|
+
key = payload.fetch('key', '').to_s.strip
|
|
24
|
+
raise SchedulerConfigError, 'Job key cannot be blank' if key.empty?
|
|
25
|
+
|
|
26
|
+
cron = required_string(payload, field: 'cron', error_prefix: "Job cron cannot be blank for key '#{key}'")
|
|
27
|
+
job_class_name = required_string(payload, field: 'job_class', error_prefix: "Job class cannot be blank for key '#{key}'")
|
|
28
|
+
validate_cron(key:, cron:)
|
|
29
|
+
options = extract_job_options(payload, key:)
|
|
30
|
+
|
|
31
|
+
{
|
|
32
|
+
key: key,
|
|
33
|
+
cron: cron,
|
|
34
|
+
job_class_name: job_class_name,
|
|
35
|
+
**options
|
|
36
|
+
}
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
private
|
|
40
|
+
|
|
41
|
+
def required_string(payload, field:, error_prefix:)
|
|
42
|
+
value = payload.fetch(field, '').to_s.strip
|
|
43
|
+
raise SchedulerConfigError, error_prefix if value.empty?
|
|
44
|
+
|
|
45
|
+
value
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def validate_cron(key:, cron:)
|
|
49
|
+
return if @cron_validator.call(cron)
|
|
50
|
+
|
|
51
|
+
raise SchedulerConfigError, "Invalid cron expression '#{cron}' for key '#{key}'"
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def extract_job_options(payload, key:)
|
|
55
|
+
metadata, args, kwargs, queue, enabled_value = payload.values_at('metadata', 'args', 'kwargs', 'queue', 'enabled')
|
|
56
|
+
args ||= []
|
|
57
|
+
kwargs ||= {}
|
|
58
|
+
enabled = true
|
|
59
|
+
if payload.key?('enabled')
|
|
60
|
+
raise SchedulerConfigError, "enabled must be a boolean for key '#{key}'" unless enabled_value.is_a?(TrueClass) || enabled_value.is_a?(FalseClass)
|
|
61
|
+
|
|
62
|
+
enabled = enabled_value
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
raise SchedulerConfigError, "metadata must be a mapping for key '#{key}'" if metadata && !metadata.is_a?(Hash)
|
|
66
|
+
|
|
67
|
+
validate_job_option_types(key:, args:, kwargs:, queue:)
|
|
68
|
+
@placeholder_support.validate_placeholders(args, key:)
|
|
69
|
+
@placeholder_support.validate_placeholders(kwargs, key:)
|
|
70
|
+
|
|
71
|
+
{
|
|
72
|
+
queue: queue,
|
|
73
|
+
args: deep_dup(args),
|
|
74
|
+
kwargs: deep_dup(kwargs),
|
|
75
|
+
enabled: enabled,
|
|
76
|
+
metadata: metadata ? deep_dup(metadata) : {}
|
|
77
|
+
}
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
def validate_job_option_types(key:, args:, kwargs:, queue:)
|
|
81
|
+
raise SchedulerConfigError, "args must be an array for key '#{key}'" unless args.is_a?(Array)
|
|
82
|
+
raise SchedulerConfigError, "kwargs must be a mapping for key '#{key}'" unless kwargs.is_a?(Hash)
|
|
83
|
+
raise SchedulerConfigError, "queue must be a string for key '#{key}'" if queue && !queue.is_a?(String)
|
|
84
|
+
return if kwargs.keys.all? { |kwargs_key| kwargs_key.is_a?(String) || kwargs_key.is_a?(Symbol) }
|
|
85
|
+
|
|
86
|
+
raise SchedulerConfigError, "kwargs keys must be strings or symbols for key '#{key}'"
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
require 'kaal/runtime/runtime_context'
|
|
8
|
+
require 'kaal/scheduler_file/hash_transform'
|
|
9
|
+
require 'kaal/scheduler_file/placeholder_support'
|
|
10
|
+
require 'kaal/support/hash_tools'
|
|
11
|
+
require_relative 'helper_bundle'
|
|
12
|
+
require_relative 'payload_loader'
|
|
13
|
+
require_relative 'job_normalizer'
|
|
14
|
+
require_relative 'job_applier'
|
|
15
|
+
|
|
16
|
+
module Kaal
|
|
17
|
+
# Loads scheduler definitions from config/scheduler.yml and registers them.
|
|
18
|
+
class SchedulerFileLoader
|
|
19
|
+
include SchedulerHashTransform
|
|
20
|
+
include SchedulerPlaceholderSupport
|
|
21
|
+
include Kaal::Support::HashTools
|
|
22
|
+
|
|
23
|
+
PLACEHOLDER_PATTERN = /\{\{\s*([a-zA-Z0-9_.]+)\s*\}\}/
|
|
24
|
+
ALLOWED_PLACEHOLDERS = {
|
|
25
|
+
'fire_time.iso8601' => ->(ctx) { ctx.fetch(:fire_time).iso8601 },
|
|
26
|
+
'fire_time.unix' => ->(ctx) { ctx.fetch(:fire_time).to_i },
|
|
27
|
+
'idempotency_key' => ->(ctx) { ctx.fetch(:idempotency_key) },
|
|
28
|
+
'key' => ->(ctx) { ctx.fetch(:key) }
|
|
29
|
+
}.freeze
|
|
30
|
+
|
|
31
|
+
def initialize(
|
|
32
|
+
configuration:,
|
|
33
|
+
definition_registry:,
|
|
34
|
+
registry:,
|
|
35
|
+
logger:,
|
|
36
|
+
runtime_context: RuntimeContext.default
|
|
37
|
+
)
|
|
38
|
+
@configuration = configuration
|
|
39
|
+
@definition_registry = definition_registry
|
|
40
|
+
@registry = registry
|
|
41
|
+
@logger = logger
|
|
42
|
+
@runtime_context = runtime_context
|
|
43
|
+
@placeholder_resolvers = ALLOWED_PLACEHOLDERS
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def load
|
|
47
|
+
applied_job_contexts = []
|
|
48
|
+
path, payload = payload_loader.load
|
|
49
|
+
return handle_missing_file(path) unless payload
|
|
50
|
+
|
|
51
|
+
jobs = extract_jobs(payload)
|
|
52
|
+
validate_unique_keys(jobs)
|
|
53
|
+
normalized_jobs = jobs.map { |job_payload| normalize_job(job_payload) }
|
|
54
|
+
applied_jobs = []
|
|
55
|
+
normalized_jobs.each do |job|
|
|
56
|
+
applied_job_context = apply_job(job)
|
|
57
|
+
next unless applied_job_context
|
|
58
|
+
|
|
59
|
+
applied_jobs << job
|
|
60
|
+
applied_job_contexts << applied_job_context
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
applied_jobs
|
|
64
|
+
rescue StandardError
|
|
65
|
+
rollback_applied_jobs(applied_job_contexts)
|
|
66
|
+
raise
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
private
|
|
70
|
+
|
|
71
|
+
def handle_missing_file(path)
|
|
72
|
+
payload_loader.handle_missing_file(path)
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def extract_jobs(payload)
|
|
76
|
+
payload_loader.extract_jobs(payload)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def validate_unique_keys(jobs)
|
|
80
|
+
payload_loader.validate_unique_keys(jobs)
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def normalize_job(job_payload)
|
|
84
|
+
job_normalizer.call(job_payload)
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def extract_job_options(payload, key:)
|
|
88
|
+
job_normalizer.send(:extract_job_options, payload, key:)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def apply_job(job)
|
|
92
|
+
job_applier.apply(job)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def rollback_applied_jobs(applied_job_contexts = [])
|
|
96
|
+
job_applier.rollback_jobs(applied_job_contexts)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def rollback_applied_job(key:, existing_definition:, existing_registry_entry:)
|
|
100
|
+
job_applier.rollback_job(key:, existing_definition:, existing_registry_entry:)
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def skip_due_to_conflict?(key:, existing_definition:)
|
|
104
|
+
job_applier.conflict?(key:, existing_definition:)
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def build_callback(key:, job_class_name:, queue:, args_template:, kwargs_template:)
|
|
108
|
+
job_applier.callback_for(
|
|
109
|
+
key: key,
|
|
110
|
+
job_class_name: job_class_name,
|
|
111
|
+
queue: queue,
|
|
112
|
+
args_template: args_template,
|
|
113
|
+
kwargs_template: kwargs_template
|
|
114
|
+
)
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def resolve_job_class(job_class_name:, key:, queue: nil)
|
|
118
|
+
job_applier.resolved_job_class(job_class_name:, key:, queue:)
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def payload_loader
|
|
122
|
+
@payload_loader ||= PayloadLoader.new(
|
|
123
|
+
configuration: @configuration,
|
|
124
|
+
runtime_context: @runtime_context,
|
|
125
|
+
logger: @logger,
|
|
126
|
+
hash_transform: helper_bundle
|
|
127
|
+
)
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def job_normalizer
|
|
131
|
+
@job_normalizer ||= JobNormalizer.new(
|
|
132
|
+
hash_transform: helper_bundle,
|
|
133
|
+
placeholder_support: helper_bundle,
|
|
134
|
+
cron_validator: ->(cron) { Kaal.valid?(cron) }
|
|
135
|
+
)
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
def job_applier
|
|
139
|
+
@job_applier ||= JobApplier.new(
|
|
140
|
+
configuration: @configuration,
|
|
141
|
+
definition_registry: @definition_registry,
|
|
142
|
+
registry: @registry,
|
|
143
|
+
logger: @logger,
|
|
144
|
+
helper_bundle: helper_bundle
|
|
145
|
+
)
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def helper_bundle
|
|
149
|
+
@helper_bundle ||= HelperBundle.new(loader: self)
|
|
150
|
+
end
|
|
151
|
+
end
|
|
152
|
+
end
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
require 'erb'
|
|
8
|
+
require 'yaml'
|
|
9
|
+
|
|
10
|
+
module Kaal
|
|
11
|
+
class SchedulerFileLoader
|
|
12
|
+
# Loads and validates scheduler YAML payloads from disk.
|
|
13
|
+
class PayloadLoader
|
|
14
|
+
def initialize(configuration:, runtime_context:, logger:, hash_transform:)
|
|
15
|
+
@configuration = configuration
|
|
16
|
+
@runtime_context = runtime_context
|
|
17
|
+
@logger = logger
|
|
18
|
+
@hash_transform = hash_transform
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def load
|
|
22
|
+
path = scheduler_file_path
|
|
23
|
+
return [path, nil] unless File.exist?(path)
|
|
24
|
+
|
|
25
|
+
[path, parse_yaml(path)]
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def handle_missing_file(path)
|
|
29
|
+
message = "Scheduler file not found at #{path}"
|
|
30
|
+
raise SchedulerConfigError, message if @configuration.scheduler_missing_file_policy == :error
|
|
31
|
+
|
|
32
|
+
@logger&.warn(message)
|
|
33
|
+
[]
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def extract_jobs(payload)
|
|
37
|
+
environment_name = @runtime_context.environment_name
|
|
38
|
+
defaults = fetch_hash(payload, 'defaults')
|
|
39
|
+
env_payload = fetch_hash(payload, environment_name)
|
|
40
|
+
default_jobs = defaults.fetch('jobs', [])
|
|
41
|
+
env_jobs = env_payload.fetch('jobs', [])
|
|
42
|
+
raise SchedulerConfigError, "Expected 'defaults.jobs' to be an array" unless default_jobs.is_a?(Array)
|
|
43
|
+
raise SchedulerConfigError, "Expected '#{environment_name}.jobs' to be an array" unless env_jobs.is_a?(Array)
|
|
44
|
+
|
|
45
|
+
default_jobs + env_jobs
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def validate_unique_keys(jobs)
|
|
49
|
+
keys = jobs.map do |job_payload|
|
|
50
|
+
raise SchedulerConfigError, "Each jobs entry must be a mapping, got #{job_payload.class}" unless job_payload.is_a?(Hash)
|
|
51
|
+
|
|
52
|
+
@hash_transform.stringify_keys(job_payload)['key'].to_s.strip
|
|
53
|
+
end
|
|
54
|
+
duplicates = keys.group_by(&:itself).select { |key, arr| !key.empty? && arr.size > 1 }.keys
|
|
55
|
+
return if duplicates.empty?
|
|
56
|
+
|
|
57
|
+
raise SchedulerConfigError, "Duplicate job keys in scheduler file: #{duplicates.join(', ')}"
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
private
|
|
61
|
+
|
|
62
|
+
def scheduler_file_path
|
|
63
|
+
configured_path = @configuration.scheduler_config_path.to_s.strip
|
|
64
|
+
raise SchedulerConfigError, 'scheduler_config_path cannot be blank' if configured_path.empty?
|
|
65
|
+
|
|
66
|
+
@runtime_context.resolve_path(configured_path)
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def parse_yaml(path)
|
|
70
|
+
rendered = render_yaml_erb(path)
|
|
71
|
+
parsed = YAML.safe_load(rendered) || {}
|
|
72
|
+
raise SchedulerConfigError, "Expected scheduler YAML root to be a mapping in #{path}" unless parsed.is_a?(Hash)
|
|
73
|
+
|
|
74
|
+
@hash_transform.stringify_keys(parsed)
|
|
75
|
+
rescue Psych::Exception => e
|
|
76
|
+
raise SchedulerConfigError, "Failed to parse scheduler YAML at #{path}: #{e.message}"
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def render_yaml_erb(path)
|
|
80
|
+
ERB.new(File.read(path), trim_mode: '-').result
|
|
81
|
+
rescue StandardError, SyntaxError => e
|
|
82
|
+
raise SchedulerConfigError, "Failed to evaluate scheduler ERB at #{path}: #{e.message}"
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def fetch_hash(payload, key)
|
|
86
|
+
section = payload.fetch(key)
|
|
87
|
+
raise SchedulerConfigError, "Expected '#{key}' section to be a mapping" unless section.is_a?(Hash)
|
|
88
|
+
|
|
89
|
+
section
|
|
90
|
+
rescue KeyError
|
|
91
|
+
{}
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
end
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
3
7
|
module Kaal
|
|
4
8
|
# Placeholder parsing/resolution for scheduler args and kwargs.
|
|
5
9
|
module SchedulerPlaceholderSupport
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
require 'kaal/scheduler_file/loader'
|
|
8
|
+
require 'kaal/scheduler_file/hash_transform'
|
|
9
|
+
require 'kaal/scheduler_file/placeholder_support'
|
|
10
|
+
|
|
11
|
+
module Kaal
|
|
12
|
+
# Scheduler file loading and payload helpers.
|
|
13
|
+
module SchedulerFile
|
|
14
|
+
Loader = ::Kaal::SchedulerFileLoader
|
|
15
|
+
HashTransform = ::Kaal::SchedulerHashTransform
|
|
16
|
+
PlaceholderSupport = ::Kaal::SchedulerPlaceholderSupport
|
|
17
|
+
end
|
|
18
|
+
end
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Copyright Codevedas Inc. 2025-present
|
|
4
|
+
#
|
|
5
|
+
# This source code is licensed under the MIT license found in the
|
|
6
|
+
# LICENSE file in the root directory of this source tree.
|
|
7
|
+
module Kaal
|
|
8
|
+
module Support
|
|
9
|
+
# Small deep-copy and key-normalization helpers used across config and scheduler loading.
|
|
10
|
+
module HashTools
|
|
11
|
+
module_function
|
|
12
|
+
|
|
13
|
+
def deep_dup(value)
|
|
14
|
+
case value
|
|
15
|
+
when Hash
|
|
16
|
+
value.each_with_object({}) do |(key, child), memo|
|
|
17
|
+
duplicated_pair = [deep_dup(key), deep_dup(child)]
|
|
18
|
+
memo[duplicated_pair[0]] = duplicated_pair[1]
|
|
19
|
+
end
|
|
20
|
+
when Array
|
|
21
|
+
value.map { |child| duplicate_child(child) }
|
|
22
|
+
else
|
|
23
|
+
duplicable?(value) ? value.dup : value
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def stringify_keys(value)
|
|
28
|
+
transform_keys(value, &:to_s)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def symbolize_keys(value)
|
|
32
|
+
transform_keys(value) { |key| key.to_s.to_sym }
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def deep_merge(left, right)
|
|
36
|
+
left.merge(right) do |_key, left_value, right_value|
|
|
37
|
+
if left_value.is_a?(Hash) && right_value.is_a?(Hash)
|
|
38
|
+
deep_merge(left_value, right_value)
|
|
39
|
+
else
|
|
40
|
+
deep_dup(right_value)
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def constantize(name)
|
|
46
|
+
name.to_s.split('::').reject(&:empty?).reduce(Object) { |scope, part| scope.const_get(part) }
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def duplicable?(value)
|
|
50
|
+
!value.is_a?(NilClass) &&
|
|
51
|
+
!value.is_a?(FalseClass) &&
|
|
52
|
+
!value.is_a?(TrueClass) &&
|
|
53
|
+
!value.is_a?(Symbol) &&
|
|
54
|
+
!value.is_a?(Numeric) &&
|
|
55
|
+
!value.is_a?(Method) &&
|
|
56
|
+
!value.is_a?(Proc)
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def transform_keys(value, &)
|
|
60
|
+
case value
|
|
61
|
+
when Hash
|
|
62
|
+
transform_hash_keys(value, &)
|
|
63
|
+
when Array
|
|
64
|
+
transform_array_keys(value, &)
|
|
65
|
+
else
|
|
66
|
+
value
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def duplicate_child(child)
|
|
71
|
+
deep_dup(child)
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def transform_child_keys(child, &)
|
|
75
|
+
transform_keys(child, &)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def transform_hash_keys(value, &)
|
|
79
|
+
value.each_with_object({}) do |(key, child), memo|
|
|
80
|
+
transformed_pair = [yield(key), transform_child_keys(child, &)]
|
|
81
|
+
memo[transformed_pair[0]] = transformed_pair[1]
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def transform_array_keys(value, &)
|
|
86
|
+
value.map { |child| transform_child_keys(child, &) }
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
private_class_method :duplicate_child, :transform_child_keys, :transform_hash_keys, :transform_array_keys
|
|
90
|
+
private_class_method :transform_keys
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|