spurline-deploy 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/spurline/adapters/base.rb +17 -0
- data/lib/spurline/adapters/claude.rb +208 -0
- data/lib/spurline/adapters/open_ai.rb +213 -0
- data/lib/spurline/adapters/registry.rb +33 -0
- data/lib/spurline/adapters/scheduler/base.rb +15 -0
- data/lib/spurline/adapters/scheduler/sync.rb +15 -0
- data/lib/spurline/adapters/stub_adapter.rb +54 -0
- data/lib/spurline/agent.rb +433 -0
- data/lib/spurline/audit/log.rb +156 -0
- data/lib/spurline/audit/secret_filter.rb +121 -0
- data/lib/spurline/base.rb +130 -0
- data/lib/spurline/cartographer/analyzer.rb +71 -0
- data/lib/spurline/cartographer/analyzers/ci_config.rb +171 -0
- data/lib/spurline/cartographer/analyzers/dotfiles.rb +134 -0
- data/lib/spurline/cartographer/analyzers/entry_points.rb +145 -0
- data/lib/spurline/cartographer/analyzers/file_signatures.rb +55 -0
- data/lib/spurline/cartographer/analyzers/manifests.rb +217 -0
- data/lib/spurline/cartographer/analyzers/security_scan.rb +223 -0
- data/lib/spurline/cartographer/repo_profile.rb +140 -0
- data/lib/spurline/cartographer/runner.rb +88 -0
- data/lib/spurline/cartographer.rb +6 -0
- data/lib/spurline/channels/base.rb +41 -0
- data/lib/spurline/channels/event.rb +136 -0
- data/lib/spurline/channels/github.rb +205 -0
- data/lib/spurline/channels/router.rb +103 -0
- data/lib/spurline/cli/check.rb +88 -0
- data/lib/spurline/cli/checks/adapter_resolution.rb +81 -0
- data/lib/spurline/cli/checks/agent_loadability.rb +41 -0
- data/lib/spurline/cli/checks/base.rb +35 -0
- data/lib/spurline/cli/checks/credentials.rb +43 -0
- data/lib/spurline/cli/checks/permissions.rb +22 -0
- data/lib/spurline/cli/checks/project_structure.rb +48 -0
- data/lib/spurline/cli/checks/session_store.rb +97 -0
- data/lib/spurline/cli/console.rb +73 -0
- data/lib/spurline/cli/credentials.rb +181 -0
- data/lib/spurline/cli/generators/agent.rb +123 -0
- data/lib/spurline/cli/generators/migration.rb +62 -0
- data/lib/spurline/cli/generators/project.rb +331 -0
- data/lib/spurline/cli/generators/tool.rb +98 -0
- data/lib/spurline/cli/router.rb +121 -0
- data/lib/spurline/configuration.rb +23 -0
- data/lib/spurline/dsl/guardrails.rb +108 -0
- data/lib/spurline/dsl/hooks.rb +51 -0
- data/lib/spurline/dsl/memory.rb +39 -0
- data/lib/spurline/dsl/model.rb +23 -0
- data/lib/spurline/dsl/persona.rb +74 -0
- data/lib/spurline/dsl/suspend_until.rb +53 -0
- data/lib/spurline/dsl/tools.rb +176 -0
- data/lib/spurline/errors.rb +109 -0
- data/lib/spurline/lifecycle/deterministic_runner.rb +207 -0
- data/lib/spurline/lifecycle/runner.rb +456 -0
- data/lib/spurline/lifecycle/states.rb +47 -0
- data/lib/spurline/lifecycle/suspension_boundary.rb +82 -0
- data/lib/spurline/memory/context_assembler.rb +100 -0
- data/lib/spurline/memory/embedder/base.rb +17 -0
- data/lib/spurline/memory/embedder/open_ai.rb +70 -0
- data/lib/spurline/memory/episode.rb +56 -0
- data/lib/spurline/memory/episodic_store.rb +147 -0
- data/lib/spurline/memory/long_term/base.rb +22 -0
- data/lib/spurline/memory/long_term/postgres.rb +106 -0
- data/lib/spurline/memory/manager.rb +147 -0
- data/lib/spurline/memory/short_term.rb +57 -0
- data/lib/spurline/orchestration/agent_spawner.rb +151 -0
- data/lib/spurline/orchestration/judge.rb +109 -0
- data/lib/spurline/orchestration/ledger/store/base.rb +28 -0
- data/lib/spurline/orchestration/ledger/store/memory.rb +50 -0
- data/lib/spurline/orchestration/ledger.rb +339 -0
- data/lib/spurline/orchestration/merge_queue.rb +133 -0
- data/lib/spurline/orchestration/permission_intersection.rb +151 -0
- data/lib/spurline/orchestration/task_envelope.rb +201 -0
- data/lib/spurline/persona/base.rb +42 -0
- data/lib/spurline/persona/registry.rb +42 -0
- data/lib/spurline/secrets/resolver.rb +65 -0
- data/lib/spurline/secrets/vault.rb +42 -0
- data/lib/spurline/security/content.rb +76 -0
- data/lib/spurline/security/context_pipeline.rb +58 -0
- data/lib/spurline/security/gates/base.rb +36 -0
- data/lib/spurline/security/gates/operator_config.rb +22 -0
- data/lib/spurline/security/gates/system_prompt.rb +23 -0
- data/lib/spurline/security/gates/tool_result.rb +23 -0
- data/lib/spurline/security/gates/user_input.rb +22 -0
- data/lib/spurline/security/injection_scanner.rb +109 -0
- data/lib/spurline/security/pii_filter.rb +104 -0
- data/lib/spurline/session/resumption.rb +36 -0
- data/lib/spurline/session/serializer.rb +169 -0
- data/lib/spurline/session/session.rb +154 -0
- data/lib/spurline/session/store/base.rb +27 -0
- data/lib/spurline/session/store/memory.rb +45 -0
- data/lib/spurline/session/store/postgres.rb +123 -0
- data/lib/spurline/session/store/sqlite.rb +139 -0
- data/lib/spurline/session/suspension.rb +93 -0
- data/lib/spurline/session/turn.rb +98 -0
- data/lib/spurline/spur.rb +213 -0
- data/lib/spurline/streaming/buffer.rb +77 -0
- data/lib/spurline/streaming/chunk.rb +62 -0
- data/lib/spurline/streaming/stream_enumerator.rb +29 -0
- data/lib/spurline/testing.rb +245 -0
- data/lib/spurline/toolkit.rb +110 -0
- data/lib/spurline/tools/base.rb +209 -0
- data/lib/spurline/tools/idempotency.rb +220 -0
- data/lib/spurline/tools/permissions.rb +44 -0
- data/lib/spurline/tools/registry.rb +43 -0
- data/lib/spurline/tools/runner.rb +255 -0
- data/lib/spurline/tools/scope.rb +309 -0
- data/lib/spurline/tools/toolkit_registry.rb +63 -0
- data/lib/spurline/version.rb +5 -0
- data/lib/spurline.rb +56 -0
- metadata +161 -0
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "securerandom"
|
|
4
|
+
require "time"
|
|
5
|
+
|
|
6
|
+
module Spurline
|
|
7
|
+
module Orchestration
|
|
8
|
+
# Workflow state machine for planner/worker/judge orchestration.
|
|
9
|
+
class Ledger
|
|
10
|
+
STATES = %i[planning executing merging complete error].freeze
|
|
11
|
+
|
|
12
|
+
VALID_TRANSITIONS = {
|
|
13
|
+
planning: [:executing, :error],
|
|
14
|
+
executing: [:merging, :error],
|
|
15
|
+
merging: [:complete, :executing, :error],
|
|
16
|
+
complete: [],
|
|
17
|
+
error: [],
|
|
18
|
+
}.freeze
|
|
19
|
+
|
|
20
|
+
TASK_STATES = %i[pending assigned running complete failed].freeze
|
|
21
|
+
|
|
22
|
+
attr_reader :id, :state, :plan, :tasks, :dependency_graph,
|
|
23
|
+
:merged_output, :metadata, :created_at
|
|
24
|
+
|
|
25
|
+
def initialize(id: SecureRandom.uuid, store: nil)
|
|
26
|
+
@id = id.to_s
|
|
27
|
+
@state = :planning
|
|
28
|
+
@plan = []
|
|
29
|
+
@tasks = {}
|
|
30
|
+
@dependency_graph = {}
|
|
31
|
+
@merged_output = {}
|
|
32
|
+
@metadata = {}
|
|
33
|
+
@created_at = Time.now.utc
|
|
34
|
+
@store = store
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# @param envelope [TaskEnvelope]
|
|
38
|
+
# @return [TaskEnvelope]
|
|
39
|
+
def add_task(envelope)
|
|
40
|
+
assert_state!(:planning, "tasks can only be added during planning")
|
|
41
|
+
|
|
42
|
+
normalized = normalize_envelope(envelope)
|
|
43
|
+
task_id = normalized.task_id
|
|
44
|
+
raise Spurline::LedgerError, "task already exists: #{task_id}" if @tasks.key?(task_id)
|
|
45
|
+
|
|
46
|
+
@tasks[task_id] = {
|
|
47
|
+
envelope: normalized,
|
|
48
|
+
state: :pending,
|
|
49
|
+
worker_session_id: nil,
|
|
50
|
+
output: nil,
|
|
51
|
+
error: nil,
|
|
52
|
+
}
|
|
53
|
+
@dependency_graph[task_id] = []
|
|
54
|
+
@plan << task_id
|
|
55
|
+
persist!
|
|
56
|
+
normalized
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def add_dependency(task_id, depends_on:)
|
|
60
|
+
task_id = task_id.to_s
|
|
61
|
+
depends_on = depends_on.to_s
|
|
62
|
+
|
|
63
|
+
fetch_task!(task_id)
|
|
64
|
+
fetch_task!(depends_on)
|
|
65
|
+
|
|
66
|
+
if task_id == depends_on
|
|
67
|
+
raise Spurline::LedgerError, "task cannot depend on itself: #{task_id}"
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
deps = (@dependency_graph[task_id] ||= [])
|
|
71
|
+
deps << depends_on unless deps.include?(depends_on)
|
|
72
|
+
persist!
|
|
73
|
+
deps
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def assign_task(task_id, worker_session_id:)
|
|
77
|
+
task = fetch_task!(task_id)
|
|
78
|
+
ensure_task_state!(task_id, expected: :pending)
|
|
79
|
+
|
|
80
|
+
if worker_session_id.to_s.strip.empty?
|
|
81
|
+
raise Spurline::LedgerError, "worker_session_id is required"
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
task[:state] = :assigned
|
|
85
|
+
task[:worker_session_id] = worker_session_id.to_s
|
|
86
|
+
task[:error] = nil
|
|
87
|
+
persist!
|
|
88
|
+
task
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def start_task(task_id)
|
|
92
|
+
task = fetch_task!(task_id)
|
|
93
|
+
ensure_task_state!(task_id, expected: :assigned)
|
|
94
|
+
|
|
95
|
+
task[:state] = :running
|
|
96
|
+
persist!
|
|
97
|
+
task
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def complete_task(task_id, output:)
|
|
101
|
+
task = fetch_task!(task_id)
|
|
102
|
+
ensure_task_state_in!(task_id, expected: %i[running assigned])
|
|
103
|
+
|
|
104
|
+
task[:state] = :complete
|
|
105
|
+
task[:output] = deep_copy(output)
|
|
106
|
+
task[:error] = nil
|
|
107
|
+
persist!
|
|
108
|
+
task
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def fail_task(task_id, error:)
|
|
112
|
+
task = fetch_task!(task_id)
|
|
113
|
+
ensure_task_state_in!(task_id, expected: %i[running assigned])
|
|
114
|
+
|
|
115
|
+
task[:state] = :failed
|
|
116
|
+
task[:error] = error.to_s
|
|
117
|
+
persist!
|
|
118
|
+
task
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def task_status(task_id)
|
|
122
|
+
fetch_task!(task_id)[:state]
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def all_tasks_complete?
|
|
126
|
+
@tasks.values.all? { |task| task[:state] == :complete }
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def completed_tasks
|
|
130
|
+
select_tasks_by_state(:complete)
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def pending_tasks
|
|
134
|
+
select_tasks_by_state(:pending)
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
# pending tasks whose dependencies are all complete
|
|
138
|
+
def unblocked_tasks
|
|
139
|
+
pending_tasks.select do |task_id, _task|
|
|
140
|
+
dependencies = @dependency_graph[task_id] || []
|
|
141
|
+
dependencies.all? { |dep_id| task_status(dep_id) == :complete }
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def transition_to!(new_state)
|
|
146
|
+
target = new_state.to_sym
|
|
147
|
+
|
|
148
|
+
unless STATES.include?(target)
|
|
149
|
+
raise Spurline::LedgerError, "invalid ledger state: #{new_state.inspect}"
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
allowed = VALID_TRANSITIONS.fetch(@state)
|
|
153
|
+
unless allowed.include?(target)
|
|
154
|
+
raise Spurline::LedgerError, "invalid transition #{@state} -> #{target}"
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
@state = target
|
|
158
|
+
persist!
|
|
159
|
+
@state
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
def to_h
|
|
163
|
+
{
|
|
164
|
+
id: id,
|
|
165
|
+
state: state,
|
|
166
|
+
plan: deep_copy(plan),
|
|
167
|
+
tasks: serialized_tasks,
|
|
168
|
+
dependency_graph: deep_copy(dependency_graph),
|
|
169
|
+
merged_output: deep_copy(merged_output),
|
|
170
|
+
metadata: deep_copy(metadata),
|
|
171
|
+
created_at: created_at.utc.iso8601,
|
|
172
|
+
}
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def self.from_h(data, store: nil)
|
|
176
|
+
hash = data || {}
|
|
177
|
+
ledger = new(id: fetch_key(hash, :id, required: true), store: store)
|
|
178
|
+
|
|
179
|
+
state = (fetch_key(hash, :state) || :planning).to_sym
|
|
180
|
+
unless STATES.include?(state)
|
|
181
|
+
raise Spurline::LedgerError, "invalid ledger state: #{state.inspect}"
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
plan = Array(fetch_key(hash, :plan) || []).map(&:to_s)
|
|
185
|
+
tasks = deserialize_tasks(fetch_key(hash, :tasks) || {})
|
|
186
|
+
dependency_graph = deserialize_dependency_graph(fetch_key(hash, :dependency_graph) || {})
|
|
187
|
+
|
|
188
|
+
ledger.instance_variable_set(:@state, state)
|
|
189
|
+
ledger.instance_variable_set(:@plan, plan)
|
|
190
|
+
ledger.instance_variable_set(:@tasks, tasks)
|
|
191
|
+
ledger.instance_variable_set(:@dependency_graph, dependency_graph)
|
|
192
|
+
ledger.instance_variable_set(:@merged_output, ledger.send(:deep_copy, fetch_key(hash, :merged_output) || {}))
|
|
193
|
+
ledger.instance_variable_set(:@metadata, ledger.send(:deep_copy, fetch_key(hash, :metadata) || {}))
|
|
194
|
+
ledger.instance_variable_set(:@created_at, parse_time(fetch_key(hash, :created_at)))
|
|
195
|
+
|
|
196
|
+
ledger
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
private
|
|
200
|
+
|
|
201
|
+
def persist!
|
|
202
|
+
@store&.save_ledger(self)
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def normalize_envelope(envelope)
|
|
206
|
+
return envelope if envelope.is_a?(TaskEnvelope)
|
|
207
|
+
|
|
208
|
+
if envelope.is_a?(Hash)
|
|
209
|
+
return TaskEnvelope.from_h(envelope)
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
raise Spurline::LedgerError, "envelope must be a TaskEnvelope or Hash"
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def fetch_task!(task_id)
|
|
216
|
+
id = task_id.to_s
|
|
217
|
+
@tasks.fetch(id) do
|
|
218
|
+
raise Spurline::LedgerError, "unknown task: #{id}"
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
def ensure_task_state!(task_id, expected:)
|
|
223
|
+
actual = task_status(task_id)
|
|
224
|
+
return if actual == expected
|
|
225
|
+
|
|
226
|
+
raise Spurline::LedgerError, "task #{task_id} must be #{expected}, got #{actual}"
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
def ensure_task_state_in!(task_id, expected:)
|
|
230
|
+
actual = task_status(task_id)
|
|
231
|
+
return if expected.include?(actual)
|
|
232
|
+
|
|
233
|
+
raise Spurline::LedgerError, "task #{task_id} must be one of #{expected.inspect}, got #{actual}"
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
def assert_state!(expected, message)
|
|
237
|
+
return if state == expected
|
|
238
|
+
|
|
239
|
+
raise Spurline::LedgerError, message
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
def select_tasks_by_state(target)
|
|
243
|
+
@tasks.each_with_object({}) do |(task_id, task), selected|
|
|
244
|
+
next unless task[:state] == target
|
|
245
|
+
|
|
246
|
+
selected[task_id] = snapshot_task(task)
|
|
247
|
+
end
|
|
248
|
+
end
|
|
249
|
+
|
|
250
|
+
def snapshot_task(task)
|
|
251
|
+
{
|
|
252
|
+
envelope: task[:envelope],
|
|
253
|
+
state: task[:state],
|
|
254
|
+
worker_session_id: task[:worker_session_id],
|
|
255
|
+
output: deep_copy(task[:output]),
|
|
256
|
+
error: task[:error],
|
|
257
|
+
}
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
def serialized_tasks
|
|
261
|
+
@tasks.each_with_object({}) do |(task_id, task), serialized|
|
|
262
|
+
serialized[task_id] = {
|
|
263
|
+
envelope: task[:envelope].to_h,
|
|
264
|
+
state: task[:state],
|
|
265
|
+
worker_session_id: task[:worker_session_id],
|
|
266
|
+
output: deep_copy(task[:output]),
|
|
267
|
+
error: task[:error],
|
|
268
|
+
}
|
|
269
|
+
end
|
|
270
|
+
end
|
|
271
|
+
|
|
272
|
+
def deep_copy(value)
|
|
273
|
+
case value
|
|
274
|
+
when Hash
|
|
275
|
+
value.each_with_object({}) do |(key, item), copy|
|
|
276
|
+
copy[key] = deep_copy(item)
|
|
277
|
+
end
|
|
278
|
+
when Array
|
|
279
|
+
value.map { |item| deep_copy(item) }
|
|
280
|
+
else
|
|
281
|
+
value
|
|
282
|
+
end
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
class << self
|
|
286
|
+
private
|
|
287
|
+
|
|
288
|
+
def parse_time(value)
|
|
289
|
+
return Time.now.utc if value.nil?
|
|
290
|
+
return value.utc if value.respond_to?(:utc)
|
|
291
|
+
|
|
292
|
+
Time.parse(value.to_s).utc
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
def deserialize_tasks(raw_tasks)
|
|
296
|
+
(raw_tasks || {}).each_with_object({}) do |(task_id, task_data), deserialized|
|
|
297
|
+
task_hash = task_data || {}
|
|
298
|
+
envelope_data = fetch_key(task_hash, :envelope, required: true) do
|
|
299
|
+
raise Spurline::LedgerError, "task #{task_id} missing envelope"
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
envelope = envelope_data.is_a?(TaskEnvelope) ? envelope_data : TaskEnvelope.from_h(envelope_data)
|
|
303
|
+
task_state = (fetch_key(task_hash, :state) || :pending).to_sym
|
|
304
|
+
|
|
305
|
+
unless TASK_STATES.include?(task_state)
|
|
306
|
+
raise Spurline::LedgerError, "invalid task state for #{task_id}: #{task_state.inspect}"
|
|
307
|
+
end
|
|
308
|
+
|
|
309
|
+
deserialized[task_id.to_s] = {
|
|
310
|
+
envelope: envelope,
|
|
311
|
+
state: task_state,
|
|
312
|
+
worker_session_id: fetch_key(task_hash, :worker_session_id),
|
|
313
|
+
output: fetch_key(task_hash, :output),
|
|
314
|
+
error: fetch_key(task_hash, :error),
|
|
315
|
+
}
|
|
316
|
+
end
|
|
317
|
+
end
|
|
318
|
+
|
|
319
|
+
def deserialize_dependency_graph(raw_graph)
|
|
320
|
+
(raw_graph || {}).each_with_object({}) do |(task_id, deps), graph|
|
|
321
|
+
graph[task_id.to_s] = Array(deps).map(&:to_s)
|
|
322
|
+
end
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
def fetch_key(hash, key, required: false, &block)
|
|
326
|
+
if hash.is_a?(Hash) && hash.key?(key)
|
|
327
|
+
hash[key]
|
|
328
|
+
elsif hash.is_a?(Hash) && hash.key?(key.to_s)
|
|
329
|
+
hash[key.to_s]
|
|
330
|
+
elsif required
|
|
331
|
+
return block.call if block
|
|
332
|
+
|
|
333
|
+
raise KeyError, "missing key: #{key}"
|
|
334
|
+
end
|
|
335
|
+
end
|
|
336
|
+
end
|
|
337
|
+
end
|
|
338
|
+
end
|
|
339
|
+
end
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Spurline
|
|
4
|
+
module Orchestration
|
|
5
|
+
# Deterministic FIFO merge queue with explicit conflict handling strategies.
|
|
6
|
+
class MergeQueue
|
|
7
|
+
STRATEGIES = %i[escalate file_level union].freeze
|
|
8
|
+
|
|
9
|
+
ConflictReport = Struct.new(:task_id, :conflicting_task_id, :resource, :details, keyword_init: true)
|
|
10
|
+
MergeResult = Struct.new(:success, :merged_output, :conflicts, keyword_init: true) do
|
|
11
|
+
def success?
|
|
12
|
+
success
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def initialize(strategy: :escalate)
|
|
17
|
+
@strategy = strategy.to_sym
|
|
18
|
+
validate_strategy!(@strategy)
|
|
19
|
+
@queue = []
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def enqueue(task_id:, output:)
|
|
23
|
+
unless output.is_a?(Hash)
|
|
24
|
+
raise ArgumentError, "merge output must be a hash"
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
@queue << { task_id: task_id.to_s, output: deep_copy(output) }
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def process(existing_output: {})
|
|
31
|
+
merged = deep_copy(existing_output)
|
|
32
|
+
key_sources = merged.keys.each_with_object({}) { |key, map| map[key] = nil }
|
|
33
|
+
conflicts = []
|
|
34
|
+
|
|
35
|
+
until @queue.empty?
|
|
36
|
+
entry = @queue.shift
|
|
37
|
+
overlaps = detect_conflicts(merged, entry)
|
|
38
|
+
|
|
39
|
+
case @strategy
|
|
40
|
+
when :escalate
|
|
41
|
+
if overlaps.any?
|
|
42
|
+
conflicts.concat(build_conflict_reports(entry, overlaps, key_sources, strategy: :escalate))
|
|
43
|
+
next
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
merge_entry!(merged, key_sources, entry)
|
|
47
|
+
when :file_level
|
|
48
|
+
conflicts.concat(build_conflict_reports(entry, overlaps, key_sources, strategy: :file_level))
|
|
49
|
+
overlapping_keys = overlaps.map { |item| item[:resource] }
|
|
50
|
+
|
|
51
|
+
entry[:output].each do |key, value|
|
|
52
|
+
next if overlapping_keys.include?(key)
|
|
53
|
+
|
|
54
|
+
merged[key] = deep_copy(value)
|
|
55
|
+
key_sources[key] = entry[:task_id]
|
|
56
|
+
end
|
|
57
|
+
when :union
|
|
58
|
+
conflicts.concat(build_conflict_reports(entry, overlaps, key_sources, strategy: :union))
|
|
59
|
+
merge_entry!(merged, key_sources, entry)
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
success = @strategy == :escalate ? conflicts.empty? : true
|
|
64
|
+
MergeResult.new(success: success, merged_output: merged, conflicts: conflicts)
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def size
|
|
68
|
+
@queue.size
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def empty?
|
|
72
|
+
@queue.empty?
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
private
|
|
76
|
+
|
|
77
|
+
# Conflict detection: hash-key overlap with different values.
|
|
78
|
+
def detect_conflicts(existing, entry)
|
|
79
|
+
entry[:output].each_with_object([]) do |(key, value), conflicts|
|
|
80
|
+
next unless existing.key?(key)
|
|
81
|
+
next if existing[key] == value
|
|
82
|
+
|
|
83
|
+
conflicts << {
|
|
84
|
+
resource: key,
|
|
85
|
+
existing_value: deep_copy(existing[key]),
|
|
86
|
+
incoming_value: deep_copy(value),
|
|
87
|
+
}
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def validate_strategy!(strategy)
|
|
92
|
+
return if STRATEGIES.include?(strategy)
|
|
93
|
+
|
|
94
|
+
raise Spurline::ConfigurationError, "invalid merge strategy: #{strategy.inspect}"
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def merge_entry!(merged, key_sources, entry)
|
|
98
|
+
entry[:output].each do |key, value|
|
|
99
|
+
merged[key] = deep_copy(value)
|
|
100
|
+
key_sources[key] = entry[:task_id]
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def build_conflict_reports(entry, overlaps, key_sources, strategy:)
|
|
105
|
+
overlaps.map do |overlap|
|
|
106
|
+
ConflictReport.new(
|
|
107
|
+
task_id: entry[:task_id],
|
|
108
|
+
conflicting_task_id: key_sources[overlap[:resource]],
|
|
109
|
+
resource: overlap[:resource],
|
|
110
|
+
details: {
|
|
111
|
+
strategy: strategy,
|
|
112
|
+
existing_value: overlap[:existing_value],
|
|
113
|
+
incoming_value: overlap[:incoming_value],
|
|
114
|
+
}
|
|
115
|
+
)
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def deep_copy(value)
|
|
120
|
+
case value
|
|
121
|
+
when Hash
|
|
122
|
+
value.each_with_object({}) do |(key, item), copy|
|
|
123
|
+
copy[key] = deep_copy(item)
|
|
124
|
+
end
|
|
125
|
+
when Array
|
|
126
|
+
value.map { |item| deep_copy(item) }
|
|
127
|
+
else
|
|
128
|
+
value
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
end
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Spurline
|
|
4
|
+
module Orchestration
|
|
5
|
+
module PermissionIntersection
|
|
6
|
+
module_function
|
|
7
|
+
|
|
8
|
+
# Computes effective parent->child permissions under the setuid rule.
|
|
9
|
+
# Result is always <= parent when both define the same tool.
|
|
10
|
+
def compute(parent_permissions, child_permissions)
|
|
11
|
+
parent = normalize_permissions(parent_permissions)
|
|
12
|
+
child = normalize_permissions(child_permissions)
|
|
13
|
+
|
|
14
|
+
tool_names = (parent.keys + child.keys).uniq
|
|
15
|
+
|
|
16
|
+
tool_names.each_with_object({}) do |tool_name, result|
|
|
17
|
+
parent_tool = parent[tool_name]
|
|
18
|
+
child_tool = child[tool_name]
|
|
19
|
+
|
|
20
|
+
result[tool_name] = if parent_tool && child_tool
|
|
21
|
+
intersect_tool(parent_tool, child_tool)
|
|
22
|
+
elsif parent_tool
|
|
23
|
+
deep_copy(parent_tool)
|
|
24
|
+
else
|
|
25
|
+
deep_copy(child_tool)
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Validates that child permissions do not exceed parent permissions.
|
|
31
|
+
# Raises PrivilegeEscalationError if a child broadens access.
|
|
32
|
+
def validate_no_escalation!(parent, child)
|
|
33
|
+
normalized_parent = normalize_permissions(parent)
|
|
34
|
+
normalized_child = normalize_permissions(child)
|
|
35
|
+
|
|
36
|
+
normalized_child.each do |tool_name, child_tool|
|
|
37
|
+
parent_tool = normalized_parent[tool_name]
|
|
38
|
+
next unless parent_tool
|
|
39
|
+
|
|
40
|
+
validate_denied!(tool_name, parent_tool, child_tool)
|
|
41
|
+
validate_requires_confirmation!(tool_name, parent_tool, child_tool)
|
|
42
|
+
validate_allowed_users!(tool_name, parent_tool, child_tool)
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
true
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def intersect_tool(parent_tool, child_tool)
|
|
49
|
+
denied = truthy?(parent_tool[:denied]) || truthy?(child_tool[:denied])
|
|
50
|
+
requires_confirmation = truthy?(parent_tool[:requires_confirmation]) ||
|
|
51
|
+
truthy?(child_tool[:requires_confirmation])
|
|
52
|
+
|
|
53
|
+
parent_users = normalize_users(parent_tool[:allowed_users])
|
|
54
|
+
child_users = normalize_users(child_tool[:allowed_users])
|
|
55
|
+
|
|
56
|
+
allowed_users = if parent_users && child_users
|
|
57
|
+
parent_users & child_users
|
|
58
|
+
elsif parent_users
|
|
59
|
+
parent_users
|
|
60
|
+
else
|
|
61
|
+
child_users
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
result = {
|
|
65
|
+
denied: denied,
|
|
66
|
+
requires_confirmation: requires_confirmation,
|
|
67
|
+
}
|
|
68
|
+
result[:allowed_users] = allowed_users if allowed_users
|
|
69
|
+
result
|
|
70
|
+
end
|
|
71
|
+
private_class_method :intersect_tool
|
|
72
|
+
|
|
73
|
+
def validate_denied!(tool_name, parent_tool, child_tool)
|
|
74
|
+
return unless truthy?(parent_tool[:denied]) && !truthy?(child_tool[:denied])
|
|
75
|
+
|
|
76
|
+
raise Spurline::PrivilegeEscalationError, "child tool #{tool_name} removes denied=true"
|
|
77
|
+
end
|
|
78
|
+
private_class_method :validate_denied!
|
|
79
|
+
|
|
80
|
+
def validate_requires_confirmation!(tool_name, parent_tool, child_tool)
|
|
81
|
+
return unless truthy?(parent_tool[:requires_confirmation]) && !truthy?(child_tool[:requires_confirmation])
|
|
82
|
+
|
|
83
|
+
raise Spurline::PrivilegeEscalationError, "child tool #{tool_name} removes requires_confirmation=true"
|
|
84
|
+
end
|
|
85
|
+
private_class_method :validate_requires_confirmation!
|
|
86
|
+
|
|
87
|
+
def validate_allowed_users!(tool_name, parent_tool, child_tool)
|
|
88
|
+
parent_users = normalize_users(parent_tool[:allowed_users])
|
|
89
|
+
child_users = normalize_users(child_tool[:allowed_users])
|
|
90
|
+
|
|
91
|
+
return if parent_users.nil?
|
|
92
|
+
|
|
93
|
+
if child_users.nil?
|
|
94
|
+
raise Spurline::PrivilegeEscalationError,
|
|
95
|
+
"child tool #{tool_name} omits allowed_users while parent restricts it"
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
extra_users = child_users - parent_users
|
|
99
|
+
return if extra_users.empty?
|
|
100
|
+
|
|
101
|
+
raise Spurline::PrivilegeEscalationError,
|
|
102
|
+
"child tool #{tool_name} adds users not allowed by parent: #{extra_users.join(", ")}"
|
|
103
|
+
end
|
|
104
|
+
private_class_method :validate_allowed_users!
|
|
105
|
+
|
|
106
|
+
def normalize_permissions(permissions)
|
|
107
|
+
raw = permissions || {}
|
|
108
|
+
|
|
109
|
+
raw.each_with_object({}) do |(tool_name, config), normalized|
|
|
110
|
+
normalized[tool_name.to_sym] = normalize_tool_config(config)
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
private_class_method :normalize_permissions
|
|
114
|
+
|
|
115
|
+
def normalize_tool_config(config)
|
|
116
|
+
return {} unless config.is_a?(Hash)
|
|
117
|
+
|
|
118
|
+
config.each_with_object({}) do |(key, value), normalized|
|
|
119
|
+
normalized[key.to_sym] = key.to_sym == :allowed_users ? normalize_users(value) : value
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
private_class_method :normalize_tool_config
|
|
123
|
+
|
|
124
|
+
def normalize_users(users)
|
|
125
|
+
return nil if users.nil?
|
|
126
|
+
|
|
127
|
+
Array(users).map(&:to_s).uniq
|
|
128
|
+
end
|
|
129
|
+
private_class_method :normalize_users
|
|
130
|
+
|
|
131
|
+
def truthy?(value)
|
|
132
|
+
value == true
|
|
133
|
+
end
|
|
134
|
+
private_class_method :truthy?
|
|
135
|
+
|
|
136
|
+
def deep_copy(value)
|
|
137
|
+
case value
|
|
138
|
+
when Hash
|
|
139
|
+
value.each_with_object({}) do |(key, item), copy|
|
|
140
|
+
copy[key] = deep_copy(item)
|
|
141
|
+
end
|
|
142
|
+
when Array
|
|
143
|
+
value.map { |item| deep_copy(item) }
|
|
144
|
+
else
|
|
145
|
+
value
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
private_class_method :deep_copy
|
|
149
|
+
end
|
|
150
|
+
end
|
|
151
|
+
end
|