carson 1.0.0 → 2.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/copilot-instructions.md +1 -12
- data/.github/workflows/carson_policy.yml +1 -1
- data/API.md +50 -13
- data/MANUAL.md +140 -65
- data/README.md +140 -33
- data/RELEASE.md +350 -6
- data/SKILL.md +102 -0
- data/VERSION +1 -1
- data/carson.gemspec +3 -1
- data/{assets/hooks → hooks}/pre-commit +1 -1
- data/{assets/hooks → hooks}/pre-merge-commit +4 -0
- data/{assets/hooks → hooks}/pre-push +4 -0
- data/{assets/hooks → hooks}/prepare-commit-msg +4 -0
- data/icon.svg +651 -0
- data/lib/carson/adapters/agent.rb +15 -0
- data/lib/carson/adapters/claude.rb +45 -0
- data/lib/carson/adapters/codex.rb +45 -0
- data/lib/carson/adapters/prompt.rb +60 -0
- data/lib/carson/cli.rb +65 -20
- data/lib/carson/config.rb +100 -14
- data/lib/carson/policy/ruby/lint.rb +1 -1
- data/lib/carson/runtime/audit.rb +33 -10
- data/lib/carson/runtime/govern.rb +641 -0
- data/lib/carson/runtime/lint.rb +3 -3
- data/lib/carson/runtime/local.rb +51 -12
- data/lib/carson/runtime/review/gate_support.rb +14 -1
- data/lib/carson/runtime/review.rb +3 -3
- data/lib/carson/runtime.rb +10 -3
- data/lib/carson.rb +9 -0
- data/templates/.github/AGENTS.md +1 -0
- data/templates/.github/CLAUDE.md +1 -0
- data/templates/.github/carson-instructions.md +12 -0
- data/templates/.github/copilot-instructions.md +1 -12
- metadata +15 -5
|
@@ -0,0 +1,641 @@
|
|
|
1
|
+
# Carson govern — portfolio-level triage loop.
|
|
2
|
+
# Scans repos, lists open PRs, classifies each, takes the right action, reports.
|
|
3
|
+
require "json"
|
|
4
|
+
require "time"
|
|
5
|
+
require "fileutils"
|
|
6
|
+
|
|
7
|
+
module Carson
|
|
8
|
+
class Runtime
|
|
9
|
+
module Govern
|
|
10
|
+
GOVERN_REPORT_MD = "govern_latest.md".freeze
|
|
11
|
+
GOVERN_REPORT_JSON = "govern_latest.json".freeze
|
|
12
|
+
|
|
13
|
+
TRIAGE_READY = "ready".freeze
|
|
14
|
+
TRIAGE_CI_FAILING = "ci_failing".freeze
|
|
15
|
+
TRIAGE_REVIEW_BLOCKED = "review_blocked".freeze
|
|
16
|
+
TRIAGE_NEEDS_ATTENTION = "needs_attention".freeze
|
|
17
|
+
|
|
18
|
+
# Portfolio-level entry point. Scans configured repos (or current repo)
|
|
19
|
+
# and triages all open PRs. Returns EXIT_OK/EXIT_ERROR.
|
|
20
|
+
def govern!( dry_run: false, json_output: false, loop_seconds: nil )
|
|
21
|
+
if loop_seconds
|
|
22
|
+
govern_loop!( dry_run: dry_run, json_output: json_output, loop_seconds: loop_seconds )
|
|
23
|
+
else
|
|
24
|
+
govern_cycle!( dry_run: dry_run, json_output: json_output )
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def govern_cycle!( dry_run:, json_output: )
|
|
29
|
+
print_header "Carson Govern"
|
|
30
|
+
repos = governed_repo_paths
|
|
31
|
+
if repos.empty?
|
|
32
|
+
puts_line "governing current repository: #{repo_root}"
|
|
33
|
+
repos = [ repo_root ]
|
|
34
|
+
else
|
|
35
|
+
puts_line "governing #{repos.length} repo#{plural_suffix( count: repos.length )}"
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
portfolio_report = {
|
|
39
|
+
cycle_at: Time.now.utc.iso8601,
|
|
40
|
+
dry_run: dry_run,
|
|
41
|
+
repos: []
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
repos.each do |repo_path|
|
|
45
|
+
repo_report = govern_repo!( repo_path: repo_path, dry_run: dry_run )
|
|
46
|
+
portfolio_report[ :repos ] << repo_report
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
write_govern_report( report: portfolio_report )
|
|
50
|
+
|
|
51
|
+
if json_output
|
|
52
|
+
puts_line JSON.pretty_generate( portfolio_report )
|
|
53
|
+
else
|
|
54
|
+
print_govern_summary( report: portfolio_report )
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
EXIT_OK
|
|
58
|
+
rescue StandardError => e
|
|
59
|
+
puts_line "ERROR: govern failed — #{e.message}"
|
|
60
|
+
EXIT_ERROR
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def govern_loop!( dry_run:, json_output:, loop_seconds: )
|
|
64
|
+
print_header "⧓ Carson Govern — loop mode (every #{loop_seconds}s)"
|
|
65
|
+
cycle_count = 0
|
|
66
|
+
loop do
|
|
67
|
+
cycle_count += 1
|
|
68
|
+
puts_line ""
|
|
69
|
+
puts_line "── cycle #{cycle_count} at #{Time.now.utc.strftime( "%Y-%m-%d %H:%M:%S UTC" )} ──"
|
|
70
|
+
begin
|
|
71
|
+
govern_cycle!( dry_run: dry_run, json_output: json_output )
|
|
72
|
+
rescue StandardError => e
|
|
73
|
+
puts_line "ERROR: cycle #{cycle_count} failed — #{e.message}"
|
|
74
|
+
end
|
|
75
|
+
puts_line "sleeping #{loop_seconds}s until next cycle…"
|
|
76
|
+
sleep loop_seconds
|
|
77
|
+
end
|
|
78
|
+
rescue Interrupt
|
|
79
|
+
puts_line ""
|
|
80
|
+
puts_line "⧓ govern loop stopped after #{cycle_count} cycle#{plural_suffix( count: cycle_count )}."
|
|
81
|
+
EXIT_OK
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Standalone housekeep: sync + prune.
|
|
85
|
+
def housekeep!
|
|
86
|
+
print_header "Housekeep"
|
|
87
|
+
sync_status = sync!
|
|
88
|
+
if sync_status != EXIT_OK
|
|
89
|
+
puts_line "housekeep: sync returned #{sync_status}; skipping prune."
|
|
90
|
+
return sync_status
|
|
91
|
+
end
|
|
92
|
+
prune!
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
private
|
|
96
|
+
|
|
97
|
+
# Resolves the list of repo paths to govern from config.
|
|
98
|
+
def governed_repo_paths
|
|
99
|
+
config.govern_repos.map do |path|
|
|
100
|
+
expanded = File.expand_path( path )
|
|
101
|
+
unless Dir.exist?( expanded )
|
|
102
|
+
puts_line "WARN: governed repo path does not exist: #{expanded}"
|
|
103
|
+
next nil
|
|
104
|
+
end
|
|
105
|
+
expanded
|
|
106
|
+
end.compact
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
# Governs a single repository: list open PRs, triage each.
|
|
110
|
+
def govern_repo!( repo_path:, dry_run: )
|
|
111
|
+
puts_line ""
|
|
112
|
+
puts_line "--- #{repo_path} ---"
|
|
113
|
+
repo_report = {
|
|
114
|
+
repo: repo_path,
|
|
115
|
+
prs: [],
|
|
116
|
+
error: nil
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
unless Dir.exist?( repo_path )
|
|
120
|
+
repo_report[ :error ] = "path does not exist"
|
|
121
|
+
puts_line "ERROR: #{repo_path} does not exist"
|
|
122
|
+
return repo_report
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
prs = list_open_prs( repo_path: repo_path )
|
|
126
|
+
if prs.nil?
|
|
127
|
+
repo_report[ :error ] = "failed to list open PRs"
|
|
128
|
+
puts_line "ERROR: failed to list open PRs for #{repo_path}"
|
|
129
|
+
return repo_report
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
if prs.empty?
|
|
133
|
+
puts_line "no open PRs"
|
|
134
|
+
return repo_report
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
puts_line "open PRs: #{prs.length}"
|
|
138
|
+
prs.each do |pr|
|
|
139
|
+
pr_report = triage_pr!( pr: pr, repo_path: repo_path, dry_run: dry_run )
|
|
140
|
+
repo_report[ :prs ] << pr_report
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
repo_report
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# Lists open PRs via gh CLI.
|
|
147
|
+
def list_open_prs( repo_path: )
|
|
148
|
+
stdout_text, stderr_text, status = Open3.capture3(
|
|
149
|
+
"gh", "pr", "list", "--state", "open",
|
|
150
|
+
"--json", "number,title,headRefName,statusCheckRollup,reviewDecision,url,updatedAt",
|
|
151
|
+
chdir: repo_path
|
|
152
|
+
)
|
|
153
|
+
unless status.success?
|
|
154
|
+
error_text = stderr_text.to_s.strip
|
|
155
|
+
puts_line "gh pr list failed: #{error_text}" unless error_text.empty?
|
|
156
|
+
return nil
|
|
157
|
+
end
|
|
158
|
+
JSON.parse( stdout_text )
|
|
159
|
+
rescue JSON::ParserError => e
|
|
160
|
+
puts_line "gh pr list returned invalid JSON: #{e.message}"
|
|
161
|
+
nil
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Classifies a PR and takes appropriate action.
|
|
165
|
+
def triage_pr!( pr:, repo_path:, dry_run: )
|
|
166
|
+
number = pr[ "number" ]
|
|
167
|
+
title = pr[ "title" ].to_s
|
|
168
|
+
branch = pr[ "headRefName" ].to_s
|
|
169
|
+
url = pr[ "url" ].to_s
|
|
170
|
+
|
|
171
|
+
pr_report = {
|
|
172
|
+
number: number,
|
|
173
|
+
title: title,
|
|
174
|
+
branch: branch,
|
|
175
|
+
url: url,
|
|
176
|
+
classification: nil,
|
|
177
|
+
action: nil,
|
|
178
|
+
detail: nil
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
classification, detail = classify_pr( pr: pr, repo_path: repo_path )
|
|
182
|
+
pr_report[ :classification ] = classification
|
|
183
|
+
pr_report[ :detail ] = detail
|
|
184
|
+
|
|
185
|
+
action = decide_action( classification: classification, dry_run: dry_run )
|
|
186
|
+
pr_report[ :action ] = action
|
|
187
|
+
|
|
188
|
+
puts_line " PR ##{number} (#{branch}): #{classification} → #{action}"
|
|
189
|
+
puts_line " #{detail}" unless detail.to_s.empty?
|
|
190
|
+
|
|
191
|
+
execute_action!( action: action, pr: pr, repo_path: repo_path, dry_run: dry_run ) unless dry_run
|
|
192
|
+
|
|
193
|
+
pr_report
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
TRIAGE_PENDING = "pending".freeze
|
|
197
|
+
|
|
198
|
+
# Classifies PR state by checking CI, review status, and audit readiness.
|
|
199
|
+
def classify_pr( pr:, repo_path: )
|
|
200
|
+
ci_status = check_ci_status( pr: pr )
|
|
201
|
+
if ci_status == :pending && within_check_wait?( pr: pr )
|
|
202
|
+
return [ TRIAGE_PENDING, "checks still settling (within check_wait window)" ]
|
|
203
|
+
end
|
|
204
|
+
return [ TRIAGE_CI_FAILING, "CI checks failing or pending" ] unless ci_status == :green
|
|
205
|
+
|
|
206
|
+
review_decision = pr[ "reviewDecision" ].to_s.upcase
|
|
207
|
+
if review_decision == "CHANGES_REQUESTED"
|
|
208
|
+
return [ TRIAGE_REVIEW_BLOCKED, "changes requested by reviewer" ]
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
# Run audit and review gate checks for deeper analysis
|
|
212
|
+
audit_status, audit_detail = check_audit_status( pr: pr, repo_path: repo_path )
|
|
213
|
+
return [ TRIAGE_NEEDS_ATTENTION, audit_detail ] unless audit_status == :pass
|
|
214
|
+
|
|
215
|
+
review_status, review_detail = check_review_gate_status( pr: pr, repo_path: repo_path )
|
|
216
|
+
return [ TRIAGE_REVIEW_BLOCKED, review_detail ] unless review_status == :pass
|
|
217
|
+
|
|
218
|
+
[ TRIAGE_READY, "all gates pass" ]
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
# Checks CI status from PR's statusCheckRollup.
|
|
222
|
+
def check_ci_status( pr: )
|
|
223
|
+
checks = Array( pr[ "statusCheckRollup" ] )
|
|
224
|
+
return :green if checks.empty?
|
|
225
|
+
|
|
226
|
+
has_failure = checks.any? { |c| check_state_failing?( state: c[ "state" ].to_s ) || check_conclusion_failing?( conclusion: c[ "conclusion" ].to_s ) }
|
|
227
|
+
return :red if has_failure
|
|
228
|
+
|
|
229
|
+
has_pending = checks.any? { |c| check_state_pending?( state: c[ "state" ].to_s ) }
|
|
230
|
+
return :pending if has_pending
|
|
231
|
+
|
|
232
|
+
:green
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def check_state_failing?( state: )
|
|
236
|
+
[ "FAILURE", "ERROR" ].include?( state.upcase )
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
def check_conclusion_failing?( conclusion: )
|
|
240
|
+
[ "FAILURE", "CANCELLED", "TIMED_OUT", "ACTION_REQUIRED" ].include?( conclusion.upcase )
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
def check_state_pending?( state: )
|
|
244
|
+
[ "PENDING", "QUEUED", "IN_PROGRESS", "WAITING", "REQUESTED" ].include?( state.upcase )
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
# Checks if the PR's branch is available locally and defers audit. Returns [:pass/:fail, detail].
|
|
248
|
+
def check_audit_status( pr:, repo_path: )
|
|
249
|
+
branch = pr[ "headRefName" ].to_s
|
|
250
|
+
stdout_text, stderr_text, status = Open3.capture3(
|
|
251
|
+
"git", "rev-parse", "--verify", "refs/heads/#{branch}",
|
|
252
|
+
chdir: repo_path
|
|
253
|
+
)
|
|
254
|
+
unless status.success?
|
|
255
|
+
return [ :pass, "branch not local; skipping audit" ]
|
|
256
|
+
end
|
|
257
|
+
|
|
258
|
+
[ :pass, "audit deferred to merge gate" ]
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
# Checks review gate status. Returns [:pass/:fail, detail].
|
|
262
|
+
def check_review_gate_status( pr:, repo_path: )
|
|
263
|
+
review_decision = pr[ "reviewDecision" ].to_s.upcase
|
|
264
|
+
case review_decision
|
|
265
|
+
when "APPROVED"
|
|
266
|
+
[ :pass, "approved" ]
|
|
267
|
+
when "CHANGES_REQUESTED"
|
|
268
|
+
[ :fail, "changes requested" ]
|
|
269
|
+
when "REVIEW_REQUIRED"
|
|
270
|
+
[ :fail, "review required" ]
|
|
271
|
+
else
|
|
272
|
+
[ :pass, "no review policy or approved" ]
|
|
273
|
+
end
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Maps classification to action.
|
|
277
|
+
def decide_action( classification:, dry_run: )
|
|
278
|
+
case classification
|
|
279
|
+
when TRIAGE_READY
|
|
280
|
+
dry_run ? "would_merge" : "merge"
|
|
281
|
+
when TRIAGE_CI_FAILING
|
|
282
|
+
dry_run ? "would_dispatch_ci_fix" : "dispatch_ci_fix"
|
|
283
|
+
when TRIAGE_REVIEW_BLOCKED
|
|
284
|
+
dry_run ? "would_dispatch_review_fix" : "dispatch_review_fix"
|
|
285
|
+
when TRIAGE_PENDING
|
|
286
|
+
"skip"
|
|
287
|
+
when TRIAGE_NEEDS_ATTENTION
|
|
288
|
+
"escalate"
|
|
289
|
+
else
|
|
290
|
+
"skip"
|
|
291
|
+
end
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
# Executes the decided action on a PR.
|
|
295
|
+
def execute_action!( action:, pr:, repo_path:, dry_run: )
|
|
296
|
+
case action
|
|
297
|
+
when "merge"
|
|
298
|
+
merge_if_ready!( pr: pr, repo_path: repo_path )
|
|
299
|
+
when "dispatch_ci_fix"
|
|
300
|
+
dispatch_agent!( pr: pr, repo_path: repo_path, objective: "fix_ci" )
|
|
301
|
+
when "dispatch_review_fix"
|
|
302
|
+
dispatch_agent!( pr: pr, repo_path: repo_path, objective: "address_review" )
|
|
303
|
+
when "escalate"
|
|
304
|
+
puts_line " ESCALATE: PR ##{pr[ 'number' ]} needs human attention"
|
|
305
|
+
end
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
# Merges a PR that has passed all gates.
|
|
309
|
+
def merge_if_ready!( pr:, repo_path: )
|
|
310
|
+
unless config.govern_merge_authority
|
|
311
|
+
puts_line " merge authority disabled; skipping merge"
|
|
312
|
+
return
|
|
313
|
+
end
|
|
314
|
+
|
|
315
|
+
method = config.govern_merge_method
|
|
316
|
+
number = pr[ "number" ]
|
|
317
|
+
stdout_text, stderr_text, status = Open3.capture3(
|
|
318
|
+
"gh", "pr", "merge", number.to_s,
|
|
319
|
+
"--#{method}",
|
|
320
|
+
"--delete-branch",
|
|
321
|
+
chdir: repo_path
|
|
322
|
+
)
|
|
323
|
+
if status.success?
|
|
324
|
+
puts_line " merged PR ##{number} via #{method}"
|
|
325
|
+
housekeep_repo!( repo_path: repo_path )
|
|
326
|
+
else
|
|
327
|
+
error_text = stderr_text.to_s.strip
|
|
328
|
+
puts_line " merge failed: #{error_text}"
|
|
329
|
+
end
|
|
330
|
+
end
|
|
331
|
+
|
|
332
|
+
# Dispatches an agent to fix an issue on a PR.
|
|
333
|
+
def dispatch_agent!( pr:, repo_path:, objective: )
|
|
334
|
+
state = load_dispatch_state
|
|
335
|
+
state_key = dispatch_state_key( pr: pr, repo_path: repo_path )
|
|
336
|
+
|
|
337
|
+
existing = state[ state_key ]
|
|
338
|
+
if existing && existing[ "status" ] == "running"
|
|
339
|
+
puts_line " agent already dispatched for #{objective}; skipping"
|
|
340
|
+
return
|
|
341
|
+
end
|
|
342
|
+
|
|
343
|
+
provider = select_agent_provider
|
|
344
|
+
unless provider
|
|
345
|
+
puts_line " no agent provider available; escalating"
|
|
346
|
+
return
|
|
347
|
+
end
|
|
348
|
+
|
|
349
|
+
context = evidence( pr: pr, repo_path: repo_path, objective: objective )
|
|
350
|
+
work_order = Adapters::Agent::WorkOrder.new(
|
|
351
|
+
repo: repo_path,
|
|
352
|
+
branch: pr[ "headRefName" ].to_s,
|
|
353
|
+
pr_number: pr[ "number" ],
|
|
354
|
+
objective: objective,
|
|
355
|
+
context: context,
|
|
356
|
+
acceptance_checks: nil
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
puts_line " dispatching #{provider} agent for #{objective}"
|
|
360
|
+
adapter = build_agent_adapter( provider: provider, repo_path: repo_path )
|
|
361
|
+
result = adapter.dispatch( work_order: work_order )
|
|
362
|
+
|
|
363
|
+
state[ state_key ] = {
|
|
364
|
+
"objective" => objective,
|
|
365
|
+
"provider" => provider,
|
|
366
|
+
"dispatched_at" => Time.now.utc.iso8601,
|
|
367
|
+
"status" => result.status == "done" ? "done" : "failed",
|
|
368
|
+
"summary" => result.summary
|
|
369
|
+
}
|
|
370
|
+
save_dispatch_state( state: state )
|
|
371
|
+
|
|
372
|
+
puts_line " agent result: #{result.status} — #{result.summary.to_s[0, 120]}"
|
|
373
|
+
end
|
|
374
|
+
|
|
375
|
+
# Runs housekeep in the given repo after a successful merge.
|
|
376
|
+
def housekeep_repo!( repo_path: )
|
|
377
|
+
if repo_path == self.repo_root
|
|
378
|
+
housekeep!
|
|
379
|
+
else
|
|
380
|
+
rt = Runtime.new( repo_root: repo_path, tool_root: tool_root, out: out, err: err )
|
|
381
|
+
rt.housekeep!
|
|
382
|
+
end
|
|
383
|
+
end
|
|
384
|
+
|
|
385
|
+
# Selects which agent provider to use based on config and availability.
|
|
386
|
+
def select_agent_provider
|
|
387
|
+
provider = config.govern_agent_provider
|
|
388
|
+
case provider
|
|
389
|
+
when "codex"
|
|
390
|
+
command_available?( "codex" ) ? "codex" : nil
|
|
391
|
+
when "claude"
|
|
392
|
+
command_available?( "claude" ) ? "claude" : nil
|
|
393
|
+
when "auto"
|
|
394
|
+
return "codex" if command_available?( "codex" )
|
|
395
|
+
return "claude" if command_available?( "claude" )
|
|
396
|
+
nil
|
|
397
|
+
else
|
|
398
|
+
nil
|
|
399
|
+
end
|
|
400
|
+
end
|
|
401
|
+
|
|
402
|
+
def command_available?( name )
|
|
403
|
+
_, _, status = Open3.capture3( "which", name )
|
|
404
|
+
status.success?
|
|
405
|
+
end
|
|
406
|
+
|
|
407
|
+
def build_agent_adapter( provider:, repo_path: )
|
|
408
|
+
case provider
|
|
409
|
+
when "codex"
|
|
410
|
+
Adapters::Codex.new( repo_root: repo_path )
|
|
411
|
+
when "claude"
|
|
412
|
+
Adapters::Claude.new( repo_root: repo_path )
|
|
413
|
+
else
|
|
414
|
+
raise "unknown agent provider: #{provider}"
|
|
415
|
+
end
|
|
416
|
+
end
|
|
417
|
+
|
|
418
|
+
# Dispatch state persistence.
|
|
419
|
+
def load_dispatch_state
|
|
420
|
+
path = config.govern_dispatch_state_path
|
|
421
|
+
return {} unless File.file?( path )
|
|
422
|
+
|
|
423
|
+
JSON.parse( File.read( path ) )
|
|
424
|
+
rescue JSON::ParserError
|
|
425
|
+
{}
|
|
426
|
+
end
|
|
427
|
+
|
|
428
|
+
def save_dispatch_state( state: )
|
|
429
|
+
path = config.govern_dispatch_state_path
|
|
430
|
+
FileUtils.mkdir_p( File.dirname( path ) )
|
|
431
|
+
File.write( path, JSON.pretty_generate( state ) )
|
|
432
|
+
end
|
|
433
|
+
|
|
434
|
+
def dispatch_state_key( pr:, repo_path: )
|
|
435
|
+
dir_name = File.basename( repo_path )
|
|
436
|
+
"#{dir_name}##{pr[ 'number' ]}"
|
|
437
|
+
end
|
|
438
|
+
|
|
439
|
+
# Evidence gathering — builds structured context Hash for agent work orders.
|
|
440
|
+
def evidence( pr:, repo_path:, objective: )
|
|
441
|
+
ctx = { title: pr.fetch( "title", "" ) }
|
|
442
|
+
case objective
|
|
443
|
+
when "fix_ci"
|
|
444
|
+
ctx.merge!( ci_evidence( pr: pr, repo_path: repo_path ) )
|
|
445
|
+
when "address_review"
|
|
446
|
+
ctx.merge!( review_evidence( pr: pr, repo_path: repo_path ) )
|
|
447
|
+
end
|
|
448
|
+
prior = prior_attempt( pr: pr, repo_path: repo_path )
|
|
449
|
+
ctx[ :prior_attempt ] = prior if prior
|
|
450
|
+
ctx
|
|
451
|
+
rescue StandardError => e
|
|
452
|
+
puts_line " evidence gathering failed: #{e.message}"
|
|
453
|
+
{ title: pr.fetch( "title", "" ) }
|
|
454
|
+
end
|
|
455
|
+
|
|
456
|
+
CI_LOG_LIMIT = 8_000
|
|
457
|
+
|
|
458
|
+
def ci_evidence( pr:, repo_path: )
|
|
459
|
+
branch = pr[ "headRefName" ].to_s
|
|
460
|
+
stdout_text, _, status = Open3.capture3(
|
|
461
|
+
"gh", "run", "list",
|
|
462
|
+
"--branch", branch,
|
|
463
|
+
"--status", "failure",
|
|
464
|
+
"--limit", "1",
|
|
465
|
+
"--json", "databaseId,url",
|
|
466
|
+
chdir: repo_path
|
|
467
|
+
)
|
|
468
|
+
return {} unless status.success?
|
|
469
|
+
|
|
470
|
+
runs = JSON.parse( stdout_text )
|
|
471
|
+
return {} if runs.empty?
|
|
472
|
+
|
|
473
|
+
run_id = runs.first[ "databaseId" ].to_s
|
|
474
|
+
run_url = runs.first[ "url" ].to_s
|
|
475
|
+
|
|
476
|
+
log_stdout, _, log_status = Open3.capture3(
|
|
477
|
+
"gh", "run", "view", run_id, "--log-failed",
|
|
478
|
+
chdir: repo_path
|
|
479
|
+
)
|
|
480
|
+
return { ci_run_url: run_url } unless log_status.success?
|
|
481
|
+
|
|
482
|
+
{ ci_logs: truncate_log( text: log_stdout ), ci_run_url: run_url }
|
|
483
|
+
rescue StandardError => e
|
|
484
|
+
puts_line " ci_evidence failed: #{e.message}"
|
|
485
|
+
{}
|
|
486
|
+
end
|
|
487
|
+
|
|
488
|
+
def truncate_log( text:, limit: CI_LOG_LIMIT )
|
|
489
|
+
text = text.to_s
|
|
490
|
+
return text if text.length <= limit
|
|
491
|
+
text[ -limit.. ]
|
|
492
|
+
end
|
|
493
|
+
|
|
494
|
+
def review_evidence( pr:, repo_path: )
|
|
495
|
+
rt = scoped_runtime( repo_path: repo_path )
|
|
496
|
+
owner, repo = rt.send( :repository_coordinates )
|
|
497
|
+
pr_number = pr[ "number" ]
|
|
498
|
+
details = rt.send( :pull_request_details, owner: owner, repo: repo, pr_number: pr_number )
|
|
499
|
+
pr_author = details.dig( :author, :login ).to_s
|
|
500
|
+
threads = rt.send( :unresolved_thread_entries, details: details )
|
|
501
|
+
top_level = rt.send( :actionable_top_level_items, details: details, pr_author: pr_author )
|
|
502
|
+
|
|
503
|
+
findings = []
|
|
504
|
+
threads.each do |entry|
|
|
505
|
+
body = thread_body( details: details, url: entry[ :url ] )
|
|
506
|
+
findings << { kind: "unresolved_thread", url: entry[ :url ], body: body }
|
|
507
|
+
end
|
|
508
|
+
top_level.each do |entry|
|
|
509
|
+
body = comment_body( details: details, url: entry[ :url ] )
|
|
510
|
+
findings << { kind: entry[ :kind ], url: entry[ :url ], body: body }
|
|
511
|
+
end
|
|
512
|
+
|
|
513
|
+
{ review_findings: findings }
|
|
514
|
+
rescue StandardError => e
|
|
515
|
+
puts_line " review_evidence failed: #{e.message}"
|
|
516
|
+
{}
|
|
517
|
+
end
|
|
518
|
+
|
|
519
|
+
def scoped_runtime( repo_path: )
|
|
520
|
+
return self if repo_path == self.repo_root
|
|
521
|
+
Runtime.new( repo_root: repo_path, tool_root: tool_root, out: out, err: err )
|
|
522
|
+
end
|
|
523
|
+
|
|
524
|
+
def prior_attempt( pr:, repo_path: )
|
|
525
|
+
state = load_dispatch_state
|
|
526
|
+
key = dispatch_state_key( pr: pr, repo_path: repo_path )
|
|
527
|
+
existing = state[ key ]
|
|
528
|
+
return nil unless existing
|
|
529
|
+
return nil unless existing[ "status" ] == "failed"
|
|
530
|
+
{ summary: existing[ "summary" ].to_s, dispatched_at: existing[ "dispatched_at" ].to_s }
|
|
531
|
+
end
|
|
532
|
+
|
|
533
|
+
def thread_body( details:, url: )
|
|
534
|
+
Array( details[ :review_threads ] ).each do |thread|
|
|
535
|
+
thread[ :comments ].each do |comment|
|
|
536
|
+
return comment[ :body ].to_s if comment[ :url ] == url
|
|
537
|
+
end
|
|
538
|
+
end
|
|
539
|
+
""
|
|
540
|
+
end
|
|
541
|
+
|
|
542
|
+
def comment_body( details:, url: )
|
|
543
|
+
Array( details[ :comments ] ).each do |comment|
|
|
544
|
+
return comment[ :body ].to_s if comment[ :url ] == url
|
|
545
|
+
end
|
|
546
|
+
Array( details[ :reviews ] ).each do |review|
|
|
547
|
+
return review[ :body ].to_s if review[ :url ] == url
|
|
548
|
+
end
|
|
549
|
+
""
|
|
550
|
+
end
|
|
551
|
+
|
|
552
|
+
# Check wait: returns true if the PR was updated within the configured wait window.
|
|
553
|
+
def within_check_wait?( pr: )
|
|
554
|
+
wait = config.govern_check_wait
|
|
555
|
+
return false if wait <= 0
|
|
556
|
+
|
|
557
|
+
updated_at_text = pr[ "updatedAt" ].to_s.strip
|
|
558
|
+
return false if updated_at_text.empty?
|
|
559
|
+
|
|
560
|
+
updated_at = Time.parse( updated_at_text )
|
|
561
|
+
( Time.now.utc - updated_at.utc ) < wait
|
|
562
|
+
rescue ArgumentError
|
|
563
|
+
false
|
|
564
|
+
end
|
|
565
|
+
|
|
566
|
+
# Report writing.
|
|
567
|
+
def write_govern_report( report: )
|
|
568
|
+
report_dir = report_dir_path
|
|
569
|
+
FileUtils.mkdir_p( report_dir )
|
|
570
|
+
json_path = File.join( report_dir, GOVERN_REPORT_JSON )
|
|
571
|
+
md_path = File.join( report_dir, GOVERN_REPORT_MD )
|
|
572
|
+
File.write( json_path, JSON.pretty_generate( report ) )
|
|
573
|
+
File.write( md_path, render_govern_markdown( report: report ) )
|
|
574
|
+
puts_line "report_json: #{json_path}"
|
|
575
|
+
puts_line "report_markdown: #{md_path}"
|
|
576
|
+
end
|
|
577
|
+
|
|
578
|
+
def render_govern_markdown( report: )
|
|
579
|
+
lines = []
|
|
580
|
+
lines << "# Carson Govern Report"
|
|
581
|
+
lines << ""
|
|
582
|
+
lines << "**Cycle**: #{report[ :cycle_at ]}"
|
|
583
|
+
lines << "**Dry run**: #{report[ :dry_run ]}"
|
|
584
|
+
lines << ""
|
|
585
|
+
|
|
586
|
+
Array( report[ :repos ] ).each do |repo_report|
|
|
587
|
+
lines << "## #{repo_report[ :repo ]}"
|
|
588
|
+
lines << ""
|
|
589
|
+
if repo_report[ :error ]
|
|
590
|
+
lines << "**Error**: #{repo_report[ :error ]}"
|
|
591
|
+
lines << ""
|
|
592
|
+
next
|
|
593
|
+
end
|
|
594
|
+
|
|
595
|
+
prs = Array( repo_report[ :prs ] )
|
|
596
|
+
if prs.empty?
|
|
597
|
+
lines << "No open PRs."
|
|
598
|
+
lines << ""
|
|
599
|
+
next
|
|
600
|
+
end
|
|
601
|
+
|
|
602
|
+
prs.each do |pr|
|
|
603
|
+
lines << "### PR ##{pr[ :number ]} — #{pr[ :title ]}"
|
|
604
|
+
lines << ""
|
|
605
|
+
lines << "- **Branch**: #{pr[ :branch ]}"
|
|
606
|
+
lines << "- **Classification**: #{pr[ :classification ]}"
|
|
607
|
+
lines << "- **Action**: #{pr[ :action ]}"
|
|
608
|
+
lines << "- **Detail**: #{pr[ :detail ]}" unless pr[ :detail ].to_s.empty?
|
|
609
|
+
lines << ""
|
|
610
|
+
end
|
|
611
|
+
end
|
|
612
|
+
|
|
613
|
+
lines.join( "\n" )
|
|
614
|
+
end
|
|
615
|
+
|
|
616
|
+
def print_govern_summary( report: )
|
|
617
|
+
puts_line ""
|
|
618
|
+
total_prs = 0
|
|
619
|
+
ready_count = 0
|
|
620
|
+
blocked_count = 0
|
|
621
|
+
|
|
622
|
+
Array( report[ :repos ] ).each do |repo_report|
|
|
623
|
+
Array( repo_report[ :prs ] ).each do |pr|
|
|
624
|
+
total_prs += 1
|
|
625
|
+
case pr[ :classification ]
|
|
626
|
+
when TRIAGE_READY
|
|
627
|
+
ready_count += 1
|
|
628
|
+
else
|
|
629
|
+
blocked_count += 1
|
|
630
|
+
end
|
|
631
|
+
end
|
|
632
|
+
end
|
|
633
|
+
|
|
634
|
+
repos_count = Array( report[ :repos ] ).length
|
|
635
|
+
puts_line "govern_summary: repos=#{repos_count} prs=#{total_prs} ready=#{ready_count} blocked=#{blocked_count}"
|
|
636
|
+
end
|
|
637
|
+
end
|
|
638
|
+
|
|
639
|
+
include Govern
|
|
640
|
+
end
|
|
641
|
+
end
|
data/lib/carson/runtime/lint.rb
CHANGED
|
@@ -5,7 +5,7 @@ require "tmpdir"
|
|
|
5
5
|
module Carson
|
|
6
6
|
class Runtime
|
|
7
7
|
module Lint
|
|
8
|
-
# Prepares canonical lint policy files under
|
|
8
|
+
# Prepares canonical lint policy files under ~/.carson/lint from an explicit source.
|
|
9
9
|
def lint_setup!( source:, ref: "main", force: false )
|
|
10
10
|
print_header "Lint Setup"
|
|
11
11
|
source_text = source.to_s.strip
|
|
@@ -109,7 +109,7 @@ module Carson
|
|
|
109
109
|
def cache_workspace_root
|
|
110
110
|
home = ENV.fetch( "HOME", "" ).to_s.strip
|
|
111
111
|
if home.start_with?( "/" )
|
|
112
|
-
path = File.join( home, ".
|
|
112
|
+
path = File.join( home, ".carson", "cache" )
|
|
113
113
|
FileUtils.mkdir_p( path )
|
|
114
114
|
return path
|
|
115
115
|
end
|
|
@@ -122,7 +122,7 @@ module Carson
|
|
|
122
122
|
home = ENV.fetch( "HOME", "" ).to_s.strip
|
|
123
123
|
raise "HOME must be an absolute path for lint setup" unless home.start_with?( "/" )
|
|
124
124
|
|
|
125
|
-
File.join( home, "
|
|
125
|
+
File.join( home, ".carson", "lint" )
|
|
126
126
|
end
|
|
127
127
|
|
|
128
128
|
def copy_lint_coding_tree( source_coding_dir:, target_coding_dir:, force: )
|