carson 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/copilot-instructions.md +12 -0
- data/.github/pull_request_template.md +14 -0
- data/.github/workflows/carson_policy.yml +90 -0
- data/API.md +114 -0
- data/LICENSE +21 -0
- data/MANUAL.md +170 -0
- data/README.md +48 -0
- data/RELEASE.md +592 -0
- data/VERSION +1 -0
- data/assets/hooks/pre-commit +19 -0
- data/assets/hooks/pre-merge-commit +8 -0
- data/assets/hooks/pre-push +13 -0
- data/assets/hooks/prepare-commit-msg +8 -0
- data/carson.gemspec +37 -0
- data/exe/carson +13 -0
- data/lib/carson/adapters/git.rb +20 -0
- data/lib/carson/adapters/github.rb +20 -0
- data/lib/carson/cli.rb +189 -0
- data/lib/carson/config.rb +348 -0
- data/lib/carson/policy/ruby/lint.rb +61 -0
- data/lib/carson/runtime/audit.rb +793 -0
- data/lib/carson/runtime/lint.rb +177 -0
- data/lib/carson/runtime/local.rb +661 -0
- data/lib/carson/runtime/review/data_access.rb +253 -0
- data/lib/carson/runtime/review/gate_support.rb +224 -0
- data/lib/carson/runtime/review/query_text.rb +164 -0
- data/lib/carson/runtime/review/sweep_support.rb +252 -0
- data/lib/carson/runtime/review/utility.rb +63 -0
- data/lib/carson/runtime/review.rb +182 -0
- data/lib/carson/runtime.rb +182 -0
- data/lib/carson/version.rb +4 -0
- data/lib/carson.rb +6 -0
- data/templates/.github/copilot-instructions.md +12 -0
- data/templates/.github/pull_request_template.md +14 -0
- metadata +80 -0
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
module Carson
|
|
2
|
+
class Runtime
|
|
3
|
+
module Review
|
|
4
|
+
module SweepSupport
|
|
5
|
+
private
|
|
6
|
+
|
|
7
|
+
def sweep_findings_for_pull_request( details: )
|
|
8
|
+
pr_author = details.dig( :author, :login ).to_s
|
|
9
|
+
state = details.fetch( :state )
|
|
10
|
+
baseline_time = if [ "CLOSED", "MERGED" ].include?( state )
|
|
11
|
+
parse_time_or_nil( text: details.fetch( :merged_at ) ) || parse_time_or_nil( text: details.fetch( :closed_at ) )
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
findings = []
|
|
15
|
+
unresolved_thread_entries( details: details ).each do |entry|
|
|
16
|
+
thread_time = parse_time_or_nil( text: entry.fetch( :created_at ) )
|
|
17
|
+
next unless include_sweep_event?( event_time: thread_time, baseline_time: baseline_time )
|
|
18
|
+
findings << build_sweep_finding(
|
|
19
|
+
details: details,
|
|
20
|
+
kind: "unresolved_thread",
|
|
21
|
+
url: entry.fetch( :url ),
|
|
22
|
+
author: entry.fetch( :author ),
|
|
23
|
+
created_at: entry.fetch( :created_at ),
|
|
24
|
+
reason: "unresolved review thread"
|
|
25
|
+
)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
Array( details.fetch( :comments ) ).each do |comment|
|
|
29
|
+
next if comment.fetch( :author ) == pr_author
|
|
30
|
+
hits = matched_risk_keywords( text: comment.fetch( :body ) )
|
|
31
|
+
next if hits.empty?
|
|
32
|
+
event_time = parse_time_or_nil( text: comment.fetch( :created_at ) )
|
|
33
|
+
next unless include_sweep_event?( event_time: event_time, baseline_time: baseline_time )
|
|
34
|
+
findings << build_sweep_finding(
|
|
35
|
+
details: details,
|
|
36
|
+
kind: "risk_issue_comment",
|
|
37
|
+
url: comment.fetch( :url ),
|
|
38
|
+
author: comment.fetch( :author ),
|
|
39
|
+
created_at: comment.fetch( :created_at ),
|
|
40
|
+
reason: "risk keywords: #{hits.join( ', ' )}"
|
|
41
|
+
)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
Array( details.fetch( :reviews ) ).each do |review|
|
|
45
|
+
next if review.fetch( :author ) == pr_author
|
|
46
|
+
hits = matched_risk_keywords( text: review.fetch( :body ) )
|
|
47
|
+
next if hits.empty?
|
|
48
|
+
event_time = parse_time_or_nil( text: review.fetch( :created_at ) )
|
|
49
|
+
next unless include_sweep_event?( event_time: event_time, baseline_time: baseline_time )
|
|
50
|
+
findings << build_sweep_finding(
|
|
51
|
+
details: details,
|
|
52
|
+
kind: "risk_review",
|
|
53
|
+
url: review.fetch( :url ),
|
|
54
|
+
author: review.fetch( :author ),
|
|
55
|
+
created_at: review.fetch( :created_at ),
|
|
56
|
+
reason: "risk keywords: #{hits.join( ', ' )}"
|
|
57
|
+
)
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
Array( details.fetch( :review_threads ) ).flat_map { |thread| thread.fetch( :comments ) }.each do |comment|
|
|
61
|
+
next if comment.fetch( :author ) == pr_author
|
|
62
|
+
hits = matched_risk_keywords( text: comment.fetch( :body ) )
|
|
63
|
+
next if hits.empty?
|
|
64
|
+
event_time = parse_time_or_nil( text: comment.fetch( :created_at ) )
|
|
65
|
+
next unless include_sweep_event?( event_time: event_time, baseline_time: baseline_time )
|
|
66
|
+
findings << build_sweep_finding(
|
|
67
|
+
details: details,
|
|
68
|
+
kind: "risk_thread_comment",
|
|
69
|
+
url: comment.fetch( :url ),
|
|
70
|
+
author: comment.fetch( :author ),
|
|
71
|
+
created_at: comment.fetch( :created_at ),
|
|
72
|
+
reason: "risk keywords: #{hits.join( ', ' )}"
|
|
73
|
+
)
|
|
74
|
+
end
|
|
75
|
+
deduplicate_findings_by_url( items: findings )
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# Inclusion guard for late-event sweep checks; closed PRs only include events after close/merge.
|
|
79
|
+
def include_sweep_event?( event_time:, baseline_time: )
|
|
80
|
+
return true if baseline_time.nil?
|
|
81
|
+
return false if event_time.nil?
|
|
82
|
+
event_time > baseline_time
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Formats one sweep finding record with PR context fields included.
|
|
86
|
+
def build_sweep_finding( details:, kind:, url:, author:, created_at:, reason: )
|
|
87
|
+
{
|
|
88
|
+
pr_number: details.fetch( :number ),
|
|
89
|
+
pr_title: details.fetch( :title ),
|
|
90
|
+
pr_url: details.fetch( :url ),
|
|
91
|
+
pr_state: details.fetch( :state ),
|
|
92
|
+
kind: kind,
|
|
93
|
+
url: url,
|
|
94
|
+
author: author,
|
|
95
|
+
created_at: created_at.to_s,
|
|
96
|
+
reason: reason
|
|
97
|
+
}
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
# Upserts one rolling tracking issue that captures latest sweep findings.
|
|
101
|
+
def upsert_review_sweep_tracking_issue( owner:, repo:, findings: )
|
|
102
|
+
slug = "#{owner}/#{repo}"
|
|
103
|
+
ensure_review_sweep_label( repo_slug: slug )
|
|
104
|
+
issue = find_review_sweep_issue( repo_slug: slug )
|
|
105
|
+
if findings.empty?
|
|
106
|
+
return close_review_sweep_issue_if_open( repo_slug: slug, issue: issue )
|
|
107
|
+
end
|
|
108
|
+
body = render_review_sweep_issue_body( findings: findings )
|
|
109
|
+
if issue.nil?
|
|
110
|
+
stdout_text, stderr_text, success, = gh_run(
|
|
111
|
+
"issue", "create",
|
|
112
|
+
"--repo", slug,
|
|
113
|
+
"--title", config.review_tracking_issue_title,
|
|
114
|
+
"--body", body,
|
|
115
|
+
"--label", config.review_tracking_issue_label
|
|
116
|
+
)
|
|
117
|
+
raise gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to create review sweep tracking issue" ) unless success
|
|
118
|
+
issue = find_review_sweep_issue( repo_slug: slug )
|
|
119
|
+
return issue.nil? ? { action: "create_unknown", issue: nil } : { action: "created", issue: issue }
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
if issue.fetch( :state ) == "CLOSED"
|
|
123
|
+
gh_system!( "issue", "reopen", issue.fetch( :number ).to_s, "--repo", slug )
|
|
124
|
+
end
|
|
125
|
+
gh_system!(
|
|
126
|
+
"issue", "edit", issue.fetch( :number ).to_s,
|
|
127
|
+
"--repo", slug,
|
|
128
|
+
"--title", config.review_tracking_issue_title,
|
|
129
|
+
"--body", body,
|
|
130
|
+
"--add-label", config.review_tracking_issue_label
|
|
131
|
+
)
|
|
132
|
+
updated_issue = find_review_sweep_issue( repo_slug: slug )
|
|
133
|
+
{ action: issue.fetch( :state ) == "CLOSED" ? "reopened_updated" : "updated", issue: updated_issue || issue }
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
# Creates/updates sweep tracking label so issue upsert can apply a stable filter tag.
|
|
137
|
+
def ensure_review_sweep_label( repo_slug: )
|
|
138
|
+
gh_system!(
|
|
139
|
+
"label", "create", config.review_tracking_issue_label,
|
|
140
|
+
"--repo", repo_slug,
|
|
141
|
+
"--description", "Carson review sweep tracking",
|
|
142
|
+
"--color", "B60205",
|
|
143
|
+
"--force"
|
|
144
|
+
)
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
# Finds rolling tracking issue by exact configured title.
|
|
148
|
+
def find_review_sweep_issue( repo_slug: )
|
|
149
|
+
stdout_text, stderr_text, success, = gh_run( "issue", "list", "--repo", repo_slug, "--state", "all", "--limit", "100", "--json", "number,title,state,url,labels" )
|
|
150
|
+
raise gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to list issues for review sweep" ) unless success
|
|
151
|
+
issues = Array( JSON.parse( stdout_text ) )
|
|
152
|
+
node = issues.find { |entry| entry[ "title" ].to_s == config.review_tracking_issue_title }
|
|
153
|
+
return nil if node.nil?
|
|
154
|
+
{
|
|
155
|
+
number: node[ "number" ],
|
|
156
|
+
title: node[ "title" ].to_s,
|
|
157
|
+
state: node[ "state" ].to_s.upcase,
|
|
158
|
+
url: node[ "url" ].to_s
|
|
159
|
+
}
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
# When sweep is clear, close prior tracking issue and add one clear audit comment.
|
|
163
|
+
def close_review_sweep_issue_if_open( repo_slug:, issue: )
|
|
164
|
+
return { action: "none", issue: nil } if issue.nil?
|
|
165
|
+
return { action: "none", issue: issue } unless issue.fetch( :state ) == "OPEN"
|
|
166
|
+
clear_message = "Clear: no actionable late review activity detected at #{Time.now.utc.iso8601}."
|
|
167
|
+
gh_system!( "issue", "comment", issue.fetch( :number ).to_s, "--repo", repo_slug, "--body", clear_message )
|
|
168
|
+
gh_system!( "issue", "close", issue.fetch( :number ).to_s, "--repo", repo_slug )
|
|
169
|
+
closed_issue = find_review_sweep_issue( repo_slug: repo_slug )
|
|
170
|
+
{ action: "closed", issue: closed_issue || issue }
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
# Markdown body used by rolling sweep issue so latest findings are always in one place.
|
|
174
|
+
def render_review_sweep_issue_body( findings: )
|
|
175
|
+
lines = []
|
|
176
|
+
lines << "# Carson review sweep findings"
|
|
177
|
+
lines << ""
|
|
178
|
+
lines << "- Generated at: #{Time.now.utc.iso8601}"
|
|
179
|
+
lines << "- Window days: #{config.review_sweep_window_days}"
|
|
180
|
+
lines << "- States: #{config.review_sweep_states.join( ', ' )}"
|
|
181
|
+
lines << "- Finding count: #{findings.count}"
|
|
182
|
+
lines << ""
|
|
183
|
+
lines << "## Findings"
|
|
184
|
+
if findings.empty?
|
|
185
|
+
lines << "- none"
|
|
186
|
+
else
|
|
187
|
+
findings.each do |item|
|
|
188
|
+
lines << "- PR ##{item.fetch( :pr_number )} (#{item.fetch( :pr_state )}) #{item.fetch( :kind )}: #{item.fetch( :reason )}"
|
|
189
|
+
lines << " - URL: #{item.fetch( :url )}"
|
|
190
|
+
lines << " - Author: #{item.fetch( :author )}"
|
|
191
|
+
lines << " - Created at: #{item.fetch( :created_at )}"
|
|
192
|
+
end
|
|
193
|
+
end
|
|
194
|
+
lines.join( "\n" )
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Writes sweep artefacts for CI logs and local troubleshooting.
|
|
198
|
+
def write_review_sweep_report( report: )
|
|
199
|
+
markdown_path, json_path = write_report(
|
|
200
|
+
report: report,
|
|
201
|
+
markdown_name: REVIEW_SWEEP_REPORT_MD,
|
|
202
|
+
json_name: REVIEW_SWEEP_REPORT_JSON,
|
|
203
|
+
renderer: method( :render_review_sweep_markdown )
|
|
204
|
+
)
|
|
205
|
+
puts_line "review_sweep_report_markdown: #{markdown_path}"
|
|
206
|
+
puts_line "review_sweep_report_json: #{json_path}"
|
|
207
|
+
rescue StandardError => e
|
|
208
|
+
puts_line "review_sweep_report_write: SKIP (#{e.message})"
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
# Human-readable scheduled sweep report.
|
|
212
|
+
def render_review_sweep_markdown( report: )
|
|
213
|
+
lines = []
|
|
214
|
+
lines << "# Carson Review Sweep Report"
|
|
215
|
+
lines << ""
|
|
216
|
+
lines << "- Generated at: #{report.fetch( :generated_at )}"
|
|
217
|
+
lines << "- Status: #{report.fetch( :status )}"
|
|
218
|
+
lines << "- Window days: #{report.fetch( :window_days )}"
|
|
219
|
+
lines << "- States: #{Array( report.fetch( :states ) ).join( ', ' )}"
|
|
220
|
+
lines << "- Cutoff time: #{report.fetch( :cutoff_time )}"
|
|
221
|
+
lines << "- Candidate count: #{report.fetch( :candidate_count )}"
|
|
222
|
+
lines << "- Finding count: #{report.fetch( :finding_count )}"
|
|
223
|
+
tracking_issue = report[ :tracking_issue ]
|
|
224
|
+
if tracking_issue.is_a?( Hash )
|
|
225
|
+
lines << "- Tracking issue action: #{tracking_issue.fetch( :action )}"
|
|
226
|
+
if tracking_issue[ :issue ].is_a?( Hash )
|
|
227
|
+
lines << "- Tracking issue URL: #{tracking_issue.dig( :issue, :url )}"
|
|
228
|
+
end
|
|
229
|
+
end
|
|
230
|
+
lines << ""
|
|
231
|
+
lines << "## Findings"
|
|
232
|
+
if report.fetch( :findings ).empty?
|
|
233
|
+
lines << "- none"
|
|
234
|
+
else
|
|
235
|
+
report.fetch( :findings ).each do |item|
|
|
236
|
+
lines << "- PR ##{item.fetch( :pr_number )} (#{item.fetch( :pr_state )}) #{item.fetch( :kind )}: #{item.fetch( :reason )}"
|
|
237
|
+
lines << " - URL: #{item.fetch( :url )}"
|
|
238
|
+
lines << " - Author: #{item.fetch( :author )}"
|
|
239
|
+
lines << " - Created at: #{item.fetch( :created_at )}"
|
|
240
|
+
end
|
|
241
|
+
end
|
|
242
|
+
lines.join( "\n" )
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
# Sweep state mapping treats merged PRs as closed for state-based inclusion filtering.
|
|
246
|
+
def sweep_state_for( pr_state: )
|
|
247
|
+
pr_state.to_s.upcase == "OPEN" ? "open" : "closed"
|
|
248
|
+
end
|
|
249
|
+
end
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
end
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
module Carson
|
|
2
|
+
class Runtime
|
|
3
|
+
module Review
|
|
4
|
+
module Utility
|
|
5
|
+
private
|
|
6
|
+
|
|
7
|
+
# Returns matching risk keywords using case-insensitive whole-word matching.
|
|
8
|
+
def matched_risk_keywords( text: )
|
|
9
|
+
text_value = text.to_s
|
|
10
|
+
config.review_risk_keywords.select do |keyword|
|
|
11
|
+
text_value.match?( /\b#{Regexp.escape( keyword )}\b/i )
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Disposition records always start with configured prefix.
|
|
16
|
+
def disposition_prefixed?( text: )
|
|
17
|
+
text.to_s.lstrip.start_with?( config.review_disposition_prefix )
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
# Extracts first matching disposition token from configured acknowledgement body.
|
|
21
|
+
def disposition_token( text: )
|
|
22
|
+
DISPOSITION_TOKENS.find { |token| text.to_s.match?( /\b#{token}\b/i ) }
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# GitHub URL extraction for mapping disposition acknowledgements to finding URLs.
|
|
26
|
+
def extract_github_urls( text: )
|
|
27
|
+
text.to_s.scan( %r{https://github\.com/[^\s\)\]]+} ).map { |value| value.sub( /[.,;:]+$/, "" ) }.uniq
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Parse RFC3339 timestamps and return nil on blank/invalid values.
|
|
31
|
+
def parse_time_or_nil( text: )
|
|
32
|
+
value = text.to_s.strip
|
|
33
|
+
return nil if value.empty?
|
|
34
|
+
Time.parse( value )
|
|
35
|
+
rescue ArgumentError
|
|
36
|
+
nil
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
# Removes duplicate finding URLs while preserving first occurrence ordering.
|
|
40
|
+
def deduplicate_findings_by_url( items: )
|
|
41
|
+
seen = {}
|
|
42
|
+
Array( items ).each_with_object( [] ) do |entry, result|
|
|
43
|
+
url = entry.fetch( :url ).to_s
|
|
44
|
+
next if url.empty? || seen.key?( url )
|
|
45
|
+
seen[ url ] = true
|
|
46
|
+
result << entry
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Shared report writer for JSON plus Markdown pairs in global report output.
|
|
51
|
+
def write_report( report:, markdown_name:, json_name:, renderer: )
|
|
52
|
+
report_dir = report_dir_path
|
|
53
|
+
FileUtils.mkdir_p( report_dir )
|
|
54
|
+
markdown_path = File.join( report_dir, markdown_name )
|
|
55
|
+
json_path = File.join( report_dir, json_name )
|
|
56
|
+
File.write( json_path, JSON.pretty_generate( report ) )
|
|
57
|
+
File.write( markdown_path, renderer.call( report: report ) )
|
|
58
|
+
[ markdown_path, json_path ]
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
require_relative "review/query_text"
|
|
2
|
+
require_relative "review/data_access"
|
|
3
|
+
require_relative "review/gate_support"
|
|
4
|
+
require_relative "review/sweep_support"
|
|
5
|
+
require_relative "review/utility"
|
|
6
|
+
|
|
7
|
+
module Carson
|
|
8
|
+
class Runtime
|
|
9
|
+
module Review
|
|
10
|
+
include QueryText
|
|
11
|
+
include DataAccess
|
|
12
|
+
include GateSupport
|
|
13
|
+
include SweepSupport
|
|
14
|
+
include Utility
|
|
15
|
+
|
|
16
|
+
def review_gate!
|
|
17
|
+
fingerprint_status = block_if_outsider_fingerprints!
|
|
18
|
+
return fingerprint_status unless fingerprint_status.nil?
|
|
19
|
+
print_header "Review Gate"
|
|
20
|
+
unless gh_available?
|
|
21
|
+
puts_line "ERROR: gh CLI not available in PATH."
|
|
22
|
+
return EXIT_ERROR
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
owner, repo = repository_coordinates
|
|
26
|
+
pr_number_override = carson_pr_number_override
|
|
27
|
+
pr_summary =
|
|
28
|
+
if pr_number_override.nil?
|
|
29
|
+
current_pull_request_for_branch( branch_name: current_branch )
|
|
30
|
+
else
|
|
31
|
+
details = pull_request_details( owner: owner, repo: repo, pr_number: pr_number_override )
|
|
32
|
+
{
|
|
33
|
+
number: details.fetch( :number ),
|
|
34
|
+
title: details.fetch( :title ),
|
|
35
|
+
url: details.fetch( :url ),
|
|
36
|
+
state: details.fetch( :state )
|
|
37
|
+
}
|
|
38
|
+
end
|
|
39
|
+
if pr_summary.nil?
|
|
40
|
+
puts_line "BLOCK: no pull request found for branch #{current_branch}."
|
|
41
|
+
report = {
|
|
42
|
+
generated_at: Time.now.utc.iso8601,
|
|
43
|
+
branch: current_branch,
|
|
44
|
+
status: "block",
|
|
45
|
+
converged: false,
|
|
46
|
+
wait_seconds: config.review_wait_seconds,
|
|
47
|
+
poll_seconds: config.review_poll_seconds,
|
|
48
|
+
max_polls: config.review_max_polls,
|
|
49
|
+
block_reasons: [ "no pull request found for current branch" ],
|
|
50
|
+
pr: nil,
|
|
51
|
+
unresolved_threads: [],
|
|
52
|
+
actionable_top_level: [],
|
|
53
|
+
unacknowledged_actionable: []
|
|
54
|
+
}
|
|
55
|
+
write_review_gate_report( report: report )
|
|
56
|
+
return EXIT_BLOCK
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
wait_for_review_warmup
|
|
60
|
+
converged = false
|
|
61
|
+
last_snapshot = nil
|
|
62
|
+
last_signature = nil
|
|
63
|
+
poll_attempts = 0
|
|
64
|
+
|
|
65
|
+
config.review_max_polls.times do |index|
|
|
66
|
+
poll_attempts = index + 1
|
|
67
|
+
snapshot = review_gate_snapshot( owner: owner, repo: repo, pr_number: pr_summary.fetch( :number ) )
|
|
68
|
+
last_snapshot = snapshot
|
|
69
|
+
signature = review_gate_signature( snapshot: snapshot )
|
|
70
|
+
puts_line "poll_attempt: #{poll_attempts}/#{config.review_max_polls}"
|
|
71
|
+
puts_line "latest_activity: #{snapshot.fetch( :latest_activity ) || 'unknown'}"
|
|
72
|
+
puts_line "unresolved_threads: #{snapshot.fetch( :unresolved_threads ).count}"
|
|
73
|
+
puts_line "unacknowledged_actionable: #{snapshot.fetch( :unacknowledged_actionable ).count}"
|
|
74
|
+
if !last_signature.nil? && signature == last_signature
|
|
75
|
+
converged = true
|
|
76
|
+
puts_line "convergence: stable"
|
|
77
|
+
break
|
|
78
|
+
end
|
|
79
|
+
last_signature = signature
|
|
80
|
+
wait_for_review_poll if index < config.review_max_polls - 1
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
block_reasons = []
|
|
84
|
+
block_reasons << "review snapshot did not converge within #{config.review_max_polls} polls" unless converged
|
|
85
|
+
if last_snapshot.fetch( :unresolved_threads ).any?
|
|
86
|
+
block_reasons << "unresolved review threads remain (#{last_snapshot.fetch( :unresolved_threads ).count})"
|
|
87
|
+
end
|
|
88
|
+
if last_snapshot.fetch( :unacknowledged_actionable ).any?
|
|
89
|
+
block_reasons << "actionable top-level comments/reviews without required disposition (#{last_snapshot.fetch( :unacknowledged_actionable ).count})"
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
report = {
|
|
93
|
+
generated_at: Time.now.utc.iso8601,
|
|
94
|
+
branch: current_branch,
|
|
95
|
+
status: block_reasons.empty? ? "ok" : "block",
|
|
96
|
+
converged: converged,
|
|
97
|
+
wait_seconds: config.review_wait_seconds,
|
|
98
|
+
poll_seconds: config.review_poll_seconds,
|
|
99
|
+
max_polls: config.review_max_polls,
|
|
100
|
+
poll_attempts: poll_attempts,
|
|
101
|
+
block_reasons: block_reasons,
|
|
102
|
+
pr: {
|
|
103
|
+
number: pr_summary.fetch( :number ),
|
|
104
|
+
title: pr_summary.fetch( :title ),
|
|
105
|
+
url: pr_summary.fetch( :url ),
|
|
106
|
+
state: pr_summary.fetch( :state )
|
|
107
|
+
},
|
|
108
|
+
unresolved_threads: last_snapshot.fetch( :unresolved_threads ),
|
|
109
|
+
actionable_top_level: last_snapshot.fetch( :actionable_top_level ),
|
|
110
|
+
unacknowledged_actionable: last_snapshot.fetch( :unacknowledged_actionable )
|
|
111
|
+
}
|
|
112
|
+
write_review_gate_report( report: report )
|
|
113
|
+
if block_reasons.empty?
|
|
114
|
+
puts_line "OK: review gate passed."
|
|
115
|
+
return EXIT_OK
|
|
116
|
+
end
|
|
117
|
+
block_reasons.each { |reason| puts_line "BLOCK: #{reason}" }
|
|
118
|
+
EXIT_BLOCK
|
|
119
|
+
rescue JSON::ParserError => e
|
|
120
|
+
puts_line "ERROR: invalid gh JSON response (#{e.message})."
|
|
121
|
+
EXIT_ERROR
|
|
122
|
+
rescue StandardError => e
|
|
123
|
+
puts_line "ERROR: #{e.message}"
|
|
124
|
+
EXIT_ERROR
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
# Scheduled sweep for late actionable review activity across recent pull requests.
|
|
128
|
+
def review_sweep!
|
|
129
|
+
fingerprint_status = block_if_outsider_fingerprints!
|
|
130
|
+
return fingerprint_status unless fingerprint_status.nil?
|
|
131
|
+
print_header "Review Sweep"
|
|
132
|
+
unless gh_available?
|
|
133
|
+
puts_line "ERROR: gh CLI not available in PATH."
|
|
134
|
+
return EXIT_ERROR
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
owner, repo = repository_coordinates
|
|
138
|
+
cutoff_time = Time.now.utc - ( config.review_sweep_window_days * 86_400 )
|
|
139
|
+
pull_requests = recent_pull_requests_for_sweep( owner: owner, repo: repo, cutoff_time: cutoff_time )
|
|
140
|
+
puts_line "window_days: #{config.review_sweep_window_days}"
|
|
141
|
+
puts_line "candidate_prs: #{pull_requests.count}"
|
|
142
|
+
findings = []
|
|
143
|
+
|
|
144
|
+
pull_requests.each do |entry|
|
|
145
|
+
next unless config.review_sweep_states.include?( sweep_state_for( pr_state: entry.fetch( :state ) ) )
|
|
146
|
+
details = pull_request_details( owner: owner, repo: repo, pr_number: entry.fetch( :number ) )
|
|
147
|
+
findings.concat( sweep_findings_for_pull_request( details: details ) )
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
findings.sort_by! { |item| [ item.fetch( :pr_number ), item.fetch( :created_at ).to_s, item.fetch( :url ) ] }
|
|
151
|
+
issue_result = upsert_review_sweep_tracking_issue( owner: owner, repo: repo, findings: findings )
|
|
152
|
+
report = {
|
|
153
|
+
generated_at: Time.now.utc.iso8601,
|
|
154
|
+
status: findings.empty? ? "ok" : "block",
|
|
155
|
+
window_days: config.review_sweep_window_days,
|
|
156
|
+
states: config.review_sweep_states,
|
|
157
|
+
cutoff_time: cutoff_time.utc.iso8601,
|
|
158
|
+
candidate_count: pull_requests.count,
|
|
159
|
+
finding_count: findings.count,
|
|
160
|
+
findings: findings,
|
|
161
|
+
tracking_issue: issue_result
|
|
162
|
+
}
|
|
163
|
+
write_review_sweep_report( report: report )
|
|
164
|
+
puts_line "finding_count: #{findings.count}"
|
|
165
|
+
if findings.empty?
|
|
166
|
+
puts_line "OK: no actionable late review activity detected."
|
|
167
|
+
return EXIT_OK
|
|
168
|
+
end
|
|
169
|
+
puts_line "BLOCK: actionable late review activity detected."
|
|
170
|
+
EXIT_BLOCK
|
|
171
|
+
rescue JSON::ParserError => e
|
|
172
|
+
puts_line "ERROR: invalid gh JSON response (#{e.message})."
|
|
173
|
+
EXIT_ERROR
|
|
174
|
+
rescue StandardError => e
|
|
175
|
+
puts_line "ERROR: #{e.message}"
|
|
176
|
+
EXIT_ERROR
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
include Review
|
|
181
|
+
end
|
|
182
|
+
end
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
# Carson runtime wiring and shared helper layer.
|
|
2
|
+
# Centralises command-neutral concerns such as output contracts, path resolution,
|
|
3
|
+
# adapter invocation, and report-location policy.
|
|
4
|
+
require "fileutils"
|
|
5
|
+
require "json"
|
|
6
|
+
require "time"
|
|
7
|
+
|
|
8
|
+
module Carson
|
|
9
|
+
class Runtime
|
|
10
|
+
# Shared exit-code contract used by all commands and CI smoke assertions.
|
|
11
|
+
EXIT_OK = 0
|
|
12
|
+
EXIT_ERROR = 1
|
|
13
|
+
EXIT_BLOCK = 2
|
|
14
|
+
|
|
15
|
+
REPORT_MD = "pr_report_latest.md".freeze
|
|
16
|
+
REPORT_JSON = "pr_report_latest.json".freeze
|
|
17
|
+
REVIEW_GATE_REPORT_MD = "review_gate_latest.md".freeze
|
|
18
|
+
REVIEW_GATE_REPORT_JSON = "review_gate_latest.json".freeze
|
|
19
|
+
REVIEW_SWEEP_REPORT_MD = "review_sweep_latest.md".freeze
|
|
20
|
+
REVIEW_SWEEP_REPORT_JSON = "review_sweep_latest.json".freeze
|
|
21
|
+
DISPOSITION_TOKENS = %w[accepted rejected deferred].freeze
|
|
22
|
+
|
|
23
|
+
# Runtime wiring for repository context, tool paths, and output streams.
|
|
24
|
+
def initialize( repo_root:, tool_root:, out:, err: )
|
|
25
|
+
@repo_root = repo_root
|
|
26
|
+
@tool_root = tool_root
|
|
27
|
+
@out = out
|
|
28
|
+
@err = err
|
|
29
|
+
@config = Config.load( repo_root: repo_root )
|
|
30
|
+
@git_adapter = Adapters::Git.new( repo_root: repo_root )
|
|
31
|
+
@github_adapter = Adapters::GitHub.new( repo_root: repo_root )
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
private
|
|
35
|
+
|
|
36
|
+
attr_reader :repo_root, :tool_root, :out, :err, :config, :git_adapter, :github_adapter
|
|
37
|
+
|
|
38
|
+
# Current local branch name.
|
|
39
|
+
def current_branch
|
|
40
|
+
git_capture!( "rev-parse", "--abbrev-ref", "HEAD" ).strip
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Checks local branch existence before restore attempts in ensure blocks.
|
|
44
|
+
def branch_exists?( branch_name: )
|
|
45
|
+
_, _, success, = git_run( "show-ref", "--verify", "--quiet", "refs/heads/#{branch_name}" )
|
|
46
|
+
success
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Human-readable plural suffix helper for audit messaging.
|
|
50
|
+
def plural_suffix( count: )
|
|
51
|
+
count.to_i == 1 ? "" : "s"
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Section heading printer for command output.
|
|
55
|
+
def print_header( title )
|
|
56
|
+
puts_line ""
|
|
57
|
+
puts_line "[#{title}]"
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Single output funnel to keep messaging style consistent.
|
|
61
|
+
def puts_line( message )
|
|
62
|
+
out.puts message
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Converts absolute paths into repo-relative output paths.
|
|
66
|
+
def relative_path( absolute_path )
|
|
67
|
+
absolute_path.sub( "#{repo_root}/", "" )
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Resolves a repo-relative path and blocks traversal outside repository root.
|
|
71
|
+
def resolve_repo_path!( relative_path:, label: )
|
|
72
|
+
path = File.expand_path( relative_path.to_s, repo_root )
|
|
73
|
+
repo_root_prefix = File.join( repo_root, "" )
|
|
74
|
+
raise ConfigError, "#{label} must stay within repository root" unless path.start_with?( repo_root_prefix )
|
|
75
|
+
path
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# Resolves report output precedence:
|
|
79
|
+
# 1) ~/.cache/carson when HOME is an absolute path
|
|
80
|
+
# 2) TMPDIR/carson when HOME is invalid and TMPDIR is absolute
|
|
81
|
+
# 3) /tmp/carson as final safety fallback
|
|
82
|
+
def report_dir_path
|
|
83
|
+
home = ENV.fetch( "HOME", "" ).to_s
|
|
84
|
+
return File.join( home, ".cache", "carson" ) if absolute_env_path?( path: home )
|
|
85
|
+
|
|
86
|
+
tmpdir = ENV.fetch( "TMPDIR", "" ).to_s
|
|
87
|
+
return File.join( tmpdir, "carson" ) if absolute_env_path?( path: tmpdir )
|
|
88
|
+
|
|
89
|
+
"/tmp/carson"
|
|
90
|
+
rescue StandardError
|
|
91
|
+
"/tmp/carson"
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
# Treats empty or non-absolute environment paths as invalid.
|
|
95
|
+
def absolute_env_path?( path: )
|
|
96
|
+
text = path.to_s
|
|
97
|
+
!text.empty? && text.start_with?( "/" )
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
# Soft capability check for GitHub CLI presence.
|
|
101
|
+
def gh_available?
|
|
102
|
+
_, _, success, = gh_run( "--version" )
|
|
103
|
+
success
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Keeps check output fields stable even when gh returns blanks.
|
|
107
|
+
def normalise_check_entries( entries: )
|
|
108
|
+
Array( entries ).map do |entry|
|
|
109
|
+
{
|
|
110
|
+
workflow: blank_to( value: entry[ "workflow" ], default: "workflow" ),
|
|
111
|
+
name: blank_to( value: entry[ "name" ], default: "check" ),
|
|
112
|
+
state: blank_to( value: entry[ "state" ], default: "UNKNOWN" ),
|
|
113
|
+
link: entry[ "link" ].to_s
|
|
114
|
+
}
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# Coalesces blank strings to explicit defaults.
|
|
119
|
+
def blank_to( value:, default: )
|
|
120
|
+
text = value.to_s.strip
|
|
121
|
+
text.empty? ? default : text
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Chooses best available error text from gh stderr/stdout.
|
|
125
|
+
def gh_error_text( stdout_text:, stderr_text:, fallback: )
|
|
126
|
+
combined = [ stderr_text.to_s.strip, stdout_text.to_s.strip ].reject( &:empty? ).join( " | " )
|
|
127
|
+
combined.empty? ? fallback : combined
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
# Runs gh command and raises with best available stderr/stdout details on failure.
|
|
131
|
+
def gh_system!( *args )
|
|
132
|
+
stdout_text, stderr_text, success, = gh_run( *args )
|
|
133
|
+
raise gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "gh #{args.join( ' ' )} failed" ) unless success
|
|
134
|
+
stdout_text
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
# Captures gh output without raising so callers can fall back when host metadata is unavailable.
|
|
138
|
+
def gh_capture_soft( *args )
|
|
139
|
+
stdout_text, stderr_text, success, = gh_run( *args )
|
|
140
|
+
[ stdout_text, stderr_text, success ]
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
# Runs git command, streams outputs, and raises on non-zero exit.
|
|
144
|
+
def git_system!( *args )
|
|
145
|
+
stdout_text, stderr_text, success, = git_run( *args )
|
|
146
|
+
out.print stdout_text unless stdout_text.empty?
|
|
147
|
+
err.print stderr_text unless stderr_text.empty?
|
|
148
|
+
raise "git #{args.join( ' ' )} failed" unless success
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Captures git stdout and raises on non-zero exit.
|
|
152
|
+
def git_capture!( *args )
|
|
153
|
+
stdout_text, stderr_text, success, = git_run( *args )
|
|
154
|
+
unless success
|
|
155
|
+
err.print stderr_text unless stderr_text.empty?
|
|
156
|
+
raise "git #{args.join( ' ' )} failed"
|
|
157
|
+
end
|
|
158
|
+
stdout_text
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
# Captures git output without raising so caller can decide behaviour.
|
|
162
|
+
def git_capture_soft( *args )
|
|
163
|
+
stdout_text, stderr_text, success, = git_run( *args )
|
|
164
|
+
[ stdout_text, stderr_text, success ]
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
# Low-level git invocation wrapper.
|
|
168
|
+
def git_run( *args )
|
|
169
|
+
git_adapter.run( *args )
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
# Low-level gh invocation wrapper.
|
|
173
|
+
def gh_run( *args )
|
|
174
|
+
github_adapter.run( *args )
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
require_relative "runtime/local"
|
|
180
|
+
require_relative "runtime/lint"
|
|
181
|
+
require_relative "runtime/audit"
|
|
182
|
+
require_relative "runtime/review"
|