carson 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/copilot-instructions.md +12 -0
- data/.github/pull_request_template.md +14 -0
- data/.github/workflows/carson_policy.yml +90 -0
- data/API.md +114 -0
- data/LICENSE +21 -0
- data/MANUAL.md +170 -0
- data/README.md +48 -0
- data/RELEASE.md +592 -0
- data/VERSION +1 -0
- data/assets/hooks/pre-commit +19 -0
- data/assets/hooks/pre-merge-commit +8 -0
- data/assets/hooks/pre-push +13 -0
- data/assets/hooks/prepare-commit-msg +8 -0
- data/carson.gemspec +37 -0
- data/exe/carson +13 -0
- data/lib/carson/adapters/git.rb +20 -0
- data/lib/carson/adapters/github.rb +20 -0
- data/lib/carson/cli.rb +189 -0
- data/lib/carson/config.rb +348 -0
- data/lib/carson/policy/ruby/lint.rb +61 -0
- data/lib/carson/runtime/audit.rb +793 -0
- data/lib/carson/runtime/lint.rb +177 -0
- data/lib/carson/runtime/local.rb +661 -0
- data/lib/carson/runtime/review/data_access.rb +253 -0
- data/lib/carson/runtime/review/gate_support.rb +224 -0
- data/lib/carson/runtime/review/query_text.rb +164 -0
- data/lib/carson/runtime/review/sweep_support.rb +252 -0
- data/lib/carson/runtime/review/utility.rb +63 -0
- data/lib/carson/runtime/review.rb +182 -0
- data/lib/carson/runtime.rb +182 -0
- data/lib/carson/version.rb +4 -0
- data/lib/carson.rb +6 -0
- data/templates/.github/copilot-instructions.md +12 -0
- data/templates/.github/pull_request_template.md +14 -0
- metadata +80 -0
|
@@ -0,0 +1,793 @@
|
|
|
1
|
+
require "cgi"
|
|
2
|
+
require "open3"
|
|
3
|
+
|
|
4
|
+
module Carson
|
|
5
|
+
class Runtime
|
|
6
|
+
module Audit
|
|
7
|
+
def audit!
|
|
8
|
+
fingerprint_status = block_if_outsider_fingerprints!
|
|
9
|
+
return fingerprint_status unless fingerprint_status.nil?
|
|
10
|
+
audit_state = "ok"
|
|
11
|
+
print_header "Repository"
|
|
12
|
+
puts_line "root: #{repo_root}"
|
|
13
|
+
puts_line "current_branch: #{current_branch}"
|
|
14
|
+
print_header "Working Tree"
|
|
15
|
+
puts_line git_capture!( "status", "--short", "--branch" ).strip
|
|
16
|
+
print_header "Hooks"
|
|
17
|
+
hooks_ok = hooks_health_report
|
|
18
|
+
audit_state = "block" unless hooks_ok
|
|
19
|
+
print_header "Local Lint Quality"
|
|
20
|
+
local_lint_quality = local_lint_quality_report
|
|
21
|
+
audit_state = "block" if local_lint_quality.fetch( :status ) == "block"
|
|
22
|
+
print_header "Main Sync Status"
|
|
23
|
+
ahead_count, behind_count, main_error = main_sync_counts
|
|
24
|
+
if main_error
|
|
25
|
+
puts_line "main_vs_remote_main: unknown"
|
|
26
|
+
puts_line "WARN: unable to calculate main sync status (#{main_error})."
|
|
27
|
+
audit_state = "attention" if audit_state == "ok"
|
|
28
|
+
elsif ahead_count.positive?
|
|
29
|
+
puts_line "main_vs_remote_main_ahead: #{ahead_count}"
|
|
30
|
+
puts_line "main_vs_remote_main_behind: #{behind_count}"
|
|
31
|
+
puts_line "ACTION: local #{config.main_branch} is ahead of #{config.git_remote}/#{config.main_branch} by #{ahead_count} commit#{plural_suffix( count: ahead_count )}; reset local drift before commit/push workflows."
|
|
32
|
+
audit_state = "block"
|
|
33
|
+
elsif behind_count.positive?
|
|
34
|
+
puts_line "main_vs_remote_main_ahead: #{ahead_count}"
|
|
35
|
+
puts_line "main_vs_remote_main_behind: #{behind_count}"
|
|
36
|
+
puts_line "ACTION: local #{config.main_branch} is behind #{config.git_remote}/#{config.main_branch} by #{behind_count} commit#{plural_suffix( count: behind_count )}; run carson sync."
|
|
37
|
+
audit_state = "attention" if audit_state == "ok"
|
|
38
|
+
else
|
|
39
|
+
puts_line "main_vs_remote_main_ahead: 0"
|
|
40
|
+
puts_line "main_vs_remote_main_behind: 0"
|
|
41
|
+
puts_line "ACTION: local #{config.main_branch} is in sync with #{config.git_remote}/#{config.main_branch}."
|
|
42
|
+
end
|
|
43
|
+
print_header "PR and Required Checks (gh)"
|
|
44
|
+
monitor_report = pr_and_check_report
|
|
45
|
+
audit_state = "attention" if audit_state == "ok" && monitor_report.fetch( :status ) != "ok"
|
|
46
|
+
print_header "Default Branch CI Baseline (gh)"
|
|
47
|
+
default_branch_baseline = default_branch_ci_baseline_report
|
|
48
|
+
audit_state = "block" if default_branch_baseline.fetch( :status ) == "block"
|
|
49
|
+
audit_state = "attention" if audit_state == "ok" && default_branch_baseline.fetch( :status ) != "ok"
|
|
50
|
+
scope_guard = print_scope_integrity_guard
|
|
51
|
+
audit_state = "block" if scope_guard.fetch( :split_required )
|
|
52
|
+
audit_state = "attention" if audit_state == "ok" && scope_guard.fetch( :status ) == "attention"
|
|
53
|
+
write_and_print_pr_monitor_report(
|
|
54
|
+
report: monitor_report.merge(
|
|
55
|
+
local_lint_quality: local_lint_quality,
|
|
56
|
+
default_branch_baseline: default_branch_baseline,
|
|
57
|
+
audit_status: audit_state
|
|
58
|
+
)
|
|
59
|
+
)
|
|
60
|
+
print_header "Audit Result"
|
|
61
|
+
puts_line "status: #{audit_state}"
|
|
62
|
+
puts_line( audit_state == "block" ? "ACTION: local policy block must be resolved before commit/push." : "ACTION: no local hard block detected." )
|
|
63
|
+
audit_state == "block" ? EXIT_BLOCK : EXIT_OK
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
private
|
|
67
|
+
def pr_and_check_report
|
|
68
|
+
report = {
|
|
69
|
+
generated_at: Time.now.utc.iso8601,
|
|
70
|
+
branch: current_branch,
|
|
71
|
+
status: "ok",
|
|
72
|
+
skip_reason: nil,
|
|
73
|
+
pr: nil,
|
|
74
|
+
checks: {
|
|
75
|
+
status: "unknown",
|
|
76
|
+
skip_reason: nil,
|
|
77
|
+
required_total: 0,
|
|
78
|
+
failing_count: 0,
|
|
79
|
+
pending_count: 0,
|
|
80
|
+
failing: [],
|
|
81
|
+
pending: []
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
unless gh_available?
|
|
85
|
+
report[ :status ] = "skipped"
|
|
86
|
+
report[ :skip_reason ] = "gh CLI not available in PATH"
|
|
87
|
+
puts_line "SKIP: #{report.fetch( :skip_reason )}"
|
|
88
|
+
return report
|
|
89
|
+
end
|
|
90
|
+
pr_stdout, pr_stderr, pr_success, = gh_run( "pr", "view", current_branch, "--json", "number,title,url,state,reviewDecision" )
|
|
91
|
+
unless pr_success
|
|
92
|
+
error_text = gh_error_text( stdout_text: pr_stdout, stderr_text: pr_stderr, fallback: "unable to read PR for branch #{current_branch}" )
|
|
93
|
+
report[ :status ] = "skipped"
|
|
94
|
+
report[ :skip_reason ] = error_text
|
|
95
|
+
puts_line "SKIP: #{error_text}"
|
|
96
|
+
return report
|
|
97
|
+
end
|
|
98
|
+
pr_data = JSON.parse( pr_stdout )
|
|
99
|
+
report[ :pr ] = {
|
|
100
|
+
number: pr_data[ "number" ],
|
|
101
|
+
title: pr_data[ "title" ].to_s,
|
|
102
|
+
url: pr_data[ "url" ].to_s,
|
|
103
|
+
state: pr_data[ "state" ].to_s,
|
|
104
|
+
review_decision: blank_to( value: pr_data[ "reviewDecision" ], default: "NONE" )
|
|
105
|
+
}
|
|
106
|
+
puts_line "pr: ##{report.dig( :pr, :number )} #{report.dig( :pr, :title )}"
|
|
107
|
+
puts_line "url: #{report.dig( :pr, :url )}"
|
|
108
|
+
puts_line "review_decision: #{report.dig( :pr, :review_decision )}"
|
|
109
|
+
checks_stdout, checks_stderr, checks_success, checks_exit = gh_run( "pr", "checks", report.dig( :pr, :number ).to_s, "--required", "--json", "name,state,bucket,workflow,link" )
|
|
110
|
+
if checks_stdout.to_s.strip.empty?
|
|
111
|
+
error_text = gh_error_text( stdout_text: checks_stdout, stderr_text: checks_stderr, fallback: "required checks unavailable" )
|
|
112
|
+
report[ :checks ][ :status ] = "skipped"
|
|
113
|
+
report[ :checks ][ :skip_reason ] = error_text
|
|
114
|
+
report[ :status ] = "attention"
|
|
115
|
+
puts_line "checks: SKIP (#{error_text})"
|
|
116
|
+
return report
|
|
117
|
+
end
|
|
118
|
+
checks_data = JSON.parse( checks_stdout )
|
|
119
|
+
failing = checks_data.select { |entry| entry[ "bucket" ].to_s == "fail" || entry[ "state" ].to_s.upcase == "FAILURE" }
|
|
120
|
+
pending = checks_data.select { |entry| entry[ "bucket" ].to_s == "pending" }
|
|
121
|
+
report[ :checks ][ :status ] = checks_success ? "ok" : ( checks_exit == 8 ? "pending" : "attention" )
|
|
122
|
+
report[ :checks ][ :required_total ] = checks_data.count
|
|
123
|
+
report[ :checks ][ :failing_count ] = failing.count
|
|
124
|
+
report[ :checks ][ :pending_count ] = pending.count
|
|
125
|
+
report[ :checks ][ :failing ] = normalise_check_entries( entries: failing )
|
|
126
|
+
report[ :checks ][ :pending ] = normalise_check_entries( entries: pending )
|
|
127
|
+
puts_line "required_checks_total: #{report.dig( :checks, :required_total )}"
|
|
128
|
+
puts_line "required_checks_failing: #{report.dig( :checks, :failing_count )}"
|
|
129
|
+
puts_line "required_checks_pending: #{report.dig( :checks, :pending_count )}"
|
|
130
|
+
report.dig( :checks, :failing ).each { |entry| puts_line "check_fail: #{entry.fetch( :workflow )} / #{entry.fetch( :name )} #{entry.fetch( :link )}".strip }
|
|
131
|
+
report.dig( :checks, :pending ).each { |entry| puts_line "check_pending: #{entry.fetch( :workflow )} / #{entry.fetch( :name )} #{entry.fetch( :link )}".strip }
|
|
132
|
+
report[ :status ] = "attention" if report.dig( :checks, :failing_count ).positive? || report.dig( :checks, :pending_count ).positive?
|
|
133
|
+
report
|
|
134
|
+
rescue JSON::ParserError => e
|
|
135
|
+
report[ :status ] = "skipped"
|
|
136
|
+
report[ :skip_reason ] = "invalid gh JSON response (#{e.message})"
|
|
137
|
+
puts_line "SKIP: #{report.fetch( :skip_reason )}"
|
|
138
|
+
report
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
# Enforces configured multi-language lint policy before governance passes.
|
|
142
|
+
def local_lint_quality_report
|
|
143
|
+
target_files, target_source = lint_target_files
|
|
144
|
+
report = {
|
|
145
|
+
status: "ok",
|
|
146
|
+
skip_reason: nil,
|
|
147
|
+
target_source: target_source,
|
|
148
|
+
target_files_count: target_files.count,
|
|
149
|
+
blocking_languages: 0,
|
|
150
|
+
languages: []
|
|
151
|
+
}
|
|
152
|
+
puts_line "lint_target_source: #{target_source}"
|
|
153
|
+
puts_line "lint_target_files_total: #{target_files.count}"
|
|
154
|
+
config.lint_languages.each do |language, entry|
|
|
155
|
+
language_report = lint_language_report(
|
|
156
|
+
language: language,
|
|
157
|
+
entry: entry,
|
|
158
|
+
target_files: target_files
|
|
159
|
+
)
|
|
160
|
+
report.fetch( :languages ) << language_report
|
|
161
|
+
next unless language_report.fetch( :status ) == "block"
|
|
162
|
+
|
|
163
|
+
report[ :status ] = "block"
|
|
164
|
+
report[ :blocking_languages ] += 1
|
|
165
|
+
end
|
|
166
|
+
puts_line "lint_blocking_languages: #{report.fetch( :blocking_languages )}"
|
|
167
|
+
report
|
|
168
|
+
rescue StandardError => e
|
|
169
|
+
report ||= {
|
|
170
|
+
status: "block",
|
|
171
|
+
skip_reason: nil,
|
|
172
|
+
target_source: "unknown",
|
|
173
|
+
target_files_count: 0,
|
|
174
|
+
blocking_languages: 0,
|
|
175
|
+
languages: []
|
|
176
|
+
}
|
|
177
|
+
report[ :status ] = "block"
|
|
178
|
+
report[ :skip_reason ] = e.message
|
|
179
|
+
puts_line "BLOCK: local lint quality check failed (#{e.message})."
|
|
180
|
+
report
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
# File selection precedence:
|
|
184
|
+
# 1) staged files for local commit-time execution
|
|
185
|
+
# 2) PR changed files in GitHub pull_request events
|
|
186
|
+
# 3) full repository tracked files in GitHub non-PR events
|
|
187
|
+
# 4) local working-tree changed files as fallback
|
|
188
|
+
def lint_target_files
|
|
189
|
+
staged = existing_repo_files( paths: staged_files )
|
|
190
|
+
return [ staged, "staged" ] unless staged.empty?
|
|
191
|
+
|
|
192
|
+
if github_pull_request_event?
|
|
193
|
+
files = lint_target_files_for_pull_request
|
|
194
|
+
return [ files, "github_pull_request" ] unless files.nil?
|
|
195
|
+
puts_line "WARN: unable to resolve pull request changed files; falling back to full repository files."
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
if github_actions_environment?
|
|
199
|
+
return [ lint_target_files_for_non_pr_ci, "github_full_repository" ]
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
[ existing_repo_files( paths: changed_files ), "working_tree" ]
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def lint_target_files_for_pull_request
|
|
206
|
+
base_ref = ENV.fetch( "GITHUB_BASE_REF", "" ).to_s.strip
|
|
207
|
+
return nil if base_ref.empty?
|
|
208
|
+
|
|
209
|
+
preferred_remote = config.git_remote.to_s.strip
|
|
210
|
+
remote_name = nil
|
|
211
|
+
|
|
212
|
+
remotes_stdout, _, remotes_success, = git_run( "remote" )
|
|
213
|
+
if remotes_success
|
|
214
|
+
available_remotes = remotes_stdout.lines.map { |line| line.to_s.strip }.reject( &:empty? )
|
|
215
|
+
candidates = [ preferred_remote, "origin", "github" ].map( &:to_s ).map( &:strip ).reject( &:empty? ).uniq
|
|
216
|
+
remote_name = candidates.find { |candidate| available_remotes.include?( candidate ) }
|
|
217
|
+
remote_name ||= available_remotes.first unless available_remotes.empty?
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
remote_name ||= ( preferred_remote.empty? ? "origin" : preferred_remote )
|
|
221
|
+
|
|
222
|
+
_, _, fetch_success, = git_run( "fetch", "--no-tags", "--depth", "1", remote_name, base_ref )
|
|
223
|
+
return nil unless fetch_success
|
|
224
|
+
|
|
225
|
+
base = "#{remote_name}/#{base_ref}"
|
|
226
|
+
stdout_text, _, success, = git_run(
|
|
227
|
+
"diff", "--name-only", "--diff-filter=ACMRTUXB", "#{base}...HEAD"
|
|
228
|
+
)
|
|
229
|
+
return nil unless success
|
|
230
|
+
|
|
231
|
+
paths = stdout_text.lines.map { |line| line.to_s.strip }.reject( &:empty? )
|
|
232
|
+
existing_repo_files( paths: paths )
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def lint_target_files_for_non_pr_ci
|
|
236
|
+
stdout_text = git_capture!( "ls-files" )
|
|
237
|
+
paths = stdout_text.lines.map { |line| line.to_s.strip }.reject( &:empty? )
|
|
238
|
+
existing_repo_files( paths: paths )
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
def github_actions_environment?
|
|
242
|
+
ENV.fetch( "GITHUB_ACTIONS", "" ).to_s.strip.casecmp( "true" ).zero?
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
def github_pull_request_event?
|
|
246
|
+
return false unless github_actions_environment?
|
|
247
|
+
|
|
248
|
+
event_name = ENV.fetch( "GITHUB_EVENT_NAME", "" ).to_s.strip
|
|
249
|
+
[ "pull_request", "pull_request_target" ].include?( event_name )
|
|
250
|
+
end
|
|
251
|
+
|
|
252
|
+
def existing_repo_files( paths: )
|
|
253
|
+
Array( paths ).map do |relative|
|
|
254
|
+
next if relative.to_s.strip.empty?
|
|
255
|
+
absolute = resolve_repo_path!( relative_path: relative, label: "lint target file #{relative}" )
|
|
256
|
+
next unless File.file?( absolute )
|
|
257
|
+
relative
|
|
258
|
+
end.compact.uniq
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
def lint_language_report( language:, entry:, target_files: )
|
|
262
|
+
globs = entry.fetch( :globs )
|
|
263
|
+
candidate_files = Array( target_files ).select do |path|
|
|
264
|
+
globs.any? { |pattern| pattern_matches_path?( pattern: pattern, path: path ) }
|
|
265
|
+
end
|
|
266
|
+
report = {
|
|
267
|
+
language: language,
|
|
268
|
+
enabled: entry.fetch( :enabled ),
|
|
269
|
+
status: "ok",
|
|
270
|
+
reason: nil,
|
|
271
|
+
file_count: candidate_files.count,
|
|
272
|
+
files: candidate_files,
|
|
273
|
+
command: entry.fetch( :command ),
|
|
274
|
+
config_files: entry.fetch( :config_files ),
|
|
275
|
+
exit_code: 0
|
|
276
|
+
}
|
|
277
|
+
puts_line "lint_language: #{language} enabled=#{report.fetch( :enabled )} files=#{report.fetch( :file_count )}"
|
|
278
|
+
if language == "ruby" && outsider_mode?
|
|
279
|
+
local_rubocop_path = File.join( repo_root, ".rubocop.yml" )
|
|
280
|
+
if File.file?( local_rubocop_path )
|
|
281
|
+
report[ :status ] = "block"
|
|
282
|
+
report[ :reason ] = "repo-local RuboCop config is forbidden: #{relative_path( local_rubocop_path )}; remove it and use ~/AI/CODING/rubocop.yml."
|
|
283
|
+
report[ :exit_code ] = EXIT_BLOCK
|
|
284
|
+
puts_line "lint_#{language}_status: block"
|
|
285
|
+
puts_line "lint_#{language}_reason: #{report.fetch( :reason )}"
|
|
286
|
+
puts_line "ACTION: remove .rubocop.yml from this repository and run carson lint setup --source <path-or-git-url>."
|
|
287
|
+
return report
|
|
288
|
+
end
|
|
289
|
+
end
|
|
290
|
+
return report unless report.fetch( :enabled )
|
|
291
|
+
return report if candidate_files.empty?
|
|
292
|
+
|
|
293
|
+
missing_config_files = entry.fetch( :config_files ).reject { |path| File.file?( path ) }
|
|
294
|
+
unless missing_config_files.empty?
|
|
295
|
+
report[ :status ] = "block"
|
|
296
|
+
report[ :reason ] = "missing config files: #{missing_config_files.join( ', ' )}"
|
|
297
|
+
report[ :exit_code ] = EXIT_BLOCK
|
|
298
|
+
puts_line "lint_#{language}_status: block"
|
|
299
|
+
puts_line "lint_#{language}_reason: #{report.fetch( :reason )}"
|
|
300
|
+
puts_line "ACTION: run carson lint setup --source <path-or-git-url> to prepare ~/AI/CODING policy files."
|
|
301
|
+
return report
|
|
302
|
+
end
|
|
303
|
+
|
|
304
|
+
command = Array( entry.fetch( :command ) )
|
|
305
|
+
command_name = command.first.to_s.strip
|
|
306
|
+
if command_name.empty?
|
|
307
|
+
report[ :status ] = "block"
|
|
308
|
+
report[ :reason ] = "missing lint command"
|
|
309
|
+
report[ :exit_code ] = EXIT_BLOCK
|
|
310
|
+
puts_line "lint_#{language}_status: block"
|
|
311
|
+
puts_line "lint_#{language}_reason: #{report.fetch( :reason )}"
|
|
312
|
+
return report
|
|
313
|
+
end
|
|
314
|
+
unless command_available_for_lint?( command_name: command_name )
|
|
315
|
+
report[ :status ] = "block"
|
|
316
|
+
report[ :reason ] = "command not available: #{command_name}"
|
|
317
|
+
report[ :exit_code ] = EXIT_BLOCK
|
|
318
|
+
puts_line "lint_#{language}_status: block"
|
|
319
|
+
puts_line "lint_#{language}_reason: #{report.fetch( :reason )}"
|
|
320
|
+
return report
|
|
321
|
+
end
|
|
322
|
+
|
|
323
|
+
args = expanded_lint_command_args( command: command, files: candidate_files )
|
|
324
|
+
stdout_text, stderr_text, success, exit_code = local_command( *args )
|
|
325
|
+
report[ :exit_code ] = exit_code
|
|
326
|
+
unless success
|
|
327
|
+
report[ :status ] = "block"
|
|
328
|
+
report[ :reason ] = summarise_command_output(
|
|
329
|
+
stdout_text: stdout_text,
|
|
330
|
+
stderr_text: stderr_text,
|
|
331
|
+
fallback: "lint command failed for #{language}"
|
|
332
|
+
)
|
|
333
|
+
end
|
|
334
|
+
puts_line "lint_#{language}_status: #{report.fetch( :status )}"
|
|
335
|
+
puts_line "lint_#{language}_exit: #{report.fetch( :exit_code )}"
|
|
336
|
+
puts_line "lint_#{language}_reason: #{report.fetch( :reason )}" unless report.fetch( :reason ).nil?
|
|
337
|
+
report
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
def command_available_for_lint?( command_name: )
|
|
341
|
+
return false if command_name.to_s.strip.empty?
|
|
342
|
+
|
|
343
|
+
if command_name.include?( "/" )
|
|
344
|
+
path = if command_name.start_with?( "~" )
|
|
345
|
+
File.expand_path( command_name )
|
|
346
|
+
elsif command_name.start_with?( "/" )
|
|
347
|
+
command_name
|
|
348
|
+
else
|
|
349
|
+
File.expand_path( command_name, repo_root )
|
|
350
|
+
end
|
|
351
|
+
return File.executable?( path )
|
|
352
|
+
end
|
|
353
|
+
path_entries = ENV.fetch( "PATH", "" ).split( File::PATH_SEPARATOR )
|
|
354
|
+
path_entries.any? do |entry|
|
|
355
|
+
next false if entry.to_s.strip.empty?
|
|
356
|
+
File.executable?( File.join( entry, command_name ) )
|
|
357
|
+
end
|
|
358
|
+
end
|
|
359
|
+
|
|
360
|
+
def expanded_lint_command_args( command:, files: )
|
|
361
|
+
expanded_command = Array( command ).map do |arg|
|
|
362
|
+
text = arg.to_s
|
|
363
|
+
if text == "{files}"
|
|
364
|
+
text
|
|
365
|
+
elsif text.start_with?( "~" )
|
|
366
|
+
File.expand_path( text )
|
|
367
|
+
elsif text.include?( "/" ) && !text.start_with?( "/" )
|
|
368
|
+
File.expand_path( text, repo_root )
|
|
369
|
+
else
|
|
370
|
+
text
|
|
371
|
+
end
|
|
372
|
+
end
|
|
373
|
+
if expanded_command.include?( "{files}" )
|
|
374
|
+
return expanded_command.flat_map { |arg| arg == "{files}" ? Array( files ) : arg }
|
|
375
|
+
end
|
|
376
|
+
|
|
377
|
+
expanded_command + Array( files )
|
|
378
|
+
end
|
|
379
|
+
|
|
380
|
+
# Local command runner for repository-context tools used by audit lint checks.
|
|
381
|
+
def local_command( *args )
|
|
382
|
+
stdout_text, stderr_text, status = Open3.capture3( *args, chdir: repo_root )
|
|
383
|
+
[ stdout_text, stderr_text, status.success?, status.exitstatus ]
|
|
384
|
+
end
|
|
385
|
+
|
|
386
|
+
# Compacts command output to one-line diagnostics for audit logs and JSON report payloads.
|
|
387
|
+
def summarise_command_output( stdout_text:, stderr_text:, fallback: )
|
|
388
|
+
combined = [ stderr_text.to_s, stdout_text.to_s ].join( "\n" )
|
|
389
|
+
lines = combined.lines.map { |line| line.to_s.strip }.reject( &:empty? )
|
|
390
|
+
return fallback if lines.empty?
|
|
391
|
+
lines.first( 12 ).join( " | " )
|
|
392
|
+
end
|
|
393
|
+
|
|
394
|
+
# Evaluates default-branch CI health so stale workflow drift blocks before merge.
|
|
395
|
+
def default_branch_ci_baseline_report
|
|
396
|
+
report = {
|
|
397
|
+
status: "ok",
|
|
398
|
+
skip_reason: nil,
|
|
399
|
+
repository: nil,
|
|
400
|
+
default_branch: nil,
|
|
401
|
+
head_sha: nil,
|
|
402
|
+
workflows_total: 0,
|
|
403
|
+
check_runs_total: 0,
|
|
404
|
+
failing_count: 0,
|
|
405
|
+
pending_count: 0,
|
|
406
|
+
no_check_evidence: false,
|
|
407
|
+
failing: [],
|
|
408
|
+
pending: []
|
|
409
|
+
}
|
|
410
|
+
unless gh_available?
|
|
411
|
+
report[ :status ] = "skipped"
|
|
412
|
+
report[ :skip_reason ] = "gh CLI not available in PATH"
|
|
413
|
+
puts_line "baseline: SKIP (#{report.fetch( :skip_reason )})"
|
|
414
|
+
return report
|
|
415
|
+
end
|
|
416
|
+
owner, repo = repository_coordinates
|
|
417
|
+
report[ :repository ] = "#{owner}/#{repo}"
|
|
418
|
+
repository_data = gh_json_payload!(
|
|
419
|
+
"api", "repos/#{owner}/#{repo}",
|
|
420
|
+
"--method", "GET",
|
|
421
|
+
fallback: "unable to read repository metadata for #{owner}/#{repo}"
|
|
422
|
+
)
|
|
423
|
+
default_branch = blank_to( value: repository_data[ "default_branch" ], default: config.main_branch )
|
|
424
|
+
report[ :default_branch ] = default_branch
|
|
425
|
+
branch_data = gh_json_payload!(
|
|
426
|
+
"api", "repos/#{owner}/#{repo}/branches/#{CGI.escape( default_branch )}",
|
|
427
|
+
"--method", "GET",
|
|
428
|
+
fallback: "unable to read default branch #{default_branch}"
|
|
429
|
+
)
|
|
430
|
+
head_sha = branch_data.dig( "commit", "sha" ).to_s.strip
|
|
431
|
+
raise "default branch #{default_branch} has no commit SHA" if head_sha.empty?
|
|
432
|
+
report[ :head_sha ] = head_sha
|
|
433
|
+
workflow_entries = default_branch_workflow_entries(
|
|
434
|
+
owner: owner,
|
|
435
|
+
repo: repo,
|
|
436
|
+
default_branch: default_branch
|
|
437
|
+
)
|
|
438
|
+
report[ :workflows_total ] = workflow_entries.count
|
|
439
|
+
check_runs_payload = gh_json_payload!(
|
|
440
|
+
"api", "repos/#{owner}/#{repo}/commits/#{head_sha}/check-runs",
|
|
441
|
+
"--method", "GET",
|
|
442
|
+
fallback: "unable to read check-runs for #{default_branch}@#{head_sha}"
|
|
443
|
+
)
|
|
444
|
+
check_runs = Array( check_runs_payload[ "check_runs" ] )
|
|
445
|
+
failing, pending = partition_default_branch_check_runs( check_runs: check_runs )
|
|
446
|
+
report[ :check_runs_total ] = check_runs.count
|
|
447
|
+
report[ :failing ] = normalise_default_branch_check_entries( entries: failing )
|
|
448
|
+
report[ :pending ] = normalise_default_branch_check_entries( entries: pending )
|
|
449
|
+
report[ :failing_count ] = report.fetch( :failing ).count
|
|
450
|
+
report[ :pending_count ] = report.fetch( :pending ).count
|
|
451
|
+
report[ :no_check_evidence ] = report.fetch( :workflows_total ).positive? && report.fetch( :check_runs_total ).zero?
|
|
452
|
+
report[ :status ] = "block" if report.fetch( :failing_count ).positive?
|
|
453
|
+
report[ :status ] = "block" if report.fetch( :pending_count ).positive?
|
|
454
|
+
report[ :status ] = "block" if report.fetch( :no_check_evidence )
|
|
455
|
+
puts_line "default_branch_repository: #{report.fetch( :repository )}"
|
|
456
|
+
puts_line "default_branch_name: #{report.fetch( :default_branch )}"
|
|
457
|
+
puts_line "default_branch_head_sha: #{report.fetch( :head_sha )}"
|
|
458
|
+
puts_line "default_branch_workflows_total: #{report.fetch( :workflows_total )}"
|
|
459
|
+
puts_line "default_branch_check_runs_total: #{report.fetch( :check_runs_total )}"
|
|
460
|
+
puts_line "default_branch_failing: #{report.fetch( :failing_count )}"
|
|
461
|
+
puts_line "default_branch_pending: #{report.fetch( :pending_count )}"
|
|
462
|
+
report.fetch( :failing ).each { |entry| puts_line "default_branch_check_fail: #{entry.fetch( :workflow )} / #{entry.fetch( :name )} #{entry.fetch( :link )}".strip }
|
|
463
|
+
report.fetch( :pending ).each { |entry| puts_line "default_branch_check_pending: #{entry.fetch( :workflow )} / #{entry.fetch( :name )} #{entry.fetch( :link )}".strip }
|
|
464
|
+
if report.fetch( :no_check_evidence )
|
|
465
|
+
puts_line "ACTION: default branch has workflow files but no check-runs; align workflow triggers and branch protection check names."
|
|
466
|
+
end
|
|
467
|
+
report
|
|
468
|
+
rescue JSON::ParserError => e
|
|
469
|
+
report[ :status ] = "skipped"
|
|
470
|
+
report[ :skip_reason ] = "invalid gh JSON response (#{e.message})"
|
|
471
|
+
puts_line "baseline: SKIP (#{report.fetch( :skip_reason )})"
|
|
472
|
+
report
|
|
473
|
+
rescue StandardError => e
|
|
474
|
+
report[ :status ] = "skipped"
|
|
475
|
+
report[ :skip_reason ] = e.message
|
|
476
|
+
puts_line "baseline: SKIP (#{report.fetch( :skip_reason )})"
|
|
477
|
+
report
|
|
478
|
+
end
|
|
479
|
+
|
|
480
|
+
# Reads JSON API payloads and raises a detailed error when gh reports non-success.
|
|
481
|
+
def gh_json_payload!( *args, fallback: )
|
|
482
|
+
stdout_text, stderr_text, success, = gh_run( *args )
|
|
483
|
+
unless success
|
|
484
|
+
error_text = gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: fallback )
|
|
485
|
+
raise error_text
|
|
486
|
+
end
|
|
487
|
+
JSON.parse( stdout_text )
|
|
488
|
+
end
|
|
489
|
+
|
|
490
|
+
# Reads workflow files from default branch; missing workflow directory is valid and returns none.
|
|
491
|
+
def default_branch_workflow_entries( owner:, repo:, default_branch: )
|
|
492
|
+
stdout_text, stderr_text, success, = gh_run(
|
|
493
|
+
"api", "repos/#{owner}/#{repo}/contents/.github/workflows",
|
|
494
|
+
"--method", "GET",
|
|
495
|
+
"-f", "ref=#{default_branch}"
|
|
496
|
+
)
|
|
497
|
+
unless success
|
|
498
|
+
error_text = gh_error_text(
|
|
499
|
+
stdout_text: stdout_text,
|
|
500
|
+
stderr_text: stderr_text,
|
|
501
|
+
fallback: "unable to read workflow files for #{default_branch}"
|
|
502
|
+
)
|
|
503
|
+
return [] if error_text.match?( /\b404\b/ )
|
|
504
|
+
raise error_text
|
|
505
|
+
end
|
|
506
|
+
payload = JSON.parse( stdout_text )
|
|
507
|
+
Array( payload ).select do |entry|
|
|
508
|
+
entry.is_a?( Hash ) &&
|
|
509
|
+
entry[ "type" ].to_s == "file" &&
|
|
510
|
+
entry[ "name" ].to_s.match?( /\.ya?ml\z/i )
|
|
511
|
+
end
|
|
512
|
+
end
|
|
513
|
+
|
|
514
|
+
# Splits default-branch check-runs into failing and pending policy buckets.
|
|
515
|
+
def partition_default_branch_check_runs( check_runs: )
|
|
516
|
+
failing = []
|
|
517
|
+
pending = []
|
|
518
|
+
Array( check_runs ).each do |entry|
|
|
519
|
+
if default_branch_check_run_failing?( entry: entry )
|
|
520
|
+
failing << entry
|
|
521
|
+
elsif default_branch_check_run_pending?( entry: entry )
|
|
522
|
+
pending << entry
|
|
523
|
+
end
|
|
524
|
+
end
|
|
525
|
+
[ failing, pending ]
|
|
526
|
+
end
|
|
527
|
+
|
|
528
|
+
# Failing means completed with a non-successful conclusion.
|
|
529
|
+
def default_branch_check_run_failing?( entry: )
|
|
530
|
+
status = entry[ "status" ].to_s.strip.downcase
|
|
531
|
+
conclusion = entry[ "conclusion" ].to_s.strip.downcase
|
|
532
|
+
status == "completed" && !conclusion.empty? && !%w[success neutral skipped].include?( conclusion )
|
|
533
|
+
end
|
|
534
|
+
|
|
535
|
+
# Pending includes non-completed checks and completed checks missing conclusion.
|
|
536
|
+
def default_branch_check_run_pending?( entry: )
|
|
537
|
+
status = entry[ "status" ].to_s.strip.downcase
|
|
538
|
+
conclusion = entry[ "conclusion" ].to_s.strip.downcase
|
|
539
|
+
return true if status.empty?
|
|
540
|
+
return true unless status == "completed"
|
|
541
|
+
|
|
542
|
+
conclusion.empty?
|
|
543
|
+
end
|
|
544
|
+
|
|
545
|
+
# Normalises default-branch check-runs to report layout used by markdown output.
|
|
546
|
+
def normalise_default_branch_check_entries( entries: )
|
|
547
|
+
Array( entries ).map do |entry|
|
|
548
|
+
state = if entry[ "status" ].to_s.strip.downcase == "completed"
|
|
549
|
+
blank_to( value: entry[ "conclusion" ], default: "UNKNOWN" )
|
|
550
|
+
else
|
|
551
|
+
blank_to( value: entry[ "status" ], default: "UNKNOWN" )
|
|
552
|
+
end
|
|
553
|
+
{
|
|
554
|
+
workflow: blank_to( value: entry.dig( "app", "name" ), default: "workflow" ),
|
|
555
|
+
name: blank_to( value: entry[ "name" ], default: "check" ),
|
|
556
|
+
state: state.upcase,
|
|
557
|
+
link: entry[ "html_url" ].to_s
|
|
558
|
+
}
|
|
559
|
+
end
|
|
560
|
+
end
|
|
561
|
+
|
|
562
|
+
# Writes monitor report artefacts and prints their locations.
|
|
563
|
+
def write_and_print_pr_monitor_report( report: )
|
|
564
|
+
markdown_path, json_path = write_pr_monitor_report( report: report )
|
|
565
|
+
puts_line "report_markdown: #{markdown_path}"
|
|
566
|
+
puts_line "report_json: #{json_path}"
|
|
567
|
+
rescue StandardError => e
|
|
568
|
+
puts_line "report_write: SKIP (#{e.message})"
|
|
569
|
+
end
|
|
570
|
+
|
|
571
|
+
# Persists report in both machine-readable JSON and human-readable Markdown.
|
|
572
|
+
def write_pr_monitor_report( report: )
|
|
573
|
+
report_dir = report_dir_path
|
|
574
|
+
FileUtils.mkdir_p( report_dir )
|
|
575
|
+
markdown_path = File.join( report_dir, REPORT_MD )
|
|
576
|
+
json_path = File.join( report_dir, REPORT_JSON )
|
|
577
|
+
File.write( json_path, JSON.pretty_generate( report ) )
|
|
578
|
+
File.write( markdown_path, render_pr_monitor_markdown( report: report ) )
|
|
579
|
+
[ markdown_path, json_path ]
|
|
580
|
+
end
|
|
581
|
+
|
|
582
|
+
# Renders Markdown summary used by humans during merge-readiness reviews.
|
|
583
|
+
def render_pr_monitor_markdown( report: )
|
|
584
|
+
lines = []
|
|
585
|
+
lines << "# Carson PR Monitor Report"
|
|
586
|
+
lines << ""
|
|
587
|
+
lines << "- Generated at: #{report.fetch( :generated_at )}"
|
|
588
|
+
lines << "- Branch: #{report.fetch( :branch )}"
|
|
589
|
+
lines << "- Audit status: #{report.fetch( :audit_status, 'unknown' )}"
|
|
590
|
+
lines << "- Monitor status: #{report.fetch( :status )}"
|
|
591
|
+
lines << "- Skip reason: #{report.fetch( :skip_reason )}" unless report.fetch( :skip_reason ).nil?
|
|
592
|
+
lines << ""
|
|
593
|
+
lines << "## PR"
|
|
594
|
+
pr = report[ :pr ]
|
|
595
|
+
if pr.nil?
|
|
596
|
+
lines << "- not available"
|
|
597
|
+
else
|
|
598
|
+
lines << "- Number: ##{pr.fetch( :number )}"
|
|
599
|
+
lines << "- Title: #{pr.fetch( :title )}"
|
|
600
|
+
lines << "- URL: #{pr.fetch( :url )}"
|
|
601
|
+
lines << "- State: #{pr.fetch( :state )}"
|
|
602
|
+
lines << "- Review decision: #{pr.fetch( :review_decision )}"
|
|
603
|
+
end
|
|
604
|
+
lines << ""
|
|
605
|
+
lines << "## Required Checks"
|
|
606
|
+
checks = report.fetch( :checks )
|
|
607
|
+
lines << "- Status: #{checks.fetch( :status )}"
|
|
608
|
+
lines << "- Skip reason: #{checks.fetch( :skip_reason )}" unless checks.fetch( :skip_reason ).nil?
|
|
609
|
+
lines << "- Total: #{checks.fetch( :required_total )}"
|
|
610
|
+
lines << "- Failing: #{checks.fetch( :failing_count )}"
|
|
611
|
+
lines << "- Pending: #{checks.fetch( :pending_count )}"
|
|
612
|
+
lines << ""
|
|
613
|
+
lines << "### Failing"
|
|
614
|
+
if checks.fetch( :failing ).empty?
|
|
615
|
+
lines << "- none"
|
|
616
|
+
else
|
|
617
|
+
checks.fetch( :failing ).each { |entry| lines << "- #{entry.fetch( :workflow )} / #{entry.fetch( :name )} (#{entry.fetch( :state )}) #{entry.fetch( :link )}".strip }
|
|
618
|
+
end
|
|
619
|
+
lines << ""
|
|
620
|
+
lines << "### Pending"
|
|
621
|
+
if checks.fetch( :pending ).empty?
|
|
622
|
+
lines << "- none"
|
|
623
|
+
else
|
|
624
|
+
checks.fetch( :pending ).each { |entry| lines << "- #{entry.fetch( :workflow )} / #{entry.fetch( :name )} (#{entry.fetch( :state )}) #{entry.fetch( :link )}".strip }
|
|
625
|
+
end
|
|
626
|
+
lines << ""
|
|
627
|
+
lines << "## Local Lint Quality"
|
|
628
|
+
lint_quality = report[ :local_lint_quality ]
|
|
629
|
+
if lint_quality.nil?
|
|
630
|
+
lines << "- not available"
|
|
631
|
+
else
|
|
632
|
+
lines << "- Status: #{lint_quality.fetch( :status )}"
|
|
633
|
+
lines << "- Skip reason: #{lint_quality.fetch( :skip_reason )}" unless lint_quality.fetch( :skip_reason ).nil?
|
|
634
|
+
lines << "- Target source: #{lint_quality.fetch( :target_source )}"
|
|
635
|
+
lines << "- Target files: #{lint_quality.fetch( :target_files_count )}"
|
|
636
|
+
lines << "- Blocking languages: #{lint_quality.fetch( :blocking_languages )}"
|
|
637
|
+
lines << ""
|
|
638
|
+
lines << "### Language Results"
|
|
639
|
+
if lint_quality.fetch( :languages ).empty?
|
|
640
|
+
lines << "- none"
|
|
641
|
+
else
|
|
642
|
+
lint_quality.fetch( :languages ).each do |entry|
|
|
643
|
+
lines << "- #{entry.fetch( :language )}: status=#{entry.fetch( :status )} files=#{entry.fetch( :file_count )} exit=#{entry.fetch( :exit_code )}"
|
|
644
|
+
lines << " reason: #{entry.fetch( :reason )}" unless entry.fetch( :reason ).nil?
|
|
645
|
+
end
|
|
646
|
+
end
|
|
647
|
+
end
|
|
648
|
+
lines << ""
|
|
649
|
+
lines << "## Default Branch CI Baseline"
|
|
650
|
+
baseline = report[ :default_branch_baseline ]
|
|
651
|
+
if baseline.nil?
|
|
652
|
+
lines << "- not available"
|
|
653
|
+
else
|
|
654
|
+
lines << "- Status: #{baseline.fetch( :status )}"
|
|
655
|
+
lines << "- Skip reason: #{baseline.fetch( :skip_reason )}" unless baseline.fetch( :skip_reason ).nil?
|
|
656
|
+
lines << "- Repository: #{baseline.fetch( :repository )}" unless baseline.fetch( :repository ).nil?
|
|
657
|
+
lines << "- Branch: #{baseline.fetch( :default_branch )}" unless baseline.fetch( :default_branch ).nil?
|
|
658
|
+
lines << "- Head SHA: #{baseline.fetch( :head_sha )}" unless baseline.fetch( :head_sha ).nil?
|
|
659
|
+
lines << "- Workflow files: #{baseline.fetch( :workflows_total )}"
|
|
660
|
+
lines << "- Check-runs: #{baseline.fetch( :check_runs_total )}"
|
|
661
|
+
lines << "- Failing: #{baseline.fetch( :failing_count )}"
|
|
662
|
+
lines << "- Pending: #{baseline.fetch( :pending_count )}"
|
|
663
|
+
lines << "- No check evidence: #{baseline.fetch( :no_check_evidence )}"
|
|
664
|
+
lines << ""
|
|
665
|
+
lines << "### Baseline Failing"
|
|
666
|
+
if baseline.fetch( :failing ).empty?
|
|
667
|
+
lines << "- none"
|
|
668
|
+
else
|
|
669
|
+
baseline.fetch( :failing ).each { |entry| lines << "- #{entry.fetch( :workflow )} / #{entry.fetch( :name )} (#{entry.fetch( :state )}) #{entry.fetch( :link )}".strip }
|
|
670
|
+
end
|
|
671
|
+
lines << ""
|
|
672
|
+
lines << "### Baseline Pending"
|
|
673
|
+
if baseline.fetch( :pending ).empty?
|
|
674
|
+
lines << "- none"
|
|
675
|
+
else
|
|
676
|
+
baseline.fetch( :pending ).each { |entry| lines << "- #{entry.fetch( :workflow )} / #{entry.fetch( :name )} (#{entry.fetch( :state )}) #{entry.fetch( :link )}".strip }
|
|
677
|
+
end
|
|
678
|
+
end
|
|
679
|
+
lines << ""
|
|
680
|
+
lines.join( "\n" )
|
|
681
|
+
end
|
|
682
|
+
|
|
683
|
+
# Evaluates scope integrity using staged paths first, then working-tree paths as fallback.
|
|
684
|
+
def print_scope_integrity_guard
|
|
685
|
+
staged = staged_files
|
|
686
|
+
files = staged.empty? ? changed_files : staged
|
|
687
|
+
files_source = staged.empty? ? "working_tree" : "staged"
|
|
688
|
+
return { status: "ok", split_required: false } if files.empty?
|
|
689
|
+
|
|
690
|
+
scope = scope_integrity_status( files: files, branch: current_branch )
|
|
691
|
+
print_header "Scope Integrity Guard"
|
|
692
|
+
puts_line "scope_file_source: #{files_source}"
|
|
693
|
+
puts_line "scope_file_count: #{files.count}"
|
|
694
|
+
puts_line "branch: #{scope.fetch( :branch )}"
|
|
695
|
+
puts_line "scope_basis: changed_paths_only"
|
|
696
|
+
puts_line "detected_groups: #{scope.fetch( :detected_groups ).sort.join( ', ' )}"
|
|
697
|
+
puts_line "core_groups: #{scope.fetch( :core_groups ).empty? ? 'none' : scope.fetch( :core_groups ).sort.join( ', ' )}"
|
|
698
|
+
puts_line "non_doc_groups: #{scope.fetch( :non_doc_groups ).empty? ? 'none' : scope.fetch( :non_doc_groups ).sort.join( ', ' )}"
|
|
699
|
+
puts_line "docs_only_changes: #{scope.fetch( :docs_only )}"
|
|
700
|
+
puts_line "unmatched_paths_count: #{scope.fetch( :unmatched_paths ).count}"
|
|
701
|
+
scope.fetch( :unmatched_paths ).each { |path| puts_line "unmatched_path: #{path}" }
|
|
702
|
+
puts_line "violating_files_count: #{scope.fetch( :violating_files ).count}"
|
|
703
|
+
scope.fetch( :violating_files ).each { |path| puts_line "violating_file: #{path} (group=#{scope.fetch( :grouped_paths ).fetch( path )})" }
|
|
704
|
+
puts_line "checklist_single_business_intent: #{scope.fetch( :split_required ) ? 'needs_review' : 'pass'}"
|
|
705
|
+
puts_line "checklist_single_scope_group: #{scope.fetch( :split_required ) ? 'needs_split' : 'pass'}"
|
|
706
|
+
puts_line "checklist_cross_boundary_changes_justified: #{( scope.fetch( :split_required ) || scope.fetch( :misc_present ) ) ? 'needs_explanation' : 'pass'}"
|
|
707
|
+
if scope.fetch( :split_required )
|
|
708
|
+
puts_line "ACTION: split/re-branch is required before commit; multiple module groups detected."
|
|
709
|
+
elsif scope.fetch( :misc_present )
|
|
710
|
+
puts_line "ACTION: unmatched paths detected; classify via scope.path_groups for stricter module checks."
|
|
711
|
+
else
|
|
712
|
+
puts_line "ACTION: scope integrity is within commit policy."
|
|
713
|
+
end
|
|
714
|
+
{ status: scope.fetch( :status ), split_required: scope.fetch( :split_required ) }
|
|
715
|
+
end
|
|
716
|
+
|
|
717
|
+
# Evaluates whether changed files stay within one core module group.
|
|
718
|
+
def scope_integrity_status( files:, branch: )
|
|
719
|
+
grouped_paths = files.map { |path| [ path, scope_group_for_path( path: path ) ] }.to_h
|
|
720
|
+
detected_groups = grouped_paths.values.uniq
|
|
721
|
+
non_doc_groups = detected_groups - [ "docs" ]
|
|
722
|
+
# Tests are supporting changes; they may travel with one core module group.
|
|
723
|
+
core_groups = non_doc_groups - [ "test", "misc" ]
|
|
724
|
+
mixed_core_groups = core_groups.length > 1
|
|
725
|
+
misc_present = non_doc_groups.include?( "misc" )
|
|
726
|
+
split_required = mixed_core_groups
|
|
727
|
+
unmatched_paths = files.select { |path| grouped_paths.fetch( path ) == "misc" }
|
|
728
|
+
violating_files = if split_required
|
|
729
|
+
files.select do |path|
|
|
730
|
+
group = grouped_paths.fetch( path )
|
|
731
|
+
next false if [ "docs", "test", "misc" ].include?( group )
|
|
732
|
+
core_groups.include?( group )
|
|
733
|
+
end
|
|
734
|
+
else
|
|
735
|
+
[]
|
|
736
|
+
end
|
|
737
|
+
{
|
|
738
|
+
branch: branch,
|
|
739
|
+
grouped_paths: grouped_paths,
|
|
740
|
+
detected_groups: detected_groups,
|
|
741
|
+
non_doc_groups: non_doc_groups,
|
|
742
|
+
core_groups: core_groups,
|
|
743
|
+
docs_only: non_doc_groups.empty?,
|
|
744
|
+
mixed_core_groups: mixed_core_groups,
|
|
745
|
+
misc_present: misc_present,
|
|
746
|
+
split_required: split_required,
|
|
747
|
+
unmatched_paths: unmatched_paths,
|
|
748
|
+
violating_files: violating_files,
|
|
749
|
+
status: ( split_required || misc_present ) ? "attention" : "ok"
|
|
750
|
+
}
|
|
751
|
+
end
|
|
752
|
+
|
|
753
|
+
# Resolves a path to configured scope group; unmatched paths become misc.
|
|
754
|
+
def scope_group_for_path( path: )
|
|
755
|
+
config.path_groups.each do |group, patterns|
|
|
756
|
+
return group if patterns.any? { |pattern| pattern_matches_path?( pattern: pattern, path: path ) }
|
|
757
|
+
end
|
|
758
|
+
"misc"
|
|
759
|
+
end
|
|
760
|
+
|
|
761
|
+
# Supports directory-wide /** prefixes and fnmatch for other patterns.
|
|
762
|
+
def pattern_matches_path?( pattern:, path: )
|
|
763
|
+
if pattern.end_with?( "/**" )
|
|
764
|
+
prefix = pattern.delete_suffix( "/**" )
|
|
765
|
+
return path == prefix || path.start_with?( "#{prefix}/" )
|
|
766
|
+
end
|
|
767
|
+
File.fnmatch?( pattern, path, File::FNM_PATHNAME | File::FNM_DOTMATCH )
|
|
768
|
+
end
|
|
769
|
+
|
|
770
|
+
# Uses index-only paths so commit hooks evaluate exactly what is being committed.
|
|
771
|
+
def staged_files
|
|
772
|
+
git_capture!( "diff", "--cached", "--name-only" ).lines.map do |line|
|
|
773
|
+
raw_path = line.to_s.strip
|
|
774
|
+
next if raw_path.empty?
|
|
775
|
+
raw_path.split( " -> " ).last
|
|
776
|
+
end.compact
|
|
777
|
+
end
|
|
778
|
+
|
|
779
|
+
# Parses `git status --porcelain` and normalises rename targets.
|
|
780
|
+
def changed_files
|
|
781
|
+
git_capture!( "status", "--porcelain" ).lines.map do |line|
|
|
782
|
+
raw_path = line[ 3.. ].to_s.strip
|
|
783
|
+
next if raw_path.empty?
|
|
784
|
+
raw_path.split( " -> " ).last
|
|
785
|
+
end.compact
|
|
786
|
+
end
|
|
787
|
+
|
|
788
|
+
# True when there are no staged/unstaged/untracked file changes.
|
|
789
|
+
end
|
|
790
|
+
|
|
791
|
+
include Audit
|
|
792
|
+
end
|
|
793
|
+
end
|