carson 3.22.0 → 3.22.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,68 @@
4
4
  module Carson
5
5
  class Runtime
6
6
  module Local
7
+ # Returns a plan hash describing what prune! would do, without executing any mutations.
8
+ # Does NOT fetch — branch staleness reflects whatever the last fetch left behind.
9
+ # Returns: { stale: [...], orphan: [...], absorbed: [...] }
10
+ # Each item: { branch:, action: :delete|:skip, reason:, type: }
11
+ def prune_plan( dry_run: true ) # rubocop:disable Lint/UnusedMethodArgument
12
+ active_branch = current_branch
13
+ cwd_branch = cwd_worktree_branch
14
+
15
+ stale = stale_local_branches.map do |entry|
16
+ branch = entry.fetch( :branch )
17
+ upstream = entry.fetch( :upstream )
18
+ if config.protected_branches.include?( branch )
19
+ { action: :skip, branch: branch, upstream: upstream, name: branch, type: "stale", reason: "protected branch" }
20
+ elsif branch == active_branch
21
+ { action: :skip, branch: branch, upstream: upstream, name: branch, type: "stale", reason: "current branch" }
22
+ elsif cwd_branch && branch == cwd_branch
23
+ { action: :skip, branch: branch, upstream: upstream, name: branch, type: "stale", reason: "checked out in CWD worktree" }
24
+ else
25
+ { action: :delete, branch: branch, upstream: upstream, name: branch, type: "stale", reason: "upstream gone" }
26
+ end
27
+ end
28
+
29
+ orphan = orphan_local_branches( active_branch: active_branch, cwd_branch: cwd_branch ).map do |branch|
30
+ if gh_available?
31
+ tip_sha = begin
32
+ git_capture!( "rev-parse", "--verify", branch ).strip
33
+ rescue StandardError
34
+ nil
35
+ end
36
+
37
+ if tip_sha
38
+ merged_pr, = merged_pr_for_branch( branch: branch, branch_tip_sha: tip_sha )
39
+ if merged_pr.nil? && branch_absorbed_into_main?( branch: branch )
40
+ merged_pr = { url: "absorbed into #{config.main_branch}" }
41
+ end
42
+
43
+ if merged_pr
44
+ { action: :delete, branch: branch, upstream: "", name: branch, type: "orphan", reason: "merged — #{merged_pr[ :url ]}" }
45
+ else
46
+ { action: :skip, branch: branch, upstream: "", name: branch, type: "orphan", reason: "no merged PR evidence" }
47
+ end
48
+ else
49
+ { action: :skip, branch: branch, upstream: "", name: branch, type: "orphan", reason: "cannot read branch tip SHA" }
50
+ end
51
+ else
52
+ { action: :skip, branch: branch, upstream: "", name: branch, type: "orphan", reason: "gh CLI not available" }
53
+ end
54
+ end
55
+
56
+ absorbed = absorbed_local_branches( active_branch: active_branch, cwd_branch: cwd_branch ).map do |entry|
57
+ branch = entry.fetch( :branch )
58
+ upstream = entry.fetch( :upstream )
59
+ if gh_available? && branch_has_open_pr?( branch: branch )
60
+ { action: :skip, branch: branch, upstream: upstream, name: branch, type: "absorbed", reason: "open PR exists" }
61
+ else
62
+ { action: :delete, branch: branch, upstream: upstream, name: branch, type: "absorbed", reason: "content already on main" }
63
+ end
64
+ end
65
+
66
+ { stale: stale, orphan: orphan, absorbed: absorbed }
67
+ end
68
+
7
69
  def prune!( json_output: false )
8
70
  fingerprint_status = block_if_outsider_fingerprints!
9
71
  unless fingerprint_status.nil?
@@ -197,8 +259,8 @@ module Carson
197
259
 
198
260
  # Returns the worktree path for a branch, or nil if not checked output in any worktree.
199
261
  def worktree_path_for_branch( branch: )
200
- entry = worktree_list.find { |worktree| worktree.fetch( :branch, nil ) == branch }
201
- entry&.fetch( :path, nil )
262
+ entry = worktree_list.find { |worktree| worktree.branch == branch }
263
+ entry&.path
202
264
  end
203
265
 
204
266
  # Detects local branches whose upstream tracking is marked [gone] after fetch --prune.
@@ -433,6 +495,32 @@ module Carson
433
495
 
434
496
  # Finds merged PR evidence for the exact local branch tip.
435
497
  def merged_pr_for_branch( branch:, branch_tip_sha: )
498
+ closed_prs, error = closed_prs_for_branch( branch: branch, branch_tip_sha: branch_tip_sha )
499
+ return [ nil, error ] unless error.nil?
500
+
501
+ latest = Array( closed_prs )
502
+ .select { |entry| !entry[ :merged_at ].nil? }
503
+ .max_by { |entry| entry.fetch( :merged_at ) }
504
+ return [ nil, "no merged PR evidence for branch tip #{branch_tip_sha} into #{config.main_branch}" ] if latest.nil?
505
+
506
+ [ latest, nil ]
507
+ end
508
+
509
+ # Finds closed-but-unmerged PR evidence for the exact local branch tip.
510
+ def abandoned_pr_for_branch( branch:, branch_tip_sha: )
511
+ closed_prs, error = closed_prs_for_branch( branch: branch, branch_tip_sha: branch_tip_sha )
512
+ return [ nil, error ] unless error.nil?
513
+
514
+ latest = Array( closed_prs )
515
+ .select { |entry| entry[ :merged_at ].nil? && !entry[ :closed_at ].nil? }
516
+ .max_by { |entry| entry.fetch( :closed_at ) }
517
+ return [ nil, "no abandoned PR evidence for branch tip #{branch_tip_sha} into #{config.main_branch}" ] if latest.nil?
518
+
519
+ [ latest, nil ]
520
+ end
521
+
522
+ # Queries all closed PRs for the branch tip, regardless of merge state.
523
+ def closed_prs_for_branch( branch:, branch_tip_sha: )
436
524
  owner, repo = repository_coordinates
437
525
  results = []
438
526
  page = 1
@@ -450,7 +538,7 @@ module Carson
450
538
  "-f", "page=#{page}"
451
539
  )
452
540
  unless success
453
- error_text = gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to query merged PR evidence for branch #{branch}" )
541
+ error_text = gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to query closed PR evidence for branch #{branch}" )
454
542
  return [ nil, error_text ]
455
543
  end
456
544
  page_nodes = Array( JSON.parse( stdout_text ) )
@@ -461,42 +549,39 @@ module Carson
461
549
  next unless entry.dig( "base", "ref" ).to_s == config.main_branch
462
550
  next unless entry.dig( "head", "sha" ).to_s == branch_tip_sha
463
551
 
464
- merged_at = parse_time_or_nil( text: entry[ "merged_at" ] )
465
- next if merged_at.nil?
466
-
467
552
  results << {
468
553
  number: entry[ "number" ],
469
554
  url: entry[ "html_url" ].to_s,
470
- merged_at: merged_at.utc.iso8601,
555
+ merged_at: parse_time_or_nil( text: entry[ "merged_at" ] )&.utc&.iso8601,
556
+ closed_at: parse_time_or_nil( text: entry[ "closed_at" ] )&.utc&.iso8601,
471
557
  head_sha: entry.dig( "head", "sha" ).to_s
472
558
  }
559
+ end
560
+
561
+ if page >= max_pages
562
+ probe_stdout_text, probe_stderr_text, probe_success, = gh_run(
563
+ "api", "repos/#{owner}/#{repo}/pulls",
564
+ "--method", "GET",
565
+ "-f", "state=closed",
566
+ "-f", "base=#{config.main_branch}",
567
+ "-f", "head=#{owner}:#{branch}",
568
+ "-f", "sort=updated",
569
+ "-f", "direction=desc",
570
+ "-f", "per_page=100",
571
+ "-f", "page=#{page + 1}"
572
+ )
573
+ unless probe_success
574
+ error_text = gh_error_text( stdout_text: probe_stdout_text, stderr_text: probe_stderr_text, fallback: "unable to verify closed PR pagination limit for branch #{branch}" )
575
+ return [ nil, error_text ]
473
576
  end
474
- if page >= max_pages
475
- probe_stdout_text, probe_stderr_text, probe_success, = gh_run(
476
- "api", "repos/#{owner}/#{repo}/pulls",
477
- "--method", "GET",
478
- "-f", "state=closed",
479
- "-f", "base=#{config.main_branch}",
480
- "-f", "head=#{owner}:#{branch}",
481
- "-f", "sort=updated",
482
- "-f", "direction=desc",
483
- "-f", "per_page=100",
484
- "-f", "page=#{page + 1}"
485
- )
486
- unless probe_success
487
- error_text = gh_error_text( stdout_text: probe_stdout_text, stderr_text: probe_stderr_text, fallback: "unable to verify merged PR pagination limit for branch #{branch}" )
488
- return [ nil, error_text ]
489
- end
490
- probe_nodes = Array( JSON.parse( probe_stdout_text ) )
491
- return [ nil, "merged PR lookup exceeded pagination safety limit (#{max_pages} pages) for branch #{branch}" ] unless probe_nodes.empty?
492
- break
493
- end
494
- page += 1
577
+ probe_nodes = Array( JSON.parse( probe_stdout_text ) )
578
+ return [ nil, "closed PR lookup exceeded pagination safety limit (#{max_pages} pages) for branch #{branch}" ] unless probe_nodes.empty?
579
+ break
495
580
  end
496
- latest = results.max_by { |item| item.fetch( :merged_at ) }
497
- return [ nil, "no merged PR evidence for branch tip #{branch_tip_sha} into #{config.main_branch}" ] if latest.nil?
581
+ page += 1
582
+ end
498
583
 
499
- [ latest, nil ]
584
+ [ results, nil ]
500
585
  rescue JSON::ParserError => exception
501
586
  [ nil, "invalid gh JSON response (#{exception.message})" ]
502
587
  rescue StandardError => exception
@@ -9,7 +9,7 @@ module Carson
9
9
 
10
10
  unless working_tree_clean?
11
11
  return sync_finish(
12
- result: { command: "sync", status: "block", error: "working tree is dirty", recovery: "git add -A && git commit, then carson sync" },
12
+ result: sync_dirty_result,
13
13
  exit_code: EXIT_BLOCK, json_output: json_output
14
14
  )
15
15
  end
@@ -59,7 +59,7 @@ module Carson
59
59
  repos.each do |repo_path|
60
60
  repo_name = File.basename( repo_path )
61
61
  unless Dir.exist?( repo_path )
62
- puts_line "#{repo_name}: FAIL (path not found)"
62
+ puts_line "#{repo_name}: not found"
63
63
  record_batch_skip( command: "sync", repo_path: repo_path, reason: "path not found" )
64
64
  failed += 1
65
65
  next
@@ -73,12 +73,12 @@ module Carson
73
73
  clear_batch_success( command: "sync", repo_path: repo_path )
74
74
  synced += 1
75
75
  else
76
- puts_line "#{repo_name}: FAIL" unless verbose?
76
+ puts_line "#{repo_name}: could not sync" unless verbose?
77
77
  record_batch_skip( command: "sync", repo_path: repo_path, reason: "sync failed" )
78
78
  failed += 1
79
79
  end
80
80
  rescue StandardError => exception
81
- puts_line "#{repo_name}: FAIL (#{exception.message})"
81
+ puts_line "#{repo_name}: could not sync (#{exception.message})"
82
82
  record_batch_skip( command: "sync", repo_path: repo_path, reason: exception.message )
83
83
  failed += 1
84
84
  end
@@ -117,8 +117,8 @@ module Carson
117
117
  # Human-readable output for sync results.
118
118
  def print_sync_human( result: )
119
119
  if result[ :error ]
120
- puts_line "BLOCK: #{result[ :error ]}."
121
- puts_line " Recovery: #{result[ :recovery ]}" if result[ :recovery ]
120
+ puts_line "#{result[ :error ]}."
121
+ puts_line " #{result[ :recovery ]}" if result[ :recovery ]
122
122
  return
123
123
  end
124
124
 
@@ -144,6 +144,28 @@ module Carson
144
144
  git_capture!( "status", "--porcelain" ).strip.empty?
145
145
  end
146
146
 
147
+ def sync_dirty_result
148
+ if main_worktree_context?
149
+ {
150
+ command: "sync",
151
+ status: "block",
152
+ error: "main working tree has uncommitted changes",
153
+ recovery: "create a worktree first: carson worktree create <name>; then move or discard the accidental main-tree changes before retrying carson sync"
154
+ }
155
+ else
156
+ {
157
+ command: "sync",
158
+ status: "block",
159
+ error: "working tree is dirty",
160
+ recovery: "git add -A && git commit, then carson sync"
161
+ }
162
+ end
163
+ end
164
+
165
+ def main_worktree_context?
166
+ realpath_safe( repo_root ) == realpath_safe( main_worktree_root )
167
+ end
168
+
147
169
  def inside_git_work_tree?
148
170
  stdout_text, = git_capture_soft( "rev-parse", "--is-inside-work-tree" )
149
171
  stdout_text.to_s.strip == "true"
@@ -162,7 +184,7 @@ module Carson
162
184
  violations = outsider_fingerprint_violations
163
185
  return nil if violations.empty?
164
186
 
165
- violations.each { |entry| puts_line "BLOCK: #{entry}" }
187
+ violations.each { |entry| puts_line entry }
166
188
  EXIT_BLOCK
167
189
  end
168
190
 
@@ -6,8 +6,17 @@ module Carson
6
6
 
7
7
  SUPERSEDED = [
8
8
  ".github/carson-instructions.md",
9
+ ".github/biome.json",
10
+ ".github/erb-lint.yml",
11
+ ".github/rubocop.yml",
12
+ ".github/ruff.toml",
9
13
  ".github/workflows/carson-lint.yml",
10
- ".github/.mega-linter.yml"
14
+ ".github/.mega-linter.yml",
15
+ ".github/carson.md",
16
+ ".github/copilot-instructions.md",
17
+ ".github/CLAUDE.md",
18
+ ".github/AGENTS.md",
19
+ ".github/pull_request_template.md"
11
20
  ].freeze
12
21
 
13
22
  # Read-only template drift check; returns block when managed files are output of sync.
@@ -62,7 +71,7 @@ module Carson
62
71
  repos.each do |repo_path|
63
72
  repo_name = File.basename( repo_path )
64
73
  unless Dir.exist?( repo_path )
65
- puts_line "#{repo_name}: FAIL (path not found)"
74
+ puts_line "#{repo_name}: not found"
66
75
  record_batch_skip( command: "template_check", repo_path: repo_path, reason: "path not found" )
67
76
  failed += 1
68
77
  next
@@ -80,7 +89,7 @@ module Carson
80
89
  drifted += 1
81
90
  end
82
91
  rescue StandardError => exception
83
- puts_line "#{repo_name}: FAIL (#{exception.message})"
92
+ puts_line "#{repo_name}: could not complete (#{exception.message})"
84
93
  record_batch_skip( command: "template_check", repo_path: repo_path, reason: exception.message )
85
94
  failed += 1
86
95
  end
@@ -312,7 +321,7 @@ module Carson
312
321
 
313
322
  def template_superseded_present_in( root: )
314
323
  SUPERSEDED.select do |file|
315
- File.file?( File.join( root, file ) )
324
+ File.file?( File.join( root, file ) ) && !config.template_managed_files.include?( file )
316
325
  end
317
326
  end
318
327
 
@@ -323,7 +332,7 @@ module Carson
323
332
  def template_superseded_present
324
333
  SUPERSEDED.select do |file|
325
334
  file_path = resolve_repo_path!( relative_path: file, label: "superseded file #{file}" )
326
- File.file?( file_path )
335
+ File.file?( file_path ) && !config.template_managed_files.include?( file )
327
336
  end
328
337
  end
329
338
 
@@ -352,10 +361,19 @@ module Carson
352
361
  def template_source_path( managed_file: )
353
362
  relative_within_github = managed_file.delete_prefix( ".github/" )
354
363
 
355
- canonical = config.template_canonical
364
+ canonical = config.lint_canonical
356
365
  if canonical && !canonical.empty?
357
- canonical_path = File.join( canonical, relative_within_github )
358
- return canonical_path if File.file?( canonical_path )
366
+ canonical_candidates = [
367
+ File.join( canonical, relative_within_github ),
368
+ File.join( canonical, ".github", relative_within_github )
369
+ ]
370
+ if relative_within_github.start_with?( "linters/" ) && relative_within_github.count( "/" ) == 1
371
+ canonical_candidates << File.join( canonical, relative_within_github.delete_prefix( "linters/" ) )
372
+ end
373
+
374
+ canonical_candidates.each do |canonical_path|
375
+ return canonical_path if File.file?( canonical_path )
376
+ end
359
377
  end
360
378
 
361
379
  template_path = File.join( github_templates_dir, relative_within_github )