claude-attribution 1.9.0 → 1.9.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -68,6 +68,51 @@ Up to three workflows are installed into repos that use this tool — one always
68
68
 
69
69
  ---
70
70
 
71
+ ## Publishing this package to npm
72
+
73
+ This repo is set up to publish to npm from GitHub Actions on the **self-hosted runner** using an npm publish token. That avoids local OTP prompts while still fitting the repo's network restrictions.
74
+
75
+ ### One-time npm + GitHub setup
76
+
77
+ This part must be done by a package owner:
78
+
79
+ 1. In npm, open the `claude-attribution` package and set **Publishing access** to **Require two-factor authentication or a granular access token with bypass 2fa enabled**.
80
+ 2. Create a **granular npm access token** that can publish `claude-attribution` and has **bypass 2FA** enabled.
81
+ 3. In GitHub, save that token as the `NPM_TOKEN` Actions secret for this repository (or an org secret exposed to this repo).
82
+ 4. Optional: you can leave the npm trusted publisher entry in place, but this self-hosted workflow authenticates with `NPM_TOKEN`.
83
+ 5. Plan to rotate the npm token periodically. npm currently caps granular token lifetime at 90 days.
84
+
85
+ After that, the workflow in `.github/workflows/publish-npm.yml` can publish on the self-hosted runner without an interactive OTP prompt.
86
+
87
+ ### Release process
88
+
89
+ This repo uses a **bump → merge → tag** publish flow:
90
+
91
+ 1. Bump `package.json` to the release version and update `CHANGELOG.md` in the PR.
92
+ 2. Merge that PR to `main`.
93
+ 3. Tag the merged commit from `main`:
94
+
95
+ ```bash
96
+ git checkout main
97
+ git pull --ff-only
98
+ git tag v1.9.1
99
+ git push origin v1.9.1
100
+ ```
101
+
102
+ 4. GitHub Actions runs `publish-npm.yml` and publishes that exact version to npm.
103
+
104
+ ### Important rules
105
+
106
+ - The git tag must exactly match `package.json` without the `v` prefix.
107
+ - Example: tag `v1.9.1` requires `"version": "1.9.1"` in `package.json`
108
+ - The workflow fails if the tag and package version do not match.
109
+ - The workflow also fails if that package version is already published.
110
+ - `NPM_TOKEN` must exist in GitHub Actions before the tag is pushed.
111
+ - The npm token must be a granular publish token with bypass-2FA enabled for this package.
112
+ - The self-hosted workflow publishes **without** `--provenance` because npm only accepts provenance from GitHub-hosted runners.
113
+
114
+ ---
115
+
71
116
  ## For Repo Maintainers: Installing Into a Repo
72
117
 
73
118
  ### Prerequisites
@@ -164,7 +209,7 @@ git push origin refs/notes/claude-attribution-map
164
209
  Marks every currently tracked file as AI-written at HEAD. After this, PR metrics will show:
165
210
  ```
166
211
  Codebase: ~100% AI (4150 / 4150 lines)
167
- This PR: 184 lines changed (4% of codebase) · 77% Claude edits · 142 AI lines
212
+ This PR: 184 lines changed (4% of codebase) · 77% AI edits · 142 AI-attributed changed lines
168
213
  ```
169
214
 
170
215
  **Option 2 — Repo is human-written, or a mix (`--human` / no flag):**
@@ -251,7 +296,7 @@ The metrics block injected into the PR body looks like (when the cumulative mini
251
296
  > ## AI Coding Metrics
252
297
  >
253
298
  > **Codebase: ~77% AI** (3200 / 4150 lines)
254
- > **This PR:** 184 lines changed (4% of codebase) · 77% AI edits · 142 AI lines
299
+ > **This PR:** 184 lines changed (4% of codebase) · 77% AI edits · 142 AI-attributed changed lines
255
300
  > **Session:** 12 prompts · 24m total (18m AI · 6m human)
256
301
  > **Assistant runtime:** Claude Code (claude-sonnet-4-6)
257
302
  >
@@ -278,6 +323,8 @@ For **Copilot CLI** sessions, the same block is rendered with provider-aware dif
278
323
  - model usage shows **Known Tokens** instead of Claude-style input/output/cache columns
279
324
  - cost is shown as **unavailable** unless durable local billing data exists
280
325
 
326
+ The `This PR` line is based on the branch diff against the base branch, not the full final size of every touched file.
327
+
281
328
  The block is wrapped in HTML comments for idempotent updates — re-running replaces the existing block rather than appending:
282
329
 
283
330
  ```
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-attribution",
3
- "version": "1.9.0",
3
+ "version": "1.9.4",
4
4
  "description": "AI code attribution tracking for Claude Code and GitHub Copilot sessions",
5
5
  "type": "module",
6
6
  "bin": {
@@ -2,10 +2,14 @@ import { describe, expect, test } from "bun:test";
2
2
  import { mkdir, readFile, writeFile } from "fs/promises";
3
3
  import { join } from "path";
4
4
  import { saveCheckpoint } from "../attribution/checkpoint.ts";
5
- import type { AttributionResult } from "../attribution/differ.ts";
5
+ import { hashLine, type AttributionResult } from "../attribution/differ.ts";
6
6
  import { NOTES_REF, writeNote } from "../attribution/git-notes.ts";
7
7
  import { pushNotesRef } from "../attribution/notes-sync.ts";
8
- import { writeMinimap, type MinimapResult } from "../attribution/minimap.ts";
8
+ import {
9
+ hashSetToString,
10
+ writeMinimap,
11
+ type MinimapResult,
12
+ } from "../attribution/minimap.ts";
9
13
  import { readInstalledRepoRegistry } from "../setup/installed-repos.ts";
10
14
  import {
11
15
  CLI_BIN,
@@ -94,8 +98,9 @@ describe("integration", () => {
94
98
  cwd: ctx.repo,
95
99
  env: {
96
100
  HOME: ctx.home,
97
- COPILOT_CLI: "1",
98
- COPILOT_CLI_BINARY_VERSION: "1.0.14",
101
+ COPILOT_CLI: "",
102
+ COPILOT_CLI_BINARY_VERSION: "",
103
+ ANTHROPIC_MODEL: "claude-sonnet-4-6",
99
104
  },
100
105
  });
101
106
 
@@ -117,9 +122,9 @@ describe("integration", () => {
117
122
  expect(note.sessionMetrics?.humanPromptCount).toBe(1);
118
123
  expect(note.sessionMetrics?.activeMinutes).toBe(2);
119
124
  expect(note.assistantRuntime).toEqual({
120
- vendor: "copilot",
121
- client: "GitHub Copilot CLI",
122
- clientVersion: "1.0.14",
125
+ vendor: "claude",
126
+ client: "Claude Code",
127
+ modelFamily: "claude-sonnet-4-6",
123
128
  });
124
129
  } finally {
125
130
  await ctx.cleanup();
@@ -214,6 +219,105 @@ describe("integration", () => {
214
219
  }
215
220
  });
216
221
 
222
+ test("metrics headline uses diff-scoped PR stats instead of full touched file totals", async () => {
223
+ const ctx = await createTempContext("claude-attribution-pr-diff");
224
+ try {
225
+ await initGitRepo(ctx.repo);
226
+ await runCommand("git", ["branch", "-M", "main"], { cwd: ctx.repo });
227
+
228
+ const remote = join(ctx.root, "origin.git");
229
+ await runCommand("git", ["init", "--bare", remote], { cwd: ctx.root });
230
+ await runCommand("git", ["remote", "add", "origin", remote], {
231
+ cwd: ctx.repo,
232
+ });
233
+
234
+ const baseContent =
235
+ Array.from({ length: 100 }, (_, i) => `line ${i + 1}`).join("\n") + "\n";
236
+ await writeFile(join(ctx.repo, "README.md"), baseContent);
237
+ await commitAll(ctx.repo, "initial");
238
+ const baseSha = await currentSha(ctx.repo);
239
+ await runCommand("git", ["push", "-u", "origin", "main"], { cwd: ctx.repo });
240
+
241
+ const baseMinimap: MinimapResult = {
242
+ commit: baseSha,
243
+ timestamp: "2026-03-01T10:00:00.000Z",
244
+ files: [
245
+ {
246
+ path: "README.md",
247
+ ai_hashes: "",
248
+ ai: 0,
249
+ human: 100,
250
+ total: 100,
251
+ pctAi: 0,
252
+ },
253
+ ],
254
+ totals: { ai: 0, human: 100, total: 100, pctAi: 0 },
255
+ };
256
+ await writeMinimap(baseMinimap, ctx.repo, baseSha);
257
+
258
+ await runCommand("git", ["checkout", "-b", "feature/diff-metrics"], {
259
+ cwd: ctx.repo,
260
+ });
261
+ await writeFile(
262
+ join(ctx.repo, "README.md"),
263
+ `${baseContent}AI-added line 1\nAI-added line 2\n`,
264
+ );
265
+ await commitAll(ctx.repo, "feature work");
266
+ const headSha = await currentSha(ctx.repo);
267
+
268
+ const note: AttributionResult = {
269
+ commit: headSha,
270
+ session: "session-pr-diff-1",
271
+ branch: "feature/diff-metrics",
272
+ timestamp: "2026-03-01T10:05:00.000Z",
273
+ files: [
274
+ {
275
+ path: "README.md",
276
+ ai: 2,
277
+ human: 100,
278
+ mixed: 0,
279
+ total: 102,
280
+ pctAi: 2,
281
+ },
282
+ ],
283
+ totals: { ai: 2, human: 100, mixed: 0, total: 102, pctAi: 2 },
284
+ };
285
+ await writeNote(note, ctx.repo, headSha);
286
+
287
+ const aiHashes = hashSetToString(
288
+ new Set([hashLine("AI-added line 1"), hashLine("AI-added line 2")]),
289
+ );
290
+ const headMinimap: MinimapResult = {
291
+ commit: headSha,
292
+ timestamp: "2026-03-01T10:05:00.000Z",
293
+ files: [
294
+ {
295
+ path: "README.md",
296
+ ai_hashes: aiHashes,
297
+ ai: 2,
298
+ human: 100,
299
+ total: 102,
300
+ pctAi: 2,
301
+ },
302
+ ],
303
+ totals: { ai: 2, human: 100, total: 102, pctAi: 2 },
304
+ };
305
+ await writeMinimap(headMinimap, ctx.repo, headSha);
306
+
307
+ const metrics = await runCommand(CLI_BIN, ["metrics"], {
308
+ cwd: ctx.repo,
309
+ env: { HOME: ctx.home },
310
+ });
311
+
312
+ expect(metrics.stdout).toContain(
313
+ "**This PR:** 2 lines changed (2% of codebase) · 100% AI edits · 2 AI-attributed changed lines",
314
+ );
315
+ expect(metrics.stdout).not.toContain("**This PR:** 102 lines changed");
316
+ } finally {
317
+ await ctx.cleanup();
318
+ }
319
+ });
320
+
217
321
  test("post-commit falls back to Copilot session-state metadata", async () => {
218
322
  const ctx = await createTempContext("copilot-attribution-commit");
219
323
  try {
@@ -314,6 +418,151 @@ describe("integration", () => {
314
418
  }
315
419
  });
316
420
 
421
+ test("Copilot sessions win over stale Claude session markers and transcripts", async () => {
422
+ const ctx = await createTempContext("copilot-session-precedence");
423
+ try {
424
+ await initGitRepo(ctx.repo);
425
+ const filePath = join(ctx.repo, "src.ts");
426
+ await writeFile(filePath, "const value = 1;\n");
427
+ await commitAll(ctx.repo, "initial");
428
+
429
+ const staleClaudeSession = "claude-session-stale-1";
430
+ await mkdir(join(ctx.repo, ".claude", "attribution-state"), {
431
+ recursive: true,
432
+ });
433
+ await writeFile(
434
+ join(ctx.repo, ".claude", "attribution-state", "current-session"),
435
+ staleClaudeSession,
436
+ );
437
+ await writeJsonl(join(ctx.repo, ".claude", "logs", "tool-usage.jsonl"), [
438
+ {
439
+ timestamp: "2026-03-30T01:09:44.678Z",
440
+ session: staleClaudeSession,
441
+ tool: "Bash",
442
+ },
443
+ ]);
444
+ await writeTranscript(ctx.home, ctx.repo, staleClaudeSession, [
445
+ {
446
+ type: "user",
447
+ timestamp: "2026-03-30T01:00:00.000Z",
448
+ },
449
+ {
450
+ type: "assistant",
451
+ timestamp: "2026-03-30T01:01:00.000Z",
452
+ message: {
453
+ model: "claude-sonnet-4-6",
454
+ usage: {
455
+ input_tokens: 2000,
456
+ output_tokens: 500,
457
+ cache_creation_input_tokens: 100,
458
+ cache_read_input_tokens: 800,
459
+ },
460
+ },
461
+ },
462
+ ]);
463
+
464
+ await writeFile(filePath, "const value = 1;\nconst nextValue = 2;\n");
465
+ await commitAll(ctx.repo, "feature work");
466
+
467
+ const branch = (
468
+ await runCommand("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
469
+ cwd: ctx.repo,
470
+ })
471
+ ).stdout.trim();
472
+ const copilotSession = "copilot-session-precedence-1";
473
+ await writeCopilotSession(ctx.home, copilotSession, [
474
+ {
475
+ type: "session.start",
476
+ timestamp: "2026-04-01T10:00:00.000Z",
477
+ data: {
478
+ context: {
479
+ cwd: ctx.repo,
480
+ gitRoot: ctx.repo,
481
+ branch,
482
+ },
483
+ },
484
+ },
485
+ {
486
+ type: "user.message",
487
+ timestamp: "2026-04-01T10:00:00.000Z",
488
+ data: { content: "start work" },
489
+ },
490
+ {
491
+ type: "tool.execution_complete",
492
+ timestamp: "2026-04-01T10:01:00.000Z",
493
+ data: { model: "gpt-5.4" },
494
+ },
495
+ {
496
+ type: "assistant.message",
497
+ timestamp: "2026-04-01T10:01:00.000Z",
498
+ data: { outputTokens: 240 },
499
+ },
500
+ {
501
+ type: "assistant.turn_end",
502
+ timestamp: "2026-04-01T10:01:00.000Z",
503
+ },
504
+ ]);
505
+
506
+ await runCommand(CLI_BIN, ["hook", "post-commit"], {
507
+ cwd: ctx.repo,
508
+ env: {
509
+ HOME: ctx.home,
510
+ COPILOT_CLI: "1",
511
+ COPILOT_CLI_BINARY_VERSION: "1.0.15",
512
+ },
513
+ });
514
+
515
+ const note = JSON.parse(
516
+ (
517
+ await runCommand(
518
+ "git",
519
+ ["notes", "--ref", "claude-attribution", "show", "HEAD"],
520
+ { cwd: ctx.repo },
521
+ )
522
+ ).stdout,
523
+ ) as AttributionResult;
524
+
525
+ expect(note.session).toBe(copilotSession);
526
+ expect(note.assistantRuntime).toEqual({
527
+ vendor: "copilot",
528
+ client: "GitHub Copilot CLI",
529
+ clientVersion: "1.0.15",
530
+ modelFamily: "gpt-5.4",
531
+ });
532
+ expect(note.modelUsage).toEqual([
533
+ {
534
+ modelFull: "gpt-5.4",
535
+ modelShort: "Unknown",
536
+ calls: 1,
537
+ inputTokens: 0,
538
+ outputTokens: 240,
539
+ cacheCreationTokens: 0,
540
+ cacheReadTokens: 0,
541
+ },
542
+ ]);
543
+
544
+ const metrics = await runCommand(CLI_BIN, ["metrics"], {
545
+ cwd: ctx.repo,
546
+ env: {
547
+ HOME: ctx.home,
548
+ COPILOT_CLI: "1",
549
+ COPILOT_CLI_BINARY_VERSION: "1.0.15",
550
+ },
551
+ });
552
+
553
+ expect(metrics.stdout).toContain(
554
+ "**Assistant runtime:** GitHub Copilot CLI (v1.0.15 · gpt-5.4)",
555
+ );
556
+ expect(metrics.stdout).toContain("| Model | Calls | Known Tokens |");
557
+ expect(metrics.stdout).toContain("| gpt-5.4 | 1 | 240 |");
558
+ expect(metrics.stdout).toContain("**Estimated cost:** unavailable");
559
+ expect(metrics.stdout).not.toContain("claude-sonnet-4-6");
560
+ expect(metrics.stdout).not.toContain("**Estimated cost:** ~$");
561
+ } finally {
562
+ await ctx.cleanup();
563
+ }
564
+ });
565
+
317
566
  test("metrics synthesizes hosted Copilot bot commits without local notes", async () => {
318
567
  const ctx = await createTempContext("copilot-hosted-metrics");
319
568
  try {
@@ -47,6 +47,23 @@ await ctx.cleanup();
47
47
  }
48
48
  });
49
49
 
50
+ test("all-AI baseline records blank lines as AI-preserving hashes", async () => {
51
+ const ctx = await createTempContext("claude-attribution-minimap-blank-carry");
52
+ try {
53
+ await initGitRepo(ctx.repo);
54
+ await writeFile(join(ctx.repo, "doc.md"), "alpha\n\nbeta\n");
55
+ await runCommand("git", ["add", "."], { cwd: ctx.repo });
56
+ await runCommand("git", ["commit", "-m", "baseline"], { cwd: ctx.repo });
57
+
58
+ const baseline = await buildAllAiMinimap(ctx.repo);
59
+ const prevEntry = baseline.files.find((file) => file.path === "doc.md");
60
+ expect(prevEntry).toMatchObject({ ai: 3, human: 0, total: 3, pctAi: 100 });
61
+ expect(prevEntry?.ai_hashes.length).toBeGreaterThanOrEqual(16);
62
+ } finally {
63
+ await ctx.cleanup();
64
+ }
65
+ });
66
+
50
67
  test("marks only recently changed files as AI in ai-since minimaps", async () => {
51
68
  const ctx = await createTempContext("claude-attribution-minimap-ai-since");
52
69
  try {
@@ -99,7 +99,7 @@ describe("computeMinimapFile", () => {
99
99
  expect(hashSetFromString(result.ai_hashes).has(hash)).toBe(false);
100
100
  });
101
101
 
102
- test("blank line always human (never in ai_hashes)", () => {
102
+ test("blank line stays AI when the blank-line marker is present", () => {
103
103
  const aiHash = hashLine("");
104
104
  const result = computeMinimapFile(
105
105
  "foo.ts",
@@ -107,12 +107,12 @@ describe("computeMinimapFile", () => {
107
107
  new Set([aiHash]),
108
108
  new Set([aiHash]),
109
109
  );
110
- expect(result.ai).toBe(0);
111
- expect(result.human).toBe(1);
112
- expect(result.ai_hashes).toBe("");
110
+ expect(result.ai).toBe(1);
111
+ expect(result.human).toBe(0);
112
+ expect(result.ai_hashes).toBe(aiHash);
113
113
  });
114
114
 
115
- test("whitespace-only line treated as blank (always human)", () => {
115
+ test("whitespace-only line treated as blank and can carry AI attribution", () => {
116
116
  const line = " ";
117
117
  const aiHash = hashLine(line);
118
118
  const result = computeMinimapFile(
@@ -121,8 +121,8 @@ describe("computeMinimapFile", () => {
121
121
  new Set([aiHash]),
122
122
  new Set([aiHash]),
123
123
  );
124
- expect(result.ai).toBe(0);
125
- expect(result.human).toBe(1);
124
+ expect(result.ai).toBe(1);
125
+ expect(result.human).toBe(0);
126
126
  });
127
127
 
128
128
  test("MIXED line — not in currentAiHashes → human even if in prevAiHashSet", () => {
@@ -91,7 +91,12 @@ async function resolveSessionForCommit(
91
91
  repoRoot: string,
92
92
  changedFiles: string[],
93
93
  branch: string | null,
94
+ runtime: AssistantRuntimeInfo | null,
94
95
  ): Promise<string | null> {
96
+ if (runtime?.vendor === "copilot") {
97
+ return await resolveCopilotSessionId(repoRoot, branch).catch(() => null);
98
+ }
99
+
95
100
  const fromCurrentSession = await readCurrentSession(repoRoot);
96
101
  if (fromCurrentSession) return fromCurrentSession;
97
102
 
@@ -164,7 +169,13 @@ async function main() {
164
169
  .catch(() => null as string | null),
165
170
  renamedFilesInCommit(repoRoot),
166
171
  ]);
167
- const sessionId = await resolveSessionForCommit(repoRoot, changedFiles, branch);
172
+ const detectedRuntime = detectAssistantRuntime();
173
+ const sessionId = await resolveSessionForCommit(
174
+ repoRoot,
175
+ changedFiles,
176
+ branch,
177
+ detectedRuntime,
178
+ );
168
179
 
169
180
  // Process files in parallel — each file attribution is independent.
170
181
  // Return type includes attribution[] so the minimap block can build currentAiHashes.
@@ -250,8 +261,6 @@ async function main() {
250
261
  };
251
262
  const notesRefsToSync = [NOTES_REF];
252
263
 
253
- const detectedRuntime = detectAssistantRuntime();
254
-
255
264
  // Attach session usage metadata (non-fatal if unavailable)
256
265
  if (sessionId) {
257
266
  const [tx, toolEntries, agentEntries] = await Promise.all([
@@ -185,17 +185,21 @@ export async function renamedFilesInCommit(
185
185
  * Falls back to all commits reachable from HEAD if no remote ref is found.
186
186
  */
187
187
  export async function getBranchCommitShas(repoRoot: string): Promise<string[]> {
188
- const base = await run("git", ["merge-base", "HEAD", "origin/HEAD"], repoRoot)
189
- .catch(() => run("git", ["merge-base", "HEAD", "origin/main"], repoRoot))
190
- .catch(() => run("git", ["merge-base", "HEAD", "origin/master"], repoRoot))
191
- .catch(() => null);
192
- const range = base ? `${(base as string).trim()}..HEAD` : "HEAD";
188
+ const base = await getBranchBaseSha(repoRoot);
189
+ const range = base ? `${base}..HEAD` : "HEAD";
193
190
  const out = await run("git", ["log", "--format=%H", range], repoRoot).catch(
194
191
  () => "",
195
192
  );
196
193
  return out ? out.split("\n").filter(Boolean) : [];
197
194
  }
198
195
 
196
+ export async function getBranchBaseSha(repoRoot: string): Promise<string | null> {
197
+ return await run("git", ["merge-base", "HEAD", "origin/HEAD"], repoRoot)
198
+ .catch(() => run("git", ["merge-base", "HEAD", "origin/main"], repoRoot))
199
+ .catch(() => run("git", ["merge-base", "HEAD", "origin/master"], repoRoot))
200
+ .catch(() => null);
201
+ }
202
+
199
203
  /**
200
204
  * Read the committed content of a file at HEAD as a raw string.
201
205
  *
@@ -8,7 +8,8 @@
8
8
  * Design:
9
9
  * - Only `ai_hashes` is stored; Human = any committed line NOT in ai_hashes.
10
10
  * - ai_hashes is a concatenated string of 16-char hex hashes (no separator).
11
- * - Blank lines are always Human and never appear in ai_hashes.
11
+ * - Blank lines use the hash of the empty string so `init --ai` style baselines
12
+ * can preserve all-AI files across later commits.
12
13
  * - Full state (all tracked files) is stored on every commit for simple reads.
13
14
  *
14
15
  * Carry-forward algorithm (per committed line hash):
@@ -24,6 +25,7 @@ import { join } from "path";
24
25
  import { hashLine } from "./differ.ts";
25
26
 
26
27
  const execFileAsync = promisify(execFile);
28
+ const BLANK_LINE_HASH = hashLine("");
27
29
 
28
30
  export const MINIMAP_NOTES_REF = "refs/notes/claude-attribution-map";
29
31
 
@@ -81,8 +83,15 @@ export function computeMinimapFile(
81
83
 
82
84
  for (const line of committedLines) {
83
85
  if (line.trim() === "") {
84
- // Blank lines carry no attribution signal — always Human
85
- human++;
86
+ if (
87
+ currentAiHashes.has(BLANK_LINE_HASH) ||
88
+ prevAiHashSet.has(BLANK_LINE_HASH)
89
+ ) {
90
+ newAiHashes.add(BLANK_LINE_HASH);
91
+ ai++;
92
+ } else {
93
+ human++;
94
+ }
86
95
  continue;
87
96
  }
88
97
  const hash = hashLine(line);
@@ -232,7 +241,9 @@ export async function buildAllAiMinimap(
232
241
  const total = lines.length;
233
242
  const aiHashes = new Set<string>();
234
243
  for (const line of lines) {
235
- if (line.trim() !== "") aiHashes.add(hashLine(line));
244
+ aiHashes.add(
245
+ line.trim() === "" ? BLANK_LINE_HASH : hashLine(line),
246
+ );
236
247
  }
237
248
  return {
238
249
  path: relPath,
@@ -315,7 +326,9 @@ export async function buildAiSinceMinimap(
315
326
  }
316
327
  const aiHashes = new Set<string>();
317
328
  for (const line of lines) {
318
- if (line.trim() !== "") aiHashes.add(hashLine(line));
329
+ aiHashes.add(
330
+ line.trim() === "" ? BLANK_LINE_HASH : hashLine(line),
331
+ );
319
332
  }
320
333
  return {
321
334
  path: relPath,
@@ -18,18 +18,28 @@ import { type TranscriptResult } from "./transcript.ts";
18
18
  import {
19
19
  listNotes,
20
20
  readNote,
21
+ getBranchBaseSha,
21
22
  getBranchCommitShas,
22
23
  getCommitMeta,
23
24
  isKnownAiActorCommit,
24
25
  buildAllAiResult,
26
+ committedContent,
27
+ committedContentAt,
28
+ currentBranch,
25
29
  } from "../attribution/git-notes.ts";
26
30
  import {
27
31
  aggregateTotals,
28
32
  type AttributionResult,
29
33
  type FileAttribution,
34
+ hashLine,
30
35
  } from "../attribution/differ.ts";
31
36
  import { SESSION_ID_RE } from "../attribution/checkpoint.ts";
32
- import { readMinimap, listMinimapNotes } from "../attribution/minimap.ts";
37
+ import {
38
+ hashSetFromString,
39
+ readMinimap,
40
+ listMinimapNotes,
41
+ } from "../attribution/minimap.ts";
42
+ import { detectAssistantRuntime } from "../attribution/runtime.ts";
33
43
  import {
34
44
  buildSessionMetrics,
35
45
  mergeCountRecords,
@@ -58,6 +68,11 @@ export interface MetricsData {
58
68
  total: number;
59
69
  pctAi: number;
60
70
  } | null;
71
+ prDiffStats: {
72
+ ai: number;
73
+ total: number;
74
+ pctAi: number;
75
+ } | null;
61
76
  }
62
77
 
63
78
  async function readSessionStart(repoRoot: string): Promise<Date | null> {
@@ -78,15 +93,9 @@ async function readSessionStart(repoRoot: string): Promise<Date | null> {
78
93
  }
79
94
 
80
95
  async function getBranchStartTime(repoRoot: string): Promise<Date | null> {
81
- for (const ref of ["origin/HEAD", "origin/main", "origin/master"]) {
96
+ const forkPoint = await getBranchBaseSha(repoRoot);
97
+ if (forkPoint) {
82
98
  try {
83
- const { stdout: base } = await execFileAsync(
84
- "git",
85
- ["merge-base", "HEAD", ref],
86
- { cwd: repoRoot },
87
- );
88
- const forkPoint = base.trim();
89
- if (!forkPoint) continue;
90
99
  const { stdout: log } = await execFileAsync(
91
100
  "git",
92
101
  ["log", "--reverse", "--format=%ct", `${forkPoint}..HEAD`],
@@ -95,12 +104,191 @@ async function getBranchStartTime(repoRoot: string): Promise<Date | null> {
95
104
  const firstTs = log.trim().split("\n").filter(Boolean)[0];
96
105
  if (firstTs) return new Date(parseInt(firstTs, 10) * 1000);
97
106
  } catch {
98
- continue;
107
+ // Fall through.
99
108
  }
100
109
  }
101
110
  return null;
102
111
  }
103
112
 
113
+ function parseChangedLineNumbers(diff: string): {
114
+ added: number[];
115
+ removed: number[];
116
+ } {
117
+ const added: number[] = [];
118
+ const removed: number[] = [];
119
+ let nextNewLine = 0;
120
+ let nextOldLine = 0;
121
+
122
+ for (const line of diff.split("\n")) {
123
+ const hunk = /^@@ -(\d+)(?:,\d+)? \+(\d+)(?:,\d+)? @@/.exec(line);
124
+ if (hunk) {
125
+ nextOldLine = parseInt(hunk[1] ?? "0", 10);
126
+ nextNewLine = parseInt(hunk[2] ?? "0", 10);
127
+ continue;
128
+ }
129
+
130
+ if (line.startsWith("+++") || line.startsWith("---") || line === "") {
131
+ continue;
132
+ }
133
+
134
+ if (line.startsWith("+")) {
135
+ added.push(nextNewLine);
136
+ nextNewLine++;
137
+ continue;
138
+ }
139
+
140
+ if (line.startsWith("-")) {
141
+ removed.push(nextOldLine);
142
+ nextOldLine++;
143
+ continue;
144
+ }
145
+
146
+ if (line.startsWith("\\")) continue;
147
+
148
+ nextOldLine++;
149
+ nextNewLine++;
150
+ }
151
+
152
+ return { added, removed };
153
+ }
154
+
155
+ async function getBranchDiffStats(
156
+ repoRoot: string,
157
+ ): Promise<{ ai: number; total: number; pctAi: number } | null> {
158
+ const baseSha = await getBranchBaseSha(repoRoot);
159
+ if (!baseSha) return null;
160
+
161
+ const [headMinimap, baseMinimap, changedFilesOutput, renameStatusOutput] =
162
+ await Promise.all([
163
+ readMinimap(repoRoot, "HEAD"),
164
+ readMinimap(repoRoot, baseSha).catch(() => null),
165
+ execFileAsync(
166
+ "git",
167
+ ["diff", "--name-only", "--find-renames", `${baseSha}..HEAD`],
168
+ { cwd: repoRoot },
169
+ )
170
+ .then(({ stdout }) => stdout)
171
+ .catch(() => ""),
172
+ execFileAsync(
173
+ "git",
174
+ ["diff", "--name-status", "--find-renames", `${baseSha}..HEAD`],
175
+ { cwd: repoRoot },
176
+ )
177
+ .then(({ stdout }) => stdout)
178
+ .catch(() => ""),
179
+ ]);
180
+ if (!headMinimap) return null;
181
+
182
+ const headAiByPath = new Map(
183
+ headMinimap.files.map((file) => [file.path, hashSetFromString(file.ai_hashes)]),
184
+ );
185
+ const baseAiByPath = new Map(
186
+ (baseMinimap?.files ?? []).map((file) => [
187
+ file.path,
188
+ hashSetFromString(file.ai_hashes),
189
+ ]),
190
+ );
191
+ const renamedBasePathByCurrentPath = new Map<string, string>();
192
+ for (const line of renameStatusOutput.split("\n").filter(Boolean)) {
193
+ const [status, oldPath, newPath] = line.split("\t");
194
+ if (!status?.startsWith("R") || !oldPath || !newPath) continue;
195
+ renamedBasePathByCurrentPath.set(newPath, oldPath);
196
+ }
197
+
198
+ const changedFiles = changedFilesOutput.split("\n").filter(Boolean);
199
+ if (changedFiles.length === 0) {
200
+ return { ai: 0, total: 0, pctAi: 0 };
201
+ }
202
+
203
+ const fileStats = await Promise.all(
204
+ changedFiles.map(async (path) => {
205
+ const basePath = renamedBasePathByCurrentPath.get(path) ?? path;
206
+ const [numstatResult, diffResult, headContent, baseContent] = await Promise.all([
207
+ execFileAsync(
208
+ "git",
209
+ [
210
+ "diff",
211
+ "--numstat",
212
+ "--find-renames",
213
+ `${baseSha}..HEAD`,
214
+ "--",
215
+ path,
216
+ ],
217
+ { cwd: repoRoot },
218
+ )
219
+ .then(({ stdout }) => stdout.trim())
220
+ .catch(() => ""),
221
+ execFileAsync(
222
+ "git",
223
+ [
224
+ "diff",
225
+ "--unified=0",
226
+ "--no-color",
227
+ "--find-renames",
228
+ `${baseSha}..HEAD`,
229
+ "--",
230
+ path,
231
+ ],
232
+ { cwd: repoRoot },
233
+ )
234
+ .then(({ stdout }) => stdout)
235
+ .catch(() => ""),
236
+ committedContent(repoRoot, path),
237
+ committedContentAt(repoRoot, baseSha, basePath),
238
+ ]);
239
+
240
+ const numstatLine = numstatResult.split("\n").find(Boolean);
241
+ if (!numstatLine) return { ai: 0, total: 0 };
242
+
243
+ const [additionsRaw, deletionsRaw] = numstatLine.split("\t");
244
+ if (!additionsRaw || !deletionsRaw) return { ai: 0, total: 0 };
245
+ if (additionsRaw === "-" || deletionsRaw === "-") {
246
+ return { ai: 0, total: 0 };
247
+ }
248
+
249
+ const additions = parseInt(additionsRaw, 10);
250
+ const deletions = parseInt(deletionsRaw, 10);
251
+ const total = additions + deletions;
252
+ if (total === 0) return { ai: 0, total: 0 };
253
+
254
+ const { added, removed } = parseChangedLineNumbers(diffResult);
255
+ const headLines = headContent?.split("\n") ?? [];
256
+ const baseLines = baseContent?.split("\n") ?? [];
257
+ const headAiHashes = headAiByPath.get(path) ?? new Set<string>();
258
+ const baseAiHashes = baseAiByPath.get(basePath) ?? new Set<string>();
259
+
260
+ let ai = 0;
261
+ for (const lineNumber of added) {
262
+ const line = headLines[lineNumber - 1];
263
+ if (line !== undefined && headAiHashes.has(hashLine(line))) {
264
+ ai++;
265
+ }
266
+ }
267
+ for (const lineNumber of removed) {
268
+ const line = baseLines[lineNumber - 1];
269
+ if (line !== undefined && baseAiHashes.has(hashLine(line))) {
270
+ ai++;
271
+ }
272
+ }
273
+
274
+ return { ai, total };
275
+ }),
276
+ );
277
+
278
+ const totals = fileStats.reduce(
279
+ (acc, file) => ({
280
+ ai: acc.ai + file.ai,
281
+ total: acc.total + file.total,
282
+ }),
283
+ { ai: 0, total: 0 },
284
+ );
285
+ return {
286
+ ai: totals.ai,
287
+ total: totals.total,
288
+ pctAi: totals.total > 0 ? Math.round((totals.ai / totals.total) * 100) : 0,
289
+ };
290
+ }
291
+
104
292
  /**
105
293
  * Resolve the most recent Claude session ID for this repo.
106
294
  *
@@ -113,6 +301,18 @@ async function getBranchStartTime(repoRoot: string): Promise<Date | null> {
113
301
  * table, tool counts) even when attribution was installed after the editing session.
114
302
  */
115
303
  async function resolveSessionId(repoRoot: string): Promise<string | null> {
304
+ const runtime = detectAssistantRuntime();
305
+ if (runtime?.vendor === "copilot") {
306
+ const branch = await currentBranch(repoRoot).catch(() => null);
307
+ const copilotSessionId = await resolveCopilotSessionId(repoRoot, branch).catch(
308
+ () => null,
309
+ );
310
+ if (copilotSessionId && SESSION_ID_RE.test(copilotSessionId)) {
311
+ return copilotSessionId;
312
+ }
313
+ return null;
314
+ }
315
+
116
316
  // Strategy 1: tool-usage.jsonl
117
317
  const toolLog = join(repoRoot, ".claude", "logs", "tool-usage.jsonl");
118
318
  if (existsSync(toolLog)) {
@@ -322,9 +522,10 @@ export async function collectMetrics(
322
522
  const sessionStart =
323
523
  (await readSessionStart(root)) ?? (await getBranchStartTime(root));
324
524
 
325
- const [attributions, minimapTotals] = await Promise.all([
525
+ const [attributions, minimapTotals, prDiffStats] = await Promise.all([
326
526
  getBranchAttribution(root, sessionStart),
327
527
  getMinimapTotals(root),
528
+ getBranchDiffStats(root),
328
529
  ]);
329
530
 
330
531
  // Last-wins per file for attribution
@@ -420,9 +621,10 @@ export async function collectMetrics(
420
621
  )
421
622
  .map(([id]) => id);
422
623
  const primarySessionId =
624
+ orderedSessionIds.find((id) => transcriptsBySession.has(id)) ??
423
625
  (SESSION_ID_RE.test(sessionId) && transcriptsBySession.has(sessionId)
424
626
  ? sessionId
425
- : orderedSessionIds.find((id) => transcriptsBySession.has(id))) ?? null;
627
+ : null);
426
628
  const transcript = primarySessionId
427
629
  ? transcriptsBySession.get(primarySessionId) ?? null
428
630
  : null;
@@ -439,6 +641,7 @@ export async function collectMetrics(
439
641
  lastSeenByFile,
440
642
  allTranscripts,
441
643
  minimapTotals,
644
+ prDiffStats,
442
645
  };
443
646
  }
444
647
 
@@ -528,6 +731,7 @@ export function renderMetrics(data: MetricsData): string {
528
731
  lastSeenByFile,
529
732
  allTranscripts,
530
733
  minimapTotals,
734
+ prDiffStats,
531
735
  } = data;
532
736
 
533
737
  const lines: string[] = [];
@@ -555,18 +759,16 @@ export function renderMetrics(data: MetricsData): string {
555
759
  out(
556
760
  `**Codebase: ~${minimapTotals.pctAi}% AI** (${minimapTotals.ai} / ${minimapTotals.total} lines)`,
557
761
  );
558
- if (hasAttribution) {
559
- const {
560
- ai: prAi,
561
- total: prTotal,
562
- pctAi: prPctAi,
563
- } = aggregateTotals(allFileStats);
762
+ const fallbackPrStats = hasAttribution ? aggregateTotals(allFileStats) : null;
763
+ const effectivePrStats = prDiffStats ?? fallbackPrStats;
764
+ if (effectivePrStats) {
765
+ const { ai: prAi, total: prTotal, pctAi: prPctAi } = effectivePrStats;
564
766
  const codebasePct =
565
767
  minimapTotals.total > 0
566
768
  ? Math.round((prTotal / minimapTotals.total) * 100)
567
769
  : 0;
568
770
  out(
569
- `**This PR:** ${prTotal} lines changed (${codebasePct}% of codebase) · ${prPctAi}% AI edits · ${prAi} AI lines`,
771
+ `**This PR:** ${prTotal} lines changed (${codebasePct}% of codebase) · ${prPctAi}% AI edits · ${prAi} AI-attributed changed lines`,
570
772
  );
571
773
  }
572
774
  out();