@mesadev/agentblame 0.2.2 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blame.js +8 -8
- package/dist/blame.js.map +1 -1
- package/dist/capture.js +23 -23
- package/dist/capture.js.map +1 -1
- package/dist/index.js +43 -4
- package/dist/index.js.map +1 -1
- package/dist/lib/database.d.ts +14 -14
- package/dist/lib/database.js +23 -16
- package/dist/lib/database.js.map +1 -1
- package/dist/lib/git/gitDiff.js +43 -43
- package/dist/lib/git/gitDiff.js.map +1 -1
- package/dist/lib/git/gitNotes.js +5 -5
- package/dist/lib/git/gitNotes.js.map +1 -1
- package/dist/lib/hooks.d.ts +1 -0
- package/dist/lib/hooks.js +22 -20
- package/dist/lib/hooks.js.map +1 -1
- package/dist/lib/types.d.ts +87 -30
- package/dist/post-merge.js +799 -0
- package/dist/post-merge.js.map +1 -0
- package/dist/process.js +27 -25
- package/dist/process.js.map +1 -1
- package/dist/sync.js +10 -10
- package/dist/sync.js.map +1 -1
- package/package.json +1 -1
- package/dist/cleanup.d.ts +0 -10
- package/dist/cleanup.js +0 -136
- package/dist/cleanup.js.map +0 -1
- package/dist/lib/db.d.ts +0 -54
- package/dist/lib/db.js +0 -291
- package/dist/lib/db.js.map +0 -1
- package/dist/transfer-notes.js +0 -426
- package/dist/transfer-notes.js.map +0 -1
- /package/dist/{transfer-notes.d.ts → post-merge.d.ts} +0 -0
|
@@ -0,0 +1,799 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
"use strict";
|
|
3
|
+
/**
|
|
4
|
+
* Agent Blame - Transfer Notes Action
|
|
5
|
+
*
|
|
6
|
+
* Transfers git notes from PR commits to merge/squash/rebase commits.
|
|
7
|
+
* Runs as part of GitHub Actions workflow after PR merge.
|
|
8
|
+
*
|
|
9
|
+
* Environment variables (set by GitHub Actions):
|
|
10
|
+
* PR_NUMBER - The PR number
|
|
11
|
+
* PR_TITLE - The PR title
|
|
12
|
+
* BASE_REF - Target branch (e.g., main)
|
|
13
|
+
* BASE_SHA - Base commit SHA before merge
|
|
14
|
+
* HEAD_SHA - Last commit SHA on feature branch
|
|
15
|
+
* MERGE_SHA - The merge commit SHA (for merge/squash)
|
|
16
|
+
*/
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
const node_child_process_1 = require("node:child_process");
|
|
19
|
+
// Get environment variables
|
|
20
|
+
const PR_NUMBER = process.env.PR_NUMBER || "";
|
|
21
|
+
const PR_TITLE = process.env.PR_TITLE || "";
|
|
22
|
+
const BASE_SHA = process.env.BASE_SHA || "";
|
|
23
|
+
const HEAD_SHA = process.env.HEAD_SHA || "";
|
|
24
|
+
const MERGE_SHA = process.env.MERGE_SHA || "";
|
|
25
|
+
const PR_AUTHOR = process.env.PR_AUTHOR || "unknown";
|
|
26
|
+
// Analytics notes ref (separate from attribution notes)
|
|
27
|
+
const ANALYTICS_REF = "refs/notes/agentblame-analytics";
|
|
28
|
+
// We store analytics on the repo's first commit (root)
|
|
29
|
+
const ANALYTICS_ANCHOR = "agentblame-analytics-anchor";
|
|
30
|
+
function run(cmd) {
|
|
31
|
+
try {
|
|
32
|
+
return (0, node_child_process_1.execSync)(cmd, { encoding: "utf8" }).trim();
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
return "";
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
function log(msg) {
|
|
39
|
+
console.log(`[agentblame] ${msg}`);
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Detect what type of merge was performed
|
|
43
|
+
*/
|
|
44
|
+
function detectMergeType() {
|
|
45
|
+
// Get the merge commit
|
|
46
|
+
const mergeCommit = MERGE_SHA;
|
|
47
|
+
if (!mergeCommit) {
|
|
48
|
+
log("No merge commit SHA, assuming rebase");
|
|
49
|
+
return "rebase";
|
|
50
|
+
}
|
|
51
|
+
// Check number of parents
|
|
52
|
+
const parents = run(`git rev-list --parents -n 1 ${mergeCommit}`).split(" ");
|
|
53
|
+
const parentCount = parents.length - 1; // First element is the commit itself
|
|
54
|
+
if (parentCount > 1) {
|
|
55
|
+
// Multiple parents = merge commit
|
|
56
|
+
log("Detected: Merge commit (multiple parents)");
|
|
57
|
+
return "merge_commit";
|
|
58
|
+
}
|
|
59
|
+
// Single parent - could be squash or rebase
|
|
60
|
+
// Check if commit message contains PR number (squash pattern)
|
|
61
|
+
const commitMsg = run(`git log -1 --format=%s ${mergeCommit}`);
|
|
62
|
+
if (commitMsg.includes(`#${PR_NUMBER}`) || commitMsg.includes(PR_TITLE)) {
|
|
63
|
+
log("Detected: Squash merge (single commit with PR reference)");
|
|
64
|
+
return "squash";
|
|
65
|
+
}
|
|
66
|
+
log("Detected: Rebase merge");
|
|
67
|
+
return "rebase";
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Get all commits that were in the PR (between base and head)
|
|
71
|
+
*/
|
|
72
|
+
function getPRCommits() {
|
|
73
|
+
// Get commits that were in the feature branch but not in base
|
|
74
|
+
const output = run(`git rev-list ${BASE_SHA}..${HEAD_SHA}`);
|
|
75
|
+
if (!output)
|
|
76
|
+
return [];
|
|
77
|
+
return output.split("\n").filter(Boolean);
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Read agentblame note from a commit
|
|
81
|
+
*/
|
|
82
|
+
function readNote(sha) {
|
|
83
|
+
const note = run(`git notes --ref=refs/notes/agentblame show ${sha} 2>/dev/null`);
|
|
84
|
+
if (!note)
|
|
85
|
+
return null;
|
|
86
|
+
try {
|
|
87
|
+
return JSON.parse(note);
|
|
88
|
+
}
|
|
89
|
+
catch {
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Write agentblame note to a commit
|
|
95
|
+
*/
|
|
96
|
+
function writeNote(sha, attribution) {
|
|
97
|
+
const noteJson = JSON.stringify(attribution);
|
|
98
|
+
try {
|
|
99
|
+
// Use spawnSync with array args to avoid shell injection
|
|
100
|
+
const result = (0, node_child_process_1.spawnSync)("git", ["notes", "--ref=refs/notes/agentblame", "add", "-f", "-m", noteJson, sha], { encoding: "utf8" });
|
|
101
|
+
if (result.status !== 0) {
|
|
102
|
+
log(`Failed to write note to ${sha}: ${result.stderr}`);
|
|
103
|
+
return false;
|
|
104
|
+
}
|
|
105
|
+
return true;
|
|
106
|
+
}
|
|
107
|
+
catch (err) {
|
|
108
|
+
log(`Failed to write note to ${sha}: ${err}`);
|
|
109
|
+
return false;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Collect all attributions from PR commits, including original content
|
|
114
|
+
*
|
|
115
|
+
* The contentHash in attributions is the hash of the FIRST line in the range.
|
|
116
|
+
* We need to find that line in the commit's diff to extract the full content.
|
|
117
|
+
*/
|
|
118
|
+
function collectPRAttributions(prCommits) {
|
|
119
|
+
const byHash = new Map();
|
|
120
|
+
const withContent = [];
|
|
121
|
+
for (const sha of prCommits) {
|
|
122
|
+
const note = readNote(sha);
|
|
123
|
+
if (!note?.attributions)
|
|
124
|
+
continue;
|
|
125
|
+
// Get the commit's diff with per-line hashes
|
|
126
|
+
const hunks = getCommitHunks(sha);
|
|
127
|
+
// Build a map from per-line contentHash to line data
|
|
128
|
+
// Also build a map from path+lineNumber to content for range extraction
|
|
129
|
+
const linesByHash = new Map();
|
|
130
|
+
const linesByLocation = new Map();
|
|
131
|
+
for (const hunk of hunks) {
|
|
132
|
+
for (const line of hunk.lines) {
|
|
133
|
+
linesByHash.set(line.contentHash, {
|
|
134
|
+
path: hunk.path,
|
|
135
|
+
lineNumber: line.lineNumber,
|
|
136
|
+
content: line.content,
|
|
137
|
+
});
|
|
138
|
+
linesByLocation.set(`${hunk.path}:${line.lineNumber}`, line.content);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
for (const attr of note.attributions) {
|
|
142
|
+
const hash = attr.contentHash;
|
|
143
|
+
if (!byHash.has(hash)) {
|
|
144
|
+
byHash.set(hash, []);
|
|
145
|
+
}
|
|
146
|
+
byHash.get(hash)?.push(attr);
|
|
147
|
+
// Extract the full content for this attribution range
|
|
148
|
+
// The contentHash is for the first line; we need to get all lines in the range
|
|
149
|
+
const rangeLines = [];
|
|
150
|
+
for (let lineNum = attr.startLine; lineNum <= attr.endLine; lineNum++) {
|
|
151
|
+
const lineContent = linesByLocation.get(`${attr.path}:${lineNum}`);
|
|
152
|
+
if (lineContent !== undefined) {
|
|
153
|
+
rangeLines.push(lineContent);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
if (rangeLines.length > 0) {
|
|
157
|
+
withContent.push({ ...attr, originalContent: rangeLines.join("\n") });
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
// Fallback: try to find by hash (first line)
|
|
161
|
+
const lineData = linesByHash.get(hash);
|
|
162
|
+
if (lineData) {
|
|
163
|
+
withContent.push({ ...attr, originalContent: lineData.content });
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
return { byHash, withContent };
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Get the diff of a commit and extract content with per-line hashes
|
|
172
|
+
* This matches the behavior of lib/git/gitDiff.ts parseDiff()
|
|
173
|
+
*/
|
|
174
|
+
function getCommitHunks(sha) {
|
|
175
|
+
const diff = run(`git diff-tree -p ${sha}`);
|
|
176
|
+
if (!diff)
|
|
177
|
+
return [];
|
|
178
|
+
const hunks = [];
|
|
179
|
+
let currentFile = "";
|
|
180
|
+
let lineNumber = 0;
|
|
181
|
+
let hunkLines = [];
|
|
182
|
+
let startLine = 0;
|
|
183
|
+
for (const line of diff.split("\n")) {
|
|
184
|
+
// New file header
|
|
185
|
+
if (line.startsWith("+++ b/")) {
|
|
186
|
+
// Save previous hunk
|
|
187
|
+
if (hunkLines.length > 0 && currentFile) {
|
|
188
|
+
const content = hunkLines.map((l) => l.content).join("\n");
|
|
189
|
+
hunks.push({
|
|
190
|
+
path: currentFile,
|
|
191
|
+
startLine,
|
|
192
|
+
endLine: startLine + hunkLines.length - 1,
|
|
193
|
+
content,
|
|
194
|
+
contentHash: computeHash(content),
|
|
195
|
+
lines: hunkLines,
|
|
196
|
+
});
|
|
197
|
+
hunkLines = [];
|
|
198
|
+
}
|
|
199
|
+
currentFile = line.slice(6);
|
|
200
|
+
continue;
|
|
201
|
+
}
|
|
202
|
+
// Hunk header
|
|
203
|
+
if (line.startsWith("@@")) {
|
|
204
|
+
// Save previous hunk
|
|
205
|
+
if (hunkLines.length > 0 && currentFile) {
|
|
206
|
+
const content = hunkLines.map((l) => l.content).join("\n");
|
|
207
|
+
hunks.push({
|
|
208
|
+
path: currentFile,
|
|
209
|
+
startLine,
|
|
210
|
+
endLine: startLine + hunkLines.length - 1,
|
|
211
|
+
content,
|
|
212
|
+
contentHash: computeHash(content),
|
|
213
|
+
lines: hunkLines,
|
|
214
|
+
});
|
|
215
|
+
hunkLines = [];
|
|
216
|
+
}
|
|
217
|
+
// Parse line number: @@ -old,count +new,count @@
|
|
218
|
+
const match = line.match(/@@ -\d+(?:,\d+)? \+(\d+)/);
|
|
219
|
+
if (match) {
|
|
220
|
+
lineNumber = parseInt(match[1], 10);
|
|
221
|
+
startLine = lineNumber;
|
|
222
|
+
}
|
|
223
|
+
continue;
|
|
224
|
+
}
|
|
225
|
+
// Added line
|
|
226
|
+
if (line.startsWith("+") && !line.startsWith("+++")) {
|
|
227
|
+
if (hunkLines.length === 0) {
|
|
228
|
+
startLine = lineNumber;
|
|
229
|
+
}
|
|
230
|
+
const content = line.slice(1);
|
|
231
|
+
hunkLines.push({
|
|
232
|
+
lineNumber,
|
|
233
|
+
content,
|
|
234
|
+
contentHash: computeHash(content),
|
|
235
|
+
});
|
|
236
|
+
lineNumber++;
|
|
237
|
+
continue;
|
|
238
|
+
}
|
|
239
|
+
// Context or removed line
|
|
240
|
+
if (!line.startsWith("-")) {
|
|
241
|
+
// Save previous hunk if we hit a non-added line
|
|
242
|
+
if (hunkLines.length > 0 && currentFile) {
|
|
243
|
+
const content = hunkLines.map((l) => l.content).join("\n");
|
|
244
|
+
hunks.push({
|
|
245
|
+
path: currentFile,
|
|
246
|
+
startLine,
|
|
247
|
+
endLine: startLine + hunkLines.length - 1,
|
|
248
|
+
content,
|
|
249
|
+
contentHash: computeHash(content),
|
|
250
|
+
lines: hunkLines,
|
|
251
|
+
});
|
|
252
|
+
hunkLines = [];
|
|
253
|
+
}
|
|
254
|
+
lineNumber++;
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
// Save last hunk
|
|
258
|
+
if (hunkLines.length > 0 && currentFile) {
|
|
259
|
+
const content = hunkLines.map((l) => l.content).join("\n");
|
|
260
|
+
hunks.push({
|
|
261
|
+
path: currentFile,
|
|
262
|
+
startLine,
|
|
263
|
+
endLine: startLine + hunkLines.length - 1,
|
|
264
|
+
content,
|
|
265
|
+
contentHash: computeHash(content),
|
|
266
|
+
lines: hunkLines,
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
return hunks;
|
|
270
|
+
}
|
|
271
|
+
/**
|
|
272
|
+
* Compute SHA256 hash of content
|
|
273
|
+
*/
|
|
274
|
+
function computeHash(content) {
|
|
275
|
+
const crypto = require("node:crypto");
|
|
276
|
+
return `sha256:${crypto.createHash("sha256").update(content).digest("hex")}`;
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Find attributions whose content is contained within the hunk content
|
|
280
|
+
* Returns attributions with calculated precise line numbers
|
|
281
|
+
*/
|
|
282
|
+
function findContainedAttributions(hunk, attributions) {
|
|
283
|
+
const results = [];
|
|
284
|
+
for (const attr of attributions) {
|
|
285
|
+
// Check if file paths match
|
|
286
|
+
const attrFileName = attr.path.split("/").pop();
|
|
287
|
+
const hunkFileName = hunk.path.split("/").pop();
|
|
288
|
+
const sameFile = attrFileName === hunkFileName ||
|
|
289
|
+
attr.path.endsWith(hunk.path) ||
|
|
290
|
+
hunk.path.endsWith(attrFileName || "");
|
|
291
|
+
if (!sameFile)
|
|
292
|
+
continue;
|
|
293
|
+
// Check if AI content is contained in the hunk
|
|
294
|
+
const aiContent = attr.originalContent.trim();
|
|
295
|
+
const hunkContent = hunk.content;
|
|
296
|
+
if (!hunkContent.includes(aiContent))
|
|
297
|
+
continue;
|
|
298
|
+
// Calculate precise line numbers
|
|
299
|
+
const offset = hunkContent.indexOf(aiContent);
|
|
300
|
+
let startLine = hunk.startLine;
|
|
301
|
+
if (offset > 0) {
|
|
302
|
+
const contentBeforeAI = hunkContent.slice(0, offset);
|
|
303
|
+
const linesBeforeAI = contentBeforeAI.split("\n").length - 1;
|
|
304
|
+
startLine = hunk.startLine + linesBeforeAI;
|
|
305
|
+
}
|
|
306
|
+
const aiLineCount = aiContent.split("\n").length;
|
|
307
|
+
const endLine = startLine + aiLineCount - 1;
|
|
308
|
+
// Create clean attribution without originalContent
|
|
309
|
+
const { originalContent: _, ...cleanAttr } = attr;
|
|
310
|
+
results.push({
|
|
311
|
+
...cleanAttr,
|
|
312
|
+
path: hunk.path,
|
|
313
|
+
startLine: startLine,
|
|
314
|
+
endLine: endLine,
|
|
315
|
+
});
|
|
316
|
+
log(` Contained match: ${hunk.path}:${startLine}-${endLine} (${attr.provider})`);
|
|
317
|
+
}
|
|
318
|
+
return results;
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* Transfer notes for a squash merge
|
|
322
|
+
*/
|
|
323
|
+
function handleSquashMerge(prCommits) {
|
|
324
|
+
log(`Transferring notes from ${prCommits.length} PR commits to squash commit ${MERGE_SHA}`);
|
|
325
|
+
// Collect all attributions from PR commits
|
|
326
|
+
const { byHash, withContent } = collectPRAttributions(prCommits);
|
|
327
|
+
if (byHash.size === 0) {
|
|
328
|
+
log("No attributions found in PR commits");
|
|
329
|
+
return;
|
|
330
|
+
}
|
|
331
|
+
log(`Found ${byHash.size} unique content hashes, ${withContent.length} with content`);
|
|
332
|
+
// Get hunks from the squash commit (with per-line hashes)
|
|
333
|
+
const hunks = getCommitHunks(MERGE_SHA);
|
|
334
|
+
log(`Squash commit has ${hunks.length} hunks`);
|
|
335
|
+
// Build a map of per-line hashes in the squash commit
|
|
336
|
+
const squashLinesByHash = new Map();
|
|
337
|
+
for (const hunk of hunks) {
|
|
338
|
+
for (const line of hunk.lines) {
|
|
339
|
+
squashLinesByHash.set(line.contentHash, {
|
|
340
|
+
path: hunk.path,
|
|
341
|
+
lineNumber: line.lineNumber,
|
|
342
|
+
content: line.content,
|
|
343
|
+
});
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
// Match attributions to squash commit
|
|
347
|
+
const newAttributions = [];
|
|
348
|
+
const matchedContentHashes = new Set();
|
|
349
|
+
// First pass: exact line hash matches
|
|
350
|
+
for (const [hash, attrs] of byHash) {
|
|
351
|
+
const squashLine = squashLinesByHash.get(hash);
|
|
352
|
+
if (squashLine && attrs.length > 0) {
|
|
353
|
+
const attr = attrs[0];
|
|
354
|
+
// For now, create single-line attribution
|
|
355
|
+
// TODO: could try to find consecutive matched lines and merge them
|
|
356
|
+
newAttributions.push({
|
|
357
|
+
...attr,
|
|
358
|
+
path: squashLine.path,
|
|
359
|
+
startLine: squashLine.lineNumber,
|
|
360
|
+
endLine: squashLine.lineNumber,
|
|
361
|
+
});
|
|
362
|
+
matchedContentHashes.add(hash);
|
|
363
|
+
log(` Line hash match: ${squashLine.path}:${squashLine.lineNumber} (${attr.provider})`);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
// Second pass: containment matching for multi-line attributions
|
|
367
|
+
for (const hunk of hunks) {
|
|
368
|
+
const unmatchedAttrs = withContent.filter((a) => !matchedContentHashes.has(a.contentHash));
|
|
369
|
+
if (unmatchedAttrs.length === 0)
|
|
370
|
+
continue;
|
|
371
|
+
const containedMatches = findContainedAttributions(hunk, unmatchedAttrs);
|
|
372
|
+
for (const match of containedMatches) {
|
|
373
|
+
newAttributions.push(match);
|
|
374
|
+
matchedContentHashes.add(match.contentHash);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
if (newAttributions.length === 0) {
|
|
378
|
+
log("No attributions matched to squash commit");
|
|
379
|
+
return;
|
|
380
|
+
}
|
|
381
|
+
// Merge consecutive attributions with same provider
|
|
382
|
+
const mergedAttributions = mergeConsecutiveAttributions(newAttributions);
|
|
383
|
+
// Write note to squash commit
|
|
384
|
+
const note = {
|
|
385
|
+
version: 2,
|
|
386
|
+
timestamp: new Date().toISOString(),
|
|
387
|
+
attributions: mergedAttributions,
|
|
388
|
+
};
|
|
389
|
+
if (writeNote(MERGE_SHA, note)) {
|
|
390
|
+
log(`✓ Attached ${mergedAttributions.length} attribution(s) to squash commit`);
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* Merge consecutive attributions with the same provider into ranges
|
|
395
|
+
*/
|
|
396
|
+
function mergeConsecutiveAttributions(attrs) {
|
|
397
|
+
if (attrs.length === 0)
|
|
398
|
+
return [];
|
|
399
|
+
// Sort by path, then by startLine
|
|
400
|
+
const sorted = [...attrs].sort((a, b) => {
|
|
401
|
+
if (a.path !== b.path)
|
|
402
|
+
return a.path.localeCompare(b.path);
|
|
403
|
+
return a.startLine - b.startLine;
|
|
404
|
+
});
|
|
405
|
+
const merged = [];
|
|
406
|
+
let current = { ...sorted[0] };
|
|
407
|
+
for (let i = 1; i < sorted.length; i++) {
|
|
408
|
+
const next = sorted[i];
|
|
409
|
+
// Check if consecutive and same provider
|
|
410
|
+
if (current.path === next.path &&
|
|
411
|
+
current.endLine >= next.startLine - 1 &&
|
|
412
|
+
current.provider === next.provider) {
|
|
413
|
+
// Merge: extend the range
|
|
414
|
+
current.endLine = Math.max(current.endLine, next.endLine);
|
|
415
|
+
current.confidence = Math.min(current.confidence, next.confidence);
|
|
416
|
+
}
|
|
417
|
+
else {
|
|
418
|
+
merged.push(current);
|
|
419
|
+
current = { ...next };
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
merged.push(current);
|
|
423
|
+
return merged;
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* Transfer notes for a rebase merge
|
|
427
|
+
*/
|
|
428
|
+
function handleRebaseMerge(prCommits) {
|
|
429
|
+
log(`Handling rebase merge: ${prCommits.length} original commits`);
|
|
430
|
+
// Collect all attributions from PR commits
|
|
431
|
+
const { byHash, withContent } = collectPRAttributions(prCommits);
|
|
432
|
+
if (byHash.size === 0) {
|
|
433
|
+
log("No attributions found in PR commits");
|
|
434
|
+
return;
|
|
435
|
+
}
|
|
436
|
+
// Find the new commits on target branch after the base
|
|
437
|
+
const newCommits = run(`git rev-list ${BASE_SHA}..HEAD`)
|
|
438
|
+
.split("\n")
|
|
439
|
+
.filter(Boolean);
|
|
440
|
+
log(`Found ${newCommits.length} new commits after rebase`);
|
|
441
|
+
let totalTransferred = 0;
|
|
442
|
+
for (const newSha of newCommits) {
|
|
443
|
+
const hunks = getCommitHunks(newSha);
|
|
444
|
+
const newAttributions = [];
|
|
445
|
+
const matchedContentHashes = new Set();
|
|
446
|
+
// Build a map of per-line hashes for this commit
|
|
447
|
+
const linesByHash = new Map();
|
|
448
|
+
for (const hunk of hunks) {
|
|
449
|
+
for (const line of hunk.lines) {
|
|
450
|
+
linesByHash.set(line.contentHash, {
|
|
451
|
+
path: hunk.path,
|
|
452
|
+
lineNumber: line.lineNumber,
|
|
453
|
+
});
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
// First pass: exact line hash matches
|
|
457
|
+
for (const [hash, attrs] of byHash) {
|
|
458
|
+
const lineInfo = linesByHash.get(hash);
|
|
459
|
+
if (lineInfo && attrs.length > 0) {
|
|
460
|
+
const attr = attrs[0];
|
|
461
|
+
newAttributions.push({
|
|
462
|
+
...attr,
|
|
463
|
+
path: lineInfo.path,
|
|
464
|
+
startLine: lineInfo.lineNumber,
|
|
465
|
+
endLine: lineInfo.lineNumber,
|
|
466
|
+
});
|
|
467
|
+
matchedContentHashes.add(hash);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
// Second pass: containment matching
|
|
471
|
+
for (const hunk of hunks) {
|
|
472
|
+
const unmatchedAttrs = withContent.filter((a) => !matchedContentHashes.has(a.contentHash));
|
|
473
|
+
if (unmatchedAttrs.length === 0)
|
|
474
|
+
continue;
|
|
475
|
+
const containedMatches = findContainedAttributions(hunk, unmatchedAttrs);
|
|
476
|
+
for (const match of containedMatches) {
|
|
477
|
+
newAttributions.push(match);
|
|
478
|
+
matchedContentHashes.add(match.contentHash);
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
if (newAttributions.length > 0) {
|
|
482
|
+
// Merge consecutive attributions
|
|
483
|
+
const merged = mergeConsecutiveAttributions(newAttributions);
|
|
484
|
+
const note = {
|
|
485
|
+
version: 2,
|
|
486
|
+
timestamp: new Date().toISOString(),
|
|
487
|
+
attributions: merged,
|
|
488
|
+
};
|
|
489
|
+
if (writeNote(newSha, note)) {
|
|
490
|
+
log(` ✓ ${newSha.slice(0, 7)}: ${merged.length} attribution(s)`);
|
|
491
|
+
totalTransferred += merged.length;
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
log(`✓ Transferred ${totalTransferred} attribution(s) across ${newCommits.length} commits`);
|
|
496
|
+
}
|
|
497
|
+
// =============================================================================
|
|
498
|
+
// Analytics Aggregation
|
|
499
|
+
// =============================================================================
|
|
500
|
+
/**
|
|
501
|
+
* Get the root commit SHA (first commit in repo)
|
|
502
|
+
*/
|
|
503
|
+
function getRootCommit() {
|
|
504
|
+
return run("git rev-list --max-parents=0 HEAD").split("\n")[0] || "";
|
|
505
|
+
}
|
|
506
|
+
/**
|
|
507
|
+
* Get or create the analytics anchor tag
|
|
508
|
+
* Returns the SHA the tag points to (root commit)
|
|
509
|
+
*/
|
|
510
|
+
function getOrCreateAnalyticsAnchor() {
|
|
511
|
+
// Check if tag exists
|
|
512
|
+
const existingTag = run(`git rev-parse ${ANALYTICS_ANCHOR} 2>/dev/null`);
|
|
513
|
+
if (existingTag) {
|
|
514
|
+
return existingTag;
|
|
515
|
+
}
|
|
516
|
+
// Create tag on root commit
|
|
517
|
+
const rootSha = getRootCommit();
|
|
518
|
+
if (!rootSha) {
|
|
519
|
+
log("Warning: Could not find root commit for analytics anchor");
|
|
520
|
+
return "";
|
|
521
|
+
}
|
|
522
|
+
const result = (0, node_child_process_1.spawnSync)("git", ["tag", ANALYTICS_ANCHOR, rootSha], { encoding: "utf8" });
|
|
523
|
+
if (result.status !== 0) {
|
|
524
|
+
log(`Warning: Could not create analytics anchor tag: ${result.stderr}`);
|
|
525
|
+
return "";
|
|
526
|
+
}
|
|
527
|
+
log(`Created analytics anchor tag at ${rootSha.slice(0, 7)}`);
|
|
528
|
+
return rootSha;
|
|
529
|
+
}
|
|
530
|
+
/**
|
|
531
|
+
* Read existing analytics note
|
|
532
|
+
*/
|
|
533
|
+
function readAnalyticsNote() {
|
|
534
|
+
const anchorSha = getOrCreateAnalyticsAnchor();
|
|
535
|
+
if (!anchorSha)
|
|
536
|
+
return null;
|
|
537
|
+
const note = run(`git notes --ref=${ANALYTICS_REF} show ${anchorSha} 2>/dev/null`);
|
|
538
|
+
if (!note)
|
|
539
|
+
return null;
|
|
540
|
+
try {
|
|
541
|
+
const parsed = JSON.parse(note);
|
|
542
|
+
if (parsed.version === 2) {
|
|
543
|
+
return parsed;
|
|
544
|
+
}
|
|
545
|
+
return null;
|
|
546
|
+
}
|
|
547
|
+
catch {
|
|
548
|
+
return null;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
/**
|
|
552
|
+
* Write analytics note
|
|
553
|
+
*/
|
|
554
|
+
function writeAnalyticsNote(analytics) {
|
|
555
|
+
const anchorSha = getOrCreateAnalyticsAnchor();
|
|
556
|
+
if (!anchorSha)
|
|
557
|
+
return false;
|
|
558
|
+
const noteJson = JSON.stringify(analytics);
|
|
559
|
+
const result = (0, node_child_process_1.spawnSync)("git", ["notes", `--ref=${ANALYTICS_REF}`, "add", "-f", "-m", noteJson, anchorSha], { encoding: "utf8" });
|
|
560
|
+
if (result.status !== 0) {
|
|
561
|
+
log(`Failed to write analytics note: ${result.stderr}`);
|
|
562
|
+
return false;
|
|
563
|
+
}
|
|
564
|
+
return true;
|
|
565
|
+
}
|
|
566
|
+
/**
|
|
567
|
+
* Get PR diff stats (additions/deletions)
|
|
568
|
+
*/
|
|
569
|
+
function getPRDiffStats() {
|
|
570
|
+
const stat = run(`git diff --shortstat ${BASE_SHA}..${MERGE_SHA || "HEAD"}`);
|
|
571
|
+
// Format: " 5 files changed, 120 insertions(+), 30 deletions(-)"
|
|
572
|
+
const addMatch = stat.match(/(\d+) insertion/);
|
|
573
|
+
const delMatch = stat.match(/(\d+) deletion/);
|
|
574
|
+
return {
|
|
575
|
+
additions: addMatch ? parseInt(addMatch[1], 10) : 0,
|
|
576
|
+
deletions: delMatch ? parseInt(delMatch[1], 10) : 0,
|
|
577
|
+
};
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Aggregate PR statistics from attribution notes
|
|
581
|
+
*/
|
|
582
|
+
function aggregatePRStats(attributions) {
|
|
583
|
+
let aiLines = 0;
|
|
584
|
+
const byProvider = {};
|
|
585
|
+
const byModel = {};
|
|
586
|
+
for (const attr of attributions) {
|
|
587
|
+
const lineCount = attr.endLine - attr.startLine + 1;
|
|
588
|
+
aiLines += lineCount;
|
|
589
|
+
// Aggregate by provider
|
|
590
|
+
const provider = attr.provider;
|
|
591
|
+
byProvider[provider] = (byProvider[provider] || 0) + lineCount;
|
|
592
|
+
// Aggregate by model
|
|
593
|
+
if (attr.model) {
|
|
594
|
+
byModel[attr.model] = (byModel[attr.model] || 0) + lineCount;
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
return { aiLines, byProvider, byModel };
|
|
598
|
+
}
|
|
599
|
+
/**
|
|
600
|
+
* Merge provider breakdowns
|
|
601
|
+
*/
|
|
602
|
+
function mergeProviders(a, b) {
|
|
603
|
+
const result = { ...a };
|
|
604
|
+
for (const [key, value] of Object.entries(b)) {
|
|
605
|
+
const k = key;
|
|
606
|
+
result[k] = (result[k] || 0) + (value || 0);
|
|
607
|
+
}
|
|
608
|
+
return result;
|
|
609
|
+
}
|
|
610
|
+
/**
|
|
611
|
+
* Merge model breakdowns
|
|
612
|
+
*/
|
|
613
|
+
function mergeModels(a, b) {
|
|
614
|
+
const result = { ...a };
|
|
615
|
+
for (const [key, value] of Object.entries(b)) {
|
|
616
|
+
result[key] = (result[key] || 0) + value;
|
|
617
|
+
}
|
|
618
|
+
return result;
|
|
619
|
+
}
|
|
620
|
+
/**
|
|
621
|
+
* Update analytics with current PR data
|
|
622
|
+
*/
|
|
623
|
+
function updateAnalytics(existing, prAttributions) {
|
|
624
|
+
const prStats = aggregatePRStats(prAttributions);
|
|
625
|
+
const diffStats = getPRDiffStats();
|
|
626
|
+
const now = new Date().toISOString();
|
|
627
|
+
const today = now.split("T")[0];
|
|
628
|
+
// Create history entry for this PR
|
|
629
|
+
const historyEntry = {
|
|
630
|
+
date: today,
|
|
631
|
+
pr: parseInt(PR_NUMBER, 10) || 0,
|
|
632
|
+
title: PR_TITLE.slice(0, 100), // Truncate long titles
|
|
633
|
+
author: PR_AUTHOR,
|
|
634
|
+
added: diffStats.additions,
|
|
635
|
+
removed: diffStats.deletions,
|
|
636
|
+
aiLines: prStats.aiLines,
|
|
637
|
+
providers: Object.keys(prStats.byProvider).length > 0 ? prStats.byProvider : undefined,
|
|
638
|
+
models: Object.keys(prStats.byModel).length > 0 ? prStats.byModel : undefined,
|
|
639
|
+
};
|
|
640
|
+
if (existing) {
|
|
641
|
+
// Update existing analytics
|
|
642
|
+
const newSummary = {
|
|
643
|
+
totalLines: existing.summary.totalLines + diffStats.additions,
|
|
644
|
+
aiLines: existing.summary.aiLines + prStats.aiLines,
|
|
645
|
+
humanLines: existing.summary.humanLines +
|
|
646
|
+
(diffStats.additions - prStats.aiLines),
|
|
647
|
+
providers: mergeProviders(existing.summary.providers, prStats.byProvider),
|
|
648
|
+
models: mergeModels(existing.summary.models, prStats.byModel),
|
|
649
|
+
updated: now,
|
|
650
|
+
};
|
|
651
|
+
// Update contributor stats
|
|
652
|
+
const contributors = { ...existing.contributors };
|
|
653
|
+
if (!contributors[PR_AUTHOR]) {
|
|
654
|
+
contributors[PR_AUTHOR] = {
|
|
655
|
+
totalLines: 0,
|
|
656
|
+
aiLines: 0,
|
|
657
|
+
providers: {},
|
|
658
|
+
models: {},
|
|
659
|
+
prCount: 0,
|
|
660
|
+
};
|
|
661
|
+
}
|
|
662
|
+
const authorStats = contributors[PR_AUTHOR];
|
|
663
|
+
authorStats.totalLines += diffStats.additions;
|
|
664
|
+
authorStats.aiLines += prStats.aiLines;
|
|
665
|
+
authorStats.providers = mergeProviders(authorStats.providers, prStats.byProvider);
|
|
666
|
+
authorStats.models = mergeModels(authorStats.models, prStats.byModel);
|
|
667
|
+
authorStats.prCount += 1;
|
|
668
|
+
// Add to history (keep last 100 PRs)
|
|
669
|
+
const history = [historyEntry, ...existing.history].slice(0, 100);
|
|
670
|
+
return {
|
|
671
|
+
version: 2,
|
|
672
|
+
summary: newSummary,
|
|
673
|
+
contributors,
|
|
674
|
+
history,
|
|
675
|
+
};
|
|
676
|
+
}
|
|
677
|
+
// Create new analytics
|
|
678
|
+
const contributors = {
|
|
679
|
+
[PR_AUTHOR]: {
|
|
680
|
+
totalLines: diffStats.additions,
|
|
681
|
+
aiLines: prStats.aiLines,
|
|
682
|
+
providers: prStats.byProvider,
|
|
683
|
+
models: prStats.byModel,
|
|
684
|
+
prCount: 1,
|
|
685
|
+
},
|
|
686
|
+
};
|
|
687
|
+
return {
|
|
688
|
+
version: 2,
|
|
689
|
+
summary: {
|
|
690
|
+
totalLines: diffStats.additions,
|
|
691
|
+
aiLines: prStats.aiLines,
|
|
692
|
+
humanLines: diffStats.additions - prStats.aiLines,
|
|
693
|
+
providers: prStats.byProvider,
|
|
694
|
+
models: prStats.byModel,
|
|
695
|
+
updated: now,
|
|
696
|
+
},
|
|
697
|
+
contributors,
|
|
698
|
+
history: [historyEntry],
|
|
699
|
+
};
|
|
700
|
+
}
|
|
701
|
+
/**
|
|
702
|
+
* Collect all attributions from the merge result
|
|
703
|
+
*/
|
|
704
|
+
function collectMergeAttributions(mergeType) {
|
|
705
|
+
if (mergeType === "merge_commit") {
|
|
706
|
+
// For merge commits, notes survive on original commits
|
|
707
|
+
// Collect from all PR commits
|
|
708
|
+
const prCommits = getPRCommits();
|
|
709
|
+
const allAttributions = [];
|
|
710
|
+
for (const sha of prCommits) {
|
|
711
|
+
const note = readNote(sha);
|
|
712
|
+
if (note?.attributions) {
|
|
713
|
+
allAttributions.push(...note.attributions);
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
return allAttributions;
|
|
717
|
+
}
|
|
718
|
+
// For squash/rebase, read from the merge commit(s)
|
|
719
|
+
if (mergeType === "squash" && MERGE_SHA) {
|
|
720
|
+
const note = readNote(MERGE_SHA);
|
|
721
|
+
return note?.attributions || [];
|
|
722
|
+
}
|
|
723
|
+
if (mergeType === "rebase") {
|
|
724
|
+
// Collect from all new commits after rebase
|
|
725
|
+
const newCommits = run(`git rev-list ${BASE_SHA}..HEAD`)
|
|
726
|
+
.split("\n")
|
|
727
|
+
.filter(Boolean);
|
|
728
|
+
const allAttributions = [];
|
|
729
|
+
for (const sha of newCommits) {
|
|
730
|
+
const note = readNote(sha);
|
|
731
|
+
if (note?.attributions) {
|
|
732
|
+
allAttributions.push(...note.attributions);
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
return allAttributions;
|
|
736
|
+
}
|
|
737
|
+
return [];
|
|
738
|
+
}
|
|
739
|
+
/**
|
|
740
|
+
* Update repository analytics after PR merge
|
|
741
|
+
*/
|
|
742
|
+
function updateRepositoryAnalytics(mergeType) {
|
|
743
|
+
log("Updating repository analytics...");
|
|
744
|
+
// Collect all attributions from this PR
|
|
745
|
+
const attributions = collectMergeAttributions(mergeType);
|
|
746
|
+
log(`Collected ${attributions.length} attributions from PR`);
|
|
747
|
+
// Read existing analytics
|
|
748
|
+
const existing = readAnalyticsNote();
|
|
749
|
+
if (existing) {
|
|
750
|
+
log(`Found existing analytics: ${existing.history.length} PRs, ${existing.summary.totalLines} total lines`);
|
|
751
|
+
}
|
|
752
|
+
else {
|
|
753
|
+
log("No existing analytics found, creating new");
|
|
754
|
+
}
|
|
755
|
+
// Update analytics
|
|
756
|
+
const updated = updateAnalytics(existing, attributions);
|
|
757
|
+
// Write updated analytics
|
|
758
|
+
if (writeAnalyticsNote(updated)) {
|
|
759
|
+
log(`✓ Updated analytics: ${updated.summary.aiLines}/${updated.summary.totalLines} AI lines (${Math.round((updated.summary.aiLines / updated.summary.totalLines) * 100)}%)`);
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
/**
|
|
763
|
+
* Main entry point
|
|
764
|
+
*/
|
|
765
|
+
async function main() {
|
|
766
|
+
log("Agent Blame - Transfer Notes");
|
|
767
|
+
log(`PR #${PR_NUMBER}: ${PR_TITLE}`);
|
|
768
|
+
log(`Base: ${BASE_SHA.slice(0, 7)}, Head: ${HEAD_SHA.slice(0, 7)}, Merge: ${MERGE_SHA.slice(0, 7)}`);
|
|
769
|
+
// Detect merge type
|
|
770
|
+
const mergeType = detectMergeType();
|
|
771
|
+
if (mergeType === "merge_commit") {
|
|
772
|
+
log("Merge commit detected - notes survive automatically on original commits");
|
|
773
|
+
// Still update analytics for merge commits
|
|
774
|
+
updateRepositoryAnalytics(mergeType);
|
|
775
|
+
log("Done");
|
|
776
|
+
return;
|
|
777
|
+
}
|
|
778
|
+
// Get PR commits
|
|
779
|
+
const prCommits = getPRCommits();
|
|
780
|
+
if (prCommits.length === 0) {
|
|
781
|
+
log("No PR commits found");
|
|
782
|
+
return;
|
|
783
|
+
}
|
|
784
|
+
log(`Found ${prCommits.length} commits in PR`);
|
|
785
|
+
if (mergeType === "squash") {
|
|
786
|
+
handleSquashMerge(prCommits);
|
|
787
|
+
}
|
|
788
|
+
else if (mergeType === "rebase") {
|
|
789
|
+
handleRebaseMerge(prCommits);
|
|
790
|
+
}
|
|
791
|
+
// Update repository analytics (runs for all merge types)
|
|
792
|
+
updateRepositoryAnalytics(mergeType);
|
|
793
|
+
log("Done");
|
|
794
|
+
}
|
|
795
|
+
main().catch((err) => {
|
|
796
|
+
console.error("[agentblame] Error:", err);
|
|
797
|
+
process.exit(1);
|
|
798
|
+
});
|
|
799
|
+
//# sourceMappingURL=post-merge.js.map
|