@mesadev/agentblame 0.2.11 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agentblame.js +3500 -0
- package/dist/blame.d.ts +4 -1
- package/dist/blame.js +280 -78
- package/dist/blame.js.map +1 -1
- package/dist/capture.d.ts +4 -7
- package/dist/capture.js +464 -486
- package/dist/capture.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +334 -72
- package/dist/index.js.map +1 -1
- package/dist/lib/analytics.d.ts +179 -0
- package/dist/lib/analytics.js +833 -0
- package/dist/lib/analytics.js.map +1 -0
- package/dist/lib/attribution.d.ts +54 -0
- package/dist/lib/attribution.js +266 -0
- package/dist/lib/attribution.js.map +1 -0
- package/dist/lib/checkpoint.d.ts +97 -0
- package/dist/lib/checkpoint.js +441 -0
- package/dist/lib/checkpoint.js.map +1 -0
- package/dist/lib/config.d.ts +46 -0
- package/dist/lib/config.js +123 -0
- package/dist/lib/config.js.map +1 -0
- package/dist/lib/database.d.ts +115 -85
- package/dist/lib/database.js +305 -325
- package/dist/lib/database.js.map +1 -1
- package/dist/lib/delta.d.ts +78 -0
- package/dist/lib/delta.js +309 -0
- package/dist/lib/delta.js.map +1 -0
- package/dist/lib/git/gitBlame.js +9 -4
- package/dist/lib/git/gitBlame.js.map +1 -1
- package/dist/lib/git/gitConfig.d.ts +5 -3
- package/dist/lib/git/gitConfig.js +41 -6
- package/dist/lib/git/gitConfig.js.map +1 -1
- package/dist/lib/git/gitDiff.d.ts +13 -1
- package/dist/lib/git/gitDiff.js +39 -7
- package/dist/lib/git/gitDiff.js.map +1 -1
- package/dist/lib/git/gitNotes.d.ts +30 -4
- package/dist/lib/git/gitNotes.js +140 -24
- package/dist/lib/git/gitNotes.js.map +1 -1
- package/dist/lib/hooks.d.ts +1 -0
- package/dist/lib/hooks.js +148 -27
- package/dist/lib/hooks.js.map +1 -1
- package/dist/lib/index.d.ts +7 -0
- package/dist/lib/index.js +13 -0
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/storage.d.ts +163 -0
- package/dist/lib/storage.js +823 -0
- package/dist/lib/storage.js.map +1 -0
- package/dist/lib/trace.d.ts +118 -0
- package/dist/lib/trace.js +499 -0
- package/dist/lib/trace.js.map +1 -0
- package/dist/lib/types.d.ts +322 -114
- package/dist/lib/types.js +2 -1
- package/dist/lib/types.js.map +1 -1
- package/dist/lib/util.d.ts +8 -8
- package/dist/lib/util.js +18 -22
- package/dist/lib/util.js.map +1 -1
- package/dist/lib/watcher.d.ts +104 -0
- package/dist/lib/watcher.js +398 -0
- package/dist/lib/watcher.js.map +1 -0
- package/dist/post-merge.js +460 -421
- package/dist/post-merge.js.map +1 -1
- package/dist/process.d.ts +6 -5
- package/dist/process.js +182 -158
- package/dist/process.js.map +1 -1
- package/dist/sync.js +172 -131
- package/dist/sync.js.map +1 -1
- package/package.json +3 -2
|
@@ -0,0 +1,823 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Storage Module v3
|
|
4
|
+
*
|
|
5
|
+
* Handles file snapshot storage using git blobs and working log management.
|
|
6
|
+
* Uses git's native object store for snapshots (deduplication, compression, gc-managed).
|
|
7
|
+
*
|
|
8
|
+
* Storage locations:
|
|
9
|
+
* - .git/agentblame/agentblame.db - SQLite database
|
|
10
|
+
* - .git/agentblame/working/{base_sha}/snapshots.jsonl - Snapshot chain
|
|
11
|
+
* - .git/agentblame/working/{base_sha}/INITIAL - Uncommitted attributions
|
|
12
|
+
* - .git/objects/ - Git blob storage (via git hash-object)
|
|
13
|
+
*/
|
|
14
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
15
|
+
if (k2 === undefined) k2 = k;
|
|
16
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
17
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
18
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
19
|
+
}
|
|
20
|
+
Object.defineProperty(o, k2, desc);
|
|
21
|
+
}) : (function(o, m, k, k2) {
|
|
22
|
+
if (k2 === undefined) k2 = k;
|
|
23
|
+
o[k2] = m[k];
|
|
24
|
+
}));
|
|
25
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
26
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
27
|
+
}) : function(o, v) {
|
|
28
|
+
o["default"] = v;
|
|
29
|
+
});
|
|
30
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
31
|
+
var ownKeys = function(o) {
|
|
32
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
33
|
+
var ar = [];
|
|
34
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
35
|
+
return ar;
|
|
36
|
+
};
|
|
37
|
+
return ownKeys(o);
|
|
38
|
+
};
|
|
39
|
+
return function (mod) {
|
|
40
|
+
if (mod && mod.__esModule) return mod;
|
|
41
|
+
var result = {};
|
|
42
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
43
|
+
__setModuleDefault(result, mod);
|
|
44
|
+
return result;
|
|
45
|
+
};
|
|
46
|
+
})();
|
|
47
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
|
+
exports.storeSnapshot = storeSnapshot;
|
|
49
|
+
exports.loadSnapshot = loadSnapshot;
|
|
50
|
+
exports.loadSnapshotsBatch = loadSnapshotsBatch;
|
|
51
|
+
exports.blobExists = blobExists;
|
|
52
|
+
exports.getAgentBlameGitDir = getAgentBlameGitDir;
|
|
53
|
+
exports.getWorkingDir = getWorkingDir;
|
|
54
|
+
exports.getSnapshotsPath = getSnapshotsPath;
|
|
55
|
+
exports.getInitialPath = getInitialPath;
|
|
56
|
+
exports.getDatabasePath = getDatabasePath;
|
|
57
|
+
exports.ensureAgentBlameDirs = ensureAgentBlameDirs;
|
|
58
|
+
exports.ensureWorkingDir = ensureWorkingDir;
|
|
59
|
+
exports.appendToWorkingLog = appendToWorkingLog;
|
|
60
|
+
exports.readWorkingLog = readWorkingLog;
|
|
61
|
+
exports.readWorkingLogForFile = readWorkingLogForFile;
|
|
62
|
+
exports.getLastSnapshotForFile = getLastSnapshotForFile;
|
|
63
|
+
exports.getSnapshotChainForFile = getSnapshotChainForFile;
|
|
64
|
+
exports.getModifiedFiles = getModifiedFiles;
|
|
65
|
+
exports.clearWorkingLog = clearWorkingLog;
|
|
66
|
+
exports.readInitialFile = readInitialFile;
|
|
67
|
+
exports.writeInitialFile = writeInitialFile;
|
|
68
|
+
exports.updateInitialFile = updateInitialFile;
|
|
69
|
+
exports.clearInitialFile = clearInitialFile;
|
|
70
|
+
exports.cleanupWorkingDir = cleanupWorkingDir;
|
|
71
|
+
exports.getActiveBaseShas = getActiveBaseShas;
|
|
72
|
+
exports.cleanupProcessedWorkingDirs = cleanupProcessedWorkingDirs;
|
|
73
|
+
exports.cleanupStaleWorkingDirs = cleanupStaleWorkingDirs;
|
|
74
|
+
exports.getGitHead = getGitHead;
|
|
75
|
+
exports.getParentCommit = getParentCommit;
|
|
76
|
+
exports.getFileAtCommit = getFileAtCommit;
|
|
77
|
+
exports.getRootCommit = getRootCommit;
|
|
78
|
+
exports.readFileContent = readFileContent;
|
|
79
|
+
exports.fileExists = fileExists;
|
|
80
|
+
exports.captureFileSnapshot = captureFileSnapshot;
|
|
81
|
+
const fs = __importStar(require("node:fs"));
|
|
82
|
+
const path = __importStar(require("node:path"));
|
|
83
|
+
const node_child_process_1 = require("node:child_process");
|
|
84
|
+
// =============================================================================
|
|
85
|
+
// Git Blob Storage
|
|
86
|
+
// =============================================================================
|
|
87
|
+
/**
|
|
88
|
+
* Store content as a git blob and return its SHA
|
|
89
|
+
* Uses git hash-object -w --stdin for native storage
|
|
90
|
+
*/
|
|
91
|
+
async function storeSnapshot(repoRoot, content) {
|
|
92
|
+
return new Promise((resolve, reject) => {
|
|
93
|
+
const proc = (0, node_child_process_1.spawn)("git", ["hash-object", "-w", "--stdin"], {
|
|
94
|
+
cwd: repoRoot,
|
|
95
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
96
|
+
});
|
|
97
|
+
let stdout = "";
|
|
98
|
+
let stderr = "";
|
|
99
|
+
proc.stdout.on("data", (data) => {
|
|
100
|
+
stdout += data.toString();
|
|
101
|
+
});
|
|
102
|
+
proc.stderr.on("data", (data) => {
|
|
103
|
+
stderr += data.toString();
|
|
104
|
+
});
|
|
105
|
+
proc.on("close", (code) => {
|
|
106
|
+
if (code === 0) {
|
|
107
|
+
resolve(stdout.trim());
|
|
108
|
+
}
|
|
109
|
+
else {
|
|
110
|
+
reject(new Error(`git hash-object failed: ${stderr}`));
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
proc.on("error", (err) => {
|
|
114
|
+
reject(err);
|
|
115
|
+
});
|
|
116
|
+
// Write content to stdin and close
|
|
117
|
+
proc.stdin.write(content);
|
|
118
|
+
proc.stdin.end();
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Load content from a git blob by SHA
|
|
123
|
+
* Uses git cat-file blob <sha>
|
|
124
|
+
*/
|
|
125
|
+
async function loadSnapshot(repoRoot, blobSha) {
|
|
126
|
+
return new Promise((resolve, reject) => {
|
|
127
|
+
const proc = (0, node_child_process_1.spawn)("git", ["cat-file", "blob", blobSha], {
|
|
128
|
+
cwd: repoRoot,
|
|
129
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
130
|
+
});
|
|
131
|
+
let stdout = "";
|
|
132
|
+
let stderr = "";
|
|
133
|
+
proc.stdout.on("data", (data) => {
|
|
134
|
+
stdout += data.toString();
|
|
135
|
+
});
|
|
136
|
+
proc.stderr.on("data", (data) => {
|
|
137
|
+
stderr += data.toString();
|
|
138
|
+
});
|
|
139
|
+
proc.on("close", (code) => {
|
|
140
|
+
if (code === 0) {
|
|
141
|
+
resolve(stdout);
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
reject(new Error(`git cat-file failed: ${stderr}`));
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
proc.on("error", (err) => {
|
|
148
|
+
reject(err);
|
|
149
|
+
});
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Batch load multiple blobs in a single git process
|
|
154
|
+
* Uses git cat-file --batch for efficiency
|
|
155
|
+
* Returns a Map of blobSha -> content
|
|
156
|
+
*/
|
|
157
|
+
async function loadSnapshotsBatch(repoRoot, blobShas) {
|
|
158
|
+
if (blobShas.length === 0) {
|
|
159
|
+
return new Map();
|
|
160
|
+
}
|
|
161
|
+
// For single blob, use regular method
|
|
162
|
+
if (blobShas.length === 1) {
|
|
163
|
+
const content = await loadSnapshot(repoRoot, blobShas[0]);
|
|
164
|
+
return new Map([[blobShas[0], content]]);
|
|
165
|
+
}
|
|
166
|
+
return new Promise((resolve, reject) => {
|
|
167
|
+
const proc = (0, node_child_process_1.spawn)("git", ["cat-file", "--batch"], {
|
|
168
|
+
cwd: repoRoot,
|
|
169
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
170
|
+
});
|
|
171
|
+
const results = new Map();
|
|
172
|
+
let buffer = Buffer.alloc(0);
|
|
173
|
+
let currentSha = null;
|
|
174
|
+
let expectedSize = 0;
|
|
175
|
+
let headerParsed = false;
|
|
176
|
+
proc.stdout.on("data", (data) => {
|
|
177
|
+
buffer = Buffer.concat([buffer, data]);
|
|
178
|
+
// Process buffer
|
|
179
|
+
while (buffer.length > 0) {
|
|
180
|
+
if (!headerParsed) {
|
|
181
|
+
// Look for header line: <sha> <type> <size>\n
|
|
182
|
+
const newlineIdx = buffer.indexOf(10); // '\n'
|
|
183
|
+
if (newlineIdx === -1)
|
|
184
|
+
break;
|
|
185
|
+
const headerLine = buffer.subarray(0, newlineIdx).toString();
|
|
186
|
+
buffer = buffer.subarray(newlineIdx + 1);
|
|
187
|
+
// Parse header: "sha blob size" or "sha missing"
|
|
188
|
+
const parts = headerLine.split(" ");
|
|
189
|
+
if (parts.length >= 3 && parts[1] === "blob") {
|
|
190
|
+
const size = parseInt(parts[2], 10);
|
|
191
|
+
if (!isNaN(size) && size >= 0) {
|
|
192
|
+
currentSha = parts[0];
|
|
193
|
+
expectedSize = size;
|
|
194
|
+
headerParsed = true;
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
// Invalid size, skip this blob
|
|
198
|
+
currentSha = null;
|
|
199
|
+
headerParsed = false;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
else {
|
|
203
|
+
// Missing or not a blob, skip
|
|
204
|
+
currentSha = null;
|
|
205
|
+
headerParsed = false;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
else {
|
|
209
|
+
// Read content + trailing newline
|
|
210
|
+
if (buffer.length >= expectedSize + 1) {
|
|
211
|
+
const content = buffer.subarray(0, expectedSize).toString();
|
|
212
|
+
buffer = buffer.subarray(expectedSize + 1); // +1 for trailing newline
|
|
213
|
+
if (currentSha) {
|
|
214
|
+
results.set(currentSha, content);
|
|
215
|
+
}
|
|
216
|
+
currentSha = null;
|
|
217
|
+
expectedSize = 0;
|
|
218
|
+
headerParsed = false;
|
|
219
|
+
}
|
|
220
|
+
else {
|
|
221
|
+
break; // Need more data
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
});
|
|
226
|
+
proc.stderr.on("data", (data) => {
|
|
227
|
+
// Ignore stderr for batch mode
|
|
228
|
+
});
|
|
229
|
+
proc.on("close", (code) => {
|
|
230
|
+
if (code === 0) {
|
|
231
|
+
resolve(results);
|
|
232
|
+
}
|
|
233
|
+
else {
|
|
234
|
+
reject(new Error(`git cat-file --batch failed`));
|
|
235
|
+
}
|
|
236
|
+
});
|
|
237
|
+
proc.on("error", (err) => {
|
|
238
|
+
reject(err);
|
|
239
|
+
});
|
|
240
|
+
// Write all SHAs to stdin
|
|
241
|
+
for (const sha of blobShas) {
|
|
242
|
+
proc.stdin.write(sha + "\n");
|
|
243
|
+
}
|
|
244
|
+
proc.stdin.end();
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Check if a blob exists in the git object store
|
|
249
|
+
*/
|
|
250
|
+
async function blobExists(repoRoot, blobSha) {
|
|
251
|
+
return new Promise((resolve) => {
|
|
252
|
+
const proc = (0, node_child_process_1.spawn)("git", ["cat-file", "-e", blobSha], {
|
|
253
|
+
cwd: repoRoot,
|
|
254
|
+
stdio: ["ignore", "ignore", "ignore"],
|
|
255
|
+
});
|
|
256
|
+
proc.on("close", (code) => {
|
|
257
|
+
resolve(code === 0);
|
|
258
|
+
});
|
|
259
|
+
proc.on("error", () => {
|
|
260
|
+
resolve(false);
|
|
261
|
+
});
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
// =============================================================================
|
|
265
|
+
// Path Helpers
|
|
266
|
+
// =============================================================================
|
|
267
|
+
/**
|
|
268
|
+
* Get the agentblame directory inside .git
|
|
269
|
+
*/
|
|
270
|
+
function getAgentBlameGitDir(repoRoot) {
|
|
271
|
+
return path.join(repoRoot, ".git", "agentblame");
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Get the working directory for a specific base SHA
|
|
275
|
+
*/
|
|
276
|
+
function getWorkingDir(repoRoot, baseSha) {
|
|
277
|
+
return path.join(getAgentBlameGitDir(repoRoot), "working", baseSha);
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Get the snapshots.jsonl path for a base SHA
|
|
281
|
+
*/
|
|
282
|
+
function getSnapshotsPath(repoRoot, baseSha) {
|
|
283
|
+
return path.join(getWorkingDir(repoRoot, baseSha), "snapshots.jsonl");
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Get the INITIAL file path for a base SHA
|
|
287
|
+
*/
|
|
288
|
+
function getInitialPath(repoRoot, baseSha) {
|
|
289
|
+
return path.join(getWorkingDir(repoRoot, baseSha), "INITIAL");
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* Get the database path
|
|
293
|
+
*/
|
|
294
|
+
function getDatabasePath(repoRoot) {
|
|
295
|
+
return path.join(getAgentBlameGitDir(repoRoot), "agentblame.db");
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Ensure the agentblame directory structure exists
|
|
299
|
+
*/
|
|
300
|
+
function ensureAgentBlameDirs(repoRoot) {
|
|
301
|
+
const agentBlameDir = getAgentBlameGitDir(repoRoot);
|
|
302
|
+
const workingDir = path.join(agentBlameDir, "working");
|
|
303
|
+
if (!fs.existsSync(agentBlameDir)) {
|
|
304
|
+
fs.mkdirSync(agentBlameDir, { recursive: true });
|
|
305
|
+
}
|
|
306
|
+
if (!fs.existsSync(workingDir)) {
|
|
307
|
+
fs.mkdirSync(workingDir, { recursive: true });
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
/**
|
|
311
|
+
* Ensure working directory for a base SHA exists
|
|
312
|
+
*/
|
|
313
|
+
function ensureWorkingDir(repoRoot, baseSha) {
|
|
314
|
+
const workingDir = getWorkingDir(repoRoot, baseSha);
|
|
315
|
+
if (!fs.existsSync(workingDir)) {
|
|
316
|
+
fs.mkdirSync(workingDir, { recursive: true });
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
// =============================================================================
|
|
320
|
+
// Working Log (snapshots.jsonl)
|
|
321
|
+
// =============================================================================
|
|
322
|
+
/**
|
|
323
|
+
* Append an entry to the working log
|
|
324
|
+
*/
|
|
325
|
+
function appendToWorkingLog(repoRoot, baseSha, entry) {
|
|
326
|
+
ensureWorkingDir(repoRoot, baseSha);
|
|
327
|
+
const logPath = getSnapshotsPath(repoRoot, baseSha);
|
|
328
|
+
const line = JSON.stringify(entry) + "\n";
|
|
329
|
+
fs.appendFileSync(logPath, line, "utf8");
|
|
330
|
+
}
|
|
331
|
+
/**
|
|
332
|
+
* Read all entries from the working log
|
|
333
|
+
*/
|
|
334
|
+
function readWorkingLog(repoRoot, baseSha) {
|
|
335
|
+
const logPath = getSnapshotsPath(repoRoot, baseSha);
|
|
336
|
+
if (!fs.existsSync(logPath)) {
|
|
337
|
+
return [];
|
|
338
|
+
}
|
|
339
|
+
const content = fs.readFileSync(logPath, "utf8");
|
|
340
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
341
|
+
return lines.map((line) => JSON.parse(line));
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Read entries from the working log filtered by file path
|
|
345
|
+
* More efficient than readWorkingLog + filter for single file queries
|
|
346
|
+
*/
|
|
347
|
+
function readWorkingLogForFile(repoRoot, baseSha, filePath) {
|
|
348
|
+
const logPath = getSnapshotsPath(repoRoot, baseSha);
|
|
349
|
+
if (!fs.existsSync(logPath)) {
|
|
350
|
+
return [];
|
|
351
|
+
}
|
|
352
|
+
const content = fs.readFileSync(logPath, "utf8");
|
|
353
|
+
const results = [];
|
|
354
|
+
// Quick string check before JSON parse
|
|
355
|
+
const filePattern = `"file":"${filePath}"`;
|
|
356
|
+
for (const line of content.split("\n")) {
|
|
357
|
+
if (!line)
|
|
358
|
+
continue;
|
|
359
|
+
// Fast rejection: skip lines that don't contain the file path
|
|
360
|
+
if (!line.includes(filePattern))
|
|
361
|
+
continue;
|
|
362
|
+
const entry = JSON.parse(line);
|
|
363
|
+
if (entry.file === filePath) {
|
|
364
|
+
results.push(entry);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
return results;
|
|
368
|
+
}
|
|
369
|
+
/**
|
|
370
|
+
* Get the last snapshot entry for a file (for dedup checking)
|
|
371
|
+
*/
|
|
372
|
+
function getLastSnapshotForFile(repoRoot, baseSha, filePath) {
|
|
373
|
+
const logPath = getSnapshotsPath(repoRoot, baseSha);
|
|
374
|
+
if (!fs.existsSync(logPath)) {
|
|
375
|
+
return null;
|
|
376
|
+
}
|
|
377
|
+
const content = fs.readFileSync(logPath, "utf8");
|
|
378
|
+
const filePattern = `"file":"${filePath}"`;
|
|
379
|
+
let lastEntry = null;
|
|
380
|
+
for (const line of content.split("\n")) {
|
|
381
|
+
if (!line)
|
|
382
|
+
continue;
|
|
383
|
+
if (!line.includes(filePattern))
|
|
384
|
+
continue;
|
|
385
|
+
const entry = JSON.parse(line);
|
|
386
|
+
if (entry.file === filePath) {
|
|
387
|
+
lastEntry = entry;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
return lastEntry;
|
|
391
|
+
}
|
|
392
|
+
/**
|
|
393
|
+
* Get the snapshot chain for a specific file
|
|
394
|
+
* Returns snapshots in chronological order (oldest first)
|
|
395
|
+
* Prepends the original git content (baseSha) as the first entry with sessionId: null
|
|
396
|
+
*/
|
|
397
|
+
async function getSnapshotChainForFile(repoRoot, baseSha, filePath) {
|
|
398
|
+
// Use filtered read for efficiency
|
|
399
|
+
const entries = readWorkingLogForFile(repoRoot, baseSha, filePath);
|
|
400
|
+
const aiSnapshots = entries.map((entry) => ({
|
|
401
|
+
timestamp: entry.ts,
|
|
402
|
+
filePath: entry.file,
|
|
403
|
+
blobSha: entry.blob,
|
|
404
|
+
sessionId: entry.session,
|
|
405
|
+
type: entry.type,
|
|
406
|
+
}));
|
|
407
|
+
if (aiSnapshots.length === 0) {
|
|
408
|
+
return [];
|
|
409
|
+
}
|
|
410
|
+
// Get the original content from git (before any AI edits)
|
|
411
|
+
const originalBlob = await getFileBlobAtCommit(repoRoot, baseSha, filePath);
|
|
412
|
+
if (originalBlob) {
|
|
413
|
+
// Prepend the original content as a snapshot with sessionId: null
|
|
414
|
+
const originalSnapshot = {
|
|
415
|
+
timestamp: new Date(0).toISOString(), // Earliest possible time
|
|
416
|
+
filePath,
|
|
417
|
+
blobSha: originalBlob,
|
|
418
|
+
sessionId: null,
|
|
419
|
+
type: "human_edit",
|
|
420
|
+
};
|
|
421
|
+
return [originalSnapshot, ...aiSnapshots];
|
|
422
|
+
}
|
|
423
|
+
// File is NEW (didn't exist before AI edits)
|
|
424
|
+
// Return just AI snapshots - trace algorithm handles this case by
|
|
425
|
+
// attributing lines that exist in all snapshots to the first AI snapshot
|
|
426
|
+
return aiSnapshots;
|
|
427
|
+
}
|
|
428
|
+
/**
|
|
429
|
+
* Get the blob SHA for a file at a specific commit
|
|
430
|
+
*/
|
|
431
|
+
async function getFileBlobAtCommit(repoRoot, commitSha, filePath) {
|
|
432
|
+
const { spawn } = await Promise.resolve().then(() => __importStar(require("node:child_process")));
|
|
433
|
+
return new Promise((resolve) => {
|
|
434
|
+
const proc = spawn("git", ["ls-tree", commitSha, filePath], {
|
|
435
|
+
cwd: repoRoot,
|
|
436
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
437
|
+
});
|
|
438
|
+
let stdout = "";
|
|
439
|
+
proc.stdout.on("data", (data) => {
|
|
440
|
+
stdout += data.toString();
|
|
441
|
+
});
|
|
442
|
+
proc.on("close", (code) => {
|
|
443
|
+
if (code === 0 && stdout.trim()) {
|
|
444
|
+
// Output format: mode type blob\tpath
|
|
445
|
+
const parts = stdout.trim().split(/\s+/);
|
|
446
|
+
if (parts.length >= 3) {
|
|
447
|
+
resolve(parts[2]); // The blob SHA
|
|
448
|
+
}
|
|
449
|
+
else {
|
|
450
|
+
resolve(null);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
else {
|
|
454
|
+
resolve(null);
|
|
455
|
+
}
|
|
456
|
+
});
|
|
457
|
+
proc.on("error", () => {
|
|
458
|
+
resolve(null);
|
|
459
|
+
});
|
|
460
|
+
});
|
|
461
|
+
}
|
|
462
|
+
/**
|
|
463
|
+
* Get all files that have been modified since base SHA
|
|
464
|
+
*/
|
|
465
|
+
function getModifiedFiles(repoRoot, baseSha) {
|
|
466
|
+
const entries = readWorkingLog(repoRoot, baseSha);
|
|
467
|
+
const files = new Set();
|
|
468
|
+
for (const entry of entries) {
|
|
469
|
+
files.add(entry.file);
|
|
470
|
+
}
|
|
471
|
+
return Array.from(files);
|
|
472
|
+
}
|
|
473
|
+
/**
|
|
474
|
+
* Clear the working log for a base SHA
|
|
475
|
+
*/
|
|
476
|
+
function clearWorkingLog(repoRoot, baseSha) {
|
|
477
|
+
const logPath = getSnapshotsPath(repoRoot, baseSha);
|
|
478
|
+
if (fs.existsSync(logPath)) {
|
|
479
|
+
fs.unlinkSync(logPath);
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
// =============================================================================
|
|
483
|
+
// INITIAL File (Uncommitted Attributions)
|
|
484
|
+
// =============================================================================
|
|
485
|
+
/**
|
|
486
|
+
* Read the INITIAL file
|
|
487
|
+
*/
|
|
488
|
+
function readInitialFile(repoRoot, baseSha) {
|
|
489
|
+
const initialPath = getInitialPath(repoRoot, baseSha);
|
|
490
|
+
if (!fs.existsSync(initialPath)) {
|
|
491
|
+
return null;
|
|
492
|
+
}
|
|
493
|
+
try {
|
|
494
|
+
const content = fs.readFileSync(initialPath, "utf8");
|
|
495
|
+
return JSON.parse(content);
|
|
496
|
+
}
|
|
497
|
+
catch {
|
|
498
|
+
return null;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
/**
|
|
502
|
+
* Write the INITIAL file
|
|
503
|
+
*/
|
|
504
|
+
function writeInitialFile(repoRoot, baseSha, data) {
|
|
505
|
+
ensureWorkingDir(repoRoot, baseSha);
|
|
506
|
+
const initialPath = getInitialPath(repoRoot, baseSha);
|
|
507
|
+
fs.writeFileSync(initialPath, JSON.stringify(data, null, 2), "utf8");
|
|
508
|
+
}
|
|
509
|
+
/**
|
|
510
|
+
* Update INITIAL file with new attribution
|
|
511
|
+
*/
|
|
512
|
+
function updateInitialFile(repoRoot, baseSha, filePath, blobSha, sessionId, lines) {
|
|
513
|
+
let initial = readInitialFile(repoRoot, baseSha);
|
|
514
|
+
if (!initial) {
|
|
515
|
+
initial = {
|
|
516
|
+
baseSha,
|
|
517
|
+
files: {},
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
if (!initial.files[filePath]) {
|
|
521
|
+
initial.files[filePath] = {
|
|
522
|
+
blobSha,
|
|
523
|
+
attributions: [],
|
|
524
|
+
};
|
|
525
|
+
}
|
|
526
|
+
// Update blob SHA to latest
|
|
527
|
+
initial.files[filePath].blobSha = blobSha;
|
|
528
|
+
// Add attribution (merge overlapping ranges later during commit processing)
|
|
529
|
+
initial.files[filePath].attributions.push({
|
|
530
|
+
lines,
|
|
531
|
+
sessionId,
|
|
532
|
+
});
|
|
533
|
+
writeInitialFile(repoRoot, baseSha, initial);
|
|
534
|
+
}
|
|
535
|
+
/**
|
|
536
|
+
* Clear the INITIAL file
|
|
537
|
+
*/
|
|
538
|
+
function clearInitialFile(repoRoot, baseSha) {
|
|
539
|
+
const initialPath = getInitialPath(repoRoot, baseSha);
|
|
540
|
+
if (fs.existsSync(initialPath)) {
|
|
541
|
+
fs.unlinkSync(initialPath);
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
// =============================================================================
|
|
545
|
+
// Working Directory Cleanup
|
|
546
|
+
// =============================================================================
|
|
547
|
+
/**
|
|
548
|
+
* Clean up working directory for a base SHA (after commit)
|
|
549
|
+
*/
|
|
550
|
+
function cleanupWorkingDir(repoRoot, baseSha) {
|
|
551
|
+
const workingDir = getWorkingDir(repoRoot, baseSha);
|
|
552
|
+
if (fs.existsSync(workingDir)) {
|
|
553
|
+
fs.rmSync(workingDir, { recursive: true });
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Get all base SHAs that have working directories
|
|
558
|
+
*/
|
|
559
|
+
function getActiveBaseShas(repoRoot) {
|
|
560
|
+
const workingBaseDir = path.join(getAgentBlameGitDir(repoRoot), "working");
|
|
561
|
+
if (!fs.existsSync(workingBaseDir)) {
|
|
562
|
+
return [];
|
|
563
|
+
}
|
|
564
|
+
try {
|
|
565
|
+
return fs.readdirSync(workingBaseDir).filter((name) => {
|
|
566
|
+
const stat = fs.statSync(path.join(workingBaseDir, name));
|
|
567
|
+
return stat.isDirectory();
|
|
568
|
+
});
|
|
569
|
+
}
|
|
570
|
+
catch {
|
|
571
|
+
return [];
|
|
572
|
+
}
|
|
573
|
+
}
|
|
574
|
+
/**
|
|
575
|
+
* Smart cleanup of old working directories.
|
|
576
|
+
* Only cleans up if:
|
|
577
|
+
* 1. The commit is an ancestor of HEAD (we've moved past it)
|
|
578
|
+
* 2. The commit has git notes (deltas were processed)
|
|
579
|
+
*
|
|
580
|
+
* This preserves deltas for:
|
|
581
|
+
* - Current HEAD (uncommitted changes)
|
|
582
|
+
* - Reset/recommit scenarios
|
|
583
|
+
* - Stashed changes
|
|
584
|
+
*/
|
|
585
|
+
async function cleanupProcessedWorkingDirs(repoRoot, currentBaseSha) {
|
|
586
|
+
const baseShas = getActiveBaseShas(repoRoot);
|
|
587
|
+
for (const baseSha of baseShas) {
|
|
588
|
+
// Never clean up current baseSha
|
|
589
|
+
if (baseSha === currentBaseSha) {
|
|
590
|
+
continue;
|
|
591
|
+
}
|
|
592
|
+
// Check if this commit is an ancestor of HEAD and has been processed (has notes)
|
|
593
|
+
const isAncestor = await isAncestorOf(repoRoot, baseSha, currentBaseSha);
|
|
594
|
+
const hasNotes = await commitHasNotes(repoRoot, baseSha);
|
|
595
|
+
if (isAncestor && hasNotes) {
|
|
596
|
+
cleanupWorkingDir(repoRoot, baseSha);
|
|
597
|
+
if (process.env.AGENTBLAME_DEBUG) {
|
|
598
|
+
console.error(`[agentblame] Cleaned up processed working dir: ${baseSha.slice(0, 8)}`);
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
else if (process.env.AGENTBLAME_DEBUG) {
|
|
602
|
+
console.error(`[agentblame] Keeping working dir: ${baseSha.slice(0, 8)} (ancestor=${isAncestor}, hasNotes=${hasNotes})`);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
/**
|
|
607
|
+
* Check if commitA is an ancestor of commitB
|
|
608
|
+
*/
|
|
609
|
+
async function isAncestorOf(repoRoot, commitA, commitB) {
|
|
610
|
+
return new Promise((resolve) => {
|
|
611
|
+
const proc = (0, node_child_process_1.spawn)("git", ["merge-base", "--is-ancestor", commitA, commitB], {
|
|
612
|
+
cwd: repoRoot,
|
|
613
|
+
stdio: ["ignore", "ignore", "ignore"],
|
|
614
|
+
});
|
|
615
|
+
proc.on("close", (code) => {
|
|
616
|
+
resolve(code === 0);
|
|
617
|
+
});
|
|
618
|
+
proc.on("error", () => {
|
|
619
|
+
resolve(false);
|
|
620
|
+
});
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
/**
|
|
624
|
+
* Check if a commit has agentblame git notes
|
|
625
|
+
*/
|
|
626
|
+
async function commitHasNotes(repoRoot, commitSha) {
|
|
627
|
+
return new Promise((resolve) => {
|
|
628
|
+
const proc = (0, node_child_process_1.spawn)("git", ["notes", "--ref=agentblame", "show", commitSha], {
|
|
629
|
+
cwd: repoRoot,
|
|
630
|
+
stdio: ["ignore", "ignore", "ignore"],
|
|
631
|
+
});
|
|
632
|
+
proc.on("close", (code) => {
|
|
633
|
+
resolve(code === 0);
|
|
634
|
+
});
|
|
635
|
+
proc.on("error", () => {
|
|
636
|
+
resolve(false);
|
|
637
|
+
});
|
|
638
|
+
});
|
|
639
|
+
}
|
|
640
|
+
/**
|
|
641
|
+
* Clean up stale working directories (for commits that no longer exist)
|
|
642
|
+
*/
|
|
643
|
+
async function cleanupStaleWorkingDirs(repoRoot) {
|
|
644
|
+
const baseShas = getActiveBaseShas(repoRoot);
|
|
645
|
+
const cleaned = [];
|
|
646
|
+
const kept = [];
|
|
647
|
+
for (const baseSha of baseShas) {
|
|
648
|
+
// Check if commit exists
|
|
649
|
+
const exists = await commitExists(repoRoot, baseSha);
|
|
650
|
+
if (!exists) {
|
|
651
|
+
cleanupWorkingDir(repoRoot, baseSha);
|
|
652
|
+
cleaned.push(baseSha);
|
|
653
|
+
}
|
|
654
|
+
else {
|
|
655
|
+
kept.push(baseSha);
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
return { cleaned, kept };
|
|
659
|
+
}
|
|
660
|
+
/**
|
|
661
|
+
* Check if a commit exists in the repository
|
|
662
|
+
*/
|
|
663
|
+
async function commitExists(repoRoot, commitSha) {
|
|
664
|
+
return new Promise((resolve) => {
|
|
665
|
+
const proc = (0, node_child_process_1.spawn)("git", ["cat-file", "-e", commitSha], {
|
|
666
|
+
cwd: repoRoot,
|
|
667
|
+
stdio: ["ignore", "ignore", "ignore"],
|
|
668
|
+
});
|
|
669
|
+
proc.on("close", (code) => {
|
|
670
|
+
resolve(code === 0);
|
|
671
|
+
});
|
|
672
|
+
proc.on("error", () => {
|
|
673
|
+
resolve(false);
|
|
674
|
+
});
|
|
675
|
+
});
|
|
676
|
+
}
|
|
677
|
+
// =============================================================================
|
|
678
|
+
// Git HEAD Operations
|
|
679
|
+
// =============================================================================
|
|
680
|
+
/**
|
|
681
|
+
* Get the current HEAD commit SHA
|
|
682
|
+
*/
|
|
683
|
+
async function getGitHead(repoRoot) {
|
|
684
|
+
return new Promise((resolve) => {
|
|
685
|
+
const proc = (0, node_child_process_1.spawn)("git", ["rev-parse", "HEAD"], {
|
|
686
|
+
cwd: repoRoot,
|
|
687
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
688
|
+
});
|
|
689
|
+
let stdout = "";
|
|
690
|
+
proc.stdout.on("data", (data) => {
|
|
691
|
+
stdout += data.toString();
|
|
692
|
+
});
|
|
693
|
+
proc.on("close", (code) => {
|
|
694
|
+
if (code === 0) {
|
|
695
|
+
resolve(stdout.trim());
|
|
696
|
+
}
|
|
697
|
+
else {
|
|
698
|
+
resolve(null);
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
proc.on("error", () => {
|
|
702
|
+
resolve(null);
|
|
703
|
+
});
|
|
704
|
+
});
|
|
705
|
+
}
|
|
706
|
+
/**
|
|
707
|
+
* Get the parent commit SHA
|
|
708
|
+
*/
|
|
709
|
+
async function getParentCommit(repoRoot, commitSha) {
|
|
710
|
+
return new Promise((resolve) => {
|
|
711
|
+
const proc = (0, node_child_process_1.spawn)("git", ["rev-parse", `${commitSha}^`], {
|
|
712
|
+
cwd: repoRoot,
|
|
713
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
714
|
+
});
|
|
715
|
+
let stdout = "";
|
|
716
|
+
proc.stdout.on("data", (data) => {
|
|
717
|
+
stdout += data.toString();
|
|
718
|
+
});
|
|
719
|
+
proc.on("close", (code) => {
|
|
720
|
+
if (code === 0) {
|
|
721
|
+
resolve(stdout.trim());
|
|
722
|
+
}
|
|
723
|
+
else {
|
|
724
|
+
// No parent (initial commit)
|
|
725
|
+
resolve(null);
|
|
726
|
+
}
|
|
727
|
+
});
|
|
728
|
+
proc.on("error", () => {
|
|
729
|
+
resolve(null);
|
|
730
|
+
});
|
|
731
|
+
});
|
|
732
|
+
}
|
|
733
|
+
/**
|
|
734
|
+
* Get file content at a specific commit
|
|
735
|
+
*/
|
|
736
|
+
async function getFileAtCommit(repoRoot, commitSha, filePath) {
|
|
737
|
+
return new Promise((resolve) => {
|
|
738
|
+
const proc = (0, node_child_process_1.spawn)("git", ["show", `${commitSha}:${filePath}`], {
|
|
739
|
+
cwd: repoRoot,
|
|
740
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
741
|
+
});
|
|
742
|
+
let stdout = "";
|
|
743
|
+
proc.stdout.on("data", (data) => {
|
|
744
|
+
stdout += data.toString();
|
|
745
|
+
});
|
|
746
|
+
proc.on("close", (code) => {
|
|
747
|
+
if (code === 0) {
|
|
748
|
+
resolve(stdout);
|
|
749
|
+
}
|
|
750
|
+
else {
|
|
751
|
+
resolve(null);
|
|
752
|
+
}
|
|
753
|
+
});
|
|
754
|
+
proc.on("error", () => {
|
|
755
|
+
resolve(null);
|
|
756
|
+
});
|
|
757
|
+
});
|
|
758
|
+
}
|
|
759
|
+
/**
|
|
760
|
+
* Get the root commit (first commit in repository)
|
|
761
|
+
*/
|
|
762
|
+
async function getRootCommit(repoRoot) {
|
|
763
|
+
return new Promise((resolve) => {
|
|
764
|
+
const proc = (0, node_child_process_1.spawn)("git", ["rev-list", "--max-parents=0", "HEAD"], {
|
|
765
|
+
cwd: repoRoot,
|
|
766
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
767
|
+
});
|
|
768
|
+
let stdout = "";
|
|
769
|
+
proc.stdout.on("data", (data) => {
|
|
770
|
+
stdout += data.toString();
|
|
771
|
+
});
|
|
772
|
+
proc.on("close", (code) => {
|
|
773
|
+
if (code === 0) {
|
|
774
|
+
const commits = stdout.trim().split("\n");
|
|
775
|
+
resolve(commits[0] || null);
|
|
776
|
+
}
|
|
777
|
+
else {
|
|
778
|
+
resolve(null);
|
|
779
|
+
}
|
|
780
|
+
});
|
|
781
|
+
proc.on("error", () => {
|
|
782
|
+
resolve(null);
|
|
783
|
+
});
|
|
784
|
+
});
|
|
785
|
+
}
|
|
786
|
+
// =============================================================================
|
|
787
|
+
// File Content Helpers
|
|
788
|
+
// =============================================================================
|
|
789
|
+
/**
|
|
790
|
+
* Read current file content from working directory
|
|
791
|
+
*/
|
|
792
|
+
function readFileContent(filePath) {
|
|
793
|
+
try {
|
|
794
|
+
return fs.readFileSync(filePath, "utf8");
|
|
795
|
+
}
|
|
796
|
+
catch {
|
|
797
|
+
return null;
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* Check if file exists
|
|
802
|
+
*/
|
|
803
|
+
function fileExists(filePath) {
|
|
804
|
+
return fs.existsSync(filePath);
|
|
805
|
+
}
|
|
806
|
+
/**
|
|
807
|
+
* Store file content as snapshot and update working log
|
|
808
|
+
*/
|
|
809
|
+
async function captureFileSnapshot(repoRoot, baseSha, filePath, content, sessionId, type) {
|
|
810
|
+
// Store content as git blob
|
|
811
|
+
const blobSha = await storeSnapshot(repoRoot, content);
|
|
812
|
+
// Append to working log
|
|
813
|
+
const entry = {
|
|
814
|
+
ts: new Date().toISOString(),
|
|
815
|
+
file: filePath,
|
|
816
|
+
blob: blobSha,
|
|
817
|
+
session: sessionId,
|
|
818
|
+
type,
|
|
819
|
+
};
|
|
820
|
+
appendToWorkingLog(repoRoot, baseSha, entry);
|
|
821
|
+
return blobSha;
|
|
822
|
+
}
|
|
823
|
+
//# sourceMappingURL=storage.js.map
|