gitx.do 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (167) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +156 -0
  3. package/dist/durable-object/object-store.d.ts +113 -0
  4. package/dist/durable-object/object-store.d.ts.map +1 -0
  5. package/dist/durable-object/object-store.js +387 -0
  6. package/dist/durable-object/object-store.js.map +1 -0
  7. package/dist/durable-object/schema.d.ts +17 -0
  8. package/dist/durable-object/schema.d.ts.map +1 -0
  9. package/dist/durable-object/schema.js +43 -0
  10. package/dist/durable-object/schema.js.map +1 -0
  11. package/dist/durable-object/wal.d.ts +111 -0
  12. package/dist/durable-object/wal.d.ts.map +1 -0
  13. package/dist/durable-object/wal.js +200 -0
  14. package/dist/durable-object/wal.js.map +1 -0
  15. package/dist/index.d.ts +24 -0
  16. package/dist/index.d.ts.map +1 -0
  17. package/dist/index.js +101 -0
  18. package/dist/index.js.map +1 -0
  19. package/dist/mcp/adapter.d.ts +231 -0
  20. package/dist/mcp/adapter.d.ts.map +1 -0
  21. package/dist/mcp/adapter.js +502 -0
  22. package/dist/mcp/adapter.js.map +1 -0
  23. package/dist/mcp/sandbox.d.ts +261 -0
  24. package/dist/mcp/sandbox.d.ts.map +1 -0
  25. package/dist/mcp/sandbox.js +983 -0
  26. package/dist/mcp/sandbox.js.map +1 -0
  27. package/dist/mcp/sdk-adapter.d.ts +413 -0
  28. package/dist/mcp/sdk-adapter.d.ts.map +1 -0
  29. package/dist/mcp/sdk-adapter.js +672 -0
  30. package/dist/mcp/sdk-adapter.js.map +1 -0
  31. package/dist/mcp/tools.d.ts +133 -0
  32. package/dist/mcp/tools.d.ts.map +1 -0
  33. package/dist/mcp/tools.js +1604 -0
  34. package/dist/mcp/tools.js.map +1 -0
  35. package/dist/ops/blame.d.ts +148 -0
  36. package/dist/ops/blame.d.ts.map +1 -0
  37. package/dist/ops/blame.js +754 -0
  38. package/dist/ops/blame.js.map +1 -0
  39. package/dist/ops/branch.d.ts +215 -0
  40. package/dist/ops/branch.d.ts.map +1 -0
  41. package/dist/ops/branch.js +608 -0
  42. package/dist/ops/branch.js.map +1 -0
  43. package/dist/ops/commit-traversal.d.ts +209 -0
  44. package/dist/ops/commit-traversal.d.ts.map +1 -0
  45. package/dist/ops/commit-traversal.js +755 -0
  46. package/dist/ops/commit-traversal.js.map +1 -0
  47. package/dist/ops/commit.d.ts +221 -0
  48. package/dist/ops/commit.d.ts.map +1 -0
  49. package/dist/ops/commit.js +606 -0
  50. package/dist/ops/commit.js.map +1 -0
  51. package/dist/ops/merge-base.d.ts +223 -0
  52. package/dist/ops/merge-base.d.ts.map +1 -0
  53. package/dist/ops/merge-base.js +581 -0
  54. package/dist/ops/merge-base.js.map +1 -0
  55. package/dist/ops/merge.d.ts +385 -0
  56. package/dist/ops/merge.d.ts.map +1 -0
  57. package/dist/ops/merge.js +1203 -0
  58. package/dist/ops/merge.js.map +1 -0
  59. package/dist/ops/tag.d.ts +182 -0
  60. package/dist/ops/tag.d.ts.map +1 -0
  61. package/dist/ops/tag.js +608 -0
  62. package/dist/ops/tag.js.map +1 -0
  63. package/dist/ops/tree-builder.d.ts +82 -0
  64. package/dist/ops/tree-builder.d.ts.map +1 -0
  65. package/dist/ops/tree-builder.js +246 -0
  66. package/dist/ops/tree-builder.js.map +1 -0
  67. package/dist/ops/tree-diff.d.ts +243 -0
  68. package/dist/ops/tree-diff.d.ts.map +1 -0
  69. package/dist/ops/tree-diff.js +657 -0
  70. package/dist/ops/tree-diff.js.map +1 -0
  71. package/dist/pack/delta.d.ts +68 -0
  72. package/dist/pack/delta.d.ts.map +1 -0
  73. package/dist/pack/delta.js +343 -0
  74. package/dist/pack/delta.js.map +1 -0
  75. package/dist/pack/format.d.ts +84 -0
  76. package/dist/pack/format.d.ts.map +1 -0
  77. package/dist/pack/format.js +261 -0
  78. package/dist/pack/format.js.map +1 -0
  79. package/dist/pack/full-generation.d.ts +327 -0
  80. package/dist/pack/full-generation.d.ts.map +1 -0
  81. package/dist/pack/full-generation.js +1159 -0
  82. package/dist/pack/full-generation.js.map +1 -0
  83. package/dist/pack/generation.d.ts +118 -0
  84. package/dist/pack/generation.d.ts.map +1 -0
  85. package/dist/pack/generation.js +459 -0
  86. package/dist/pack/generation.js.map +1 -0
  87. package/dist/pack/index.d.ts +181 -0
  88. package/dist/pack/index.d.ts.map +1 -0
  89. package/dist/pack/index.js +552 -0
  90. package/dist/pack/index.js.map +1 -0
  91. package/dist/refs/branch.d.ts +224 -0
  92. package/dist/refs/branch.d.ts.map +1 -0
  93. package/dist/refs/branch.js +170 -0
  94. package/dist/refs/branch.js.map +1 -0
  95. package/dist/refs/storage.d.ts +208 -0
  96. package/dist/refs/storage.d.ts.map +1 -0
  97. package/dist/refs/storage.js +421 -0
  98. package/dist/refs/storage.js.map +1 -0
  99. package/dist/refs/tag.d.ts +230 -0
  100. package/dist/refs/tag.d.ts.map +1 -0
  101. package/dist/refs/tag.js +188 -0
  102. package/dist/refs/tag.js.map +1 -0
  103. package/dist/storage/lru-cache.d.ts +188 -0
  104. package/dist/storage/lru-cache.d.ts.map +1 -0
  105. package/dist/storage/lru-cache.js +410 -0
  106. package/dist/storage/lru-cache.js.map +1 -0
  107. package/dist/storage/object-index.d.ts +140 -0
  108. package/dist/storage/object-index.d.ts.map +1 -0
  109. package/dist/storage/object-index.js +166 -0
  110. package/dist/storage/object-index.js.map +1 -0
  111. package/dist/storage/r2-pack.d.ts +394 -0
  112. package/dist/storage/r2-pack.d.ts.map +1 -0
  113. package/dist/storage/r2-pack.js +1062 -0
  114. package/dist/storage/r2-pack.js.map +1 -0
  115. package/dist/tiered/cdc-pipeline.d.ts +316 -0
  116. package/dist/tiered/cdc-pipeline.d.ts.map +1 -0
  117. package/dist/tiered/cdc-pipeline.js +771 -0
  118. package/dist/tiered/cdc-pipeline.js.map +1 -0
  119. package/dist/tiered/migration.d.ts +242 -0
  120. package/dist/tiered/migration.d.ts.map +1 -0
  121. package/dist/tiered/migration.js +592 -0
  122. package/dist/tiered/migration.js.map +1 -0
  123. package/dist/tiered/parquet-writer.d.ts +248 -0
  124. package/dist/tiered/parquet-writer.d.ts.map +1 -0
  125. package/dist/tiered/parquet-writer.js +555 -0
  126. package/dist/tiered/parquet-writer.js.map +1 -0
  127. package/dist/tiered/read-path.d.ts +141 -0
  128. package/dist/tiered/read-path.d.ts.map +1 -0
  129. package/dist/tiered/read-path.js +204 -0
  130. package/dist/tiered/read-path.js.map +1 -0
  131. package/dist/types/objects.d.ts +53 -0
  132. package/dist/types/objects.d.ts.map +1 -0
  133. package/dist/types/objects.js +291 -0
  134. package/dist/types/objects.js.map +1 -0
  135. package/dist/types/storage.d.ts +117 -0
  136. package/dist/types/storage.d.ts.map +1 -0
  137. package/dist/types/storage.js +8 -0
  138. package/dist/types/storage.js.map +1 -0
  139. package/dist/utils/hash.d.ts +31 -0
  140. package/dist/utils/hash.d.ts.map +1 -0
  141. package/dist/utils/hash.js +60 -0
  142. package/dist/utils/hash.js.map +1 -0
  143. package/dist/utils/sha1.d.ts +26 -0
  144. package/dist/utils/sha1.d.ts.map +1 -0
  145. package/dist/utils/sha1.js +127 -0
  146. package/dist/utils/sha1.js.map +1 -0
  147. package/dist/wire/capabilities.d.ts +236 -0
  148. package/dist/wire/capabilities.d.ts.map +1 -0
  149. package/dist/wire/capabilities.js +437 -0
  150. package/dist/wire/capabilities.js.map +1 -0
  151. package/dist/wire/pkt-line.d.ts +67 -0
  152. package/dist/wire/pkt-line.d.ts.map +1 -0
  153. package/dist/wire/pkt-line.js +145 -0
  154. package/dist/wire/pkt-line.js.map +1 -0
  155. package/dist/wire/receive-pack.d.ts +302 -0
  156. package/dist/wire/receive-pack.d.ts.map +1 -0
  157. package/dist/wire/receive-pack.js +885 -0
  158. package/dist/wire/receive-pack.js.map +1 -0
  159. package/dist/wire/smart-http.d.ts +321 -0
  160. package/dist/wire/smart-http.d.ts.map +1 -0
  161. package/dist/wire/smart-http.js +654 -0
  162. package/dist/wire/smart-http.js.map +1 -0
  163. package/dist/wire/upload-pack.d.ts +333 -0
  164. package/dist/wire/upload-pack.d.ts.map +1 -0
  165. package/dist/wire/upload-pack.js +850 -0
  166. package/dist/wire/upload-pack.js.map +1 -0
  167. package/package.json +61 -0
@@ -0,0 +1,754 @@
1
+ /**
2
+ * Git Blame Algorithm
3
+ *
4
+ * This module provides functionality for attributing each line of a file
5
+ * to the commit that last modified it.
6
+ */
7
+ // ============================================================================
8
+ // Helper Functions
9
+ // ============================================================================
10
+ const decoder = new TextDecoder();
11
+ /**
12
+ * Check if content is likely binary (contains null bytes or other non-text chars)
13
+ */
14
+ function isBinaryContent(data) {
15
+ // Check first 8000 bytes or entire file if smaller
16
+ const checkLength = Math.min(data.length, 8000);
17
+ for (let i = 0; i < checkLength; i++) {
18
+ // Null byte is a strong indicator of binary
19
+ if (data[i] === 0)
20
+ return true;
21
+ }
22
+ return false;
23
+ }
24
+ /**
25
+ * Split content into lines, handling various line ending styles
26
+ */
27
+ function splitLines(content) {
28
+ if (content === '')
29
+ return [];
30
+ // Split by \n but handle \r\n as well
31
+ const lines = content.split('\n');
32
+ // If there's a trailing newline, the split will create an empty final element
33
+ // which we should remove to match expected behavior
34
+ if (lines.length > 0 && lines[lines.length - 1] === '') {
35
+ lines.pop();
36
+ }
37
+ return lines.map(line => line.replace(/\r$/, ''));
38
+ }
39
+ /**
40
+ * Normalize line for comparison (optionally ignoring whitespace)
41
+ */
42
+ function normalizeLine(line, ignoreWhitespace) {
43
+ if (ignoreWhitespace) {
44
+ return line.trim().replace(/\s+/g, ' ');
45
+ }
46
+ return line;
47
+ }
48
+ /**
49
+ * Get file at commit, traversing nested paths
50
+ */
51
+ async function getFileAtPath(storage, commit, path) {
52
+ // Try the direct storage method first
53
+ const directResult = await storage.getFileAtCommit(commit.tree, path);
54
+ if (directResult)
55
+ return directResult;
56
+ // Handle nested paths manually
57
+ const parts = path.split('/');
58
+ let currentTreeSha = commit.tree;
59
+ for (let i = 0; i < parts.length; i++) {
60
+ const tree = await storage.getTree(currentTreeSha);
61
+ if (!tree)
62
+ return null;
63
+ const entry = tree.entries.find(e => e.name === parts[i]);
64
+ if (!entry)
65
+ return null;
66
+ if (i === parts.length - 1) {
67
+ // Final part - should be a file
68
+ return storage.getBlob(entry.sha);
69
+ }
70
+ else {
71
+ // Intermediate part - should be a directory
72
+ if (entry.mode !== '040000')
73
+ return null;
74
+ currentTreeSha = entry.sha;
75
+ }
76
+ }
77
+ return null;
78
+ }
79
+ /**
80
+ * Simple LCS-based diff to find unchanged lines between two file versions
81
+ * Returns a mapping of (oldLineIndex -> newLineIndex) for unchanged lines
82
+ */
83
+ function computeLineMapping(oldLines, newLines, ignoreWhitespace = false) {
84
+ // Build a map of unchanged line positions
85
+ const mapping = new Map();
86
+ // Normalize lines for comparison if needed
87
+ const normalizedOld = oldLines.map(l => normalizeLine(l, ignoreWhitespace));
88
+ const normalizedNew = newLines.map(l => normalizeLine(l, ignoreWhitespace));
89
+ // Use a simple greedy LCS approach for line matching
90
+ // Build LCS table
91
+ const m = oldLines.length;
92
+ const n = newLines.length;
93
+ if (m === 0 || n === 0)
94
+ return mapping;
95
+ // Create LCS table
96
+ const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
97
+ for (let i = 1; i <= m; i++) {
98
+ for (let j = 1; j <= n; j++) {
99
+ if (normalizedOld[i - 1] === normalizedNew[j - 1]) {
100
+ dp[i][j] = dp[i - 1][j - 1] + 1;
101
+ }
102
+ else {
103
+ dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);
104
+ }
105
+ }
106
+ }
107
+ // Backtrack to find the matching lines
108
+ let i = m, j = n;
109
+ while (i > 0 && j > 0) {
110
+ if (normalizedOld[i - 1] === normalizedNew[j - 1]) {
111
+ mapping.set(i - 1, j - 1); // 0-indexed
112
+ i--;
113
+ j--;
114
+ }
115
+ else if (dp[i - 1][j] > dp[i][j - 1]) {
116
+ i--;
117
+ }
118
+ else {
119
+ j--;
120
+ }
121
+ }
122
+ return mapping;
123
+ }
124
+ /**
125
+ * Parse line range specification (git-style -L option)
126
+ */
127
+ function parseLineRange(lineRange, lines) {
128
+ const totalLines = lines.length;
129
+ // Handle regex patterns like /pattern1/,/pattern2/
130
+ if (lineRange.startsWith('/')) {
131
+ const parts = lineRange.match(/^\/(.+)\/,\/(.+)\/$/);
132
+ if (parts) {
133
+ const startPattern = new RegExp(parts[1]);
134
+ const endPattern = new RegExp(parts[2]);
135
+ let start = -1;
136
+ let end = -1;
137
+ for (let i = 0; i < lines.length; i++) {
138
+ if (start === -1 && startPattern.test(lines[i])) {
139
+ start = i + 1; // 1-indexed
140
+ }
141
+ if (start !== -1 && endPattern.test(lines[i])) {
142
+ end = i + 1; // 1-indexed
143
+ break;
144
+ }
145
+ }
146
+ if (start === -1)
147
+ start = 1;
148
+ if (end === -1)
149
+ end = totalLines;
150
+ return { start, end };
151
+ }
152
+ }
153
+ // Handle numeric ranges like "2,4" or "2,+3"
154
+ const [startStr, endStr] = lineRange.split(',');
155
+ const start = parseInt(startStr, 10);
156
+ let end;
157
+ if (endStr.startsWith('+')) {
158
+ // Relative offset: start + offset lines
159
+ end = start + parseInt(endStr.slice(1), 10);
160
+ }
161
+ else {
162
+ end = parseInt(endStr, 10);
163
+ }
164
+ return { start, end };
165
+ }
166
+ /**
167
+ * Calculate similarity between two strings (0-1)
168
+ */
169
+ function calculateSimilarity(a, b) {
170
+ if (a === b)
171
+ return 1;
172
+ if (a.length === 0 || b.length === 0)
173
+ return 0;
174
+ const aLines = splitLines(a);
175
+ const bLines = splitLines(b);
176
+ if (aLines.length === 0 && bLines.length === 0)
177
+ return 1;
178
+ if (aLines.length === 0 || bLines.length === 0)
179
+ return 0;
180
+ // Count matching lines
181
+ const mapping = computeLineMapping(aLines, bLines, false);
182
+ const matchCount = mapping.size;
183
+ const maxLines = Math.max(aLines.length, bLines.length);
184
+ return matchCount / maxLines;
185
+ }
186
+ // ============================================================================
187
+ // Main Functions
188
+ // ============================================================================
189
+ /**
190
+ * Compute blame for a file at a specific commit
191
+ */
192
+ export async function blame(storage, path, commit, options) {
193
+ const opts = options ?? {};
194
+ // Get the commit object
195
+ const commitObj = await storage.getCommit(commit);
196
+ if (!commitObj) {
197
+ throw new Error(`Commit not found: ${commit}`);
198
+ }
199
+ // Get the file content at this commit
200
+ const fileContent = await getFileAtPath(storage, commitObj, path);
201
+ if (fileContent === null) {
202
+ throw new Error(`File not found: ${path} at commit ${commit}`);
203
+ }
204
+ // Check for binary file
205
+ if (isBinaryContent(fileContent)) {
206
+ throw new Error(`Cannot blame binary file: ${path}`);
207
+ }
208
+ const contentStr = decoder.decode(fileContent);
209
+ let lines = splitLines(contentStr);
210
+ // Handle empty file
211
+ if (lines.length === 0) {
212
+ return {
213
+ path,
214
+ lines: [],
215
+ commits: new Map(),
216
+ options: opts
217
+ };
218
+ }
219
+ // Parse line range if specified
220
+ let startLine = 1;
221
+ let endLine = lines.length;
222
+ if (opts.lineRange) {
223
+ const range = parseLineRange(opts.lineRange, lines);
224
+ startLine = range.start;
225
+ endLine = range.end;
226
+ }
227
+ // Initialize blame info for each line (all attributed to current commit initially)
228
+ const blameInfo = lines.map((content, idx) => ({
229
+ commitSha: commit,
230
+ author: commitObj.author.name,
231
+ email: commitObj.author.email,
232
+ timestamp: commitObj.author.timestamp,
233
+ content,
234
+ lineNumber: idx + 1,
235
+ originalLineNumber: idx + 1,
236
+ originalPath: path
237
+ }));
238
+ // Track which lines still need attribution
239
+ const lineNeedsAttribution = new Array(lines.length).fill(true);
240
+ // Track the current path (for rename following)
241
+ let currentPath = path;
242
+ // Track commits for the result
243
+ const commitsMap = new Map();
244
+ // Add current commit info
245
+ commitsMap.set(commit, {
246
+ sha: commit,
247
+ author: commitObj.author.name,
248
+ email: commitObj.author.email,
249
+ timestamp: commitObj.author.timestamp,
250
+ summary: commitObj.message.split('\n')[0],
251
+ boundary: commitObj.parents.length === 0
252
+ });
253
+ // Walk through commit history
254
+ let currentCommit = commit;
255
+ let currentLines = lines;
256
+ let commitCount = 0;
257
+ const maxCommits = opts.maxCommits ?? Infinity;
258
+ // Handle the followRenames option
259
+ const followRenames = opts.followRenames ?? false;
260
+ // For merge commits, we need to explore both parents
261
+ const commitQueue = [];
262
+ // Initialize with current commit's parents
263
+ const currentCommitObj = await storage.getCommit(currentCommit);
264
+ if (currentCommitObj && currentCommitObj.parents.length > 0) {
265
+ for (const parentSha of currentCommitObj.parents) {
266
+ // Identity mapping for first level
267
+ const identityMapping = new Map();
268
+ for (let i = 0; i < currentLines.length; i++) {
269
+ identityMapping.set(i, i);
270
+ }
271
+ commitQueue.push({
272
+ sha: parentSha,
273
+ lines: currentLines,
274
+ path: currentPath,
275
+ lineMapping: identityMapping,
276
+ childCommitSha: currentCommit
277
+ });
278
+ }
279
+ }
280
+ // Process commit queue (BFS through history)
281
+ while (commitQueue.length > 0 && commitCount < maxCommits) {
282
+ const item = commitQueue.shift();
283
+ const { sha: parentSha, lines: childLines, path: childPath, lineMapping: childToOriginal, childCommitSha } = item;
284
+ // Check if this commit should be ignored
285
+ if (opts.ignoreRevisions?.includes(parentSha)) {
286
+ // Skip this commit but continue to its parents
287
+ const parentCommitObj = await storage.getCommit(parentSha);
288
+ if (parentCommitObj && parentCommitObj.parents.length > 0) {
289
+ for (const grandparentSha of parentCommitObj.parents) {
290
+ commitQueue.push({
291
+ sha: grandparentSha,
292
+ lines: childLines,
293
+ path: childPath,
294
+ lineMapping: childToOriginal,
295
+ childCommitSha: parentSha
296
+ });
297
+ }
298
+ }
299
+ continue;
300
+ }
301
+ commitCount++;
302
+ // Check date filters
303
+ const parentCommitObj = await storage.getCommit(parentSha);
304
+ if (!parentCommitObj)
305
+ continue;
306
+ if (opts.since && parentCommitObj.author.timestamp * 1000 < opts.since.getTime()) {
307
+ continue;
308
+ }
309
+ if (opts.until && parentCommitObj.author.timestamp * 1000 > opts.until.getTime()) {
310
+ continue;
311
+ }
312
+ // Track path through renames
313
+ // Renames are stored in the child commit (the one that did the rename)
314
+ // So we check the childCommitSha to find what the file was called in the parent
315
+ let pathInParent = childPath;
316
+ if (followRenames) {
317
+ // Check renames in the child commit (where the rename happened)
318
+ const childRenames = await storage.getRenamesInCommit(childCommitSha);
319
+ // Find reverse rename: oldPath -> newPath means in parent it was oldPath
320
+ for (const [oldPath, newPath] of childRenames) {
321
+ if (newPath === childPath) {
322
+ pathInParent = oldPath;
323
+ break;
324
+ }
325
+ }
326
+ }
327
+ // Get file content in parent
328
+ const parentContent = await getFileAtPath(storage, parentCommitObj, pathInParent);
329
+ // If file doesn't exist in parent, all remaining lines are from the first commit that has them
330
+ if (!parentContent) {
331
+ continue;
332
+ }
333
+ const parentContentStr = decoder.decode(parentContent);
334
+ const parentLines = splitLines(parentContentStr);
335
+ // Compute line mapping between parent and child
336
+ const mapping = computeLineMapping(parentLines, childLines, opts.ignoreWhitespace ?? false);
337
+ // Add commit info
338
+ if (!commitsMap.has(parentSha)) {
339
+ commitsMap.set(parentSha, {
340
+ sha: parentSha,
341
+ author: parentCommitObj.author.name,
342
+ email: parentCommitObj.author.email,
343
+ timestamp: parentCommitObj.author.timestamp,
344
+ summary: parentCommitObj.message.split('\n')[0],
345
+ boundary: parentCommitObj.parents.length === 0
346
+ });
347
+ }
348
+ // Update blame for lines that came from parent
349
+ // mapping: parentLineIdx -> childLineIdx
350
+ for (const [parentIdx, childIdx] of mapping) {
351
+ // Convert childIdx to original index
352
+ for (const [origIdx, mappedChildIdx] of childToOriginal) {
353
+ if (mappedChildIdx === childIdx && lineNeedsAttribution[origIdx]) {
354
+ // This line exists in parent - attribute to parent
355
+ blameInfo[origIdx].commitSha = parentSha;
356
+ blameInfo[origIdx].author = parentCommitObj.author.name;
357
+ blameInfo[origIdx].email = parentCommitObj.author.email;
358
+ blameInfo[origIdx].timestamp = parentCommitObj.author.timestamp;
359
+ blameInfo[origIdx].originalLineNumber = parentIdx + 1;
360
+ if (pathInParent !== childPath) {
361
+ blameInfo[origIdx].originalPath = pathInParent;
362
+ }
363
+ }
364
+ }
365
+ }
366
+ // Build new mapping from original indices to parent indices
367
+ const newMapping = new Map();
368
+ for (const [origIdx, childIdx] of childToOriginal) {
369
+ // Find if this child line maps to a parent line
370
+ for (const [parentIdx, mappedChildIdx] of mapping) {
371
+ if (mappedChildIdx === childIdx) {
372
+ newMapping.set(origIdx, parentIdx);
373
+ break;
374
+ }
375
+ }
376
+ }
377
+ // Add parent's parents to queue if there are still lines to attribute
378
+ if (parentCommitObj.parents.length > 0 && newMapping.size > 0) {
379
+ for (const grandparentSha of parentCommitObj.parents) {
380
+ commitQueue.push({
381
+ sha: grandparentSha,
382
+ lines: parentLines,
383
+ path: pathInParent,
384
+ lineMapping: newMapping,
385
+ childCommitSha: parentSha
386
+ });
387
+ }
388
+ }
389
+ }
390
+ // Filter to requested line range
391
+ let resultLines = blameInfo;
392
+ if (opts.lineRange) {
393
+ resultLines = blameInfo.filter(l => l.lineNumber >= startLine && l.lineNumber <= endLine);
394
+ }
395
+ return {
396
+ path,
397
+ lines: resultLines,
398
+ commits: commitsMap,
399
+ options: opts
400
+ };
401
+ }
402
+ /**
403
+ * Alias for blame - get full file blame
404
+ */
405
+ export async function blameFile(storage, path, commit, options) {
406
+ return blame(storage, path, commit, options);
407
+ }
408
+ /**
409
+ * Get blame information for a specific line
410
+ */
411
+ export async function blameLine(storage, path, lineNumber, commit, options) {
412
+ if (lineNumber < 1) {
413
+ throw new Error(`Invalid line number: ${lineNumber}. Line numbers start at 1.`);
414
+ }
415
+ const result = await blame(storage, path, commit, options);
416
+ if (lineNumber > result.lines.length) {
417
+ throw new Error(`Invalid line number: ${lineNumber}. File has ${result.lines.length} lines.`);
418
+ }
419
+ return result.lines[lineNumber - 1];
420
+ }
421
+ /**
422
+ * Get blame for a specific line range
423
+ */
424
+ export async function blameRange(storage, path, startLine, endLine, commit, options) {
425
+ if (startLine < 1) {
426
+ throw new Error(`Invalid start line: ${startLine}. Line numbers start at 1.`);
427
+ }
428
+ if (endLine < startLine) {
429
+ throw new Error(`Invalid range: end (${endLine}) is before start (${startLine}).`);
430
+ }
431
+ const fullResult = await blame(storage, path, commit, options);
432
+ if (endLine > fullResult.lines.length) {
433
+ throw new Error(`Invalid end line: ${endLine}. File has ${fullResult.lines.length} lines.`);
434
+ }
435
+ return {
436
+ path: fullResult.path,
437
+ lines: fullResult.lines.slice(startLine - 1, endLine),
438
+ commits: fullResult.commits,
439
+ options: fullResult.options
440
+ };
441
+ }
442
+ /**
443
+ * Get blame at a specific historical commit
444
+ */
445
+ export async function getBlameForCommit(storage, path, commit, options) {
446
+ return blame(storage, path, commit, options);
447
+ }
448
+ /**
449
+ * Track content path across renames through history
450
+ */
451
+ export async function trackContentAcrossRenames(storage, path, commit, _options) {
452
+ const history = [];
453
+ let currentPath = path;
454
+ let currentCommitSha = commit;
455
+ while (currentCommitSha) {
456
+ history.push({ commit: currentCommitSha, path: currentPath });
457
+ const commitObj = await storage.getCommit(currentCommitSha);
458
+ if (!commitObj || commitObj.parents.length === 0)
459
+ break;
460
+ // Check for renames in this commit
461
+ const renames = await storage.getRenamesInCommit(currentCommitSha);
462
+ // Find if our current path was renamed from something
463
+ for (const [oldPath, newPath] of renames) {
464
+ if (newPath === currentPath) {
465
+ currentPath = oldPath;
466
+ break;
467
+ }
468
+ }
469
+ currentCommitSha = commitObj.parents[0];
470
+ }
471
+ return history;
472
+ }
473
+ /**
474
+ * Detect file renames between two commits
475
+ */
476
+ export async function detectRenames(storage, fromCommit, toCommit, options) {
477
+ const threshold = options?.threshold ?? 0.5;
478
+ const renames = new Map();
479
+ const fromCommitObj = await storage.getCommit(fromCommit);
480
+ const toCommitObj = await storage.getCommit(toCommit);
481
+ if (!fromCommitObj || !toCommitObj)
482
+ return renames;
483
+ const fromTree = await storage.getTree(fromCommitObj.tree);
484
+ const toTree = await storage.getTree(toCommitObj.tree);
485
+ if (!fromTree || !toTree)
486
+ return renames;
487
+ // Find files that were deleted in 'from' and added in 'to'
488
+ const fromFiles = new Map(); // name -> sha
489
+ const toFiles = new Map();
490
+ for (const entry of fromTree.entries) {
491
+ if (entry.mode !== '040000') { // Skip directories
492
+ fromFiles.set(entry.name, entry.sha);
493
+ }
494
+ }
495
+ for (const entry of toTree.entries) {
496
+ if (entry.mode !== '040000') {
497
+ toFiles.set(entry.name, entry.sha);
498
+ }
499
+ }
500
+ // Find deleted files (in from but not in to)
501
+ const deletedFiles = [];
502
+ for (const name of fromFiles.keys()) {
503
+ if (!toFiles.has(name)) {
504
+ deletedFiles.push(name);
505
+ }
506
+ }
507
+ // Find added files (in to but not in from)
508
+ const addedFiles = [];
509
+ for (const name of toFiles.keys()) {
510
+ if (!fromFiles.has(name)) {
511
+ addedFiles.push(name);
512
+ }
513
+ }
514
+ // Check for exact SHA matches (pure renames)
515
+ for (const deleted of deletedFiles) {
516
+ const deletedSha = fromFiles.get(deleted);
517
+ for (const added of addedFiles) {
518
+ const addedSha = toFiles.get(added);
519
+ if (deletedSha === addedSha) {
520
+ renames.set(deleted, added);
521
+ break;
522
+ }
523
+ }
524
+ }
525
+ // Check for content similarity (renames with modifications)
526
+ for (const deleted of deletedFiles) {
527
+ if (renames.has(deleted))
528
+ continue;
529
+ const deletedSha = fromFiles.get(deleted);
530
+ const deletedContent = await storage.getBlob(deletedSha);
531
+ if (!deletedContent || isBinaryContent(deletedContent))
532
+ continue;
533
+ const deletedStr = decoder.decode(deletedContent);
534
+ for (const added of addedFiles) {
535
+ // Check if already matched
536
+ let alreadyMatched = false;
537
+ for (const [, v] of renames) {
538
+ if (v === added) {
539
+ alreadyMatched = true;
540
+ break;
541
+ }
542
+ }
543
+ if (alreadyMatched)
544
+ continue;
545
+ const addedSha = toFiles.get(added);
546
+ const addedContent = await storage.getBlob(addedSha);
547
+ if (!addedContent || isBinaryContent(addedContent))
548
+ continue;
549
+ const addedStr = decoder.decode(addedContent);
550
+ const similarity = calculateSimilarity(deletedStr, addedStr);
551
+ if (similarity >= threshold) {
552
+ renames.set(deleted, added);
553
+ break;
554
+ }
555
+ }
556
+ }
557
+ return renames;
558
+ }
559
+ /**
560
+ * Build complete blame history for a specific line
561
+ */
562
+ export async function buildBlameHistory(storage, path, lineNumber, commit, options) {
563
+ const history = [];
564
+ let currentCommitSha = commit;
565
+ let currentPath = path;
566
+ let currentLineNumber = lineNumber;
567
+ while (currentCommitSha) {
568
+ const commitObj = await storage.getCommit(currentCommitSha);
569
+ if (!commitObj)
570
+ break;
571
+ const fileContent = await getFileAtPath(storage, commitObj, currentPath);
572
+ if (!fileContent)
573
+ break;
574
+ const contentStr = decoder.decode(fileContent);
575
+ const lines = splitLines(contentStr);
576
+ if (currentLineNumber > lines.length || currentLineNumber < 1)
577
+ break;
578
+ history.push({
579
+ commitSha: currentCommitSha,
580
+ content: lines[currentLineNumber - 1],
581
+ lineNumber: currentLineNumber,
582
+ author: commitObj.author.name,
583
+ timestamp: commitObj.author.timestamp
584
+ });
585
+ // Move to parent
586
+ if (commitObj.parents.length === 0)
587
+ break;
588
+ const parentSha = commitObj.parents[0];
589
+ const parentCommitObj = await storage.getCommit(parentSha);
590
+ if (!parentCommitObj)
591
+ break;
592
+ // Check for renames
593
+ const renames = await storage.getRenamesInCommit(currentCommitSha);
594
+ for (const [oldPath, newPath] of renames) {
595
+ if (newPath === currentPath) {
596
+ currentPath = oldPath;
597
+ break;
598
+ }
599
+ }
600
+ // Get parent content and find corresponding line
601
+ const parentContent = await getFileAtPath(storage, parentCommitObj, currentPath);
602
+ if (!parentContent)
603
+ break;
604
+ const parentContentStr = decoder.decode(parentContent);
605
+ const parentLines = splitLines(parentContentStr);
606
+ // Find which line in parent corresponds to our current line
607
+ const mapping = computeLineMapping(parentLines, lines, options?.ignoreWhitespace ?? false);
608
+ let foundParentLine = false;
609
+ for (const [parentIdx, childIdx] of mapping) {
610
+ if (childIdx === currentLineNumber - 1) {
611
+ currentLineNumber = parentIdx + 1;
612
+ foundParentLine = true;
613
+ break;
614
+ }
615
+ }
616
+ // If we didn't find a content match but the parent has the line at the same position,
617
+ // assume it's the same line (content was modified). This is important for tracking
618
+ // history of lines that change content in every commit.
619
+ if (!foundParentLine) {
620
+ if (currentLineNumber <= parentLines.length) {
621
+ // Line exists at same position in parent - assume it's the same logical line
622
+ foundParentLine = true;
623
+ // currentLineNumber stays the same
624
+ }
625
+ else {
626
+ break;
627
+ }
628
+ }
629
+ currentCommitSha = parentSha;
630
+ }
631
+ return history;
632
+ }
633
+ /**
634
+ * Format blame result for display
635
+ */
636
+ export function formatBlame(result, options) {
637
+ const opts = options ?? {};
638
+ const lines = [];
639
+ if (opts.format === 'porcelain') {
640
+ // Porcelain format - machine readable
641
+ for (const line of result.lines) {
642
+ const commitInfo = result.commits.get(line.commitSha);
643
+ lines.push(`${line.commitSha} ${line.originalLineNumber} ${line.lineNumber} 1`);
644
+ lines.push(`author ${line.author}`);
645
+ lines.push(`author-mail <${line.email || commitInfo?.email || ''}>`);
646
+ lines.push(`author-time ${line.timestamp}`);
647
+ lines.push(`author-tz +0000`);
648
+ lines.push(`committer ${line.author}`);
649
+ lines.push(`committer-mail <${line.email || commitInfo?.email || ''}>`);
650
+ lines.push(`committer-time ${line.timestamp}`);
651
+ lines.push(`committer-tz +0000`);
652
+ lines.push(`filename ${result.path}`);
653
+ lines.push(`\t${line.content}`);
654
+ }
655
+ }
656
+ else {
657
+ // Default format - human readable
658
+ for (const line of result.lines) {
659
+ const sha = line.commitSha.substring(0, 8);
660
+ const author = line.author.padEnd(15).substring(0, 15);
661
+ let datePart = '';
662
+ if (opts.showDate) {
663
+ const date = new Date(line.timestamp * 1000);
664
+ datePart = ` ${date.toISOString().substring(0, 10)}`;
665
+ }
666
+ let authorPart = author;
667
+ if (opts.showEmail) {
668
+ const email = line.email || result.commits.get(line.commitSha)?.email || '';
669
+ authorPart = email.padEnd(25).substring(0, 25);
670
+ }
671
+ let lineNumPart = '';
672
+ if (opts.showLineNumbers) {
673
+ lineNumPart = `${line.lineNumber}) `;
674
+ }
675
+ lines.push(`${sha} (${authorPart}${datePart} ${lineNumPart}${line.content}`);
676
+ }
677
+ }
678
+ return lines.join('\n');
679
+ }
680
+ /**
681
+ * Parse porcelain blame output
682
+ */
683
+ export function parseBlameOutput(output) {
684
+ const lines = [];
685
+ const commits = new Map();
686
+ const outputLines = output.split('\n');
687
+ let i = 0;
688
+ while (i < outputLines.length) {
689
+ const headerLine = outputLines[i];
690
+ if (!headerLine || headerLine.trim() === '') {
691
+ i++;
692
+ continue;
693
+ }
694
+ // Parse header: <sha> <orig-line> <final-line> <num-lines>
695
+ // Accept any 40-char alphanumeric SHA (to support test fixtures using makeSha)
696
+ const headerMatch = headerLine.match(/^([0-9a-zA-Z]{40}) (\d+) (\d+)/);
697
+ if (!headerMatch) {
698
+ i++;
699
+ continue;
700
+ }
701
+ const commitSha = headerMatch[1];
702
+ const originalLineNumber = parseInt(headerMatch[2], 10);
703
+ const lineNumber = parseInt(headerMatch[3], 10);
704
+ // Parse metadata lines until we hit the content line (starts with tab)
705
+ let author = '';
706
+ let email = '';
707
+ let timestamp = 0;
708
+ let content = '';
709
+ i++;
710
+ while (i < outputLines.length) {
711
+ const metaLine = outputLines[i];
712
+ if (metaLine.startsWith('\t')) {
713
+ content = metaLine.substring(1);
714
+ i++;
715
+ break;
716
+ }
717
+ if (metaLine.startsWith('author ')) {
718
+ author = metaLine.substring(7);
719
+ }
720
+ else if (metaLine.startsWith('author-mail ')) {
721
+ email = metaLine.substring(12).replace(/[<>]/g, '');
722
+ }
723
+ else if (metaLine.startsWith('author-time ')) {
724
+ timestamp = parseInt(metaLine.substring(12), 10);
725
+ }
726
+ i++;
727
+ }
728
+ lines.push({
729
+ commitSha,
730
+ author,
731
+ email,
732
+ timestamp,
733
+ content,
734
+ lineNumber,
735
+ originalLineNumber
736
+ });
737
+ // Add commit info if not already present
738
+ if (!commits.has(commitSha)) {
739
+ commits.set(commitSha, {
740
+ sha: commitSha,
741
+ author,
742
+ email,
743
+ timestamp,
744
+ summary: ''
745
+ });
746
+ }
747
+ }
748
+ return {
749
+ path: '',
750
+ lines,
751
+ commits
752
+ };
753
+ }
754
+ //# sourceMappingURL=blame.js.map