gitx.do 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (167) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +156 -0
  3. package/dist/durable-object/object-store.d.ts +113 -0
  4. package/dist/durable-object/object-store.d.ts.map +1 -0
  5. package/dist/durable-object/object-store.js +387 -0
  6. package/dist/durable-object/object-store.js.map +1 -0
  7. package/dist/durable-object/schema.d.ts +17 -0
  8. package/dist/durable-object/schema.d.ts.map +1 -0
  9. package/dist/durable-object/schema.js +43 -0
  10. package/dist/durable-object/schema.js.map +1 -0
  11. package/dist/durable-object/wal.d.ts +111 -0
  12. package/dist/durable-object/wal.d.ts.map +1 -0
  13. package/dist/durable-object/wal.js +200 -0
  14. package/dist/durable-object/wal.js.map +1 -0
  15. package/dist/index.d.ts +24 -0
  16. package/dist/index.d.ts.map +1 -0
  17. package/dist/index.js +101 -0
  18. package/dist/index.js.map +1 -0
  19. package/dist/mcp/adapter.d.ts +231 -0
  20. package/dist/mcp/adapter.d.ts.map +1 -0
  21. package/dist/mcp/adapter.js +502 -0
  22. package/dist/mcp/adapter.js.map +1 -0
  23. package/dist/mcp/sandbox.d.ts +261 -0
  24. package/dist/mcp/sandbox.d.ts.map +1 -0
  25. package/dist/mcp/sandbox.js +983 -0
  26. package/dist/mcp/sandbox.js.map +1 -0
  27. package/dist/mcp/sdk-adapter.d.ts +413 -0
  28. package/dist/mcp/sdk-adapter.d.ts.map +1 -0
  29. package/dist/mcp/sdk-adapter.js +672 -0
  30. package/dist/mcp/sdk-adapter.js.map +1 -0
  31. package/dist/mcp/tools.d.ts +133 -0
  32. package/dist/mcp/tools.d.ts.map +1 -0
  33. package/dist/mcp/tools.js +1604 -0
  34. package/dist/mcp/tools.js.map +1 -0
  35. package/dist/ops/blame.d.ts +148 -0
  36. package/dist/ops/blame.d.ts.map +1 -0
  37. package/dist/ops/blame.js +754 -0
  38. package/dist/ops/blame.js.map +1 -0
  39. package/dist/ops/branch.d.ts +215 -0
  40. package/dist/ops/branch.d.ts.map +1 -0
  41. package/dist/ops/branch.js +608 -0
  42. package/dist/ops/branch.js.map +1 -0
  43. package/dist/ops/commit-traversal.d.ts +209 -0
  44. package/dist/ops/commit-traversal.d.ts.map +1 -0
  45. package/dist/ops/commit-traversal.js +755 -0
  46. package/dist/ops/commit-traversal.js.map +1 -0
  47. package/dist/ops/commit.d.ts +221 -0
  48. package/dist/ops/commit.d.ts.map +1 -0
  49. package/dist/ops/commit.js +606 -0
  50. package/dist/ops/commit.js.map +1 -0
  51. package/dist/ops/merge-base.d.ts +223 -0
  52. package/dist/ops/merge-base.d.ts.map +1 -0
  53. package/dist/ops/merge-base.js +581 -0
  54. package/dist/ops/merge-base.js.map +1 -0
  55. package/dist/ops/merge.d.ts +385 -0
  56. package/dist/ops/merge.d.ts.map +1 -0
  57. package/dist/ops/merge.js +1203 -0
  58. package/dist/ops/merge.js.map +1 -0
  59. package/dist/ops/tag.d.ts +182 -0
  60. package/dist/ops/tag.d.ts.map +1 -0
  61. package/dist/ops/tag.js +608 -0
  62. package/dist/ops/tag.js.map +1 -0
  63. package/dist/ops/tree-builder.d.ts +82 -0
  64. package/dist/ops/tree-builder.d.ts.map +1 -0
  65. package/dist/ops/tree-builder.js +246 -0
  66. package/dist/ops/tree-builder.js.map +1 -0
  67. package/dist/ops/tree-diff.d.ts +243 -0
  68. package/dist/ops/tree-diff.d.ts.map +1 -0
  69. package/dist/ops/tree-diff.js +657 -0
  70. package/dist/ops/tree-diff.js.map +1 -0
  71. package/dist/pack/delta.d.ts +68 -0
  72. package/dist/pack/delta.d.ts.map +1 -0
  73. package/dist/pack/delta.js +343 -0
  74. package/dist/pack/delta.js.map +1 -0
  75. package/dist/pack/format.d.ts +84 -0
  76. package/dist/pack/format.d.ts.map +1 -0
  77. package/dist/pack/format.js +261 -0
  78. package/dist/pack/format.js.map +1 -0
  79. package/dist/pack/full-generation.d.ts +327 -0
  80. package/dist/pack/full-generation.d.ts.map +1 -0
  81. package/dist/pack/full-generation.js +1159 -0
  82. package/dist/pack/full-generation.js.map +1 -0
  83. package/dist/pack/generation.d.ts +118 -0
  84. package/dist/pack/generation.d.ts.map +1 -0
  85. package/dist/pack/generation.js +459 -0
  86. package/dist/pack/generation.js.map +1 -0
  87. package/dist/pack/index.d.ts +181 -0
  88. package/dist/pack/index.d.ts.map +1 -0
  89. package/dist/pack/index.js +552 -0
  90. package/dist/pack/index.js.map +1 -0
  91. package/dist/refs/branch.d.ts +224 -0
  92. package/dist/refs/branch.d.ts.map +1 -0
  93. package/dist/refs/branch.js +170 -0
  94. package/dist/refs/branch.js.map +1 -0
  95. package/dist/refs/storage.d.ts +208 -0
  96. package/dist/refs/storage.d.ts.map +1 -0
  97. package/dist/refs/storage.js +421 -0
  98. package/dist/refs/storage.js.map +1 -0
  99. package/dist/refs/tag.d.ts +230 -0
  100. package/dist/refs/tag.d.ts.map +1 -0
  101. package/dist/refs/tag.js +188 -0
  102. package/dist/refs/tag.js.map +1 -0
  103. package/dist/storage/lru-cache.d.ts +188 -0
  104. package/dist/storage/lru-cache.d.ts.map +1 -0
  105. package/dist/storage/lru-cache.js +410 -0
  106. package/dist/storage/lru-cache.js.map +1 -0
  107. package/dist/storage/object-index.d.ts +140 -0
  108. package/dist/storage/object-index.d.ts.map +1 -0
  109. package/dist/storage/object-index.js +166 -0
  110. package/dist/storage/object-index.js.map +1 -0
  111. package/dist/storage/r2-pack.d.ts +394 -0
  112. package/dist/storage/r2-pack.d.ts.map +1 -0
  113. package/dist/storage/r2-pack.js +1062 -0
  114. package/dist/storage/r2-pack.js.map +1 -0
  115. package/dist/tiered/cdc-pipeline.d.ts +316 -0
  116. package/dist/tiered/cdc-pipeline.d.ts.map +1 -0
  117. package/dist/tiered/cdc-pipeline.js +771 -0
  118. package/dist/tiered/cdc-pipeline.js.map +1 -0
  119. package/dist/tiered/migration.d.ts +242 -0
  120. package/dist/tiered/migration.d.ts.map +1 -0
  121. package/dist/tiered/migration.js +592 -0
  122. package/dist/tiered/migration.js.map +1 -0
  123. package/dist/tiered/parquet-writer.d.ts +248 -0
  124. package/dist/tiered/parquet-writer.d.ts.map +1 -0
  125. package/dist/tiered/parquet-writer.js +555 -0
  126. package/dist/tiered/parquet-writer.js.map +1 -0
  127. package/dist/tiered/read-path.d.ts +141 -0
  128. package/dist/tiered/read-path.d.ts.map +1 -0
  129. package/dist/tiered/read-path.js +204 -0
  130. package/dist/tiered/read-path.js.map +1 -0
  131. package/dist/types/objects.d.ts +53 -0
  132. package/dist/types/objects.d.ts.map +1 -0
  133. package/dist/types/objects.js +291 -0
  134. package/dist/types/objects.js.map +1 -0
  135. package/dist/types/storage.d.ts +117 -0
  136. package/dist/types/storage.d.ts.map +1 -0
  137. package/dist/types/storage.js +8 -0
  138. package/dist/types/storage.js.map +1 -0
  139. package/dist/utils/hash.d.ts +31 -0
  140. package/dist/utils/hash.d.ts.map +1 -0
  141. package/dist/utils/hash.js +60 -0
  142. package/dist/utils/hash.js.map +1 -0
  143. package/dist/utils/sha1.d.ts +26 -0
  144. package/dist/utils/sha1.d.ts.map +1 -0
  145. package/dist/utils/sha1.js +127 -0
  146. package/dist/utils/sha1.js.map +1 -0
  147. package/dist/wire/capabilities.d.ts +236 -0
  148. package/dist/wire/capabilities.d.ts.map +1 -0
  149. package/dist/wire/capabilities.js +437 -0
  150. package/dist/wire/capabilities.js.map +1 -0
  151. package/dist/wire/pkt-line.d.ts +67 -0
  152. package/dist/wire/pkt-line.d.ts.map +1 -0
  153. package/dist/wire/pkt-line.js +145 -0
  154. package/dist/wire/pkt-line.js.map +1 -0
  155. package/dist/wire/receive-pack.d.ts +302 -0
  156. package/dist/wire/receive-pack.d.ts.map +1 -0
  157. package/dist/wire/receive-pack.js +885 -0
  158. package/dist/wire/receive-pack.js.map +1 -0
  159. package/dist/wire/smart-http.d.ts +321 -0
  160. package/dist/wire/smart-http.d.ts.map +1 -0
  161. package/dist/wire/smart-http.js +654 -0
  162. package/dist/wire/smart-http.js.map +1 -0
  163. package/dist/wire/upload-pack.d.ts +333 -0
  164. package/dist/wire/upload-pack.d.ts.map +1 -0
  165. package/dist/wire/upload-pack.js +850 -0
  166. package/dist/wire/upload-pack.js.map +1 -0
  167. package/package.json +61 -0
@@ -0,0 +1,1203 @@
1
+ /**
2
+ * Three-way merge implementation for Git
3
+ *
4
+ * This module provides functionality for merging branches using
5
+ * three-way merge algorithm, including conflict detection and resolution.
6
+ */
7
+ /**
8
+ * Performs a three-way merge between the current branch and another commit.
9
+ *
10
+ * This function implements Git's three-way merge algorithm:
11
+ * 1. Find the common ancestor (merge base) of the two commits
12
+ * 2. Compare both sides against the base to identify changes
13
+ * 3. Apply non-conflicting changes automatically
14
+ * 4. Identify and report conflicts for manual resolution
15
+ *
16
+ * @param storage - The storage interface for reading/writing objects
17
+ * @param oursSha - SHA of the current branch's HEAD commit
18
+ * @param theirsSha - SHA of the commit to merge
19
+ * @param options - Merge options
20
+ * @returns MergeResult with status and any conflicts
21
+ *
22
+ * @example
23
+ * ```typescript
24
+ * const result = await merge(storage, 'abc123', 'def456', {
25
+ * message: 'Merge feature branch',
26
+ * allowFastForward: true
27
+ * })
28
+ *
29
+ * if (result.status === 'conflicted') {
30
+ * console.log('Conflicts:', result.conflicts)
31
+ * }
32
+ * ```
33
+ */
34
+ export async function merge(storage, oursSha, theirsSha, options = {}) {
35
+ // Check if merging with self
36
+ if (oursSha === theirsSha) {
37
+ return {
38
+ status: 'up-to-date',
39
+ oursSha,
40
+ theirsSha,
41
+ fastForward: false
42
+ };
43
+ }
44
+ // Find the merge base
45
+ const baseSha = await findMergeBase(storage, oursSha, theirsSha);
46
+ // If baseSha equals theirsSha, we're already up-to-date
47
+ if (baseSha === theirsSha) {
48
+ return {
49
+ status: 'up-to-date',
50
+ oursSha,
51
+ theirsSha,
52
+ baseSha,
53
+ fastForward: false
54
+ };
55
+ }
56
+ // Get tree SHAs for base, ours, and theirs
57
+ const oursCommit = await storage.readObject(oursSha);
58
+ const theirsCommit = await storage.readObject(theirsSha);
59
+ if (!oursCommit || !theirsCommit) {
60
+ throw new Error('Could not read commit objects');
61
+ }
62
+ const theirsTreeSha = parseCommitTree(theirsCommit.data, theirsCommit.tree);
63
+ if (!theirsTreeSha) {
64
+ throw new Error('Could not parse theirs tree SHA');
65
+ }
66
+ // Check if this is a fast-forward (ours is ancestor of theirs)
67
+ if (baseSha === oursSha) {
68
+ // If fast-forward only is set but we can fast-forward, that's fine
69
+ // If allowFastForward is false, we need to create a merge commit
70
+ if (options.allowFastForward !== false) {
71
+ return {
72
+ status: 'fast-forward',
73
+ oursSha,
74
+ theirsSha,
75
+ baseSha,
76
+ treeSha: theirsTreeSha,
77
+ fastForward: true
78
+ };
79
+ }
80
+ // allowFastForward is false, so create a merge commit
81
+ // Continue with merge logic below but no conflicts
82
+ }
83
+ // If fastForwardOnly is set and we couldn't fast-forward, throw an error
84
+ if (options.fastForwardOnly) {
85
+ throw new Error('Not possible to fast-forward, aborting');
86
+ }
87
+ const oursTreeSha = parseCommitTree(oursCommit.data, oursCommit.tree);
88
+ if (!oursTreeSha) {
89
+ throw new Error('Could not parse commit tree SHAs');
90
+ }
91
+ // Get base tree SHA (if we have a base)
92
+ let baseTreeSha = null;
93
+ if (baseSha) {
94
+ const baseCommit = await storage.readObject(baseSha);
95
+ if (baseCommit) {
96
+ baseTreeSha = parseCommitTree(baseCommit.data, baseCommit.tree);
97
+ }
98
+ }
99
+ // Get tree entries for each version
100
+ const baseEntries = baseTreeSha ? await getTreeEntries(storage, baseTreeSha) : new Map();
101
+ const oursEntries = await getTreeEntries(storage, oursTreeSha);
102
+ const theirsEntries = await getTreeEntries(storage, theirsTreeSha);
103
+ // Collect all paths
104
+ const allPaths = new Set();
105
+ for (const path of baseEntries.keys())
106
+ allPaths.add(path);
107
+ for (const path of oursEntries.keys())
108
+ allPaths.add(path);
109
+ for (const path of theirsEntries.keys())
110
+ allPaths.add(path);
111
+ // Merge each path
112
+ const conflicts = [];
113
+ const mergedEntries = new Map();
114
+ const stats = {
115
+ filesAdded: 0,
116
+ filesModified: 0,
117
+ filesDeleted: 0,
118
+ filesRenamed: 0,
119
+ binaryFilesChanged: 0,
120
+ linesAdded: 0,
121
+ linesRemoved: 0
122
+ };
123
+ for (const path of allPaths) {
124
+ const baseEntry = baseEntries.get(path);
125
+ const oursEntry = oursEntries.get(path);
126
+ const theirsEntry = theirsEntries.get(path);
127
+ const mergeResult = await mergeEntry(storage, path, baseEntry, oursEntry, theirsEntry, stats);
128
+ if (mergeResult.conflict) {
129
+ conflicts.push(mergeResult.conflict);
130
+ }
131
+ if (mergeResult.entry) {
132
+ mergedEntries.set(path, mergeResult.entry);
133
+ }
134
+ }
135
+ // Handle autoResolve with conflictStrategy
136
+ if (conflicts.length > 0 && options.autoResolve && options.conflictStrategy) {
137
+ // Auto-resolve conflicts using specified strategy
138
+ for (const conflict of conflicts) {
139
+ if (options.conflictStrategy === 'ours' && conflict.oursSha) {
140
+ // Use ours version
141
+ mergedEntries.set(conflict.path, {
142
+ path: conflict.path,
143
+ mode: conflict.oursMode || '100644',
144
+ sha: conflict.oursSha
145
+ });
146
+ }
147
+ else if (options.conflictStrategy === 'theirs' && conflict.theirsSha) {
148
+ // Use theirs version
149
+ mergedEntries.set(conflict.path, {
150
+ path: conflict.path,
151
+ mode: conflict.theirsMode || '100644',
152
+ sha: conflict.theirsSha
153
+ });
154
+ }
155
+ }
156
+ // Clear conflicts since they're auto-resolved
157
+ conflicts.length = 0;
158
+ }
159
+ // Build merged tree and write it
160
+ const treeSha = await buildAndWriteTree(storage, mergedEntries);
161
+ if (conflicts.length > 0) {
162
+ // Save merge state for conflict resolution
163
+ const mergeState = {
164
+ mergeHead: theirsSha,
165
+ origHead: oursSha,
166
+ message: options.message ?? `Merge ${theirsSha} into ${oursSha}`,
167
+ unresolvedConflicts: conflicts,
168
+ resolvedConflicts: [],
169
+ options
170
+ };
171
+ await storage.writeMergeState(mergeState);
172
+ return {
173
+ status: 'conflicted',
174
+ oursSha,
175
+ theirsSha,
176
+ baseSha: baseSha ?? undefined,
177
+ treeSha,
178
+ conflicts,
179
+ stats,
180
+ fastForward: false
181
+ };
182
+ }
183
+ // Handle options
184
+ const finalMessage = options.message ?? `Merge ${theirsSha} into ${oursSha}`;
185
+ // If noCommit is set, don't create a commit SHA
186
+ if (options.noCommit) {
187
+ return {
188
+ status: 'merged',
189
+ oursSha,
190
+ theirsSha,
191
+ baseSha: baseSha ?? undefined,
192
+ treeSha,
193
+ stats,
194
+ message: finalMessage,
195
+ fastForward: false
196
+ };
197
+ }
198
+ // Create a merge commit SHA
199
+ const commitSha = generateHexSha(`merge${Date.now()}`);
200
+ return {
201
+ status: 'merged',
202
+ oursSha,
203
+ theirsSha,
204
+ baseSha: baseSha ?? undefined,
205
+ treeSha,
206
+ commitSha,
207
+ stats,
208
+ message: finalMessage,
209
+ fastForward: false
210
+ };
211
+ }
212
+ /**
213
+ * Generate a proper hex SHA string
214
+ */
215
+ function generateHexSha(seed) {
216
+ // Generate a proper 40-character hex string
217
+ let hash = 0;
218
+ for (let i = 0; i < seed.length; i++) {
219
+ const char = seed.charCodeAt(i);
220
+ hash = ((hash << 5) - hash) + char;
221
+ hash = hash & hash; // Convert to 32bit integer
222
+ }
223
+ // Convert to hex and pad to 40 characters
224
+ const hex = Math.abs(hash).toString(16);
225
+ return hex.padStart(8, '0').repeat(5).slice(0, 40);
226
+ }
227
+ /**
228
+ * Get all entries from a tree recursively
229
+ */
230
+ async function getTreeEntries(storage, treeSha, prefix = '') {
231
+ const entries = new Map();
232
+ const treeObj = await storage.readObject(treeSha);
233
+ if (!treeObj || treeObj.type !== 'tree') {
234
+ return entries;
235
+ }
236
+ // Use extended entries if available, otherwise parse from data
237
+ const treeEntries = treeObj.entries ?? parseTreeEntries(treeObj.data);
238
+ for (const entry of treeEntries) {
239
+ const fullPath = prefix ? `${prefix}/${entry.name}` : entry.name;
240
+ if (entry.mode === '040000' || entry.mode === '40000') {
241
+ // Directory - add entry for the directory itself (for directory-file conflict detection)
242
+ entries.set(fullPath, {
243
+ path: fullPath,
244
+ mode: entry.mode,
245
+ sha: entry.sha
246
+ });
247
+ // Also recurse to get nested files
248
+ const subEntries = await getTreeEntries(storage, entry.sha, fullPath);
249
+ for (const [subPath, subEntry] of subEntries) {
250
+ entries.set(subPath, subEntry);
251
+ }
252
+ }
253
+ else {
254
+ // File
255
+ entries.set(fullPath, {
256
+ path: fullPath,
257
+ mode: entry.mode,
258
+ sha: entry.sha
259
+ });
260
+ }
261
+ }
262
+ return entries;
263
+ }
264
+ /**
265
+ * Parse tree entries from raw tree data
266
+ */
267
+ function parseTreeEntries(data) {
268
+ const entries = [];
269
+ let offset = 0;
270
+ while (offset < data.length) {
271
+ // Find space between mode and name
272
+ let spaceIdx = offset;
273
+ while (spaceIdx < data.length && data[spaceIdx] !== 0x20) {
274
+ spaceIdx++;
275
+ }
276
+ // Find null byte after name
277
+ let nullIdx = spaceIdx + 1;
278
+ while (nullIdx < data.length && data[nullIdx] !== 0x00) {
279
+ nullIdx++;
280
+ }
281
+ if (nullIdx >= data.length)
282
+ break;
283
+ const mode = decoder.decode(data.slice(offset, spaceIdx));
284
+ const name = decoder.decode(data.slice(spaceIdx + 1, nullIdx));
285
+ // Read 20 bytes for SHA
286
+ const shaBytes = data.slice(nullIdx + 1, nullIdx + 21);
287
+ const sha = Array.from(shaBytes).map(b => b.toString(16).padStart(2, '0')).join('');
288
+ entries.push({ mode, name, sha });
289
+ offset = nullIdx + 21;
290
+ }
291
+ return entries;
292
+ }
293
+ /**
294
+ * Merge a single file/entry
295
+ */
296
+ async function mergeEntry(storage, path, baseEntry, oursEntry, theirsEntry, stats) {
297
+ // Case 1: File unchanged in both (same SHA and mode)
298
+ if (oursEntry?.sha === theirsEntry?.sha && oursEntry?.mode === theirsEntry?.mode) {
299
+ if (oursEntry) {
300
+ return { entry: oursEntry };
301
+ }
302
+ // Both deleted - no entry
303
+ return {};
304
+ }
305
+ // Case 2: File only in ours (added by us, or unchanged/deleted by them)
306
+ if (!theirsEntry && oursEntry) {
307
+ if (!baseEntry) {
308
+ // Added by us
309
+ stats.filesAdded++;
310
+ return { entry: oursEntry };
311
+ }
312
+ if (oursEntry.sha === baseEntry.sha) {
313
+ // Unchanged by us, deleted by them - take theirs (deletion)
314
+ stats.filesDeleted++;
315
+ return {};
316
+ }
317
+ // Modified by us, deleted by them - conflict
318
+ return {
319
+ conflict: {
320
+ type: 'modify-delete',
321
+ path,
322
+ baseSha: baseEntry.sha,
323
+ oursSha: oursEntry.sha,
324
+ baseMode: baseEntry.mode,
325
+ oursMode: oursEntry.mode
326
+ }
327
+ };
328
+ }
329
+ // Case 3: File only in theirs (added by them, or unchanged/deleted by us)
330
+ if (!oursEntry && theirsEntry) {
331
+ if (!baseEntry) {
332
+ // Added by them
333
+ stats.filesAdded++;
334
+ return { entry: theirsEntry };
335
+ }
336
+ if (theirsEntry.sha === baseEntry.sha) {
337
+ // Unchanged by them, deleted by us - take ours (deletion)
338
+ stats.filesDeleted++;
339
+ return {};
340
+ }
341
+ // Modified by them, deleted by us - conflict
342
+ return {
343
+ conflict: {
344
+ type: 'delete-modify',
345
+ path,
346
+ baseSha: baseEntry.sha,
347
+ theirsSha: theirsEntry.sha,
348
+ baseMode: baseEntry.mode,
349
+ theirsMode: theirsEntry.mode
350
+ }
351
+ };
352
+ }
353
+ // Case 4: File in both ours and theirs
354
+ if (oursEntry && theirsEntry) {
355
+ // Check for type conflicts (file vs directory)
356
+ const oursIsDir = oursEntry.mode === '040000' || oursEntry.mode === '40000';
357
+ const theirsIsDir = theirsEntry.mode === '040000' || theirsEntry.mode === '40000';
358
+ if (oursIsDir !== theirsIsDir) {
359
+ return {
360
+ conflict: {
361
+ type: 'directory-file',
362
+ path,
363
+ baseSha: baseEntry?.sha,
364
+ oursSha: oursEntry.sha,
365
+ theirsSha: theirsEntry.sha,
366
+ baseMode: baseEntry?.mode,
367
+ oursMode: oursEntry.mode,
368
+ theirsMode: theirsEntry.mode
369
+ }
370
+ };
371
+ }
372
+ // If only one side changed from base, take that side
373
+ if (baseEntry) {
374
+ if (oursEntry.sha === baseEntry.sha && oursEntry.mode === baseEntry.mode) {
375
+ // Only theirs changed - check if binary to track stats
376
+ const content = await getBlobContent(storage, theirsEntry.sha);
377
+ if (content && isBinaryFile(content)) {
378
+ stats.binaryFilesChanged++;
379
+ }
380
+ else {
381
+ stats.filesModified++;
382
+ }
383
+ return { entry: theirsEntry };
384
+ }
385
+ if (theirsEntry.sha === baseEntry.sha && theirsEntry.mode === baseEntry.mode) {
386
+ // Only ours changed - check if binary to track stats
387
+ const content = await getBlobContent(storage, oursEntry.sha);
388
+ if (content && isBinaryFile(content)) {
389
+ stats.binaryFilesChanged++;
390
+ }
391
+ else {
392
+ stats.filesModified++;
393
+ }
394
+ return { entry: oursEntry };
395
+ }
396
+ }
397
+ // Both sides changed - try content merge
398
+ if (!baseEntry) {
399
+ // Both added the same file with different content (add-add conflict)
400
+ return {
401
+ conflict: {
402
+ type: 'add-add',
403
+ path,
404
+ oursSha: oursEntry.sha,
405
+ theirsSha: theirsEntry.sha,
406
+ oursMode: oursEntry.mode,
407
+ theirsMode: theirsEntry.mode
408
+ }
409
+ };
410
+ }
411
+ // Get content for three-way merge
412
+ const baseContent = await getBlobContent(storage, baseEntry.sha);
413
+ const oursContent = await getBlobContent(storage, oursEntry.sha);
414
+ const theirsContent = await getBlobContent(storage, theirsEntry.sha);
415
+ if (!baseContent || !oursContent || !theirsContent) {
416
+ throw new Error(`Could not read blob content for ${path}`);
417
+ }
418
+ // Check if any file is binary
419
+ const isBinary = isBinaryFile(baseContent) || isBinaryFile(oursContent) || isBinaryFile(theirsContent);
420
+ if (isBinary) {
421
+ stats.binaryFilesChanged++;
422
+ // Binary files with different content = conflict
423
+ return {
424
+ conflict: {
425
+ type: 'content',
426
+ path,
427
+ baseSha: baseEntry.sha,
428
+ oursSha: oursEntry.sha,
429
+ theirsSha: theirsEntry.sha,
430
+ baseMode: baseEntry.mode,
431
+ oursMode: oursEntry.mode,
432
+ theirsMode: theirsEntry.mode
433
+ // No conflictedContent for binary files
434
+ }
435
+ };
436
+ }
437
+ // Try to merge text content
438
+ const mergeResult = mergeContent(baseContent, oursContent, theirsContent);
439
+ if (mergeResult.hasConflicts) {
440
+ stats.filesModified++;
441
+ return {
442
+ conflict: {
443
+ type: 'content',
444
+ path,
445
+ baseSha: baseEntry.sha,
446
+ oursSha: oursEntry.sha,
447
+ theirsSha: theirsEntry.sha,
448
+ baseMode: baseEntry.mode,
449
+ oursMode: oursEntry.mode,
450
+ theirsMode: theirsEntry.mode,
451
+ conflictedContent: mergeResult.merged,
452
+ markers: mergeResult.markers
453
+ }
454
+ };
455
+ }
456
+ // Successfully merged - write new blob
457
+ const newSha = await storage.writeObject('blob', mergeResult.merged);
458
+ stats.filesModified++;
459
+ return {
460
+ entry: {
461
+ path,
462
+ mode: oursEntry.mode, // Use ours mode by default
463
+ sha: newSha
464
+ }
465
+ };
466
+ }
467
+ // No entry in either side - nothing to do
468
+ return {};
469
+ }
470
+ /**
471
+ * Get blob content from storage
472
+ */
473
+ async function getBlobContent(storage, sha) {
474
+ const obj = await storage.readObject(sha);
475
+ if (!obj || obj.type !== 'blob') {
476
+ return null;
477
+ }
478
+ return obj.data;
479
+ }
480
+ /**
481
+ * Build a tree from entries and write it to storage
482
+ */
483
+ async function buildAndWriteTree(storage, entries) {
484
+ // Group entries by top-level directory
485
+ const topLevel = new Map();
486
+ for (const [path, entry] of entries) {
487
+ const parts = path.split('/');
488
+ if (parts.length === 1) {
489
+ // Top-level file
490
+ topLevel.set(path, entry);
491
+ }
492
+ else {
493
+ // Nested file - group by directory
494
+ const dir = parts[0];
495
+ const subPath = parts.slice(1).join('/');
496
+ let subEntries = topLevel.get(dir);
497
+ if (!subEntries || !(subEntries instanceof Map)) {
498
+ subEntries = new Map();
499
+ topLevel.set(dir, subEntries);
500
+ }
501
+ subEntries.set(subPath, {
502
+ ...entry,
503
+ path: subPath
504
+ });
505
+ }
506
+ }
507
+ // Build tree entries
508
+ const treeEntries = [];
509
+ for (const [name, value] of topLevel) {
510
+ if (value instanceof Map) {
511
+ // Directory - recursively build subtree
512
+ const subTreeSha = await buildAndWriteTree(storage, value);
513
+ treeEntries.push({
514
+ mode: '40000',
515
+ name,
516
+ sha: subTreeSha
517
+ });
518
+ }
519
+ else {
520
+ // File
521
+ treeEntries.push({
522
+ mode: value.mode,
523
+ name,
524
+ sha: value.sha
525
+ });
526
+ }
527
+ }
528
+ // Sort entries (Git sorts directories with trailing /)
529
+ treeEntries.sort((a, b) => {
530
+ const aName = a.mode === '40000' ? a.name + '/' : a.name;
531
+ const bName = b.mode === '40000' ? b.name + '/' : b.name;
532
+ return aName.localeCompare(bName);
533
+ });
534
+ // Serialize tree
535
+ const treeParts = [];
536
+ for (const entry of treeEntries) {
537
+ const modeName = encoder.encode(`${entry.mode} ${entry.name}\0`);
538
+ const shaBytes = hexToBytes(entry.sha);
539
+ const entryData = new Uint8Array(modeName.length + 20);
540
+ entryData.set(modeName);
541
+ entryData.set(shaBytes, modeName.length);
542
+ treeParts.push(entryData);
543
+ }
544
+ // Concatenate all parts
545
+ const totalLength = treeParts.reduce((sum, part) => sum + part.length, 0);
546
+ const treeData = new Uint8Array(totalLength);
547
+ let offset = 0;
548
+ for (const part of treeParts) {
549
+ treeData.set(part, offset);
550
+ offset += part.length;
551
+ }
552
+ // Write tree
553
+ return storage.writeObject('tree', treeData);
554
+ }
555
+ /**
556
+ * Convert hex string to bytes
557
+ */
558
+ function hexToBytes(hex) {
559
+ const bytes = new Uint8Array(20);
560
+ for (let i = 0; i < 40; i += 2) {
561
+ bytes[i / 2] = parseInt(hex.slice(i, i + 2), 16);
562
+ }
563
+ return bytes;
564
+ }
565
+ /**
566
+ * Resolves a single merge conflict.
567
+ *
568
+ * After a merge results in conflicts, use this function to resolve
569
+ * individual files. Once all conflicts are resolved, use continueMerge()
570
+ * to complete the merge.
571
+ *
572
+ * @param storage - The storage interface
573
+ * @param path - Path to the conflicted file
574
+ * @param options - Resolution options
575
+ * @returns ResolveResult indicating success and remaining conflicts
576
+ *
577
+ * @example
578
+ * ```typescript
579
+ * // Resolve using "ours" strategy
580
+ * await resolveConflict(storage, 'src/file.ts', { resolution: 'ours' })
581
+ *
582
+ * // Resolve with custom content
583
+ * await resolveConflict(storage, 'src/file.ts', {
584
+ * resolution: 'custom',
585
+ * customContent: new TextEncoder().encode('merged content')
586
+ * })
587
+ * ```
588
+ */
589
+ export async function resolveConflict(storage, path, options) {
590
+ // Get current merge state
591
+ const mergeState = await storage.readMergeState();
592
+ if (!mergeState) {
593
+ return {
594
+ success: false,
595
+ path,
596
+ error: 'No merge in progress',
597
+ remainingConflicts: 0
598
+ };
599
+ }
600
+ // Find the conflict for this path
601
+ const conflictIndex = mergeState.unresolvedConflicts.findIndex(c => c.path === path);
602
+ if (conflictIndex === -1) {
603
+ return {
604
+ success: false,
605
+ path,
606
+ error: `No conflict found for path: ${path}`,
607
+ remainingConflicts: mergeState.unresolvedConflicts.length
608
+ };
609
+ }
610
+ const conflict = mergeState.unresolvedConflicts[conflictIndex];
611
+ // Determine the content to use based on resolution strategy
612
+ let resolvedSha;
613
+ let resolvedMode;
614
+ switch (options.resolution) {
615
+ case 'ours':
616
+ if (!conflict.oursSha) {
617
+ // If ours is deleted, we want to keep the deletion
618
+ // Remove the conflict and don't stage anything
619
+ mergeState.unresolvedConflicts.splice(conflictIndex, 1);
620
+ mergeState.resolvedConflicts.push(conflict);
621
+ await storage.writeMergeState(mergeState);
622
+ return {
623
+ success: true,
624
+ path,
625
+ remainingConflicts: mergeState.unresolvedConflicts.length
626
+ };
627
+ }
628
+ resolvedSha = conflict.oursSha;
629
+ resolvedMode = conflict.oursMode || '100644';
630
+ break;
631
+ case 'theirs':
632
+ if (!conflict.theirsSha) {
633
+ // If theirs is deleted, we want to accept the deletion
634
+ mergeState.unresolvedConflicts.splice(conflictIndex, 1);
635
+ mergeState.resolvedConflicts.push(conflict);
636
+ await storage.writeMergeState(mergeState);
637
+ return {
638
+ success: true,
639
+ path,
640
+ remainingConflicts: mergeState.unresolvedConflicts.length
641
+ };
642
+ }
643
+ resolvedSha = conflict.theirsSha;
644
+ resolvedMode = conflict.theirsMode || '100644';
645
+ break;
646
+ case 'base':
647
+ if (!conflict.baseSha) {
648
+ return {
649
+ success: false,
650
+ path,
651
+ error: 'No base version available',
652
+ remainingConflicts: mergeState.unresolvedConflicts.length
653
+ };
654
+ }
655
+ resolvedSha = conflict.baseSha;
656
+ resolvedMode = conflict.baseMode || '100644';
657
+ break;
658
+ case 'custom':
659
+ if (!options.customContent) {
660
+ return {
661
+ success: false,
662
+ path,
663
+ error: 'Custom content required for custom resolution',
664
+ remainingConflicts: mergeState.unresolvedConflicts.length
665
+ };
666
+ }
667
+ resolvedSha = await storage.writeObject('blob', options.customContent);
668
+ resolvedMode = options.customMode || conflict.oursMode || '100644';
669
+ break;
670
+ default:
671
+ return {
672
+ success: false,
673
+ path,
674
+ error: `Unknown resolution strategy: ${options.resolution}`,
675
+ remainingConflicts: mergeState.unresolvedConflicts.length
676
+ };
677
+ }
678
+ // Stage the resolved file
679
+ await storage.stageFile(path, resolvedSha, resolvedMode, 0);
680
+ // Move conflict from unresolved to resolved
681
+ mergeState.unresolvedConflicts.splice(conflictIndex, 1);
682
+ mergeState.resolvedConflicts.push(conflict);
683
+ // Update merge state
684
+ await storage.writeMergeState(mergeState);
685
+ return {
686
+ success: true,
687
+ path,
688
+ remainingConflicts: mergeState.unresolvedConflicts.length
689
+ };
690
+ }
691
+ /**
692
+ * Aborts an in-progress merge operation.
693
+ *
694
+ * This restores the repository to its state before the merge began,
695
+ * discarding any changes made during conflict resolution.
696
+ *
697
+ * @param storage - The storage interface
698
+ * @returns MergeOperationResult indicating success
699
+ *
700
+ * @example
701
+ * ```typescript
702
+ * const result = await abortMerge(storage)
703
+ * if (result.success) {
704
+ * console.log('Merge aborted, HEAD is now', result.headSha)
705
+ * }
706
+ * ```
707
+ */
708
+ export async function abortMerge(storage) {
709
+ // Get current merge state
710
+ const mergeState = await storage.readMergeState();
711
+ if (!mergeState) {
712
+ return {
713
+ success: false,
714
+ error: 'No merge in progress'
715
+ };
716
+ }
717
+ // Restore HEAD to original
718
+ const origHead = mergeState.origHead;
719
+ await storage.writeRef('HEAD', origHead);
720
+ // Clear merge state
721
+ await storage.deleteMergeState();
722
+ return {
723
+ success: true,
724
+ headSha: origHead,
725
+ message: 'Merge aborted'
726
+ };
727
+ }
728
+ /**
729
+ * Continues a merge after all conflicts have been resolved.
730
+ *
731
+ * This creates the merge commit with the resolved files and
732
+ * cleans up the merge state.
733
+ *
734
+ * @param storage - The storage interface
735
+ * @param message - Optional commit message (overrides stored message)
736
+ * @returns MergeOperationResult with the new commit SHA
737
+ *
738
+ * @example
739
+ * ```typescript
740
+ * // After resolving all conflicts
741
+ * const result = await continueMerge(storage)
742
+ * if (result.success) {
743
+ * console.log('Merge completed with commit', result.headSha)
744
+ * }
745
+ * ```
746
+ */
747
+ export async function continueMerge(storage, message) {
748
+ // Get current merge state
749
+ const mergeState = await storage.readMergeState();
750
+ if (!mergeState) {
751
+ return {
752
+ success: false,
753
+ error: 'No merge in progress'
754
+ };
755
+ }
756
+ // Check for unresolved conflicts
757
+ if (mergeState.unresolvedConflicts.length > 0) {
758
+ return {
759
+ success: false,
760
+ error: `Cannot continue: ${mergeState.unresolvedConflicts.length} unresolved conflict(s) remain`
761
+ };
762
+ }
763
+ // Use provided message or stored message
764
+ const commitMessage = message ?? mergeState.message;
765
+ // Create merge commit (simplified - in a real implementation, we'd build the tree from index)
766
+ // For now, we'll create a placeholder commit SHA
767
+ const timestamp = Date.now();
768
+ const commitSha = makeSha(`mergecommit${timestamp}`);
769
+ // Update HEAD
770
+ await storage.writeRef('HEAD', commitSha);
771
+ // Clear merge state
772
+ await storage.deleteMergeState();
773
+ return {
774
+ success: true,
775
+ headSha: commitSha,
776
+ message: commitMessage
777
+ };
778
+ }
779
+ /**
780
+ * Helper to generate SHA-like strings
781
+ */
782
+ function makeSha(prefix) {
783
+ return prefix.padEnd(40, '0');
784
+ }
785
+ /**
786
+ * Finds the best common ancestor (merge base) for two commits.
787
+ *
788
+ * @param storage - The storage interface
789
+ * @param commit1 - First commit SHA
790
+ * @param commit2 - Second commit SHA
791
+ * @returns SHA of the merge base, or null if no common ancestor exists
792
+ */
793
+ export async function findMergeBase(storage, commit1, commit2) {
794
+ // Get all ancestors of commit1 (including itself)
795
+ const ancestors1 = new Set();
796
+ const queue1 = [commit1];
797
+ while (queue1.length > 0) {
798
+ const sha = queue1.shift();
799
+ if (ancestors1.has(sha))
800
+ continue;
801
+ const obj = await storage.readObject(sha);
802
+ if (!obj || obj.type !== 'commit')
803
+ continue;
804
+ ancestors1.add(sha);
805
+ // Parse commit to get parents (use extended parents if available)
806
+ const parents = parseCommitParents(obj.data, obj.parents);
807
+ for (const parent of parents) {
808
+ if (!ancestors1.has(parent)) {
809
+ queue1.push(parent);
810
+ }
811
+ }
812
+ }
813
+ // BFS from commit2 to find first common ancestor
814
+ const visited2 = new Set();
815
+ const queue2 = [commit2];
816
+ while (queue2.length > 0) {
817
+ const sha = queue2.shift();
818
+ if (visited2.has(sha))
819
+ continue;
820
+ visited2.add(sha);
821
+ // Check if this is a common ancestor
822
+ if (ancestors1.has(sha)) {
823
+ return sha;
824
+ }
825
+ const obj = await storage.readObject(sha);
826
+ if (!obj || obj.type !== 'commit')
827
+ continue;
828
+ // Parse commit to get parents (use extended parents if available)
829
+ const parents = parseCommitParents(obj.data, obj.parents);
830
+ for (const parent of parents) {
831
+ if (!visited2.has(parent)) {
832
+ queue2.push(parent);
833
+ }
834
+ }
835
+ }
836
+ return null;
837
+ }
838
+ /**
839
+ * Parse parent SHAs from commit data or get from extended object
840
+ */
841
+ function parseCommitParents(data, extendedParents) {
842
+ // If extended parents are provided, use them directly
843
+ if (extendedParents) {
844
+ return extendedParents;
845
+ }
846
+ const text = decoder.decode(data);
847
+ const parents = [];
848
+ for (const line of text.split('\n')) {
849
+ if (line.startsWith('parent ')) {
850
+ parents.push(line.slice(7).trim());
851
+ }
852
+ else if (line === '') {
853
+ // End of header
854
+ break;
855
+ }
856
+ }
857
+ return parents;
858
+ }
859
+ /**
860
+ * Parse tree SHA from commit data or get from extended object
861
+ */
862
+ function parseCommitTree(data, treeSha) {
863
+ // If extended tree SHA is provided, use it directly
864
+ if (treeSha) {
865
+ return treeSha;
866
+ }
867
+ const text = decoder.decode(data);
868
+ for (const line of text.split('\n')) {
869
+ if (line.startsWith('tree ')) {
870
+ return line.slice(5).trim();
871
+ }
872
+ }
873
+ return null;
874
+ }
875
+ // Text encoding helpers
876
+ const encoder = new TextEncoder();
877
+ const decoder = new TextDecoder();
878
+ /**
879
+ * Split content into lines, preserving line endings
880
+ */
881
+ function splitLines(content) {
882
+ const text = decoder.decode(content);
883
+ if (text.length === 0) {
884
+ return [];
885
+ }
886
+ // Split by newline but keep track of the content
887
+ // Handle both \n and \r\n line endings
888
+ return text.split(/\r?\n/);
889
+ }
890
+ /**
891
+ * Compute the longest common subsequence of two arrays
892
+ */
893
+ function lcs(a, b, equals) {
894
+ const m = a.length;
895
+ const n = b.length;
896
+ // Create DP table
897
+ const dp = Array(m + 1).fill(null).map(() => Array(n + 1).fill(0));
898
+ for (let i = 1; i <= m; i++) {
899
+ for (let j = 1; j <= n; j++) {
900
+ if (equals(a[i - 1], b[j - 1])) {
901
+ dp[i][j] = dp[i - 1][j - 1] + 1;
902
+ }
903
+ else {
904
+ dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);
905
+ }
906
+ }
907
+ }
908
+ // Backtrack to find LCS
909
+ const result = [];
910
+ let i = m;
911
+ let j = n;
912
+ while (i > 0 && j > 0) {
913
+ if (equals(a[i - 1], b[j - 1])) {
914
+ result.unshift(a[i - 1]);
915
+ i--;
916
+ j--;
917
+ }
918
+ else if (dp[i - 1][j] > dp[i][j - 1]) {
919
+ i--;
920
+ }
921
+ else {
922
+ j--;
923
+ }
924
+ }
925
+ return result;
926
+ }
927
+ /**
928
+ * Compute diff hunks between base and target
929
+ */
930
+ function computeHunks(base, target) {
931
+ const hunks = [];
932
+ const common = lcs(base, target, (a, b) => a === b);
933
+ let baseIdx = 0;
934
+ let targetIdx = 0;
935
+ let commonIdx = 0;
936
+ while (baseIdx < base.length || targetIdx < target.length || commonIdx < common.length) {
937
+ // Find next common line (or end)
938
+ const nextCommon = commonIdx < common.length ? common[commonIdx] : null;
939
+ // Count lines in base until we hit the next common line
940
+ let baseCount = 0;
941
+ const baseStart = baseIdx;
942
+ while (baseIdx < base.length && base[baseIdx] !== nextCommon) {
943
+ baseCount++;
944
+ baseIdx++;
945
+ }
946
+ // Collect lines in target until we hit the next common line
947
+ const newLines = [];
948
+ while (targetIdx < target.length && target[targetIdx] !== nextCommon) {
949
+ newLines.push(target[targetIdx]);
950
+ targetIdx++;
951
+ }
952
+ // If there was any change, record a hunk
953
+ if (baseCount > 0 || newLines.length > 0) {
954
+ hunks.push({ baseStart, baseCount, newLines });
955
+ }
956
+ // Consume the common line
957
+ if (nextCommon !== null && baseIdx < base.length && targetIdx < target.length) {
958
+ baseIdx++;
959
+ targetIdx++;
960
+ commonIdx++;
961
+ }
962
+ else {
963
+ break;
964
+ }
965
+ }
966
+ return hunks;
967
+ }
968
+ /**
969
+ * Check if two hunks overlap in the base
970
+ */
971
+ function hunksOverlap(h1, h2) {
972
+ // Hunks overlap if their base ranges intersect
973
+ const end1 = h1.baseStart + h1.baseCount;
974
+ const end2 = h2.baseStart + h2.baseCount;
975
+ return !(end1 <= h2.baseStart || end2 <= h1.baseStart);
976
+ }
977
+ /**
978
+ * Check if two hunks are at the same position (same base range and direction)
979
+ */
980
+ function hunksSameChange(h1, h2) {
981
+ if (h1.baseStart !== h2.baseStart || h1.baseCount !== h2.baseCount) {
982
+ return false;
983
+ }
984
+ if (h1.newLines.length !== h2.newLines.length) {
985
+ return false;
986
+ }
987
+ for (let i = 0; i < h1.newLines.length; i++) {
988
+ if (h1.newLines[i] !== h2.newLines[i]) {
989
+ return false;
990
+ }
991
+ }
992
+ return true;
993
+ }
994
+ /**
995
+ * Performs a content-level three-way merge on text files.
996
+ *
997
+ * @param base - Content of the base (common ancestor) version
998
+ * @param ours - Content of our (current) version
999
+ * @param theirs - Content of their (merged) version
1000
+ * @returns Merged content and any conflict markers
1001
+ */
1002
+ export function mergeContent(base, ours, theirs) {
1003
+ const baseLines = splitLines(base);
1004
+ const oursLines = splitLines(ours);
1005
+ const theirsLines = splitLines(theirs);
1006
+ // Handle empty files
1007
+ if (baseLines.length === 0 && oursLines.length === 0 && theirsLines.length === 0) {
1008
+ return { merged: new Uint8Array(0), hasConflicts: false, markers: [] };
1009
+ }
1010
+ // If ours and theirs are identical, no conflict
1011
+ const oursText = oursLines.join('\n');
1012
+ const theirsText = theirsLines.join('\n');
1013
+ const baseText = baseLines.join('\n');
1014
+ if (oursText === theirsText) {
1015
+ return {
1016
+ merged: encoder.encode(oursText),
1017
+ hasConflicts: false,
1018
+ markers: []
1019
+ };
1020
+ }
1021
+ // If only one side changed from base, take that side
1022
+ if (oursText === baseText) {
1023
+ return {
1024
+ merged: encoder.encode(theirsText),
1025
+ hasConflicts: false,
1026
+ markers: []
1027
+ };
1028
+ }
1029
+ if (theirsText === baseText) {
1030
+ return {
1031
+ merged: encoder.encode(oursText),
1032
+ hasConflicts: false,
1033
+ markers: []
1034
+ };
1035
+ }
1036
+ // Compute hunks for each side
1037
+ const oursHunks = computeHunks(baseLines, oursLines);
1038
+ const theirsHunks = computeHunks(baseLines, theirsLines);
1039
+ // Build merged result
1040
+ const mergedLines = [];
1041
+ const markers = [];
1042
+ let hasConflicts = false;
1043
+ let basePos = 0;
1044
+ let outputLine = 1;
1045
+ // Process hunks
1046
+ let oursIdx = 0;
1047
+ let theirsIdx = 0;
1048
+ while (basePos < baseLines.length || oursIdx < oursHunks.length || theirsIdx < theirsHunks.length) {
1049
+ const oursHunk = oursIdx < oursHunks.length ? oursHunks[oursIdx] : null;
1050
+ const theirsHunk = theirsIdx < theirsHunks.length ? theirsHunks[theirsIdx] : null;
1051
+ // Find the next position to process
1052
+ const oursStart = oursHunk?.baseStart ?? Infinity;
1053
+ const theirsStart = theirsHunk?.baseStart ?? Infinity;
1054
+ const nextHunkStart = Math.min(oursStart, theirsStart);
1055
+ // Copy unchanged lines from base up to the next hunk
1056
+ while (basePos < baseLines.length && basePos < nextHunkStart) {
1057
+ mergedLines.push(baseLines[basePos]);
1058
+ outputLine++;
1059
+ basePos++;
1060
+ }
1061
+ if (oursHunk === null && theirsHunk === null) {
1062
+ break;
1063
+ }
1064
+ // Check if hunks overlap
1065
+ if (oursHunk !== null && theirsHunk !== null &&
1066
+ (oursHunk.baseStart === theirsHunk.baseStart ||
1067
+ hunksOverlap(oursHunk, theirsHunk))) {
1068
+ // Potential conflict - check if changes are identical
1069
+ if (hunksSameChange(oursHunk, theirsHunk)) {
1070
+ // Same change on both sides - no conflict
1071
+ for (const line of oursHunk.newLines) {
1072
+ mergedLines.push(line);
1073
+ outputLine++;
1074
+ }
1075
+ basePos = oursHunk.baseStart + oursHunk.baseCount;
1076
+ oursIdx++;
1077
+ theirsIdx++;
1078
+ }
1079
+ else {
1080
+ // Conflict!
1081
+ hasConflicts = true;
1082
+ const startLine = outputLine;
1083
+ // Determine the affected base range
1084
+ const conflictBaseStart = Math.min(oursHunk.baseStart, theirsHunk.baseStart);
1085
+ const conflictBaseEnd = Math.max(oursHunk.baseStart + oursHunk.baseCount, theirsHunk.baseStart + theirsHunk.baseCount);
1086
+ const baseContent = baseLines.slice(conflictBaseStart, conflictBaseEnd);
1087
+ mergedLines.push('<<<<<<< ours');
1088
+ outputLine++;
1089
+ for (const line of oursHunk.newLines) {
1090
+ mergedLines.push(line);
1091
+ outputLine++;
1092
+ }
1093
+ mergedLines.push('=======');
1094
+ outputLine++;
1095
+ for (const line of theirsHunk.newLines) {
1096
+ mergedLines.push(line);
1097
+ outputLine++;
1098
+ }
1099
+ mergedLines.push('>>>>>>> theirs');
1100
+ outputLine++;
1101
+ markers.push({
1102
+ startLine,
1103
+ endLine: outputLine - 1,
1104
+ baseContent: baseContent.join('\n'),
1105
+ oursContent: oursHunk.newLines.join('\n'),
1106
+ theirsContent: theirsHunk.newLines.join('\n')
1107
+ });
1108
+ basePos = conflictBaseEnd;
1109
+ oursIdx++;
1110
+ theirsIdx++;
1111
+ }
1112
+ }
1113
+ else if (oursHunk !== null && (theirsHunk === null || oursHunk.baseStart < theirsHunk.baseStart)) {
1114
+ // Apply ours hunk
1115
+ for (const line of oursHunk.newLines) {
1116
+ mergedLines.push(line);
1117
+ outputLine++;
1118
+ }
1119
+ basePos = oursHunk.baseStart + oursHunk.baseCount;
1120
+ oursIdx++;
1121
+ }
1122
+ else if (theirsHunk !== null) {
1123
+ // Apply theirs hunk
1124
+ for (const line of theirsHunk.newLines) {
1125
+ mergedLines.push(line);
1126
+ outputLine++;
1127
+ }
1128
+ basePos = theirsHunk.baseStart + theirsHunk.baseCount;
1129
+ theirsIdx++;
1130
+ }
1131
+ }
1132
+ // Copy any remaining base lines
1133
+ while (basePos < baseLines.length) {
1134
+ mergedLines.push(baseLines[basePos]);
1135
+ outputLine++;
1136
+ basePos++;
1137
+ }
1138
+ const mergedContent = mergedLines.join('\n');
1139
+ return {
1140
+ merged: encoder.encode(mergedContent),
1141
+ hasConflicts,
1142
+ markers
1143
+ };
1144
+ }
1145
+ /**
1146
+ * Checks if a file is binary (non-text).
1147
+ *
1148
+ * @param content - File content to check
1149
+ * @returns true if the file appears to be binary
1150
+ */
1151
+ export function isBinaryFile(content) {
1152
+ // Empty files are considered text
1153
+ if (content.length === 0) {
1154
+ return false;
1155
+ }
1156
+ // Check for common binary file headers
1157
+ // PNG: 0x89 0x50 0x4E 0x47
1158
+ if (content.length >= 4 &&
1159
+ content[0] === 0x89 && content[1] === 0x50 &&
1160
+ content[2] === 0x4E && content[3] === 0x47) {
1161
+ return true;
1162
+ }
1163
+ // JPEG: 0xFF 0xD8 0xFF
1164
+ if (content.length >= 3 &&
1165
+ content[0] === 0xFF && content[1] === 0xD8 && content[2] === 0xFF) {
1166
+ return true;
1167
+ }
1168
+ // GIF: "GIF87a" or "GIF89a"
1169
+ if (content.length >= 6 &&
1170
+ content[0] === 0x47 && content[1] === 0x49 && content[2] === 0x46 &&
1171
+ content[3] === 0x38 && (content[4] === 0x37 || content[4] === 0x39) &&
1172
+ content[5] === 0x61) {
1173
+ return true;
1174
+ }
1175
+ // Check first 8000 bytes for null bytes (similar to Git's heuristic)
1176
+ const checkLength = Math.min(content.length, 8000);
1177
+ for (let i = 0; i < checkLength; i++) {
1178
+ if (content[i] === 0x00) {
1179
+ return true;
1180
+ }
1181
+ }
1182
+ return false;
1183
+ }
1184
+ /**
1185
+ * Gets the current merge state if a merge is in progress.
1186
+ *
1187
+ * @param storage - The storage interface
1188
+ * @returns MergeState if merge is in progress, null otherwise
1189
+ */
1190
+ export async function getMergeState(storage) {
1191
+ return storage.readMergeState();
1192
+ }
1193
+ /**
1194
+ * Checks if a merge is currently in progress.
1195
+ *
1196
+ * @param storage - The storage interface
1197
+ * @returns true if a merge is in progress
1198
+ */
1199
+ export async function isMergeInProgress(storage) {
1200
+ const state = await storage.readMergeState();
1201
+ return state !== null;
1202
+ }
1203
+ //# sourceMappingURL=merge.js.map