@fern-api/replay 0.9.0 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.cjs +321 -12
- package/dist/cli.cjs.map +1 -1
- package/dist/index.cjs +321 -12
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +3 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +321 -12
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -133,6 +133,14 @@ var init_GitClient = __esm({
|
|
|
133
133
|
return false;
|
|
134
134
|
}
|
|
135
135
|
}
|
|
136
|
+
async treeExists(treeHash) {
|
|
137
|
+
try {
|
|
138
|
+
const type = await this.exec(["cat-file", "-t", treeHash]);
|
|
139
|
+
return type.trim() === "tree";
|
|
140
|
+
} catch {
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
136
144
|
async getCommitBody(commitSha) {
|
|
137
145
|
return this.exec(["log", "-1", "--format=%B", commitSha]);
|
|
138
146
|
}
|
|
@@ -143,6 +151,255 @@ var init_GitClient = __esm({
|
|
|
143
151
|
}
|
|
144
152
|
});
|
|
145
153
|
|
|
154
|
+
// src/HybridReconstruction.ts
|
|
155
|
+
var HybridReconstruction_exports = {};
|
|
156
|
+
__export(HybridReconstruction_exports, {
|
|
157
|
+
assembleHybrid: () => assembleHybrid,
|
|
158
|
+
locateHunksInOurs: () => locateHunksInOurs,
|
|
159
|
+
parseHunks: () => parseHunks,
|
|
160
|
+
reconstructFromGhostPatch: () => reconstructFromGhostPatch
|
|
161
|
+
});
|
|
162
|
+
function parseHunks(fileDiff) {
|
|
163
|
+
const lines = fileDiff.split("\n");
|
|
164
|
+
const hunks = [];
|
|
165
|
+
let currentHunk = null;
|
|
166
|
+
for (const line of lines) {
|
|
167
|
+
const headerMatch = line.match(
|
|
168
|
+
/^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/
|
|
169
|
+
);
|
|
170
|
+
if (headerMatch) {
|
|
171
|
+
if (currentHunk) {
|
|
172
|
+
hunks.push(currentHunk);
|
|
173
|
+
}
|
|
174
|
+
currentHunk = {
|
|
175
|
+
oldStart: parseInt(headerMatch[1], 10),
|
|
176
|
+
oldCount: headerMatch[2] != null ? parseInt(headerMatch[2], 10) : 1,
|
|
177
|
+
newStart: parseInt(headerMatch[3], 10),
|
|
178
|
+
newCount: headerMatch[4] != null ? parseInt(headerMatch[4], 10) : 1,
|
|
179
|
+
lines: []
|
|
180
|
+
};
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
if (!currentHunk) continue;
|
|
184
|
+
if (line.startsWith("diff --git") || line.startsWith("index ") || line.startsWith("---") || line.startsWith("+++") || line.startsWith("old mode") || line.startsWith("new mode") || line.startsWith("similarity index") || line.startsWith("rename from") || line.startsWith("rename to") || line.startsWith("new file mode") || line.startsWith("deleted file mode")) {
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
if (line === "\") {
|
|
188
|
+
continue;
|
|
189
|
+
}
|
|
190
|
+
if (line.startsWith("-")) {
|
|
191
|
+
currentHunk.lines.push({ type: "remove", content: line.slice(1) });
|
|
192
|
+
} else if (line.startsWith("+")) {
|
|
193
|
+
currentHunk.lines.push({ type: "add", content: line.slice(1) });
|
|
194
|
+
} else if (line.startsWith(" ") || line === "") {
|
|
195
|
+
currentHunk.lines.push({
|
|
196
|
+
type: "context",
|
|
197
|
+
content: line.startsWith(" ") ? line.slice(1) : line
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
if (currentHunk) {
|
|
202
|
+
hunks.push(currentHunk);
|
|
203
|
+
}
|
|
204
|
+
return hunks;
|
|
205
|
+
}
|
|
206
|
+
function extractLeadingContext(hunk) {
|
|
207
|
+
const result = [];
|
|
208
|
+
for (const line of hunk.lines) {
|
|
209
|
+
if (line.type !== "context") break;
|
|
210
|
+
result.push(line.content);
|
|
211
|
+
}
|
|
212
|
+
return result;
|
|
213
|
+
}
|
|
214
|
+
function extractTrailingContext(hunk) {
|
|
215
|
+
const result = [];
|
|
216
|
+
for (let i = hunk.lines.length - 1; i >= 0; i--) {
|
|
217
|
+
if (hunk.lines[i].type !== "context") break;
|
|
218
|
+
result.unshift(hunk.lines[i].content);
|
|
219
|
+
}
|
|
220
|
+
return result;
|
|
221
|
+
}
|
|
222
|
+
function countOursLinesBeforeTrailing(hunk) {
|
|
223
|
+
let count = 0;
|
|
224
|
+
const trailingStart = findTrailingContextStart(hunk);
|
|
225
|
+
for (let i = 0; i < trailingStart; i++) {
|
|
226
|
+
if (hunk.lines[i].type === "context") count++;
|
|
227
|
+
}
|
|
228
|
+
return count;
|
|
229
|
+
}
|
|
230
|
+
function findTrailingContextStart(hunk) {
|
|
231
|
+
let i = hunk.lines.length - 1;
|
|
232
|
+
while (i >= 0 && hunk.lines[i].type === "context") {
|
|
233
|
+
i--;
|
|
234
|
+
}
|
|
235
|
+
return i + 1;
|
|
236
|
+
}
|
|
237
|
+
function matchesAt(needle, haystack, offset) {
|
|
238
|
+
for (let i = 0; i < needle.length; i++) {
|
|
239
|
+
if (haystack[offset + i] !== needle[i]) return false;
|
|
240
|
+
}
|
|
241
|
+
return true;
|
|
242
|
+
}
|
|
243
|
+
function findContextInOurs(contextLines, oursLines, minIndex, hint) {
|
|
244
|
+
const SEARCH_WINDOW = 200;
|
|
245
|
+
const maxStart = oursLines.length - contextLines.length;
|
|
246
|
+
const clampedHint = Math.max(minIndex, Math.min(hint, maxStart));
|
|
247
|
+
if (clampedHint >= minIndex && clampedHint <= maxStart) {
|
|
248
|
+
if (matchesAt(contextLines, oursLines, clampedHint)) {
|
|
249
|
+
return clampedHint;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
for (let delta = 1; delta <= SEARCH_WINDOW; delta++) {
|
|
253
|
+
for (const sign of [1, -1]) {
|
|
254
|
+
const idx = clampedHint + delta * sign;
|
|
255
|
+
if (idx < minIndex || idx > maxStart) continue;
|
|
256
|
+
if (matchesAt(contextLines, oursLines, idx)) {
|
|
257
|
+
return idx;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
return -1;
|
|
262
|
+
}
|
|
263
|
+
function computeOursSpan(hunk, oursLines, oursOffset) {
|
|
264
|
+
const leading = extractLeadingContext(hunk);
|
|
265
|
+
const trailing = extractTrailingContext(hunk);
|
|
266
|
+
if (trailing.length === 0) {
|
|
267
|
+
const contextCount2 = hunk.lines.filter(
|
|
268
|
+
(l) => l.type === "context"
|
|
269
|
+
).length;
|
|
270
|
+
return Math.min(contextCount2, oursLines.length - oursOffset);
|
|
271
|
+
}
|
|
272
|
+
const searchStart = oursOffset + leading.length;
|
|
273
|
+
for (let i = searchStart; i <= oursLines.length - trailing.length; i++) {
|
|
274
|
+
if (matchesAt(trailing, oursLines, i)) {
|
|
275
|
+
return i + trailing.length - oursOffset;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
const contextCount = hunk.lines.filter(
|
|
279
|
+
(l) => l.type === "context"
|
|
280
|
+
).length;
|
|
281
|
+
return Math.min(contextCount, oursLines.length - oursOffset);
|
|
282
|
+
}
|
|
283
|
+
function locateHunksInOurs(hunks, oursLines) {
|
|
284
|
+
const located = [];
|
|
285
|
+
let minOursIndex = 0;
|
|
286
|
+
for (const hunk of hunks) {
|
|
287
|
+
const contextLines = extractLeadingContext(hunk);
|
|
288
|
+
let oursOffset;
|
|
289
|
+
if (contextLines.length > 0) {
|
|
290
|
+
const found = findContextInOurs(
|
|
291
|
+
contextLines,
|
|
292
|
+
oursLines,
|
|
293
|
+
minOursIndex,
|
|
294
|
+
hunk.newStart - 1
|
|
295
|
+
);
|
|
296
|
+
if (found === -1) {
|
|
297
|
+
const trailingContext = extractTrailingContext(hunk);
|
|
298
|
+
if (trailingContext.length > 0) {
|
|
299
|
+
const trailingFound = findContextInOurs(
|
|
300
|
+
trailingContext,
|
|
301
|
+
oursLines,
|
|
302
|
+
minOursIndex,
|
|
303
|
+
hunk.newStart - 1
|
|
304
|
+
);
|
|
305
|
+
if (trailingFound === -1) return null;
|
|
306
|
+
const nonTrailingCount = countOursLinesBeforeTrailing(hunk);
|
|
307
|
+
oursOffset = trailingFound - nonTrailingCount;
|
|
308
|
+
if (oursOffset < minOursIndex) return null;
|
|
309
|
+
} else {
|
|
310
|
+
return null;
|
|
311
|
+
}
|
|
312
|
+
} else {
|
|
313
|
+
oursOffset = found;
|
|
314
|
+
}
|
|
315
|
+
} else if (hunk.oldStart === 1 && hunk.oldCount === 0) {
|
|
316
|
+
oursOffset = 0;
|
|
317
|
+
} else {
|
|
318
|
+
oursOffset = Math.max(hunk.newStart - 1, minOursIndex);
|
|
319
|
+
}
|
|
320
|
+
const oursSpan = computeOursSpan(hunk, oursLines, oursOffset);
|
|
321
|
+
located.push({ hunk, oursOffset, oursSpan });
|
|
322
|
+
minOursIndex = oursOffset + oursSpan;
|
|
323
|
+
}
|
|
324
|
+
return located;
|
|
325
|
+
}
|
|
326
|
+
function assembleHybrid(locatedHunks, oursLines) {
|
|
327
|
+
const baseLines = [];
|
|
328
|
+
const theirsLines = [];
|
|
329
|
+
let oursCursor = 0;
|
|
330
|
+
for (const { hunk, oursOffset, oursSpan } of locatedHunks) {
|
|
331
|
+
if (oursOffset > oursCursor) {
|
|
332
|
+
const gapLines = oursLines.slice(oursCursor, oursOffset);
|
|
333
|
+
baseLines.push(...gapLines);
|
|
334
|
+
theirsLines.push(...gapLines);
|
|
335
|
+
}
|
|
336
|
+
for (const line of hunk.lines) {
|
|
337
|
+
switch (line.type) {
|
|
338
|
+
case "context":
|
|
339
|
+
baseLines.push(line.content);
|
|
340
|
+
theirsLines.push(line.content);
|
|
341
|
+
break;
|
|
342
|
+
case "remove":
|
|
343
|
+
baseLines.push(line.content);
|
|
344
|
+
break;
|
|
345
|
+
case "add":
|
|
346
|
+
theirsLines.push(line.content);
|
|
347
|
+
break;
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
oursCursor = oursOffset + oursSpan;
|
|
351
|
+
}
|
|
352
|
+
if (oursCursor < oursLines.length) {
|
|
353
|
+
const gapLines = oursLines.slice(oursCursor);
|
|
354
|
+
baseLines.push(...gapLines);
|
|
355
|
+
theirsLines.push(...gapLines);
|
|
356
|
+
}
|
|
357
|
+
return {
|
|
358
|
+
base: baseLines.join("\n"),
|
|
359
|
+
theirs: theirsLines.join("\n")
|
|
360
|
+
};
|
|
361
|
+
}
|
|
362
|
+
function reconstructFromGhostPatch(fileDiff, ours) {
|
|
363
|
+
const hunks = parseHunks(fileDiff);
|
|
364
|
+
if (hunks.length === 0) {
|
|
365
|
+
return null;
|
|
366
|
+
}
|
|
367
|
+
const isPureAddition = hunks.every(
|
|
368
|
+
(h) => h.oldCount === 0 && h.lines.every((l) => l.type !== "remove")
|
|
369
|
+
);
|
|
370
|
+
if (isPureAddition) {
|
|
371
|
+
return null;
|
|
372
|
+
}
|
|
373
|
+
const isPureDeletion = hunks.every(
|
|
374
|
+
(h) => h.newCount === 0 && h.lines.every((l) => l.type !== "add")
|
|
375
|
+
);
|
|
376
|
+
if (isPureDeletion) {
|
|
377
|
+
const baseLines = [];
|
|
378
|
+
for (const hunk of hunks) {
|
|
379
|
+
for (const line of hunk.lines) {
|
|
380
|
+
if (line.type === "context" || line.type === "remove") {
|
|
381
|
+
baseLines.push(line.content);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
return {
|
|
386
|
+
base: baseLines.join("\n"),
|
|
387
|
+
theirs: ""
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
const oursLines = ours.split("\n");
|
|
391
|
+
const located = locateHunksInOurs(hunks, oursLines);
|
|
392
|
+
if (!located) {
|
|
393
|
+
return null;
|
|
394
|
+
}
|
|
395
|
+
return assembleHybrid(located, oursLines);
|
|
396
|
+
}
|
|
397
|
+
var init_HybridReconstruction = __esm({
|
|
398
|
+
"src/HybridReconstruction.ts"() {
|
|
399
|
+
"use strict";
|
|
400
|
+
}
|
|
401
|
+
});
|
|
402
|
+
|
|
146
403
|
// src/index.ts
|
|
147
404
|
var index_exports = {};
|
|
148
405
|
__export(index_exports, {
|
|
@@ -662,6 +919,7 @@ var ReplayApplicator = class {
|
|
|
662
919
|
lockManager;
|
|
663
920
|
outputDir;
|
|
664
921
|
renameCache = /* @__PURE__ */ new Map();
|
|
922
|
+
treeExistsCache = /* @__PURE__ */ new Map();
|
|
665
923
|
fileTheirsAccumulator = /* @__PURE__ */ new Map();
|
|
666
924
|
constructor(git, lockManager, outputDir) {
|
|
667
925
|
this.git = git;
|
|
@@ -858,14 +1116,33 @@ var ReplayApplicator = class {
|
|
|
858
1116
|
}
|
|
859
1117
|
const oursPath = (0, import_node_path2.join)(this.outputDir, resolvedPath);
|
|
860
1118
|
const ours = await (0, import_promises.readFile)(oursPath, "utf-8").catch(() => null);
|
|
861
|
-
let
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
1119
|
+
let ghostReconstructed = false;
|
|
1120
|
+
let theirs = null;
|
|
1121
|
+
if (!base && ours && !renameSourcePath) {
|
|
1122
|
+
const treeReachable = await this.isTreeReachable(baseGen.tree_hash);
|
|
1123
|
+
if (!treeReachable) {
|
|
1124
|
+
const fileDiff = this.extractFileDiff(patch.patch_content, filePath);
|
|
1125
|
+
if (fileDiff) {
|
|
1126
|
+
const { reconstructFromGhostPatch: reconstructFromGhostPatch2 } = await Promise.resolve().then(() => (init_HybridReconstruction(), HybridReconstruction_exports));
|
|
1127
|
+
const result = reconstructFromGhostPatch2(fileDiff, ours);
|
|
1128
|
+
if (result) {
|
|
1129
|
+
base = result.base;
|
|
1130
|
+
theirs = result.theirs;
|
|
1131
|
+
ghostReconstructed = true;
|
|
1132
|
+
}
|
|
1133
|
+
}
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
if (!ghostReconstructed) {
|
|
1137
|
+
theirs = await this.applyPatchToContent(
|
|
1138
|
+
base,
|
|
1139
|
+
patch.patch_content,
|
|
1140
|
+
filePath,
|
|
1141
|
+
tempGit,
|
|
1142
|
+
tempDir,
|
|
1143
|
+
renameSourcePath
|
|
1144
|
+
);
|
|
1145
|
+
}
|
|
869
1146
|
let useAccumulatorAsMergeBase = false;
|
|
870
1147
|
const accumulatorEntry = this.fileTheirsAccumulator.get(resolvedPath);
|
|
871
1148
|
if (!theirs && accumulatorEntry) {
|
|
@@ -898,13 +1175,13 @@ var ReplayApplicator = class {
|
|
|
898
1175
|
baseMismatchSkipped = true;
|
|
899
1176
|
}
|
|
900
1177
|
}
|
|
901
|
-
if (
|
|
1178
|
+
if (base == null && !ours && effective_theirs) {
|
|
902
1179
|
const outDir2 = (0, import_node_path2.dirname)(oursPath);
|
|
903
1180
|
await (0, import_promises.mkdir)(outDir2, { recursive: true });
|
|
904
1181
|
await (0, import_promises.writeFile)(oursPath, effective_theirs);
|
|
905
1182
|
return { file: resolvedPath, status: "merged", reason: "new-file" };
|
|
906
1183
|
}
|
|
907
|
-
if (
|
|
1184
|
+
if (base == null && ours && effective_theirs) {
|
|
908
1185
|
const merged2 = threeWayMerge("", ours, effective_theirs);
|
|
909
1186
|
const outDir2 = (0, import_node_path2.dirname)(oursPath);
|
|
910
1187
|
await (0, import_promises.mkdir)(outDir2, { recursive: true });
|
|
@@ -927,7 +1204,7 @@ var ReplayApplicator = class {
|
|
|
927
1204
|
reason: "missing-content"
|
|
928
1205
|
};
|
|
929
1206
|
}
|
|
930
|
-
if (
|
|
1207
|
+
if (base == null && !useAccumulatorAsMergeBase || !ours) {
|
|
931
1208
|
return {
|
|
932
1209
|
file: resolvedPath,
|
|
933
1210
|
status: "skipped",
|
|
@@ -970,6 +1247,14 @@ var ReplayApplicator = class {
|
|
|
970
1247
|
};
|
|
971
1248
|
}
|
|
972
1249
|
}
|
|
1250
|
+
async isTreeReachable(treeHash) {
|
|
1251
|
+
let result = this.treeExistsCache.get(treeHash);
|
|
1252
|
+
if (result === void 0) {
|
|
1253
|
+
result = await this.git.treeExists(treeHash);
|
|
1254
|
+
this.treeExistsCache.set(treeHash, result);
|
|
1255
|
+
}
|
|
1256
|
+
return result;
|
|
1257
|
+
}
|
|
973
1258
|
isExcluded(patch) {
|
|
974
1259
|
const config = this.lockManager.getCustomizationsConfig();
|
|
975
1260
|
if (!config.exclude) return false;
|
|
@@ -2044,6 +2329,7 @@ async function bootstrap(outputDir, options) {
|
|
|
2044
2329
|
}
|
|
2045
2330
|
lockManager.save();
|
|
2046
2331
|
const fernignoreUpdated = ensureFernignoreEntries(outputDir);
|
|
2332
|
+
ensureGitattributesEntries(outputDir);
|
|
2047
2333
|
if (migrator.fernignoreExists() && fernignorePatterns.length > 0) {
|
|
2048
2334
|
const action = options?.fernignoreAction ?? "skip";
|
|
2049
2335
|
if (action === "migrate") {
|
|
@@ -2125,7 +2411,7 @@ function parseGitLog(log) {
|
|
|
2125
2411
|
return { sha, authorName, authorEmail, message };
|
|
2126
2412
|
});
|
|
2127
2413
|
}
|
|
2128
|
-
var REPLAY_FERNIGNORE_ENTRIES = [".fern/replay.lock", ".fern/replay.yml"];
|
|
2414
|
+
var REPLAY_FERNIGNORE_ENTRIES = [".fern/replay.lock", ".fern/replay.yml", ".gitattributes"];
|
|
2129
2415
|
function ensureFernignoreEntries(outputDir) {
|
|
2130
2416
|
const fernignorePath = (0, import_node_path5.join)(outputDir, ".fernignore");
|
|
2131
2417
|
let content = "";
|
|
@@ -2149,6 +2435,29 @@ function ensureFernignoreEntries(outputDir) {
|
|
|
2149
2435
|
(0, import_node_fs4.writeFileSync)(fernignorePath, content, "utf-8");
|
|
2150
2436
|
return true;
|
|
2151
2437
|
}
|
|
2438
|
+
var GITATTRIBUTES_ENTRIES = [".fern/replay.lock linguist-generated=true"];
|
|
2439
|
+
function ensureGitattributesEntries(outputDir) {
|
|
2440
|
+
const gitattributesPath = (0, import_node_path5.join)(outputDir, ".gitattributes");
|
|
2441
|
+
let content = "";
|
|
2442
|
+
if ((0, import_node_fs4.existsSync)(gitattributesPath)) {
|
|
2443
|
+
content = (0, import_node_fs4.readFileSync)(gitattributesPath, "utf-8");
|
|
2444
|
+
}
|
|
2445
|
+
const lines = content.split("\n");
|
|
2446
|
+
const toAdd = [];
|
|
2447
|
+
for (const entry of GITATTRIBUTES_ENTRIES) {
|
|
2448
|
+
if (!lines.some((line) => line.trim() === entry)) {
|
|
2449
|
+
toAdd.push(entry);
|
|
2450
|
+
}
|
|
2451
|
+
}
|
|
2452
|
+
if (toAdd.length === 0) {
|
|
2453
|
+
return;
|
|
2454
|
+
}
|
|
2455
|
+
if (content && !content.endsWith("\n")) {
|
|
2456
|
+
content += "\n";
|
|
2457
|
+
}
|
|
2458
|
+
content += toAdd.join("\n") + "\n";
|
|
2459
|
+
(0, import_node_fs4.writeFileSync)(gitattributesPath, content, "utf-8");
|
|
2460
|
+
}
|
|
2152
2461
|
function computeContentHash(patchContent) {
|
|
2153
2462
|
const normalized = patchContent.split("\n").filter((line) => !line.startsWith("From ") && !line.startsWith("index ") && !line.startsWith("Date: ")).join("\n");
|
|
2154
2463
|
return `sha256:${(0, import_node_crypto3.createHash)("sha256").update(normalized).digest("hex")}`;
|