pushwork 1.1.2 → 1.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc +9 -0
- package/dist/cli/commands.d.ts +71 -0
- package/dist/cli/commands.d.ts.map +1 -0
- package/dist/cli/commands.js +794 -0
- package/dist/cli/commands.js.map +1 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +19 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config/index.d.ts +71 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +314 -0
- package/dist/config/index.js.map +1 -0
- package/dist/core/change-detection.d.ts +4 -3
- package/dist/core/change-detection.d.ts.map +1 -1
- package/dist/core/change-detection.js +78 -25
- package/dist/core/change-detection.js.map +1 -1
- package/dist/core/sync-engine.d.ts.map +1 -1
- package/dist/core/sync-engine.js +22 -5
- package/dist/core/sync-engine.js.map +1 -1
- package/dist/types/documents.d.ts +2 -0
- package/dist/types/documents.d.ts.map +1 -1
- package/dist/utils/content-similarity.d.ts +53 -0
- package/dist/utils/content-similarity.d.ts.map +1 -0
- package/dist/utils/content-similarity.js +155 -0
- package/dist/utils/content-similarity.js.map +1 -0
- package/dist/utils/network-sync.js +5 -5
- package/dist/utils/network-sync.js.map +1 -1
- package/package.json +1 -1
- package/src/core/change-detection.ts +108 -43
- package/src/core/sync-engine.ts +22 -5
- package/src/types/documents.ts +4 -2
- package/src/utils/network-sync.ts +5 -5
- package/test/integration/in-memory-sync.test.ts +201 -0
package/src/core/sync-engine.ts
CHANGED
|
@@ -266,7 +266,7 @@ export class SyncEngine {
|
|
|
266
266
|
const plainUrl = getPlainUrl(entry.url)
|
|
267
267
|
if (!failedUrls.has(plainUrl)) continue
|
|
268
268
|
|
|
269
|
-
debug(`recreate: recreating document for ${filePath} (${plainUrl
|
|
269
|
+
debug(`recreate: recreating document for ${filePath} (${plainUrl})`)
|
|
270
270
|
out.taskLine(`Recreating document for ${filePath}`)
|
|
271
271
|
|
|
272
272
|
try {
|
|
@@ -335,7 +335,7 @@ export class SyncEngine {
|
|
|
335
335
|
newHandles.push(newHandle)
|
|
336
336
|
newHandles.push(dirHandle)
|
|
337
337
|
|
|
338
|
-
debug(`recreate: created new doc for ${filePath} -> ${newHandle.url
|
|
338
|
+
debug(`recreate: created new doc for ${filePath} -> ${newHandle.url}`)
|
|
339
339
|
} catch (error) {
|
|
340
340
|
debug(`recreate: failed for ${filePath}: ${error}`)
|
|
341
341
|
out.taskLine(`Failed to recreate ${filePath}: ${error}`, true)
|
|
@@ -378,7 +378,7 @@ export class SyncEngine {
|
|
|
378
378
|
(await this.snapshotManager.load()) ||
|
|
379
379
|
this.snapshotManager.createEmpty()
|
|
380
380
|
|
|
381
|
-
debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl
|
|
381
|
+
debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl}, files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
|
|
382
382
|
|
|
383
383
|
// Wait for initial sync to receive any pending remote changes
|
|
384
384
|
if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
|
|
@@ -433,6 +433,8 @@ export class SyncEngine {
|
|
|
433
433
|
// Detect all changes
|
|
434
434
|
debug("sync: detecting changes")
|
|
435
435
|
out.update("Detecting local and remote changes")
|
|
436
|
+
// Capture pre-push snapshot file paths to detect deletions after push
|
|
437
|
+
const prePushFilePaths = new Set(snapshot.files.keys())
|
|
436
438
|
const changes = await this.changeDetector.detectChanges(snapshot)
|
|
437
439
|
|
|
438
440
|
// Detect moves
|
|
@@ -560,8 +562,18 @@ export class SyncEngine {
|
|
|
560
562
|
}
|
|
561
563
|
|
|
562
564
|
// Re-detect changes after network sync for fresh state
|
|
565
|
+
// Compute paths deleted during push so they aren't resurrected during pull
|
|
566
|
+
const deletedPaths = new Set<string>()
|
|
567
|
+
for (const p of prePushFilePaths) {
|
|
568
|
+
if (!snapshot.files.has(p)) {
|
|
569
|
+
deletedPaths.add(p)
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
if (deletedPaths.size > 0) {
|
|
573
|
+
debug(`sync: excluding ${deletedPaths.size} deleted paths from re-detection`)
|
|
574
|
+
}
|
|
563
575
|
debug("sync: re-detecting changes after network sync")
|
|
564
|
-
const freshChanges = await this.changeDetector.detectChanges(snapshot)
|
|
576
|
+
const freshChanges = await this.changeDetector.detectChanges(snapshot, deletedPaths)
|
|
565
577
|
const freshRemoteChanges = freshChanges.filter(
|
|
566
578
|
c =>
|
|
567
579
|
c.changeType === ChangeType.REMOTE_ONLY ||
|
|
@@ -805,7 +817,7 @@ export class SyncEngine {
|
|
|
805
817
|
}
|
|
806
818
|
)
|
|
807
819
|
result.filesChanged++
|
|
808
|
-
debug(`push: created ${change.path} -> ${handle.url
|
|
820
|
+
debug(`push: created ${change.path} -> ${handle.url}`)
|
|
809
821
|
}
|
|
810
822
|
} else {
|
|
811
823
|
// Update existing file
|
|
@@ -966,6 +978,11 @@ export class SyncEngine {
|
|
|
966
978
|
if (snapshotEntry) {
|
|
967
979
|
// Update existing entry
|
|
968
980
|
snapshotEntry.head = change.remoteHead
|
|
981
|
+
// If the remote document was replaced (new URL), update the snapshot URL
|
|
982
|
+
if (change.remoteUrl) {
|
|
983
|
+
const fileHandle = await this.repo.find<FileDocument>(change.remoteUrl)
|
|
984
|
+
snapshotEntry.url = this.getEntryUrl(fileHandle, change.path)
|
|
985
|
+
}
|
|
969
986
|
} else {
|
|
970
987
|
// Create new snapshot entry for newly discovered remote file
|
|
971
988
|
// We need to find the remote file's URL from the directory hierarchy
|
package/src/types/documents.ts
CHANGED
|
@@ -41,7 +41,7 @@ export interface FileDocument {
|
|
|
41
41
|
export enum FileType {
|
|
42
42
|
TEXT = "text",
|
|
43
43
|
BINARY = "binary",
|
|
44
|
-
DIRECTORY = "directory"
|
|
44
|
+
DIRECTORY = "directory"
|
|
45
45
|
}
|
|
46
46
|
|
|
47
47
|
/**
|
|
@@ -51,7 +51,7 @@ export enum ChangeType {
|
|
|
51
51
|
NO_CHANGE = "no_change",
|
|
52
52
|
LOCAL_ONLY = "local_only",
|
|
53
53
|
REMOTE_ONLY = "remote_only",
|
|
54
|
-
BOTH_CHANGED = "both_changed"
|
|
54
|
+
BOTH_CHANGED = "both_changed"
|
|
55
55
|
}
|
|
56
56
|
|
|
57
57
|
/**
|
|
@@ -86,4 +86,6 @@ export interface DetectedChange {
|
|
|
86
86
|
remoteContent: string | Uint8Array | null
|
|
87
87
|
localHead?: UrlHeads
|
|
88
88
|
remoteHead?: UrlHeads
|
|
89
|
+
/** New remote URL when the remote document was replaced (artifact URL change) */
|
|
90
|
+
remoteUrl?: AutomergeUrl
|
|
89
91
|
}
|
|
@@ -88,7 +88,7 @@ export async function waitForBidirectionalSync(
|
|
|
88
88
|
const changedDocs: string[] = [];
|
|
89
89
|
for (const [url, heads] of currentHeads) {
|
|
90
90
|
if (lastSeenHeads.get(url) !== heads) {
|
|
91
|
-
changedDocs.push(url
|
|
91
|
+
changedDocs.push(url);
|
|
92
92
|
}
|
|
93
93
|
}
|
|
94
94
|
const newDocs = currentHeads.size - lastSeenHeads.size;
|
|
@@ -236,13 +236,13 @@ function waitForHandleSync(
|
|
|
236
236
|
};
|
|
237
237
|
|
|
238
238
|
const onConverged = () => {
|
|
239
|
-
debug(`waitForSync: ${handle.url
|
|
239
|
+
debug(`waitForSync: ${handle.url}... converged in ${Date.now() - startTime}ms`);
|
|
240
240
|
cleanup();
|
|
241
241
|
resolve(handle);
|
|
242
242
|
};
|
|
243
243
|
|
|
244
244
|
const timeout = setTimeout(() => {
|
|
245
|
-
debug(`waitForSync: ${handle.url
|
|
245
|
+
debug(`waitForSync: ${handle.url}... timed out after ${timeoutMs}ms`);
|
|
246
246
|
cleanup();
|
|
247
247
|
reject(handle);
|
|
248
248
|
}, timeoutMs);
|
|
@@ -317,9 +317,9 @@ export async function waitForSync(
|
|
|
317
317
|
const remoteHeads = syncInfo?.lastHeads;
|
|
318
318
|
if (A.equals(heads, remoteHeads)) {
|
|
319
319
|
alreadySynced++;
|
|
320
|
-
debug(`waitForSync: ${handle.url
|
|
320
|
+
debug(`waitForSync: ${handle.url}... already synced`);
|
|
321
321
|
} else {
|
|
322
|
-
debug(`waitForSync: ${handle.url
|
|
322
|
+
debug(`waitForSync: ${handle.url}... needs sync (remoteHeads=${remoteHeads ? 'present' : 'missing'})`);
|
|
323
323
|
needsSync.push(handle);
|
|
324
324
|
}
|
|
325
325
|
}
|
|
@@ -583,6 +583,207 @@ describe("Sync Reliability Tests", () => {
|
|
|
583
583
|
await pushwork(["sync"], repoA);
|
|
584
584
|
expect(await pathExists(path.join(repoA, "parent", "child", "target.txt"))).toBe(false);
|
|
585
585
|
}, 60000);
|
|
586
|
+
|
|
587
|
+
it("deleted file in root directory should not resurrect", async () => {
|
|
588
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
589
|
+
await fs.mkdir(repoA);
|
|
590
|
+
|
|
591
|
+
await fs.writeFile(path.join(repoA, "root-file.txt"), "root content");
|
|
592
|
+
await fs.writeFile(path.join(repoA, "keep.txt"), "keep this");
|
|
593
|
+
await pushwork(["init", "."], repoA);
|
|
594
|
+
await pushwork(["sync"], repoA);
|
|
595
|
+
|
|
596
|
+
// Delete file in root
|
|
597
|
+
await fs.unlink(path.join(repoA, "root-file.txt"));
|
|
598
|
+
|
|
599
|
+
await pushwork(["sync"], repoA);
|
|
600
|
+
expect(await pathExists(path.join(repoA, "root-file.txt"))).toBe(false);
|
|
601
|
+
expect(await pathExists(path.join(repoA, "keep.txt"))).toBe(true);
|
|
602
|
+
|
|
603
|
+
// Sync again - should NOT come back
|
|
604
|
+
await pushwork(["sync"], repoA);
|
|
605
|
+
expect(await pathExists(path.join(repoA, "root-file.txt"))).toBe(false);
|
|
606
|
+
}, 60000);
|
|
607
|
+
|
|
608
|
+
it("deleted file in non-artifact subdirectory (src/) should not resurrect", async () => {
|
|
609
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
610
|
+
await fs.mkdir(repoA);
|
|
611
|
+
|
|
612
|
+
await fs.mkdir(path.join(repoA, "src"), { recursive: true });
|
|
613
|
+
await fs.writeFile(path.join(repoA, "src", "index.ts"), "export default 1");
|
|
614
|
+
await fs.writeFile(path.join(repoA, "src", "helper.ts"), "export function help() {}");
|
|
615
|
+
await pushwork(["init", "."], repoA);
|
|
616
|
+
await pushwork(["sync"], repoA);
|
|
617
|
+
|
|
618
|
+
// Delete one file in src/
|
|
619
|
+
await fs.unlink(path.join(repoA, "src", "helper.ts"));
|
|
620
|
+
|
|
621
|
+
await pushwork(["sync"], repoA);
|
|
622
|
+
expect(await pathExists(path.join(repoA, "src", "helper.ts"))).toBe(false);
|
|
623
|
+
expect(await pathExists(path.join(repoA, "src", "index.ts"))).toBe(true);
|
|
624
|
+
|
|
625
|
+
// Sync again - should NOT come back
|
|
626
|
+
await pushwork(["sync"], repoA);
|
|
627
|
+
expect(await pathExists(path.join(repoA, "src", "helper.ts"))).toBe(false);
|
|
628
|
+
}, 60000);
|
|
629
|
+
|
|
630
|
+
it("deleted files should not resurrect after multiple sync cycles", async () => {
|
|
631
|
+
// Simulate real-world usage: multiple syncs over time with deletions
|
|
632
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
633
|
+
await fs.mkdir(repoA);
|
|
634
|
+
|
|
635
|
+
await fs.mkdir(path.join(repoA, "src"), { recursive: true });
|
|
636
|
+
await fs.writeFile(path.join(repoA, "readme.txt"), "readme");
|
|
637
|
+
await fs.writeFile(path.join(repoA, "src", "app.ts"), "app");
|
|
638
|
+
await fs.writeFile(path.join(repoA, "src", "old.ts"), "old");
|
|
639
|
+
await pushwork(["init", "."], repoA);
|
|
640
|
+
await pushwork(["sync"], repoA);
|
|
641
|
+
|
|
642
|
+
// Cycle 1: delete root file
|
|
643
|
+
await fs.unlink(path.join(repoA, "readme.txt"));
|
|
644
|
+
await pushwork(["sync"], repoA);
|
|
645
|
+
expect(await pathExists(path.join(repoA, "readme.txt"))).toBe(false);
|
|
646
|
+
|
|
647
|
+
// Cycle 2: delete src file
|
|
648
|
+
await fs.unlink(path.join(repoA, "src", "old.ts"));
|
|
649
|
+
await pushwork(["sync"], repoA);
|
|
650
|
+
expect(await pathExists(path.join(repoA, "src", "old.ts"))).toBe(false);
|
|
651
|
+
|
|
652
|
+
// Cycle 3: just sync - nothing should come back
|
|
653
|
+
await pushwork(["sync"], repoA);
|
|
654
|
+
expect(await pathExists(path.join(repoA, "readme.txt"))).toBe(false);
|
|
655
|
+
expect(await pathExists(path.join(repoA, "src", "old.ts"))).toBe(false);
|
|
656
|
+
expect(await pathExists(path.join(repoA, "src", "app.ts"))).toBe(true);
|
|
657
|
+
}, 90000);
|
|
658
|
+
|
|
659
|
+
it("peer B should not see files deleted by peer A (root)", async () => {
|
|
660
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
661
|
+
const repoB = path.join(tmpDir, "repo-b");
|
|
662
|
+
await fs.mkdir(repoA);
|
|
663
|
+
await fs.mkdir(repoB);
|
|
664
|
+
|
|
665
|
+
await fs.writeFile(path.join(repoA, "keep.txt"), "keep");
|
|
666
|
+
await fs.writeFile(path.join(repoA, "delete-me.txt"), "gone");
|
|
667
|
+
await pushwork(["init", "."], repoA);
|
|
668
|
+
|
|
669
|
+
// Clone to B and converge
|
|
670
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
671
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
672
|
+
await syncUntilConverged(repoA, repoB);
|
|
673
|
+
|
|
674
|
+
expect(await pathExists(path.join(repoB, "delete-me.txt"))).toBe(true);
|
|
675
|
+
|
|
676
|
+
// A deletes a root file
|
|
677
|
+
await fs.unlink(path.join(repoA, "delete-me.txt"));
|
|
678
|
+
await pushwork(["sync"], repoA);
|
|
679
|
+
|
|
680
|
+
// B syncs - should see the deletion
|
|
681
|
+
await pushwork(["sync"], repoB);
|
|
682
|
+
expect(await pathExists(path.join(repoB, "delete-me.txt"))).toBe(false);
|
|
683
|
+
expect(await pathExists(path.join(repoB, "keep.txt"))).toBe(true);
|
|
684
|
+
|
|
685
|
+
// B syncs again - should stay deleted
|
|
686
|
+
await pushwork(["sync"], repoB);
|
|
687
|
+
expect(await pathExists(path.join(repoB, "delete-me.txt"))).toBe(false);
|
|
688
|
+
}, 90000);
|
|
689
|
+
|
|
690
|
+
it("peer B should not see files deleted by peer A (src/)", async () => {
|
|
691
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
692
|
+
const repoB = path.join(tmpDir, "repo-b");
|
|
693
|
+
await fs.mkdir(repoA);
|
|
694
|
+
await fs.mkdir(repoB);
|
|
695
|
+
|
|
696
|
+
await fs.mkdir(path.join(repoA, "src"), { recursive: true });
|
|
697
|
+
await fs.writeFile(path.join(repoA, "src", "index.ts"), "export default 1");
|
|
698
|
+
await fs.writeFile(path.join(repoA, "src", "old.ts"), "old code");
|
|
699
|
+
await pushwork(["init", "."], repoA);
|
|
700
|
+
|
|
701
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
702
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
703
|
+
await syncUntilConverged(repoA, repoB);
|
|
704
|
+
|
|
705
|
+
expect(await pathExists(path.join(repoB, "src", "old.ts"))).toBe(true);
|
|
706
|
+
|
|
707
|
+
// A deletes a file in src/
|
|
708
|
+
await fs.unlink(path.join(repoA, "src", "old.ts"));
|
|
709
|
+
await pushwork(["sync"], repoA);
|
|
710
|
+
|
|
711
|
+
// B syncs - should see the deletion
|
|
712
|
+
await pushwork(["sync"], repoB);
|
|
713
|
+
expect(await pathExists(path.join(repoB, "src", "old.ts"))).toBe(false);
|
|
714
|
+
expect(await pathExists(path.join(repoB, "src", "index.ts"))).toBe(true);
|
|
715
|
+
|
|
716
|
+
// B syncs again - should stay deleted
|
|
717
|
+
await pushwork(["sync"], repoB);
|
|
718
|
+
expect(await pathExists(path.join(repoB, "src", "old.ts"))).toBe(false);
|
|
719
|
+
}, 90000);
|
|
720
|
+
|
|
721
|
+
it("peer B should not see files deleted by peer A (dist/)", async () => {
|
|
722
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
723
|
+
const repoB = path.join(tmpDir, "repo-b");
|
|
724
|
+
await fs.mkdir(repoA);
|
|
725
|
+
await fs.mkdir(repoB);
|
|
726
|
+
|
|
727
|
+
await fs.mkdir(path.join(repoA, "dist", "assets"), { recursive: true });
|
|
728
|
+
await fs.writeFile(path.join(repoA, "dist", "index.js"), "// index");
|
|
729
|
+
await fs.writeFile(path.join(repoA, "dist", "assets", "app-ABC.js"), "// build 1");
|
|
730
|
+
await pushwork(["init", "."], repoA);
|
|
731
|
+
|
|
732
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
733
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
734
|
+
await syncUntilConverged(repoA, repoB);
|
|
735
|
+
|
|
736
|
+
expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC.js"))).toBe(true);
|
|
737
|
+
|
|
738
|
+
// A rebuilds: delete old artifact, create new one
|
|
739
|
+
await fs.unlink(path.join(repoA, "dist", "assets", "app-ABC.js"));
|
|
740
|
+
await fs.writeFile(path.join(repoA, "dist", "assets", "app-XYZ.js"), "// build 2");
|
|
741
|
+
await pushwork(["sync"], repoA);
|
|
742
|
+
|
|
743
|
+
// A should not have resurrected
|
|
744
|
+
expect(await pathExists(path.join(repoA, "dist", "assets", "app-ABC.js"))).toBe(false);
|
|
745
|
+
|
|
746
|
+
// B syncs - should see new file, NOT old file
|
|
747
|
+
await pushwork(["sync"], repoB);
|
|
748
|
+
expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC.js"))).toBe(false);
|
|
749
|
+
expect(await pathExists(path.join(repoB, "dist", "assets", "app-XYZ.js"))).toBe(true);
|
|
750
|
+
|
|
751
|
+
// B syncs again - old file should stay gone
|
|
752
|
+
await pushwork(["sync"], repoB);
|
|
753
|
+
expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC.js"))).toBe(false);
|
|
754
|
+
}, 90000);
|
|
755
|
+
|
|
756
|
+
it("peer B should see artifact file content update after URL replacement", async () => {
|
|
757
|
+
// When peer A modifies an artifact file, the document is replaced entirely
|
|
758
|
+
// (new Automerge doc with a new URL). Peer B's snapshot still points to the
|
|
759
|
+
// old (now orphaned) URL. detectRemoteChanges sees no head change on the old
|
|
760
|
+
// doc, and detectNewRemoteDocuments skips paths already in the snapshot.
|
|
761
|
+
// Without URL replacement detection, B never sees the update.
|
|
762
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
763
|
+
const repoB = path.join(tmpDir, "repo-b");
|
|
764
|
+
await fs.mkdir(repoA);
|
|
765
|
+
await fs.mkdir(repoB);
|
|
766
|
+
|
|
767
|
+
await fs.mkdir(path.join(repoA, "dist"), { recursive: true });
|
|
768
|
+
await fs.writeFile(path.join(repoA, "dist", "app.js"), "// version 1");
|
|
769
|
+
await pushwork(["init", "."], repoA);
|
|
770
|
+
|
|
771
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
772
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
773
|
+
await syncUntilConverged(repoA, repoB);
|
|
774
|
+
|
|
775
|
+
const bContentV1 = await fs.readFile(path.join(repoB, "dist", "app.js"), "utf-8");
|
|
776
|
+
expect(bContentV1).toBe("// version 1");
|
|
777
|
+
|
|
778
|
+
// A modifies the artifact file — this triggers nuclear replacement (new URL)
|
|
779
|
+
await fs.writeFile(path.join(repoA, "dist", "app.js"), "// version 2");
|
|
780
|
+
await pushwork(["sync"], repoA);
|
|
781
|
+
|
|
782
|
+
// B syncs — should pick up the new content despite the URL change
|
|
783
|
+
await pushwork(["sync"], repoB);
|
|
784
|
+
const bContentV2 = await fs.readFile(path.join(repoB, "dist", "app.js"), "utf-8");
|
|
785
|
+
expect(bContentV2).toBe("// version 2");
|
|
786
|
+
}, 90000);
|
|
586
787
|
});
|
|
587
788
|
|
|
588
789
|
describe("Move/Rename Detection", () => {
|