pushwork 2.0.0-a.sub.0 → 2.0.0-preview
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/branches.d.ts +19 -0
- package/dist/branches.d.ts.map +1 -0
- package/dist/branches.js +111 -0
- package/dist/branches.js.map +1 -0
- package/dist/cli.d.ts +1 -1
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +238 -272
- package/dist/cli.js.map +1 -1
- package/dist/config.d.ts +17 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +84 -0
- package/dist/config.js.map +1 -0
- package/dist/fs-tree.d.ts +6 -0
- package/dist/fs-tree.d.ts.map +1 -0
- package/dist/fs-tree.js +99 -0
- package/dist/fs-tree.js.map +1 -0
- package/dist/ignore.d.ts +6 -0
- package/dist/ignore.d.ts.map +1 -0
- package/dist/ignore.js +74 -0
- package/dist/ignore.js.map +1 -0
- package/dist/index.d.ts +8 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +34 -4
- package/dist/index.js.map +1 -1
- package/dist/log.d.ts +3 -0
- package/dist/log.d.ts.map +1 -0
- package/dist/log.js +14 -0
- package/dist/log.js.map +1 -0
- package/dist/pushwork.d.ts +115 -0
- package/dist/pushwork.d.ts.map +1 -0
- package/dist/pushwork.js +918 -0
- package/dist/pushwork.js.map +1 -0
- package/dist/repo.d.ts +14 -0
- package/dist/repo.d.ts.map +1 -0
- package/dist/repo.js +60 -0
- package/dist/repo.js.map +1 -0
- package/dist/shapes/custom.d.ts +3 -0
- package/dist/shapes/custom.d.ts.map +1 -0
- package/dist/shapes/custom.js +57 -0
- package/dist/shapes/custom.js.map +1 -0
- package/dist/shapes/file.d.ts +20 -0
- package/dist/shapes/file.d.ts.map +1 -0
- package/dist/shapes/file.js +140 -0
- package/dist/shapes/file.js.map +1 -0
- package/dist/shapes/index.d.ts +10 -0
- package/dist/shapes/index.d.ts.map +1 -0
- package/dist/shapes/index.js +35 -0
- package/dist/shapes/index.js.map +1 -0
- package/dist/shapes/patchwork-folder.d.ts +3 -0
- package/dist/shapes/patchwork-folder.d.ts.map +1 -0
- package/dist/shapes/patchwork-folder.js +160 -0
- package/dist/shapes/patchwork-folder.js.map +1 -0
- package/dist/shapes/types.d.ts +37 -0
- package/dist/shapes/types.d.ts.map +1 -0
- package/dist/shapes/types.js +52 -0
- package/dist/shapes/types.js.map +1 -0
- package/dist/shapes/vfs.d.ts +3 -0
- package/dist/shapes/vfs.d.ts.map +1 -0
- package/dist/shapes/vfs.js +88 -0
- package/dist/shapes/vfs.js.map +1 -0
- package/dist/stash.d.ts +23 -0
- package/dist/stash.d.ts.map +1 -0
- package/dist/stash.js +118 -0
- package/dist/stash.js.map +1 -0
- package/flake.lock +128 -0
- package/flake.nix +66 -0
- package/package.json +15 -48
- package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +26 -0
- package/pnpm-workspace.yaml +5 -0
- package/src/branches.ts +93 -0
- package/src/cli.ts +258 -408
- package/src/config.ts +64 -0
- package/src/fs-tree.ts +70 -0
- package/src/ignore.ts +33 -0
- package/src/index.ts +38 -4
- package/src/log.ts +8 -0
- package/src/pushwork.ts +1055 -0
- package/src/repo.ts +76 -0
- package/src/shapes/custom.ts +29 -0
- package/src/shapes/file.ts +115 -0
- package/src/shapes/index.ts +19 -0
- package/src/shapes/patchwork-folder.ts +156 -0
- package/src/shapes/types.ts +79 -0
- package/src/shapes/vfs.ts +93 -0
- package/src/stash.ts +106 -0
- package/test/integration/branches.test.ts +389 -0
- package/test/integration/pushwork.test.ts +547 -0
- package/test/setup.ts +29 -0
- package/test/unit/doc-shape.test.ts +612 -0
- package/tsconfig.json +2 -3
- package/vitest.config.ts +14 -0
- package/ARCHITECTURE-ACCORDING-TO-CLAUDE.md +0 -248
- package/CLAUDE.md +0 -141
- package/README.md +0 -221
- package/babel.config.js +0 -5
- package/dist/cli/commands.d.ts +0 -71
- package/dist/cli/commands.d.ts.map +0 -1
- package/dist/cli/commands.js +0 -794
- package/dist/cli/commands.js.map +0 -1
- package/dist/cli/index.d.ts +0 -2
- package/dist/cli/index.d.ts.map +0 -1
- package/dist/cli/index.js +0 -19
- package/dist/cli/index.js.map +0 -1
- package/dist/commands.d.ts +0 -61
- package/dist/commands.d.ts.map +0 -1
- package/dist/commands.js +0 -861
- package/dist/commands.js.map +0 -1
- package/dist/config/index.d.ts +0 -71
- package/dist/config/index.d.ts.map +0 -1
- package/dist/config/index.js +0 -314
- package/dist/config/index.js.map +0 -1
- package/dist/core/change-detection.d.ts +0 -80
- package/dist/core/change-detection.d.ts.map +0 -1
- package/dist/core/change-detection.js +0 -523
- package/dist/core/change-detection.js.map +0 -1
- package/dist/core/config.d.ts +0 -81
- package/dist/core/config.d.ts.map +0 -1
- package/dist/core/config.js +0 -258
- package/dist/core/config.js.map +0 -1
- package/dist/core/index.d.ts +0 -6
- package/dist/core/index.d.ts.map +0 -1
- package/dist/core/index.js +0 -6
- package/dist/core/index.js.map +0 -1
- package/dist/core/move-detection.d.ts +0 -34
- package/dist/core/move-detection.d.ts.map +0 -1
- package/dist/core/move-detection.js +0 -121
- package/dist/core/move-detection.js.map +0 -1
- package/dist/core/snapshot.d.ts +0 -105
- package/dist/core/snapshot.d.ts.map +0 -1
- package/dist/core/snapshot.js +0 -217
- package/dist/core/snapshot.js.map +0 -1
- package/dist/core/sync-engine.d.ts +0 -151
- package/dist/core/sync-engine.d.ts.map +0 -1
- package/dist/core/sync-engine.js +0 -1346
- package/dist/core/sync-engine.js.map +0 -1
- package/dist/types/config.d.ts +0 -99
- package/dist/types/config.d.ts.map +0 -1
- package/dist/types/config.js +0 -5
- package/dist/types/config.js.map +0 -1
- package/dist/types/documents.d.ts +0 -88
- package/dist/types/documents.d.ts.map +0 -1
- package/dist/types/documents.js +0 -20
- package/dist/types/documents.js.map +0 -1
- package/dist/types/index.d.ts +0 -4
- package/dist/types/index.d.ts.map +0 -1
- package/dist/types/index.js +0 -4
- package/dist/types/index.js.map +0 -1
- package/dist/types/snapshot.d.ts +0 -64
- package/dist/types/snapshot.d.ts.map +0 -1
- package/dist/types/snapshot.js +0 -2
- package/dist/types/snapshot.js.map +0 -1
- package/dist/utils/content-similarity.d.ts +0 -53
- package/dist/utils/content-similarity.d.ts.map +0 -1
- package/dist/utils/content-similarity.js +0 -155
- package/dist/utils/content-similarity.js.map +0 -1
- package/dist/utils/content.d.ts +0 -10
- package/dist/utils/content.d.ts.map +0 -1
- package/dist/utils/content.js +0 -31
- package/dist/utils/content.js.map +0 -1
- package/dist/utils/directory.d.ts +0 -24
- package/dist/utils/directory.d.ts.map +0 -1
- package/dist/utils/directory.js +0 -52
- package/dist/utils/directory.js.map +0 -1
- package/dist/utils/fs.d.ts +0 -74
- package/dist/utils/fs.d.ts.map +0 -1
- package/dist/utils/fs.js +0 -248
- package/dist/utils/fs.js.map +0 -1
- package/dist/utils/index.d.ts +0 -5
- package/dist/utils/index.d.ts.map +0 -1
- package/dist/utils/index.js +0 -5
- package/dist/utils/index.js.map +0 -1
- package/dist/utils/mime-types.d.ts +0 -13
- package/dist/utils/mime-types.d.ts.map +0 -1
- package/dist/utils/mime-types.js +0 -209
- package/dist/utils/mime-types.js.map +0 -1
- package/dist/utils/network-sync.d.ts +0 -36
- package/dist/utils/network-sync.d.ts.map +0 -1
- package/dist/utils/network-sync.js +0 -250
- package/dist/utils/network-sync.js.map +0 -1
- package/dist/utils/node-polyfills.d.ts +0 -9
- package/dist/utils/node-polyfills.d.ts.map +0 -1
- package/dist/utils/node-polyfills.js +0 -9
- package/dist/utils/node-polyfills.js.map +0 -1
- package/dist/utils/output.d.ts +0 -129
- package/dist/utils/output.d.ts.map +0 -1
- package/dist/utils/output.js +0 -368
- package/dist/utils/output.js.map +0 -1
- package/dist/utils/repo-factory.d.ts +0 -13
- package/dist/utils/repo-factory.d.ts.map +0 -1
- package/dist/utils/repo-factory.js +0 -46
- package/dist/utils/repo-factory.js.map +0 -1
- package/dist/utils/string-similarity.d.ts +0 -14
- package/dist/utils/string-similarity.d.ts.map +0 -1
- package/dist/utils/string-similarity.js +0 -39
- package/dist/utils/string-similarity.js.map +0 -1
- package/dist/utils/text-diff.d.ts +0 -37
- package/dist/utils/text-diff.d.ts.map +0 -1
- package/dist/utils/text-diff.js +0 -93
- package/dist/utils/text-diff.js.map +0 -1
- package/dist/utils/trace.d.ts +0 -19
- package/dist/utils/trace.d.ts.map +0 -1
- package/dist/utils/trace.js +0 -63
- package/dist/utils/trace.js.map +0 -1
- package/src/commands.ts +0 -1134
- package/src/core/change-detection.ts +0 -712
- package/src/core/config.ts +0 -313
- package/src/core/index.ts +0 -5
- package/src/core/move-detection.ts +0 -169
- package/src/core/snapshot.ts +0 -275
- package/src/core/sync-engine.ts +0 -1758
- package/src/types/config.ts +0 -111
- package/src/types/documents.ts +0 -91
- package/src/types/index.ts +0 -3
- package/src/types/snapshot.ts +0 -67
- package/src/utils/content.ts +0 -34
- package/src/utils/directory.ts +0 -73
- package/src/utils/fs.ts +0 -297
- package/src/utils/index.ts +0 -4
- package/src/utils/mime-types.ts +0 -244
- package/src/utils/network-sync.ts +0 -319
- package/src/utils/node-polyfills.ts +0 -8
- package/src/utils/output.ts +0 -450
- package/src/utils/repo-factory.ts +0 -73
- package/src/utils/string-similarity.ts +0 -54
- package/src/utils/text-diff.ts +0 -101
- package/src/utils/trace.ts +0 -70
- package/test/integration/README.md +0 -328
- package/test/integration/clone-test.sh +0 -310
- package/test/integration/conflict-resolution-test.sh +0 -309
- package/test/integration/debug-both-nested.sh +0 -74
- package/test/integration/debug-concurrent-nested.sh +0 -87
- package/test/integration/debug-nested.sh +0 -73
- package/test/integration/deletion-behavior-test.sh +0 -487
- package/test/integration/deletion-sync-test-simple.sh +0 -193
- package/test/integration/deletion-sync-test.sh +0 -297
- package/test/integration/exclude-patterns.test.ts +0 -144
- package/test/integration/full-integration-test.sh +0 -363
- package/test/integration/fuzzer.test.ts +0 -818
- package/test/integration/in-memory-sync.test.ts +0 -830
- package/test/integration/init-sync.test.ts +0 -89
- package/test/integration/manual-sync-test.sh +0 -84
- package/test/integration/sync-deletion.test.ts +0 -280
- package/test/integration/sync-flow.test.ts +0 -291
- package/test/jest.setup.ts +0 -34
- package/test/run-tests.sh +0 -225
- package/test/unit/deletion-behavior.test.ts +0 -249
- package/test/unit/enhanced-mime-detection.test.ts +0 -244
- package/test/unit/snapshot.test.ts +0 -404
- package/test/unit/sync-convergence.test.ts +0 -298
- package/test/unit/sync-timing.test.ts +0 -134
- package/test/unit/utils.test.ts +0 -366
|
@@ -1,818 +0,0 @@
|
|
|
1
|
-
import * as fs from "fs/promises";
|
|
2
|
-
import * as path from "path";
|
|
3
|
-
import * as tmp from "tmp";
|
|
4
|
-
import { execFile } from "child_process";
|
|
5
|
-
import { promisify } from "util";
|
|
6
|
-
import * as crypto from "crypto";
|
|
7
|
-
import * as fc from "fast-check";
|
|
8
|
-
|
|
9
|
-
const execFilePromise = promisify(execFile);
|
|
10
|
-
|
|
11
|
-
// Path to the pushwork CLI
|
|
12
|
-
const PUSHWORK_CLI = path.join(__dirname, "../../dist/cli.js");
|
|
13
|
-
|
|
14
|
-
describe("Pushwork Fuzzer", () => {
|
|
15
|
-
let tmpDir: string;
|
|
16
|
-
let cleanup: () => void;
|
|
17
|
-
|
|
18
|
-
beforeEach(() => {
|
|
19
|
-
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
20
|
-
tmpDir = tmpObj.name;
|
|
21
|
-
cleanup = tmpObj.removeCallback;
|
|
22
|
-
});
|
|
23
|
-
|
|
24
|
-
afterEach(() => {
|
|
25
|
-
cleanup();
|
|
26
|
-
});
|
|
27
|
-
|
|
28
|
-
/**
|
|
29
|
-
* Helper: Wait for a short time (useful for allowing sync to complete)
|
|
30
|
-
*/
|
|
31
|
-
async function wait(ms: number): Promise<void> {
|
|
32
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
/**
|
|
36
|
-
* Helper: Execute pushwork CLI command with retry logic for transient errors
|
|
37
|
-
*/
|
|
38
|
-
async function pushwork(
|
|
39
|
-
args: string[],
|
|
40
|
-
cwd: string,
|
|
41
|
-
maxRetries: number = 3
|
|
42
|
-
): Promise<{ stdout: string; stderr: string }> {
|
|
43
|
-
let lastError: Error | null = null;
|
|
44
|
-
|
|
45
|
-
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
46
|
-
try {
|
|
47
|
-
const result = await execFilePromise("node", [PUSHWORK_CLI, ...args], {
|
|
48
|
-
cwd,
|
|
49
|
-
env: { ...process.env, FORCE_COLOR: "0" }, // Disable color codes for cleaner output
|
|
50
|
-
});
|
|
51
|
-
return result;
|
|
52
|
-
} catch (error: any) {
|
|
53
|
-
lastError = error;
|
|
54
|
-
const errorMessage = error.message + (error.stderr || "");
|
|
55
|
-
|
|
56
|
-
// Retry on transient server errors (502, 503, connection refused, unavailable)
|
|
57
|
-
const isTransient =
|
|
58
|
-
errorMessage.includes("502") ||
|
|
59
|
-
errorMessage.includes("503") ||
|
|
60
|
-
errorMessage.includes("ECONNREFUSED") ||
|
|
61
|
-
errorMessage.includes("ETIMEDOUT") ||
|
|
62
|
-
errorMessage.includes("unavailable");
|
|
63
|
-
|
|
64
|
-
if (isTransient && attempt < maxRetries) {
|
|
65
|
-
// Exponential backoff: 1s, 2s, 4s
|
|
66
|
-
const delay = Math.pow(2, attempt - 1) * 1000;
|
|
67
|
-
await wait(delay);
|
|
68
|
-
continue;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
// Non-transient error or exhausted retries
|
|
72
|
-
throw new Error(
|
|
73
|
-
`pushwork ${args.join(" ")} failed: ${error.message}\nstdout: ${
|
|
74
|
-
error.stdout
|
|
75
|
-
}\nstderr: ${error.stderr}`
|
|
76
|
-
);
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
// Should never reach here, but TypeScript needs this
|
|
81
|
-
throw lastError;
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
/**
|
|
85
|
-
* Helper: Compute hash of all files in a directory (excluding .pushwork)
|
|
86
|
-
*/
|
|
87
|
-
async function hashDirectory(dirPath: string): Promise<string> {
|
|
88
|
-
const files = await getAllFiles(dirPath);
|
|
89
|
-
const hash = crypto.createHash("sha256");
|
|
90
|
-
|
|
91
|
-
// Sort files for consistent hashing
|
|
92
|
-
files.sort();
|
|
93
|
-
|
|
94
|
-
for (const file of files) {
|
|
95
|
-
// Skip .pushwork directory
|
|
96
|
-
if (file.includes(".pushwork")) {
|
|
97
|
-
continue;
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
const fullPath = path.join(dirPath, file);
|
|
101
|
-
const content = await fs.readFile(fullPath);
|
|
102
|
-
|
|
103
|
-
// Include relative path in hash to catch renames/moves
|
|
104
|
-
hash.update(file);
|
|
105
|
-
hash.update(content);
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
return hash.digest("hex");
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
/**
|
|
112
|
-
* Helper: Recursively get all files in a directory
|
|
113
|
-
*/
|
|
114
|
-
async function getAllFiles(
|
|
115
|
-
dirPath: string,
|
|
116
|
-
basePath: string = dirPath
|
|
117
|
-
): Promise<string[]> {
|
|
118
|
-
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
119
|
-
const files: string[] = [];
|
|
120
|
-
|
|
121
|
-
for (const entry of entries) {
|
|
122
|
-
const fullPath = path.join(dirPath, entry.name);
|
|
123
|
-
const relativePath = path.relative(basePath, fullPath);
|
|
124
|
-
|
|
125
|
-
if (entry.isDirectory()) {
|
|
126
|
-
// Skip .pushwork directory
|
|
127
|
-
if (entry.name === ".pushwork") {
|
|
128
|
-
continue;
|
|
129
|
-
}
|
|
130
|
-
const subFiles = await getAllFiles(fullPath, basePath);
|
|
131
|
-
files.push(...subFiles);
|
|
132
|
-
} else if (entry.isFile()) {
|
|
133
|
-
files.push(relativePath);
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
return files;
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
describe("Basic Setup and Clone", () => {
|
|
141
|
-
it("should initialize a repo with a single file and clone it successfully", async () => {
|
|
142
|
-
// Create two directories for testing
|
|
143
|
-
const repoA = path.join(tmpDir, "repo-a");
|
|
144
|
-
const repoB = path.join(tmpDir, "repo-b");
|
|
145
|
-
await fs.mkdir(repoA);
|
|
146
|
-
await fs.mkdir(repoB);
|
|
147
|
-
|
|
148
|
-
// Step 1: Create a file in repo A
|
|
149
|
-
const testFile = path.join(repoA, "test.txt");
|
|
150
|
-
await fs.writeFile(testFile, "Hello, Pushwork!");
|
|
151
|
-
|
|
152
|
-
// Step 2: Initialize repo A
|
|
153
|
-
await pushwork(["init", "."], repoA);
|
|
154
|
-
|
|
155
|
-
// Wait a moment for initialization to complete
|
|
156
|
-
await wait(1000);
|
|
157
|
-
|
|
158
|
-
// Step 3: Get the root URL from repo A
|
|
159
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
160
|
-
const cleanRootUrl = rootUrl.trim();
|
|
161
|
-
|
|
162
|
-
expect(cleanRootUrl).toMatch(/^automerge:/);
|
|
163
|
-
|
|
164
|
-
// Step 4: Clone repo A to repo B
|
|
165
|
-
await pushwork(["clone", cleanRootUrl, repoB], tmpDir);
|
|
166
|
-
|
|
167
|
-
// Wait a moment for clone to complete
|
|
168
|
-
await wait(1000);
|
|
169
|
-
|
|
170
|
-
// Step 5: Verify both repos have the same content
|
|
171
|
-
const hashA = await hashDirectory(repoA);
|
|
172
|
-
const hashB = await hashDirectory(repoB);
|
|
173
|
-
|
|
174
|
-
expect(hashA).toBe(hashB);
|
|
175
|
-
|
|
176
|
-
// Step 6: Verify the file exists in both repos
|
|
177
|
-
const fileAExists = await pathExists(path.join(repoA, "test.txt"));
|
|
178
|
-
const fileBExists = await pathExists(path.join(repoB, "test.txt"));
|
|
179
|
-
|
|
180
|
-
expect(fileAExists).toBe(true);
|
|
181
|
-
expect(fileBExists).toBe(true);
|
|
182
|
-
|
|
183
|
-
// Step 7: Verify the content is the same
|
|
184
|
-
const contentA = await fs.readFile(path.join(repoA, "test.txt"), "utf-8");
|
|
185
|
-
const contentB = await fs.readFile(path.join(repoB, "test.txt"), "utf-8");
|
|
186
|
-
|
|
187
|
-
expect(contentA).toBe("Hello, Pushwork!");
|
|
188
|
-
expect(contentB).toBe("Hello, Pushwork!");
|
|
189
|
-
expect(contentA).toBe(contentB);
|
|
190
|
-
}, 30000); // 30 second timeout for this test
|
|
191
|
-
});
|
|
192
|
-
|
|
193
|
-
describe("Manual Fuzzing Tests", () => {
|
|
194
|
-
it.concurrent(
|
|
195
|
-
"should handle a simple edit on one side",
|
|
196
|
-
async () => {
|
|
197
|
-
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
198
|
-
const testRoot = path.join(
|
|
199
|
-
tmpObj.name,
|
|
200
|
-
`test-manual-a-${Date.now()}-${Math.random()}`
|
|
201
|
-
);
|
|
202
|
-
await fs.mkdir(testRoot, { recursive: true });
|
|
203
|
-
const repoA = path.join(testRoot, "manual-a");
|
|
204
|
-
const repoB = path.join(testRoot, "manual-b");
|
|
205
|
-
await fs.mkdir(repoA);
|
|
206
|
-
await fs.mkdir(repoB);
|
|
207
|
-
|
|
208
|
-
// Initialize repo A with a file
|
|
209
|
-
await fs.writeFile(path.join(repoA, "test.txt"), "initial content");
|
|
210
|
-
await pushwork(["init", "."], repoA);
|
|
211
|
-
await wait(500);
|
|
212
|
-
|
|
213
|
-
// Clone to B
|
|
214
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
215
|
-
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
216
|
-
await wait(500);
|
|
217
|
-
|
|
218
|
-
// Edit file on A
|
|
219
|
-
await fs.writeFile(path.join(repoA, "test.txt"), "modified content");
|
|
220
|
-
|
|
221
|
-
// Sync A
|
|
222
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
223
|
-
await wait(1000);
|
|
224
|
-
|
|
225
|
-
// Sync B to pull changes
|
|
226
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
227
|
-
await wait(1000);
|
|
228
|
-
|
|
229
|
-
// Verify they match
|
|
230
|
-
const contentA = await fs.readFile(
|
|
231
|
-
path.join(repoA, "test.txt"),
|
|
232
|
-
"utf-8"
|
|
233
|
-
);
|
|
234
|
-
const contentB = await fs.readFile(
|
|
235
|
-
path.join(repoB, "test.txt"),
|
|
236
|
-
"utf-8"
|
|
237
|
-
);
|
|
238
|
-
|
|
239
|
-
expect(contentA).toBe("modified content");
|
|
240
|
-
expect(contentB).toBe("modified content");
|
|
241
|
-
|
|
242
|
-
// Cleanup
|
|
243
|
-
tmpObj.removeCallback();
|
|
244
|
-
},
|
|
245
|
-
30000
|
|
246
|
-
);
|
|
247
|
-
|
|
248
|
-
it.concurrent(
|
|
249
|
-
"should handle edit + rename on one side",
|
|
250
|
-
async () => {
|
|
251
|
-
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
252
|
-
const testRoot = path.join(
|
|
253
|
-
tmpObj.name,
|
|
254
|
-
`test-rename-${Date.now()}-${Math.random()}`
|
|
255
|
-
);
|
|
256
|
-
await fs.mkdir(testRoot, { recursive: true });
|
|
257
|
-
const repoA = path.join(testRoot, "rename-a");
|
|
258
|
-
const repoB = path.join(testRoot, "rename-b");
|
|
259
|
-
await fs.mkdir(repoA);
|
|
260
|
-
await fs.mkdir(repoB);
|
|
261
|
-
|
|
262
|
-
// Initialize repo A with a file
|
|
263
|
-
await fs.writeFile(
|
|
264
|
-
path.join(repoA, "original.txt"),
|
|
265
|
-
"original content"
|
|
266
|
-
);
|
|
267
|
-
await pushwork(["init", "."], repoA);
|
|
268
|
-
await wait(500);
|
|
269
|
-
|
|
270
|
-
// Clone to B
|
|
271
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
272
|
-
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
273
|
-
await wait(500);
|
|
274
|
-
|
|
275
|
-
// Edit AND rename file on A (the suspicious operation!)
|
|
276
|
-
await fs.writeFile(path.join(repoA, "original.txt"), "edited content");
|
|
277
|
-
await fs.rename(
|
|
278
|
-
path.join(repoA, "original.txt"),
|
|
279
|
-
path.join(repoA, "renamed.txt")
|
|
280
|
-
);
|
|
281
|
-
|
|
282
|
-
// Sync both sides
|
|
283
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
284
|
-
await wait(1000);
|
|
285
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
286
|
-
await wait(1000);
|
|
287
|
-
|
|
288
|
-
// One more round for convergence
|
|
289
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
290
|
-
await wait(1000);
|
|
291
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
292
|
-
await wait(1000);
|
|
293
|
-
|
|
294
|
-
// Verify: original.txt should not exist, renamed.txt should exist with edited content
|
|
295
|
-
const originalExistsA = await pathExists(
|
|
296
|
-
path.join(repoA, "original.txt")
|
|
297
|
-
);
|
|
298
|
-
const originalExistsB = await pathExists(
|
|
299
|
-
path.join(repoB, "original.txt")
|
|
300
|
-
);
|
|
301
|
-
const renamedExistsA = await pathExists(
|
|
302
|
-
path.join(repoA, "renamed.txt")
|
|
303
|
-
);
|
|
304
|
-
const renamedExistsB = await pathExists(
|
|
305
|
-
path.join(repoB, "renamed.txt")
|
|
306
|
-
);
|
|
307
|
-
|
|
308
|
-
expect(originalExistsA).toBe(false);
|
|
309
|
-
expect(originalExistsB).toBe(false);
|
|
310
|
-
expect(renamedExistsA).toBe(true);
|
|
311
|
-
expect(renamedExistsB).toBe(true);
|
|
312
|
-
|
|
313
|
-
const contentA = await fs.readFile(
|
|
314
|
-
path.join(repoA, "renamed.txt"),
|
|
315
|
-
"utf-8"
|
|
316
|
-
);
|
|
317
|
-
const contentB = await fs.readFile(
|
|
318
|
-
path.join(repoB, "renamed.txt"),
|
|
319
|
-
"utf-8"
|
|
320
|
-
);
|
|
321
|
-
|
|
322
|
-
expect(contentA).toBe("edited content");
|
|
323
|
-
expect(contentB).toBe("edited content");
|
|
324
|
-
|
|
325
|
-
// Cleanup
|
|
326
|
-
tmpObj.removeCallback();
|
|
327
|
-
},
|
|
328
|
-
120000
|
|
329
|
-
); // 2 minute timeout
|
|
330
|
-
|
|
331
|
-
it.concurrent(
|
|
332
|
-
"should handle simplest case: clone then add file",
|
|
333
|
-
async () => {
|
|
334
|
-
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
335
|
-
const testRoot = path.join(
|
|
336
|
-
tmpObj.name,
|
|
337
|
-
`test-simple-${Date.now()}-${Math.random()}`
|
|
338
|
-
);
|
|
339
|
-
await fs.mkdir(testRoot, { recursive: true });
|
|
340
|
-
const repoA = path.join(testRoot, "simple-a");
|
|
341
|
-
const repoB = path.join(testRoot, "simple-b");
|
|
342
|
-
await fs.mkdir(repoA);
|
|
343
|
-
await fs.mkdir(repoB);
|
|
344
|
-
|
|
345
|
-
// Initialize repo A
|
|
346
|
-
await fs.writeFile(path.join(repoA, "initial.txt"), "initial");
|
|
347
|
-
await pushwork(["init", "."], repoA);
|
|
348
|
-
await wait(1000);
|
|
349
|
-
|
|
350
|
-
// Clone to B
|
|
351
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
352
|
-
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
353
|
-
await wait(1000);
|
|
354
|
-
|
|
355
|
-
// B: Create a new file (nothing else happens)
|
|
356
|
-
await fs.writeFile(path.join(repoB, "aaa.txt"), "");
|
|
357
|
-
|
|
358
|
-
// B syncs
|
|
359
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
360
|
-
await wait(1000);
|
|
361
|
-
|
|
362
|
-
// A syncs
|
|
363
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
364
|
-
await wait(1000);
|
|
365
|
-
|
|
366
|
-
// Check convergence
|
|
367
|
-
const filesA = await fs.readdir(repoA);
|
|
368
|
-
const filesB = await fs.readdir(repoB);
|
|
369
|
-
const filteredFilesA = filesA.filter((f) => !f.startsWith("."));
|
|
370
|
-
const filteredFilesB = filesB.filter((f) => !f.startsWith("."));
|
|
371
|
-
expect(filteredFilesA).toEqual(filteredFilesB);
|
|
372
|
-
|
|
373
|
-
expect(await pathExists(path.join(repoA, "aaa.txt"))).toBe(true);
|
|
374
|
-
expect(await pathExists(path.join(repoB, "aaa.txt"))).toBe(true);
|
|
375
|
-
|
|
376
|
-
// Cleanup
|
|
377
|
-
tmpObj.removeCallback();
|
|
378
|
-
},
|
|
379
|
-
20000
|
|
380
|
-
);
|
|
381
|
-
|
|
382
|
-
it.concurrent(
|
|
383
|
-
"should handle minimal shrunk case: editAndRename non-existent + add same file",
|
|
384
|
-
async () => {
|
|
385
|
-
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
386
|
-
const testRoot = path.join(
|
|
387
|
-
tmpObj.name,
|
|
388
|
-
`test-shrunk-${Date.now()}-${Math.random()}`
|
|
389
|
-
);
|
|
390
|
-
await fs.mkdir(testRoot, { recursive: true });
|
|
391
|
-
const repoA = path.join(testRoot, "shrunk-a");
|
|
392
|
-
const repoB = path.join(testRoot, "shrunk-b");
|
|
393
|
-
await fs.mkdir(repoA);
|
|
394
|
-
await fs.mkdir(repoB);
|
|
395
|
-
|
|
396
|
-
// Initialize repo A
|
|
397
|
-
await fs.writeFile(path.join(repoA, "initial.txt"), "initial");
|
|
398
|
-
await pushwork(["init", "."], repoA);
|
|
399
|
-
await wait(1000); // Match manual test timing
|
|
400
|
-
|
|
401
|
-
// Clone to B
|
|
402
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
403
|
-
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
404
|
-
await wait(1000); // Match manual test timing
|
|
405
|
-
|
|
406
|
-
// A: Try to editAndRename a non-existent file (this is from the shrunk test case)
|
|
407
|
-
// This operation should be a no-op since aaa.txt doesn't exist
|
|
408
|
-
const fromPath = path.join(repoA, "aaa.txt");
|
|
409
|
-
const toPath = path.join(repoA, "aa/aa/aaa.txt");
|
|
410
|
-
if ((await pathExists(fromPath)) && !(await pathExists(toPath))) {
|
|
411
|
-
await fs.writeFile(fromPath, "");
|
|
412
|
-
await fs.mkdir(path.dirname(toPath), { recursive: true });
|
|
413
|
-
await fs.rename(fromPath, toPath);
|
|
414
|
-
}
|
|
415
|
-
|
|
416
|
-
// B: Create the same file that A tried to operate on
|
|
417
|
-
await fs.writeFile(path.join(repoB, "aaa.txt"), "");
|
|
418
|
-
|
|
419
|
-
// Sync multiple rounds (use 1s waits for reliable network propagation)
|
|
420
|
-
// Pattern: A, B, A (like manual test that worked)
|
|
421
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
422
|
-
await wait(1000);
|
|
423
|
-
|
|
424
|
-
// Check what B sees before sync
|
|
425
|
-
await pushwork(["diff", "--name-only"], repoB);
|
|
426
|
-
|
|
427
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
428
|
-
await wait(1000);
|
|
429
|
-
|
|
430
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
431
|
-
await wait(1000);
|
|
432
|
-
|
|
433
|
-
// Debug: Check what files exist
|
|
434
|
-
const filesA = await fs.readdir(repoA);
|
|
435
|
-
const filesB = await fs.readdir(repoB);
|
|
436
|
-
const filteredFilesA = filesA.filter((f) => !f.startsWith("."));
|
|
437
|
-
const filteredFilesB = filesB.filter((f) => !f.startsWith("."));
|
|
438
|
-
expect(filteredFilesA).toEqual(filteredFilesB);
|
|
439
|
-
|
|
440
|
-
// Verify convergence
|
|
441
|
-
const hashA = await hashDirectory(repoA);
|
|
442
|
-
const hashB = await hashDirectory(repoB);
|
|
443
|
-
|
|
444
|
-
expect(hashA).toBe(hashB);
|
|
445
|
-
|
|
446
|
-
// Both should have aaa.txt
|
|
447
|
-
expect(await pathExists(path.join(repoA, "aaa.txt"))).toBe(true);
|
|
448
|
-
expect(await pathExists(path.join(repoB, "aaa.txt"))).toBe(true);
|
|
449
|
-
|
|
450
|
-
// Cleanup
|
|
451
|
-
tmpObj.removeCallback();
|
|
452
|
-
},
|
|
453
|
-
20000
|
|
454
|
-
);
|
|
455
|
-
|
|
456
|
-
it.concurrent(
|
|
457
|
-
"should handle files in subdirectories and moves between directories",
|
|
458
|
-
async () => {
|
|
459
|
-
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
460
|
-
const testRoot = path.join(
|
|
461
|
-
tmpObj.name,
|
|
462
|
-
`test-subdir-${Date.now()}-${Math.random()}`
|
|
463
|
-
);
|
|
464
|
-
await fs.mkdir(testRoot, { recursive: true });
|
|
465
|
-
const repoA = path.join(testRoot, "subdir-a");
|
|
466
|
-
const repoB = path.join(testRoot, "subdir-b");
|
|
467
|
-
await fs.mkdir(repoA);
|
|
468
|
-
await fs.mkdir(repoB);
|
|
469
|
-
|
|
470
|
-
// Initialize repo A with a file in a subdirectory
|
|
471
|
-
await fs.mkdir(path.join(repoA, "dir1"), { recursive: true });
|
|
472
|
-
await fs.writeFile(path.join(repoA, "dir1", "file1.txt"), "in dir1");
|
|
473
|
-
|
|
474
|
-
await pushwork(["init", "."], repoA);
|
|
475
|
-
await wait(500);
|
|
476
|
-
|
|
477
|
-
// Clone to B
|
|
478
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
479
|
-
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
480
|
-
await wait(500);
|
|
481
|
-
|
|
482
|
-
// Verify B got the subdirectory and file
|
|
483
|
-
expect(await pathExists(path.join(repoB, "dir1", "file1.txt"))).toBe(
|
|
484
|
-
true
|
|
485
|
-
);
|
|
486
|
-
const initialContentB = await fs.readFile(
|
|
487
|
-
path.join(repoB, "dir1", "file1.txt"),
|
|
488
|
-
"utf-8"
|
|
489
|
-
);
|
|
490
|
-
expect(initialContentB).toBe("in dir1");
|
|
491
|
-
|
|
492
|
-
// On A: Create another file in a different subdirectory
|
|
493
|
-
await fs.mkdir(path.join(repoA, "dir2"), { recursive: true });
|
|
494
|
-
await fs.writeFile(path.join(repoA, "dir2", "file2.txt"), "in dir2");
|
|
495
|
-
|
|
496
|
-
// Sync both sides
|
|
497
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
498
|
-
await wait(1000);
|
|
499
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
500
|
-
await wait(1000);
|
|
501
|
-
|
|
502
|
-
// Verify B got the new subdirectory and file
|
|
503
|
-
expect(await pathExists(path.join(repoB, "dir2", "file2.txt"))).toBe(
|
|
504
|
-
true
|
|
505
|
-
);
|
|
506
|
-
const file2ContentB = await fs.readFile(
|
|
507
|
-
path.join(repoB, "dir2", "file2.txt"),
|
|
508
|
-
"utf-8"
|
|
509
|
-
);
|
|
510
|
-
expect(file2ContentB).toBe("in dir2");
|
|
511
|
-
|
|
512
|
-
// Cleanup
|
|
513
|
-
tmpObj.removeCallback();
|
|
514
|
-
},
|
|
515
|
-
30000
|
|
516
|
-
);
|
|
517
|
-
});
|
|
518
|
-
|
|
519
|
-
describe("Property-Based Fuzzing with fast-check", () => {
|
|
520
|
-
// Define operation types
|
|
521
|
-
type FileOperation =
|
|
522
|
-
| { type: "add"; path: string; content: string }
|
|
523
|
-
| { type: "edit"; path: string; content: string }
|
|
524
|
-
| { type: "delete"; path: string }
|
|
525
|
-
| { type: "rename"; fromPath: string; toPath: string }
|
|
526
|
-
| {
|
|
527
|
-
type: "editAndRename";
|
|
528
|
-
fromPath: string;
|
|
529
|
-
toPath: string;
|
|
530
|
-
content: string;
|
|
531
|
-
};
|
|
532
|
-
|
|
533
|
-
/**
|
|
534
|
-
* Arbitrary: Generate a directory name
|
|
535
|
-
*/
|
|
536
|
-
const dirNameArbitrary = fc.stringMatching(/^[a-z]{2,6}$/);
|
|
537
|
-
|
|
538
|
-
/**
|
|
539
|
-
* Arbitrary: Generate a simple filename (basename + extension)
|
|
540
|
-
*/
|
|
541
|
-
const baseNameArbitrary = fc
|
|
542
|
-
.tuple(
|
|
543
|
-
fc.stringMatching(/^[a-z]{3,8}$/), // basename
|
|
544
|
-
fc.constantFrom("txt", "md", "json", "ts") // extension
|
|
545
|
-
)
|
|
546
|
-
.map(([name, ext]) => `${name}.${ext}`);
|
|
547
|
-
|
|
548
|
-
/**
|
|
549
|
-
* Arbitrary: Generate a file path (can be in root or in subdirectories)
|
|
550
|
-
* Examples: "file.txt", "dir1/file.txt", "dir1/dir2/file.txt"
|
|
551
|
-
*/
|
|
552
|
-
const filePathArbitrary = fc.oneof(
|
|
553
|
-
// File in root directory (60% probability)
|
|
554
|
-
baseNameArbitrary,
|
|
555
|
-
// File in single subdirectory (30% probability)
|
|
556
|
-
fc
|
|
557
|
-
.tuple(dirNameArbitrary, baseNameArbitrary)
|
|
558
|
-
.map(([dir, file]) => `${dir}/${file}`),
|
|
559
|
-
// File in nested subdirectory (10% probability)
|
|
560
|
-
fc
|
|
561
|
-
.tuple(dirNameArbitrary, dirNameArbitrary, baseNameArbitrary)
|
|
562
|
-
.map(([dir1, dir2, file]) => `${dir1}/${dir2}/${file}`)
|
|
563
|
-
);
|
|
564
|
-
|
|
565
|
-
/**
|
|
566
|
-
* Arbitrary: Generate file content (small strings for now)
|
|
567
|
-
*/
|
|
568
|
-
const fileContentArbitrary = fc.string({ minLength: 0, maxLength: 100 });
|
|
569
|
-
|
|
570
|
-
/**
|
|
571
|
-
* Arbitrary: Generate a file operation
|
|
572
|
-
*/
|
|
573
|
-
const fileOperationArbitrary: fc.Arbitrary<FileOperation> = fc.oneof(
|
|
574
|
-
// Add file (can be in subdirectories)
|
|
575
|
-
fc.record({
|
|
576
|
-
type: fc.constant("add" as const),
|
|
577
|
-
path: filePathArbitrary,
|
|
578
|
-
content: fileContentArbitrary,
|
|
579
|
-
}),
|
|
580
|
-
// Edit file
|
|
581
|
-
fc.record({
|
|
582
|
-
type: fc.constant("edit" as const),
|
|
583
|
-
path: filePathArbitrary,
|
|
584
|
-
content: fileContentArbitrary,
|
|
585
|
-
}),
|
|
586
|
-
// Delete file
|
|
587
|
-
fc.record({
|
|
588
|
-
type: fc.constant("delete" as const),
|
|
589
|
-
path: filePathArbitrary,
|
|
590
|
-
}),
|
|
591
|
-
// Rename file (can move between directories)
|
|
592
|
-
fc.record({
|
|
593
|
-
type: fc.constant("rename" as const),
|
|
594
|
-
fromPath: filePathArbitrary,
|
|
595
|
-
toPath: filePathArbitrary,
|
|
596
|
-
}),
|
|
597
|
-
// Edit and rename (can move between directories)
|
|
598
|
-
fc.record({
|
|
599
|
-
type: fc.constant("editAndRename" as const),
|
|
600
|
-
fromPath: filePathArbitrary,
|
|
601
|
-
toPath: filePathArbitrary,
|
|
602
|
-
content: fileContentArbitrary,
|
|
603
|
-
})
|
|
604
|
-
);
|
|
605
|
-
|
|
606
|
-
/**
|
|
607
|
-
* Helper: Ensure parent directory exists
|
|
608
|
-
*/
|
|
609
|
-
async function ensureParentDir(filePath: string): Promise<void> {
|
|
610
|
-
const dir = path.dirname(filePath);
|
|
611
|
-
await fs.mkdir(dir, { recursive: true });
|
|
612
|
-
}
|
|
613
|
-
|
|
614
|
-
/**
|
|
615
|
-
* Helper: Apply a file operation to a directory
|
|
616
|
-
*/
|
|
617
|
-
async function applyOperation(
|
|
618
|
-
repoPath: string,
|
|
619
|
-
op: FileOperation
|
|
620
|
-
): Promise<void> {
|
|
621
|
-
try {
|
|
622
|
-
switch (op.type) {
|
|
623
|
-
case "add": {
|
|
624
|
-
const filePath = path.join(repoPath, op.path);
|
|
625
|
-
await ensureParentDir(filePath);
|
|
626
|
-
await fs.writeFile(filePath, op.content);
|
|
627
|
-
break;
|
|
628
|
-
}
|
|
629
|
-
case "edit": {
|
|
630
|
-
const filePath = path.join(repoPath, op.path);
|
|
631
|
-
// Only edit if file exists, otherwise create it
|
|
632
|
-
if (await pathExists(filePath)) {
|
|
633
|
-
await fs.writeFile(filePath, op.content);
|
|
634
|
-
} else {
|
|
635
|
-
await ensureParentDir(filePath);
|
|
636
|
-
await fs.writeFile(filePath, op.content);
|
|
637
|
-
}
|
|
638
|
-
break;
|
|
639
|
-
}
|
|
640
|
-
case "delete": {
|
|
641
|
-
const filePath = path.join(repoPath, op.path);
|
|
642
|
-
// Only delete if file exists
|
|
643
|
-
if (await pathExists(filePath)) {
|
|
644
|
-
await fs.unlink(filePath);
|
|
645
|
-
}
|
|
646
|
-
break;
|
|
647
|
-
}
|
|
648
|
-
case "rename": {
|
|
649
|
-
const fromPath = path.join(repoPath, op.fromPath);
|
|
650
|
-
const toPath = path.join(repoPath, op.toPath);
|
|
651
|
-
// Only rename if source exists and target doesn't
|
|
652
|
-
if ((await pathExists(fromPath)) && !(await pathExists(toPath))) {
|
|
653
|
-
await ensureParentDir(toPath);
|
|
654
|
-
await fs.rename(fromPath, toPath);
|
|
655
|
-
}
|
|
656
|
-
break;
|
|
657
|
-
}
|
|
658
|
-
case "editAndRename": {
|
|
659
|
-
const fromPath = path.join(repoPath, op.fromPath);
|
|
660
|
-
const toPath = path.join(repoPath, op.toPath);
|
|
661
|
-
// Edit then rename: only if source exists and target doesn't
|
|
662
|
-
if ((await pathExists(fromPath)) && !(await pathExists(toPath))) {
|
|
663
|
-
await fs.writeFile(fromPath, op.content);
|
|
664
|
-
await ensureParentDir(toPath);
|
|
665
|
-
await fs.rename(fromPath, toPath);
|
|
666
|
-
}
|
|
667
|
-
break;
|
|
668
|
-
}
|
|
669
|
-
}
|
|
670
|
-
} catch (error) {
|
|
671
|
-
// Ignore operation errors (e.g., deleting non-existent file)
|
|
672
|
-
// This is expected in fuzzing
|
|
673
|
-
}
|
|
674
|
-
}
|
|
675
|
-
|
|
676
|
-
/**
|
|
677
|
-
* Helper: Apply multiple operations
|
|
678
|
-
*/
|
|
679
|
-
async function applyOperations(
|
|
680
|
-
repoPath: string,
|
|
681
|
-
operations: FileOperation[]
|
|
682
|
-
): Promise<void> {
|
|
683
|
-
for (const op of operations) {
|
|
684
|
-
await applyOperation(repoPath, op);
|
|
685
|
-
}
|
|
686
|
-
}
|
|
687
|
-
|
|
688
|
-
it("should converge after random operations on both sides", async () => {
|
|
689
|
-
await fc.assert(
|
|
690
|
-
fc.asyncProperty(
|
|
691
|
-
fc.array(fileOperationArbitrary, { minLength: 1, maxLength: 10 }), // Operations on repo A (1-10 ops)
|
|
692
|
-
fc.array(fileOperationArbitrary, { minLength: 1, maxLength: 10 }), // Operations on repo B (1-10 ops)
|
|
693
|
-
async (opsA, opsB) => {
|
|
694
|
-
// Create two directories for testing
|
|
695
|
-
const testRoot = path.join(
|
|
696
|
-
tmpDir,
|
|
697
|
-
`test-${Date.now()}-${Math.random()}`
|
|
698
|
-
);
|
|
699
|
-
await fs.mkdir(testRoot, { recursive: true });
|
|
700
|
-
|
|
701
|
-
const repoA = path.join(testRoot, "repo-a");
|
|
702
|
-
const repoB = path.join(testRoot, "repo-b");
|
|
703
|
-
await fs.mkdir(repoA);
|
|
704
|
-
await fs.mkdir(repoB);
|
|
705
|
-
|
|
706
|
-
try {
|
|
707
|
-
// Initialize repo A with an initial file
|
|
708
|
-
await fs.writeFile(path.join(repoA, "initial.txt"), "initial");
|
|
709
|
-
await pushwork(["init", "."], repoA);
|
|
710
|
-
// Give sync server time to store and propagate the document
|
|
711
|
-
await wait(2000);
|
|
712
|
-
|
|
713
|
-
// Get root URL and clone to B
|
|
714
|
-
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
715
|
-
const cleanRootUrl = rootUrl.trim();
|
|
716
|
-
// Clone with extra retries - document availability can be delayed
|
|
717
|
-
await pushwork(["clone", cleanRootUrl, repoB], testRoot, 5);
|
|
718
|
-
await wait(1000);
|
|
719
|
-
|
|
720
|
-
// Verify initial state matches
|
|
721
|
-
const filesA = await getAllFiles(repoA);
|
|
722
|
-
const filesB = await getAllFiles(repoB);
|
|
723
|
-
const hashBeforeOps = await hashDirectory(repoA);
|
|
724
|
-
const hashB1 = await hashDirectory(repoB);
|
|
725
|
-
if (hashBeforeOps !== hashB1) {
|
|
726
|
-
throw new Error(
|
|
727
|
-
`Initial hash mismatch!\n` +
|
|
728
|
-
` repoA (${repoA}):\n files: ${JSON.stringify(filesA)}\n hash: ${hashBeforeOps}\n` +
|
|
729
|
-
` repoB (${repoB}):\n files: ${JSON.stringify(filesB)}\n hash: ${hashB1}`
|
|
730
|
-
);
|
|
731
|
-
}
|
|
732
|
-
|
|
733
|
-
// Apply operations to both sides
|
|
734
|
-
await applyOperations(repoA, opsA);
|
|
735
|
-
|
|
736
|
-
await applyOperations(repoB, opsB);
|
|
737
|
-
|
|
738
|
-
// Multiple sync rounds for convergence
|
|
739
|
-
// Need enough time for network propagation between CLI invocations
|
|
740
|
-
// Round 1: A pushes changes
|
|
741
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
742
|
-
await wait(1000);
|
|
743
|
-
|
|
744
|
-
// Round 2: B pushes changes and pulls A's changes
|
|
745
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
746
|
-
await wait(1000);
|
|
747
|
-
|
|
748
|
-
// Round 3: A pulls B's changes
|
|
749
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
750
|
-
await wait(1000);
|
|
751
|
-
|
|
752
|
-
// Round 4: B confirms convergence
|
|
753
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
754
|
-
await wait(1000);
|
|
755
|
-
|
|
756
|
-
// Round 5: Final convergence check
|
|
757
|
-
await pushwork(["sync", "--gentle"], repoA);
|
|
758
|
-
await wait(1000);
|
|
759
|
-
|
|
760
|
-
// Round 6: Extra convergence check (for aggressive fuzzing)
|
|
761
|
-
await pushwork(["sync", "--gentle"], repoB);
|
|
762
|
-
await wait(5000);
|
|
763
|
-
|
|
764
|
-
// Verify final state matches
|
|
765
|
-
|
|
766
|
-
const hashAfterA = await hashDirectory(repoA);
|
|
767
|
-
const hashAfterB = await hashDirectory(repoB);
|
|
768
|
-
|
|
769
|
-
expect(hashAfterA).toBe(hashAfterB);
|
|
770
|
-
|
|
771
|
-
// Verify diff shows no changes
|
|
772
|
-
const { stdout: diffOutput } = await pushwork(
|
|
773
|
-
["diff", "--name-only"],
|
|
774
|
-
repoA
|
|
775
|
-
);
|
|
776
|
-
// Filter out status messages, only check for actual file differences
|
|
777
|
-
const diffLines = diffOutput
|
|
778
|
-
.split("\n")
|
|
779
|
-
.filter(
|
|
780
|
-
(line) =>
|
|
781
|
-
line.trim() &&
|
|
782
|
-
!line.includes("✓") &&
|
|
783
|
-
!line.includes("Local-only") &&
|
|
784
|
-
!line.includes("Root URL")
|
|
785
|
-
);
|
|
786
|
-
expect(diffLines.length).toBe(0);
|
|
787
|
-
|
|
788
|
-
// Cleanup
|
|
789
|
-
await fs.rm(testRoot, { recursive: true, force: true });
|
|
790
|
-
} catch (error) {
|
|
791
|
-
// Cleanup on error
|
|
792
|
-
await fs
|
|
793
|
-
.rm(testRoot, { recursive: true, force: true })
|
|
794
|
-
.catch(() => {});
|
|
795
|
-
throw error;
|
|
796
|
-
}
|
|
797
|
-
}
|
|
798
|
-
),
|
|
799
|
-
{
|
|
800
|
-
numRuns: 5, // INTENSE MODE (was 20, then cranked to 50)
|
|
801
|
-
timeout: 120000, // 2 minute timeout per run
|
|
802
|
-
verbose: true, // Verbose output
|
|
803
|
-
endOnFailure: true, // Stop on first failure to debug
|
|
804
|
-
}
|
|
805
|
-
);
|
|
806
|
-
}, 600000); // 10 minute timeout for the whole test
|
|
807
|
-
});
|
|
808
|
-
});
|
|
809
|
-
|
|
810
|
-
// Helper function
|
|
811
|
-
async function pathExists(filePath: string): Promise<boolean> {
|
|
812
|
-
try {
|
|
813
|
-
await fs.access(filePath);
|
|
814
|
-
return true;
|
|
815
|
-
} catch {
|
|
816
|
-
return false;
|
|
817
|
-
}
|
|
818
|
-
}
|