pushwork 1.0.0 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -21
- package/dist/cli/commands.d.ts +6 -0
- package/dist/cli/commands.d.ts.map +1 -1
- package/dist/cli/commands.js +114 -4
- package/dist/cli/commands.js.map +1 -1
- package/dist/cli.js +27 -0
- package/dist/cli.js.map +1 -1
- package/dist/core/change-detection.d.ts.map +1 -1
- package/dist/core/change-detection.js +27 -9
- package/dist/core/change-detection.js.map +1 -1
- package/dist/core/move-detection.d.ts.map +1 -1
- package/dist/core/move-detection.js +8 -2
- package/dist/core/move-detection.js.map +1 -1
- package/dist/core/sync-engine.d.ts +4 -0
- package/dist/core/sync-engine.d.ts.map +1 -1
- package/dist/core/sync-engine.js +263 -7
- package/dist/core/sync-engine.js.map +1 -1
- package/dist/types/documents.d.ts +2 -0
- package/dist/types/documents.d.ts.map +1 -1
- package/dist/types/documents.js.map +1 -1
- package/dist/utils/fs.d.ts.map +1 -1
- package/dist/utils/fs.js +7 -1
- package/dist/utils/fs.js.map +1 -1
- package/dist/utils/network-sync.d.ts.map +1 -1
- package/dist/utils/network-sync.js +16 -3
- package/dist/utils/network-sync.js.map +1 -1
- package/package.json +30 -30
- package/src/cli/commands.ts +162 -8
- package/src/cli.ts +40 -0
- package/src/core/change-detection.ts +25 -12
- package/src/core/move-detection.ts +8 -2
- package/src/core/sync-engine.ts +270 -7
- package/src/types/documents.ts +2 -0
- package/src/utils/fs.ts +7 -3
- package/src/utils/network-sync.ts +19 -3
- package/test/integration/clone-test.sh +0 -0
- package/test/integration/conflict-resolution-test.sh +0 -0
- package/test/integration/debug-both-nested.sh +74 -0
- package/test/integration/debug-concurrent-nested.sh +87 -0
- package/test/integration/debug-nested.sh +73 -0
- package/test/integration/deletion-behavior-test.sh +0 -0
- package/test/integration/deletion-sync-test-simple.sh +0 -0
- package/test/integration/deletion-sync-test.sh +0 -0
- package/test/integration/full-integration-test.sh +0 -0
- package/test/integration/fuzzer.test.ts +865 -0
- package/test/integration/manual-sync-test.sh +84 -0
- package/test/run-tests.sh +0 -0
- package/test/unit/sync-convergence.test.ts +493 -0
- package/tools/browser-sync/README.md +0 -116
- package/tools/browser-sync/package.json +0 -44
- package/tools/browser-sync/patchwork.json +0 -1
- package/tools/browser-sync/pnpm-lock.yaml +0 -4202
- package/tools/browser-sync/src/components/BrowserSyncTool.tsx +0 -599
- package/tools/browser-sync/src/index.ts +0 -20
- package/tools/browser-sync/src/polyfills.ts +0 -31
- package/tools/browser-sync/src/styles.css +0 -290
- package/tools/browser-sync/src/types.ts +0 -27
- package/tools/browser-sync/vite.config.ts +0 -25
|
@@ -0,0 +1,865 @@
|
|
|
1
|
+
import * as fs from "fs/promises";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import * as tmp from "tmp";
|
|
4
|
+
import { execFile } from "child_process";
|
|
5
|
+
import { promisify } from "util";
|
|
6
|
+
import * as crypto from "crypto";
|
|
7
|
+
import * as fc from "fast-check";
|
|
8
|
+
|
|
9
|
+
const execFilePromise = promisify(execFile);
|
|
10
|
+
|
|
11
|
+
// Path to the pushwork CLI
|
|
12
|
+
const PUSHWORK_CLI = path.join(__dirname, "../../dist/cli.js");
|
|
13
|
+
|
|
14
|
+
describe("Pushwork Fuzzer", () => {
|
|
15
|
+
let tmpDir: string;
|
|
16
|
+
let cleanup: () => void;
|
|
17
|
+
|
|
18
|
+
beforeEach(() => {
|
|
19
|
+
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
20
|
+
tmpDir = tmpObj.name;
|
|
21
|
+
cleanup = tmpObj.removeCallback;
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
afterEach(() => {
|
|
25
|
+
cleanup();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Helper: Execute pushwork CLI command
|
|
30
|
+
*/
|
|
31
|
+
async function pushwork(
|
|
32
|
+
args: string[],
|
|
33
|
+
cwd: string
|
|
34
|
+
): Promise<{ stdout: string; stderr: string }> {
|
|
35
|
+
try {
|
|
36
|
+
const result = await execFilePromise("node", [PUSHWORK_CLI, ...args], {
|
|
37
|
+
cwd,
|
|
38
|
+
env: { ...process.env, FORCE_COLOR: "0" }, // Disable color codes for cleaner output
|
|
39
|
+
});
|
|
40
|
+
return result;
|
|
41
|
+
} catch (error: any) {
|
|
42
|
+
// execFile throws on non-zero exit code, but we still want stdout/stderr
|
|
43
|
+
throw new Error(
|
|
44
|
+
`pushwork ${args.join(" ")} failed: ${error.message}\nstdout: ${
|
|
45
|
+
error.stdout
|
|
46
|
+
}\nstderr: ${error.stderr}`
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Helper: Compute hash of all files in a directory (excluding .pushwork)
|
|
53
|
+
*/
|
|
54
|
+
async function hashDirectory(dirPath: string): Promise<string> {
|
|
55
|
+
const files = await getAllFiles(dirPath);
|
|
56
|
+
const hash = crypto.createHash("sha256");
|
|
57
|
+
|
|
58
|
+
// Sort files for consistent hashing
|
|
59
|
+
files.sort();
|
|
60
|
+
|
|
61
|
+
for (const file of files) {
|
|
62
|
+
// Skip .pushwork directory
|
|
63
|
+
if (file.includes(".pushwork")) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const fullPath = path.join(dirPath, file);
|
|
68
|
+
const content = await fs.readFile(fullPath);
|
|
69
|
+
|
|
70
|
+
// Include relative path in hash to catch renames/moves
|
|
71
|
+
hash.update(file);
|
|
72
|
+
hash.update(content);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
return hash.digest("hex");
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Helper: Recursively get all files in a directory
|
|
80
|
+
*/
|
|
81
|
+
async function getAllFiles(
|
|
82
|
+
dirPath: string,
|
|
83
|
+
basePath: string = dirPath
|
|
84
|
+
): Promise<string[]> {
|
|
85
|
+
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
86
|
+
const files: string[] = [];
|
|
87
|
+
|
|
88
|
+
for (const entry of entries) {
|
|
89
|
+
const fullPath = path.join(dirPath, entry.name);
|
|
90
|
+
const relativePath = path.relative(basePath, fullPath);
|
|
91
|
+
|
|
92
|
+
if (entry.isDirectory()) {
|
|
93
|
+
// Skip .pushwork directory
|
|
94
|
+
if (entry.name === ".pushwork") {
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
const subFiles = await getAllFiles(fullPath, basePath);
|
|
98
|
+
files.push(...subFiles);
|
|
99
|
+
} else if (entry.isFile()) {
|
|
100
|
+
files.push(relativePath);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return files;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Helper: Wait for a short time (useful for allowing sync to complete)
|
|
109
|
+
*/
|
|
110
|
+
async function wait(ms: number): Promise<void> {
|
|
111
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
describe("Basic Setup and Clone", () => {
|
|
115
|
+
it("should initialize a repo with a single file and clone it successfully", async () => {
|
|
116
|
+
// Create two directories for testing
|
|
117
|
+
const repoA = path.join(tmpDir, "repo-a");
|
|
118
|
+
const repoB = path.join(tmpDir, "repo-b");
|
|
119
|
+
await fs.mkdir(repoA);
|
|
120
|
+
await fs.mkdir(repoB);
|
|
121
|
+
|
|
122
|
+
console.log(`Test directories created:`);
|
|
123
|
+
console.log(` Repo A: ${repoA}`);
|
|
124
|
+
console.log(` Repo B: ${repoB}`);
|
|
125
|
+
|
|
126
|
+
// Step 1: Create a file in repo A
|
|
127
|
+
const testFile = path.join(repoA, "test.txt");
|
|
128
|
+
await fs.writeFile(testFile, "Hello, Pushwork!");
|
|
129
|
+
console.log(`Created test file: ${testFile}`);
|
|
130
|
+
|
|
131
|
+
// Step 2: Initialize repo A
|
|
132
|
+
console.log(`Initializing repo A...`);
|
|
133
|
+
await pushwork(["init", "."], repoA);
|
|
134
|
+
console.log(`Repo A initialized successfully`);
|
|
135
|
+
|
|
136
|
+
// Wait a moment for initialization to complete
|
|
137
|
+
await wait(1000);
|
|
138
|
+
|
|
139
|
+
// Step 3: Get the root URL from repo A
|
|
140
|
+
console.log(`Getting root URL from repo A...`);
|
|
141
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
142
|
+
const cleanRootUrl = rootUrl.trim();
|
|
143
|
+
console.log(`Root URL: ${cleanRootUrl}`);
|
|
144
|
+
|
|
145
|
+
expect(cleanRootUrl).toMatch(/^automerge:/);
|
|
146
|
+
|
|
147
|
+
// Step 4: Clone repo A to repo B
|
|
148
|
+
console.log(`Cloning repo A to repo B...`);
|
|
149
|
+
await pushwork(["clone", cleanRootUrl, repoB], tmpDir);
|
|
150
|
+
console.log(`Repo B cloned successfully`);
|
|
151
|
+
|
|
152
|
+
// Wait a moment for clone to complete
|
|
153
|
+
await wait(1000);
|
|
154
|
+
|
|
155
|
+
// Step 5: Verify both repos have the same content
|
|
156
|
+
console.log(`Computing hashes...`);
|
|
157
|
+
const hashA = await hashDirectory(repoA);
|
|
158
|
+
const hashB = await hashDirectory(repoB);
|
|
159
|
+
|
|
160
|
+
console.log(`Hash A: ${hashA}`);
|
|
161
|
+
console.log(`Hash B: ${hashB}`);
|
|
162
|
+
|
|
163
|
+
expect(hashA).toBe(hashB);
|
|
164
|
+
|
|
165
|
+
// Step 6: Verify the file exists in both repos
|
|
166
|
+
const fileAExists = await pathExists(path.join(repoA, "test.txt"));
|
|
167
|
+
const fileBExists = await pathExists(path.join(repoB, "test.txt"));
|
|
168
|
+
|
|
169
|
+
expect(fileAExists).toBe(true);
|
|
170
|
+
expect(fileBExists).toBe(true);
|
|
171
|
+
|
|
172
|
+
// Step 7: Verify the content is the same
|
|
173
|
+
const contentA = await fs.readFile(path.join(repoA, "test.txt"), "utf-8");
|
|
174
|
+
const contentB = await fs.readFile(path.join(repoB, "test.txt"), "utf-8");
|
|
175
|
+
|
|
176
|
+
expect(contentA).toBe("Hello, Pushwork!");
|
|
177
|
+
expect(contentB).toBe("Hello, Pushwork!");
|
|
178
|
+
expect(contentA).toBe(contentB);
|
|
179
|
+
|
|
180
|
+
console.log(`✅ Test passed! Both repos are identical.`);
|
|
181
|
+
}, 30000); // 30 second timeout for this test
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
describe("Manual Fuzzing Tests", () => {
|
|
185
|
+
it("should handle a simple edit on one side", async () => {
|
|
186
|
+
const repoA = path.join(tmpDir, "manual-a");
|
|
187
|
+
const repoB = path.join(tmpDir, "manual-b");
|
|
188
|
+
await fs.mkdir(repoA);
|
|
189
|
+
await fs.mkdir(repoB);
|
|
190
|
+
|
|
191
|
+
// Initialize repo A with a file
|
|
192
|
+
await fs.writeFile(path.join(repoA, "test.txt"), "initial content");
|
|
193
|
+
await pushwork(["init", "."], repoA);
|
|
194
|
+
await wait(500);
|
|
195
|
+
|
|
196
|
+
// Clone to B
|
|
197
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
198
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
199
|
+
await wait(500);
|
|
200
|
+
|
|
201
|
+
// Edit file on A
|
|
202
|
+
await fs.writeFile(path.join(repoA, "test.txt"), "modified content");
|
|
203
|
+
|
|
204
|
+
// Sync A
|
|
205
|
+
await pushwork(["sync"], repoA);
|
|
206
|
+
await wait(1000);
|
|
207
|
+
|
|
208
|
+
// Sync B to pull changes
|
|
209
|
+
await pushwork(["sync"], repoB);
|
|
210
|
+
await wait(1000);
|
|
211
|
+
|
|
212
|
+
// Verify they match
|
|
213
|
+
const contentA = await fs.readFile(path.join(repoA, "test.txt"), "utf-8");
|
|
214
|
+
const contentB = await fs.readFile(path.join(repoB, "test.txt"), "utf-8");
|
|
215
|
+
|
|
216
|
+
expect(contentA).toBe("modified content");
|
|
217
|
+
expect(contentB).toBe("modified content");
|
|
218
|
+
}, 30000);
|
|
219
|
+
|
|
220
|
+
it("should handle edit + rename on one side", async () => {
|
|
221
|
+
const repoA = path.join(tmpDir, "rename-a");
|
|
222
|
+
const repoB = path.join(tmpDir, "rename-b");
|
|
223
|
+
await fs.mkdir(repoA);
|
|
224
|
+
await fs.mkdir(repoB);
|
|
225
|
+
|
|
226
|
+
// Initialize repo A with a file
|
|
227
|
+
await fs.writeFile(path.join(repoA, "original.txt"), "original content");
|
|
228
|
+
await pushwork(["init", "."], repoA);
|
|
229
|
+
await wait(500);
|
|
230
|
+
|
|
231
|
+
// Clone to B
|
|
232
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
233
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
234
|
+
await wait(500);
|
|
235
|
+
|
|
236
|
+
// Edit AND rename file on A (the suspicious operation!)
|
|
237
|
+
await fs.writeFile(path.join(repoA, "original.txt"), "edited content");
|
|
238
|
+
await fs.rename(
|
|
239
|
+
path.join(repoA, "original.txt"),
|
|
240
|
+
path.join(repoA, "renamed.txt")
|
|
241
|
+
);
|
|
242
|
+
|
|
243
|
+
// Sync both sides
|
|
244
|
+
await pushwork(["sync"], repoA);
|
|
245
|
+
await wait(1000);
|
|
246
|
+
await pushwork(["sync"], repoB);
|
|
247
|
+
await wait(1000);
|
|
248
|
+
|
|
249
|
+
// One more round for convergence
|
|
250
|
+
await pushwork(["sync"], repoA);
|
|
251
|
+
await wait(1000);
|
|
252
|
+
await pushwork(["sync"], repoB);
|
|
253
|
+
await wait(1000);
|
|
254
|
+
|
|
255
|
+
// Verify: original.txt should not exist, renamed.txt should exist with edited content
|
|
256
|
+
const originalExistsA = await pathExists(
|
|
257
|
+
path.join(repoA, "original.txt")
|
|
258
|
+
);
|
|
259
|
+
const originalExistsB = await pathExists(
|
|
260
|
+
path.join(repoB, "original.txt")
|
|
261
|
+
);
|
|
262
|
+
const renamedExistsA = await pathExists(path.join(repoA, "renamed.txt"));
|
|
263
|
+
const renamedExistsB = await pathExists(path.join(repoB, "renamed.txt"));
|
|
264
|
+
|
|
265
|
+
expect(originalExistsA).toBe(false);
|
|
266
|
+
expect(originalExistsB).toBe(false);
|
|
267
|
+
expect(renamedExistsA).toBe(true);
|
|
268
|
+
expect(renamedExistsB).toBe(true);
|
|
269
|
+
|
|
270
|
+
const contentA = await fs.readFile(
|
|
271
|
+
path.join(repoA, "renamed.txt"),
|
|
272
|
+
"utf-8"
|
|
273
|
+
);
|
|
274
|
+
const contentB = await fs.readFile(
|
|
275
|
+
path.join(repoB, "renamed.txt"),
|
|
276
|
+
"utf-8"
|
|
277
|
+
);
|
|
278
|
+
|
|
279
|
+
expect(contentA).toBe("edited content");
|
|
280
|
+
expect(contentB).toBe("edited content");
|
|
281
|
+
}, 120000); // 2 minute timeout
|
|
282
|
+
|
|
283
|
+
it("should handle simplest case: clone then add file", async () => {
|
|
284
|
+
const repoA = path.join(tmpDir, "simple-a");
|
|
285
|
+
const repoB = path.join(tmpDir, "simple-b");
|
|
286
|
+
await fs.mkdir(repoA);
|
|
287
|
+
await fs.mkdir(repoB);
|
|
288
|
+
|
|
289
|
+
// Initialize repo A
|
|
290
|
+
await fs.writeFile(path.join(repoA, "initial.txt"), "initial");
|
|
291
|
+
await pushwork(["init", "."], repoA);
|
|
292
|
+
await wait(1000);
|
|
293
|
+
|
|
294
|
+
// Clone to B
|
|
295
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
296
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
297
|
+
await wait(1000);
|
|
298
|
+
|
|
299
|
+
// B: Create a new file (nothing else happens)
|
|
300
|
+
await fs.writeFile(path.join(repoB, "aaa.txt"), "");
|
|
301
|
+
console.log("Created aaa.txt in B");
|
|
302
|
+
|
|
303
|
+
// B syncs
|
|
304
|
+
console.log("B sync...");
|
|
305
|
+
const syncB = await pushwork(["sync"], repoB);
|
|
306
|
+
console.log("B pushed aaa.txt?", syncB.stdout.includes("aaa.txt"));
|
|
307
|
+
console.log("B full output:\n", syncB.stdout);
|
|
308
|
+
await wait(1000);
|
|
309
|
+
|
|
310
|
+
// A syncs
|
|
311
|
+
console.log("A sync...");
|
|
312
|
+
const syncA = await pushwork(["sync"], repoA);
|
|
313
|
+
console.log("A pulled aaa.txt?", syncA.stdout.includes("aaa.txt"));
|
|
314
|
+
await wait(1000);
|
|
315
|
+
|
|
316
|
+
// Check convergence
|
|
317
|
+
const filesA = await fs.readdir(repoA);
|
|
318
|
+
const filesB = await fs.readdir(repoB);
|
|
319
|
+
console.log(
|
|
320
|
+
"Files in A:",
|
|
321
|
+
filesA.filter((f) => !f.startsWith("."))
|
|
322
|
+
);
|
|
323
|
+
console.log(
|
|
324
|
+
"Files in B:",
|
|
325
|
+
filesB.filter((f) => !f.startsWith("."))
|
|
326
|
+
);
|
|
327
|
+
|
|
328
|
+
expect(await pathExists(path.join(repoA, "aaa.txt"))).toBe(true);
|
|
329
|
+
expect(await pathExists(path.join(repoB, "aaa.txt"))).toBe(true);
|
|
330
|
+
}, 30000);
|
|
331
|
+
|
|
332
|
+
it("should handle minimal shrunk case: editAndRename non-existent + add same file", async () => {
|
|
333
|
+
const repoA = path.join(tmpDir, "shrunk-a");
|
|
334
|
+
const repoB = path.join(tmpDir, "shrunk-b");
|
|
335
|
+
await fs.mkdir(repoA);
|
|
336
|
+
await fs.mkdir(repoB);
|
|
337
|
+
|
|
338
|
+
// Initialize repo A
|
|
339
|
+
await fs.writeFile(path.join(repoA, "initial.txt"), "initial");
|
|
340
|
+
await pushwork(["init", "."], repoA);
|
|
341
|
+
await wait(1000); // Match manual test timing
|
|
342
|
+
|
|
343
|
+
// Clone to B
|
|
344
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
345
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
346
|
+
await wait(1000); // Match manual test timing
|
|
347
|
+
|
|
348
|
+
// A: Try to editAndRename a non-existent file (this is from the shrunk test case)
|
|
349
|
+
// This operation should be a no-op since aaa.txt doesn't exist
|
|
350
|
+
const fromPath = path.join(repoA, "aaa.txt");
|
|
351
|
+
const toPath = path.join(repoA, "aa/aa/aaa.txt");
|
|
352
|
+
if ((await pathExists(fromPath)) && !(await pathExists(toPath))) {
|
|
353
|
+
await fs.writeFile(fromPath, "");
|
|
354
|
+
await fs.mkdir(path.dirname(toPath), { recursive: true });
|
|
355
|
+
await fs.rename(fromPath, toPath);
|
|
356
|
+
console.log("Applied editAndRename to A");
|
|
357
|
+
} else {
|
|
358
|
+
console.log("Skipped editAndRename to A (file doesn't exist)");
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// B: Create the same file that A tried to operate on
|
|
362
|
+
await fs.writeFile(path.join(repoB, "aaa.txt"), "");
|
|
363
|
+
console.log("Created aaa.txt in B");
|
|
364
|
+
|
|
365
|
+
// Sync multiple rounds (use 1s waits for reliable network propagation)
|
|
366
|
+
// Pattern: A, B, A (like manual test that worked)
|
|
367
|
+
console.log("Round 1: A sync...");
|
|
368
|
+
const sync1 = await pushwork(["sync"], repoA);
|
|
369
|
+
console.log(
|
|
370
|
+
" A result:",
|
|
371
|
+
sync1.stdout.includes("already in sync") ? "no changes" : "had changes"
|
|
372
|
+
);
|
|
373
|
+
await wait(1000);
|
|
374
|
+
|
|
375
|
+
console.log("Round 2: B sync (should push aaa.txt)...");
|
|
376
|
+
|
|
377
|
+
// Check what B sees before sync
|
|
378
|
+
const bDiffBefore = await pushwork(["diff", "--name-only"], repoB);
|
|
379
|
+
console.log(
|
|
380
|
+
" B diff before sync:",
|
|
381
|
+
bDiffBefore.stdout
|
|
382
|
+
.split("\n")
|
|
383
|
+
.filter((l) => !l.includes("✓") && l.trim())
|
|
384
|
+
);
|
|
385
|
+
|
|
386
|
+
// Check B's snapshot
|
|
387
|
+
const bSnapshotPath = path.join(repoB, ".pushwork", "snapshot.json");
|
|
388
|
+
if (await pathExists(bSnapshotPath)) {
|
|
389
|
+
const bSnapshot = JSON.parse(await fs.readFile(bSnapshotPath, "utf8"));
|
|
390
|
+
console.log(
|
|
391
|
+
" B snapshot files:",
|
|
392
|
+
Array.from(Object.keys(bSnapshot.files || {}))
|
|
393
|
+
);
|
|
394
|
+
console.log(
|
|
395
|
+
" B snapshot has aaa.txt?",
|
|
396
|
+
bSnapshot.files && bSnapshot.files["aaa.txt"] ? "YES" : "NO"
|
|
397
|
+
);
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
const sync2 = await pushwork(["sync"], repoB);
|
|
401
|
+
console.log(" B pushed?", sync2.stdout.includes("aaa.txt"));
|
|
402
|
+
console.log(
|
|
403
|
+
" B result:",
|
|
404
|
+
sync2.stdout.includes("already in sync") ? "no changes" : "had changes"
|
|
405
|
+
);
|
|
406
|
+
console.log(" B full output:\n", sync2.stdout);
|
|
407
|
+
await wait(1000);
|
|
408
|
+
|
|
409
|
+
console.log("Round 3: A sync (should pull aaa.txt)...");
|
|
410
|
+
const sync3 = await pushwork(["sync"], repoA);
|
|
411
|
+
console.log(" A pulled?", sync3.stdout.includes("aaa.txt"));
|
|
412
|
+
console.log(
|
|
413
|
+
" A result:",
|
|
414
|
+
sync3.stdout.includes("already in sync") ? "no changes" : "had changes"
|
|
415
|
+
);
|
|
416
|
+
await wait(1000);
|
|
417
|
+
|
|
418
|
+
// Debug: Check what files exist
|
|
419
|
+
const filesA = await fs.readdir(repoA);
|
|
420
|
+
const filesB = await fs.readdir(repoB);
|
|
421
|
+
console.log(
|
|
422
|
+
"Files in A after sync:",
|
|
423
|
+
filesA.filter((f) => !f.startsWith("."))
|
|
424
|
+
);
|
|
425
|
+
console.log(
|
|
426
|
+
"Files in B after sync:",
|
|
427
|
+
filesB.filter((f) => !f.startsWith("."))
|
|
428
|
+
);
|
|
429
|
+
|
|
430
|
+
// Check diff
|
|
431
|
+
const { stdout: diffA } = await pushwork(["diff", "--name-only"], repoA);
|
|
432
|
+
const { stdout: diffB } = await pushwork(["diff", "--name-only"], repoB);
|
|
433
|
+
console.log("Diff A:", diffA.trim());
|
|
434
|
+
console.log("Diff B:", diffB.trim());
|
|
435
|
+
|
|
436
|
+
// Verify convergence
|
|
437
|
+
const hashA = await hashDirectory(repoA);
|
|
438
|
+
const hashB = await hashDirectory(repoB);
|
|
439
|
+
|
|
440
|
+
console.log("Hash A:", hashA);
|
|
441
|
+
console.log("Hash B:", hashB);
|
|
442
|
+
|
|
443
|
+
expect(hashA).toBe(hashB);
|
|
444
|
+
|
|
445
|
+
// Both should have aaa.txt
|
|
446
|
+
expect(await pathExists(path.join(repoA, "aaa.txt"))).toBe(true);
|
|
447
|
+
expect(await pathExists(path.join(repoB, "aaa.txt"))).toBe(true);
|
|
448
|
+
}, 30000);
|
|
449
|
+
|
|
450
|
+
it("should handle files in subdirectories and moves between directories", async () => {
|
|
451
|
+
const repoA = path.join(tmpDir, "subdir-a");
|
|
452
|
+
const repoB = path.join(tmpDir, "subdir-b");
|
|
453
|
+
await fs.mkdir(repoA);
|
|
454
|
+
await fs.mkdir(repoB);
|
|
455
|
+
|
|
456
|
+
// Initialize repo A with a file in a subdirectory
|
|
457
|
+
await fs.mkdir(path.join(repoA, "dir1"), { recursive: true });
|
|
458
|
+
await fs.writeFile(path.join(repoA, "dir1", "file1.txt"), "in dir1");
|
|
459
|
+
|
|
460
|
+
await pushwork(["init", "."], repoA);
|
|
461
|
+
await wait(500);
|
|
462
|
+
|
|
463
|
+
// Clone to B
|
|
464
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
465
|
+
await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
|
|
466
|
+
await wait(500);
|
|
467
|
+
|
|
468
|
+
// Verify B got the subdirectory and file
|
|
469
|
+
expect(await pathExists(path.join(repoB, "dir1", "file1.txt"))).toBe(
|
|
470
|
+
true
|
|
471
|
+
);
|
|
472
|
+
const initialContentB = await fs.readFile(
|
|
473
|
+
path.join(repoB, "dir1", "file1.txt"),
|
|
474
|
+
"utf-8"
|
|
475
|
+
);
|
|
476
|
+
expect(initialContentB).toBe("in dir1");
|
|
477
|
+
|
|
478
|
+
// On A: Create another file in a different subdirectory
|
|
479
|
+
await fs.mkdir(path.join(repoA, "dir2"), { recursive: true });
|
|
480
|
+
await fs.writeFile(path.join(repoA, "dir2", "file2.txt"), "in dir2");
|
|
481
|
+
|
|
482
|
+
// Sync both sides
|
|
483
|
+
await pushwork(["sync"], repoA);
|
|
484
|
+
await wait(1000);
|
|
485
|
+
await pushwork(["sync"], repoB);
|
|
486
|
+
await wait(1000);
|
|
487
|
+
|
|
488
|
+
// Verify B got the new subdirectory and file
|
|
489
|
+
expect(await pathExists(path.join(repoB, "dir2", "file2.txt"))).toBe(
|
|
490
|
+
true
|
|
491
|
+
);
|
|
492
|
+
const file2ContentB = await fs.readFile(
|
|
493
|
+
path.join(repoB, "dir2", "file2.txt"),
|
|
494
|
+
"utf-8"
|
|
495
|
+
);
|
|
496
|
+
expect(file2ContentB).toBe("in dir2");
|
|
497
|
+
}, 30000);
|
|
498
|
+
});
|
|
499
|
+
|
|
500
|
+
describe("Property-Based Fuzzing with fast-check", () => {
|
|
501
|
+
// Define operation types
|
|
502
|
+
type FileOperation =
|
|
503
|
+
| { type: "add"; path: string; content: string }
|
|
504
|
+
| { type: "edit"; path: string; content: string }
|
|
505
|
+
| { type: "delete"; path: string }
|
|
506
|
+
| { type: "rename"; fromPath: string; toPath: string }
|
|
507
|
+
| {
|
|
508
|
+
type: "editAndRename";
|
|
509
|
+
fromPath: string;
|
|
510
|
+
toPath: string;
|
|
511
|
+
content: string;
|
|
512
|
+
};
|
|
513
|
+
|
|
514
|
+
/**
|
|
515
|
+
* Arbitrary: Generate a directory name
|
|
516
|
+
*/
|
|
517
|
+
const dirNameArbitrary = fc.stringMatching(/^[a-z]{2,6}$/);
|
|
518
|
+
|
|
519
|
+
/**
|
|
520
|
+
* Arbitrary: Generate a simple filename (basename + extension)
|
|
521
|
+
*/
|
|
522
|
+
const baseNameArbitrary = fc
|
|
523
|
+
.tuple(
|
|
524
|
+
fc.stringMatching(/^[a-z]{3,8}$/), // basename
|
|
525
|
+
fc.constantFrom("txt", "md", "json", "ts") // extension
|
|
526
|
+
)
|
|
527
|
+
.map(([name, ext]) => `${name}.${ext}`);
|
|
528
|
+
|
|
529
|
+
/**
|
|
530
|
+
* Arbitrary: Generate a file path (can be in root or in subdirectories)
|
|
531
|
+
* Examples: "file.txt", "dir1/file.txt", "dir1/dir2/file.txt"
|
|
532
|
+
*/
|
|
533
|
+
const filePathArbitrary = fc.oneof(
|
|
534
|
+
// File in root directory (60% probability)
|
|
535
|
+
baseNameArbitrary,
|
|
536
|
+
// File in single subdirectory (30% probability)
|
|
537
|
+
fc
|
|
538
|
+
.tuple(dirNameArbitrary, baseNameArbitrary)
|
|
539
|
+
.map(([dir, file]) => `${dir}/${file}`),
|
|
540
|
+
// File in nested subdirectory (10% probability)
|
|
541
|
+
fc
|
|
542
|
+
.tuple(dirNameArbitrary, dirNameArbitrary, baseNameArbitrary)
|
|
543
|
+
.map(([dir1, dir2, file]) => `${dir1}/${dir2}/${file}`)
|
|
544
|
+
);
|
|
545
|
+
|
|
546
|
+
/**
|
|
547
|
+
* Arbitrary: Generate file content (small strings for now)
|
|
548
|
+
*/
|
|
549
|
+
const fileContentArbitrary = fc.string({ minLength: 0, maxLength: 100 });
|
|
550
|
+
|
|
551
|
+
/**
|
|
552
|
+
* Arbitrary: Generate a file operation
|
|
553
|
+
*/
|
|
554
|
+
const fileOperationArbitrary: fc.Arbitrary<FileOperation> = fc.oneof(
|
|
555
|
+
// Add file (can be in subdirectories)
|
|
556
|
+
fc.record({
|
|
557
|
+
type: fc.constant("add" as const),
|
|
558
|
+
path: filePathArbitrary,
|
|
559
|
+
content: fileContentArbitrary,
|
|
560
|
+
}),
|
|
561
|
+
// Edit file
|
|
562
|
+
fc.record({
|
|
563
|
+
type: fc.constant("edit" as const),
|
|
564
|
+
path: filePathArbitrary,
|
|
565
|
+
content: fileContentArbitrary,
|
|
566
|
+
}),
|
|
567
|
+
// Delete file
|
|
568
|
+
fc.record({
|
|
569
|
+
type: fc.constant("delete" as const),
|
|
570
|
+
path: filePathArbitrary,
|
|
571
|
+
}),
|
|
572
|
+
// Rename file (can move between directories)
|
|
573
|
+
fc.record({
|
|
574
|
+
type: fc.constant("rename" as const),
|
|
575
|
+
fromPath: filePathArbitrary,
|
|
576
|
+
toPath: filePathArbitrary,
|
|
577
|
+
}),
|
|
578
|
+
// Edit and rename (can move between directories)
|
|
579
|
+
fc.record({
|
|
580
|
+
type: fc.constant("editAndRename" as const),
|
|
581
|
+
fromPath: filePathArbitrary,
|
|
582
|
+
toPath: filePathArbitrary,
|
|
583
|
+
content: fileContentArbitrary,
|
|
584
|
+
})
|
|
585
|
+
);
|
|
586
|
+
|
|
587
|
+
/**
|
|
588
|
+
* Helper: Ensure parent directory exists
|
|
589
|
+
*/
|
|
590
|
+
async function ensureParentDir(filePath: string): Promise<void> {
|
|
591
|
+
const dir = path.dirname(filePath);
|
|
592
|
+
await fs.mkdir(dir, { recursive: true });
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
/**
|
|
596
|
+
* Helper: Apply a file operation to a directory
|
|
597
|
+
*/
|
|
598
|
+
async function applyOperation(
|
|
599
|
+
repoPath: string,
|
|
600
|
+
op: FileOperation
|
|
601
|
+
): Promise<void> {
|
|
602
|
+
try {
|
|
603
|
+
switch (op.type) {
|
|
604
|
+
case "add": {
|
|
605
|
+
const filePath = path.join(repoPath, op.path);
|
|
606
|
+
await ensureParentDir(filePath);
|
|
607
|
+
await fs.writeFile(filePath, op.content);
|
|
608
|
+
break;
|
|
609
|
+
}
|
|
610
|
+
case "edit": {
|
|
611
|
+
const filePath = path.join(repoPath, op.path);
|
|
612
|
+
// Only edit if file exists, otherwise create it
|
|
613
|
+
if (await pathExists(filePath)) {
|
|
614
|
+
await fs.writeFile(filePath, op.content);
|
|
615
|
+
} else {
|
|
616
|
+
await ensureParentDir(filePath);
|
|
617
|
+
await fs.writeFile(filePath, op.content);
|
|
618
|
+
}
|
|
619
|
+
break;
|
|
620
|
+
}
|
|
621
|
+
case "delete": {
|
|
622
|
+
const filePath = path.join(repoPath, op.path);
|
|
623
|
+
// Only delete if file exists
|
|
624
|
+
if (await pathExists(filePath)) {
|
|
625
|
+
await fs.unlink(filePath);
|
|
626
|
+
}
|
|
627
|
+
break;
|
|
628
|
+
}
|
|
629
|
+
case "rename": {
|
|
630
|
+
const fromPath = path.join(repoPath, op.fromPath);
|
|
631
|
+
const toPath = path.join(repoPath, op.toPath);
|
|
632
|
+
// Only rename if source exists and target doesn't
|
|
633
|
+
if ((await pathExists(fromPath)) && !(await pathExists(toPath))) {
|
|
634
|
+
await ensureParentDir(toPath);
|
|
635
|
+
await fs.rename(fromPath, toPath);
|
|
636
|
+
}
|
|
637
|
+
break;
|
|
638
|
+
}
|
|
639
|
+
case "editAndRename": {
|
|
640
|
+
const fromPath = path.join(repoPath, op.fromPath);
|
|
641
|
+
const toPath = path.join(repoPath, op.toPath);
|
|
642
|
+
// Edit then rename: only if source exists and target doesn't
|
|
643
|
+
if ((await pathExists(fromPath)) && !(await pathExists(toPath))) {
|
|
644
|
+
await fs.writeFile(fromPath, op.content);
|
|
645
|
+
await ensureParentDir(toPath);
|
|
646
|
+
await fs.rename(fromPath, toPath);
|
|
647
|
+
}
|
|
648
|
+
break;
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
} catch (error) {
|
|
652
|
+
// Ignore operation errors (e.g., deleting non-existent file)
|
|
653
|
+
// This is expected in fuzzing
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
/**
|
|
658
|
+
* Helper: Apply multiple operations
|
|
659
|
+
*/
|
|
660
|
+
async function applyOperations(
|
|
661
|
+
repoPath: string,
|
|
662
|
+
operations: FileOperation[]
|
|
663
|
+
): Promise<void> {
|
|
664
|
+
for (const op of operations) {
|
|
665
|
+
await applyOperation(repoPath, op);
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
it("should converge after random operations on both sides", async () => {
|
|
670
|
+
await fc.assert(
|
|
671
|
+
fc.asyncProperty(
|
|
672
|
+
fc.array(fileOperationArbitrary, { minLength: 1, maxLength: 10 }), // Operations on repo A (1-10 ops)
|
|
673
|
+
fc.array(fileOperationArbitrary, { minLength: 1, maxLength: 10 }), // Operations on repo B (1-10 ops)
|
|
674
|
+
async (opsA, opsB) => {
|
|
675
|
+
// Create two directories for testing
|
|
676
|
+
const testRoot = path.join(
|
|
677
|
+
tmpDir,
|
|
678
|
+
`test-${Date.now()}-${Math.random()}`
|
|
679
|
+
);
|
|
680
|
+
await fs.mkdir(testRoot, { recursive: true });
|
|
681
|
+
|
|
682
|
+
const repoA = path.join(testRoot, "repo-a");
|
|
683
|
+
const repoB = path.join(testRoot, "repo-b");
|
|
684
|
+
await fs.mkdir(repoA);
|
|
685
|
+
await fs.mkdir(repoB);
|
|
686
|
+
|
|
687
|
+
const testStart = Date.now();
|
|
688
|
+
console.log(
|
|
689
|
+
`\n🔬 Testing: ${opsA.length} ops on A, ${opsB.length} ops on B`
|
|
690
|
+
);
|
|
691
|
+
|
|
692
|
+
try {
|
|
693
|
+
// Initialize repo A with an initial file
|
|
694
|
+
console.log(
|
|
695
|
+
` ⏱️ [${Date.now() - testStart}ms] Initializing repo A...`
|
|
696
|
+
);
|
|
697
|
+
await fs.writeFile(path.join(repoA, "initial.txt"), "initial");
|
|
698
|
+
await pushwork(["init", "."], repoA);
|
|
699
|
+
await wait(500);
|
|
700
|
+
|
|
701
|
+
// Get root URL and clone to B
|
|
702
|
+
console.log(
|
|
703
|
+
` ⏱️ [${Date.now() - testStart}ms] Cloning to repo B...`
|
|
704
|
+
);
|
|
705
|
+
const { stdout: rootUrl } = await pushwork(["url"], repoA);
|
|
706
|
+
const cleanRootUrl = rootUrl.trim();
|
|
707
|
+
await pushwork(["clone", cleanRootUrl, repoB], testRoot);
|
|
708
|
+
await wait(500);
|
|
709
|
+
|
|
710
|
+
// Verify initial state matches
|
|
711
|
+
const hashBeforeOps = await hashDirectory(repoA);
|
|
712
|
+
const hashB1 = await hashDirectory(repoB);
|
|
713
|
+
expect(hashBeforeOps).toBe(hashB1);
|
|
714
|
+
console.log(
|
|
715
|
+
` ⏱️ [${Date.now() - testStart}ms] Initial state verified`
|
|
716
|
+
);
|
|
717
|
+
|
|
718
|
+
// Apply operations to both sides
|
|
719
|
+
console.log(
|
|
720
|
+
` ⏱️ [${Date.now() - testStart}ms] Applying ${
|
|
721
|
+
opsA.length
|
|
722
|
+
} operations to repo A...`
|
|
723
|
+
);
|
|
724
|
+
console.log(` Operations A: ${JSON.stringify(opsA)}`);
|
|
725
|
+
await applyOperations(repoA, opsA);
|
|
726
|
+
|
|
727
|
+
console.log(
|
|
728
|
+
` ⏱️ [${Date.now() - testStart}ms] Applying ${
|
|
729
|
+
opsB.length
|
|
730
|
+
} operations to repo B...`
|
|
731
|
+
);
|
|
732
|
+
console.log(` Operations B: ${JSON.stringify(opsB)}`);
|
|
733
|
+
await applyOperations(repoB, opsB);
|
|
734
|
+
|
|
735
|
+
// Multiple sync rounds for convergence
|
|
736
|
+
// Need enough time for network propagation between CLI invocations
|
|
737
|
+
// Round 1: A pushes changes
|
|
738
|
+
console.log(
|
|
739
|
+
` ⏱️ [${Date.now() - testStart}ms] Sync round 1: A...`
|
|
740
|
+
);
|
|
741
|
+
await pushwork(["sync"], repoA);
|
|
742
|
+
await wait(500);
|
|
743
|
+
|
|
744
|
+
// Round 2: B pushes changes and pulls A's changes
|
|
745
|
+
console.log(
|
|
746
|
+
` ⏱️ [${Date.now() - testStart}ms] Sync round 1: B...`
|
|
747
|
+
);
|
|
748
|
+
await pushwork(["sync"], repoB);
|
|
749
|
+
await wait(500);
|
|
750
|
+
|
|
751
|
+
// Round 3: A pulls B's changes
|
|
752
|
+
console.log(
|
|
753
|
+
` ⏱️ [${Date.now() - testStart}ms] Sync round 2: A...`
|
|
754
|
+
);
|
|
755
|
+
await pushwork(["sync"], repoA);
|
|
756
|
+
await wait(500);
|
|
757
|
+
|
|
758
|
+
// Round 4: B confirms convergence
|
|
759
|
+
console.log(
|
|
760
|
+
` ⏱️ [${Date.now() - testStart}ms] Sync round 2: B...`
|
|
761
|
+
);
|
|
762
|
+
await pushwork(["sync"], repoB);
|
|
763
|
+
await wait(500);
|
|
764
|
+
|
|
765
|
+
// Round 5: Final convergence check
|
|
766
|
+
console.log(
|
|
767
|
+
` ⏱️ [${Date.now() - testStart}ms] Sync round 3: A (final)...`
|
|
768
|
+
);
|
|
769
|
+
await pushwork(["sync"], repoA);
|
|
770
|
+
await wait(500);
|
|
771
|
+
|
|
772
|
+
// Round 6: Extra convergence check (for aggressive fuzzing)
|
|
773
|
+
console.log(
|
|
774
|
+
` ⏱️ [${Date.now() - testStart}ms] Sync round 3: B (final)...`
|
|
775
|
+
);
|
|
776
|
+
await pushwork(["sync"], repoB);
|
|
777
|
+
await wait(500);
|
|
778
|
+
|
|
779
|
+
// Verify final state matches
|
|
780
|
+
console.log(
|
|
781
|
+
` ⏱️ [${Date.now() - testStart}ms] Verifying convergence...`
|
|
782
|
+
);
|
|
783
|
+
|
|
784
|
+
const hashAfterA = await hashDirectory(repoA);
|
|
785
|
+
const hashAfterB = await hashDirectory(repoB);
|
|
786
|
+
|
|
787
|
+
console.log(` Hash A: ${hashAfterA.substring(0, 16)}...`);
|
|
788
|
+
console.log(` Hash B: ${hashAfterB.substring(0, 16)}...`);
|
|
789
|
+
|
|
790
|
+
// Both sides should converge to the same state
|
|
791
|
+
if (hashAfterA !== hashAfterB) {
|
|
792
|
+
// Show what files are different
|
|
793
|
+
const filesA = await getAllFiles(repoA);
|
|
794
|
+
const filesB = await getAllFiles(repoB);
|
|
795
|
+
console.log(` ❌ CONVERGENCE FAILURE!`);
|
|
796
|
+
console.log(
|
|
797
|
+
` Files in A: ${filesA
|
|
798
|
+
.filter((f) => !f.includes(".pushwork"))
|
|
799
|
+
.join(", ")}`
|
|
800
|
+
);
|
|
801
|
+
console.log(
|
|
802
|
+
` Files in B: ${filesB
|
|
803
|
+
.filter((f) => !f.includes(".pushwork"))
|
|
804
|
+
.join(", ")}`
|
|
805
|
+
);
|
|
806
|
+
console.log(
|
|
807
|
+
` Operations applied to A: ${JSON.stringify(opsA)}`
|
|
808
|
+
);
|
|
809
|
+
console.log(
|
|
810
|
+
` Operations applied to B: ${JSON.stringify(opsB)}`
|
|
811
|
+
);
|
|
812
|
+
}
|
|
813
|
+
expect(hashAfterA).toBe(hashAfterB);
|
|
814
|
+
|
|
815
|
+
// Verify diff shows no changes
|
|
816
|
+
const { stdout: diffOutput } = await pushwork(
|
|
817
|
+
["diff", "--name-only"],
|
|
818
|
+
repoA
|
|
819
|
+
);
|
|
820
|
+
// Filter out status messages, only check for actual file differences
|
|
821
|
+
const diffLines = diffOutput
|
|
822
|
+
.split("\n")
|
|
823
|
+
.filter(
|
|
824
|
+
(line) =>
|
|
825
|
+
line.trim() &&
|
|
826
|
+
!line.includes("✓") &&
|
|
827
|
+
!line.includes("Local-only") &&
|
|
828
|
+
!line.includes("Root URL")
|
|
829
|
+
);
|
|
830
|
+
expect(diffLines.length).toBe(0);
|
|
831
|
+
|
|
832
|
+
const totalTime = Date.now() - testStart;
|
|
833
|
+
console.log(` ✅ Converged successfully! (took ${totalTime}ms)`);
|
|
834
|
+
|
|
835
|
+
// Cleanup
|
|
836
|
+
await fs.rm(testRoot, { recursive: true, force: true });
|
|
837
|
+
} catch (error) {
|
|
838
|
+
// Cleanup on error
|
|
839
|
+
await fs
|
|
840
|
+
.rm(testRoot, { recursive: true, force: true })
|
|
841
|
+
.catch(() => {});
|
|
842
|
+
throw error;
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
),
|
|
846
|
+
{
|
|
847
|
+
numRuns: 50, // INTENSE MODE (was 20, then cranked to 50)
|
|
848
|
+
timeout: 180000, // 3 minute timeout per run
|
|
849
|
+
verbose: true, // Verbose output
|
|
850
|
+
endOnFailure: true, // Stop on first failure to debug
|
|
851
|
+
}
|
|
852
|
+
);
|
|
853
|
+
}, 1200000); // 20 minute timeout for the whole test
|
|
854
|
+
});
|
|
855
|
+
});
|
|
856
|
+
|
|
857
|
+
// Helper function
|
|
858
|
+
async function pathExists(filePath: string): Promise<boolean> {
|
|
859
|
+
try {
|
|
860
|
+
await fs.access(filePath);
|
|
861
|
+
return true;
|
|
862
|
+
} catch {
|
|
863
|
+
return false;
|
|
864
|
+
}
|
|
865
|
+
}
|