pushwork 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +460 -0
- package/dist/browser/browser-sync-engine.d.ts +64 -0
- package/dist/browser/browser-sync-engine.d.ts.map +1 -0
- package/dist/browser/browser-sync-engine.js +303 -0
- package/dist/browser/browser-sync-engine.js.map +1 -0
- package/dist/browser/filesystem-adapter.d.ts +84 -0
- package/dist/browser/filesystem-adapter.d.ts.map +1 -0
- package/dist/browser/filesystem-adapter.js +413 -0
- package/dist/browser/filesystem-adapter.js.map +1 -0
- package/dist/browser/index.d.ts +36 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.js +90 -0
- package/dist/browser/index.js.map +1 -0
- package/dist/browser/types.d.ts +70 -0
- package/dist/browser/types.d.ts.map +1 -0
- package/dist/browser/types.js +6 -0
- package/dist/browser/types.js.map +1 -0
- package/dist/cli/commands.d.ts +71 -0
- package/dist/cli/commands.d.ts.map +1 -0
- package/dist/cli/commands.js +794 -0
- package/dist/cli/commands.js.map +1 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +19 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +199 -0
- package/dist/cli.js.map +1 -0
- package/dist/config/index.d.ts +71 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +314 -0
- package/dist/config/index.js.map +1 -0
- package/dist/core/change-detection.d.ts +78 -0
- package/dist/core/change-detection.d.ts.map +1 -0
- package/dist/core/change-detection.js +370 -0
- package/dist/core/change-detection.js.map +1 -0
- package/dist/core/index.d.ts +5 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +22 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/isomorphic-snapshot.d.ts +58 -0
- package/dist/core/isomorphic-snapshot.d.ts.map +1 -0
- package/dist/core/isomorphic-snapshot.js +204 -0
- package/dist/core/isomorphic-snapshot.js.map +1 -0
- package/dist/core/move-detection.d.ts +72 -0
- package/dist/core/move-detection.d.ts.map +1 -0
- package/dist/core/move-detection.js +200 -0
- package/dist/core/move-detection.js.map +1 -0
- package/dist/core/snapshot.d.ts +109 -0
- package/dist/core/snapshot.d.ts.map +1 -0
- package/dist/core/snapshot.js +263 -0
- package/dist/core/snapshot.js.map +1 -0
- package/dist/core/sync-engine.d.ts +110 -0
- package/dist/core/sync-engine.d.ts.map +1 -0
- package/dist/core/sync-engine.js +817 -0
- package/dist/core/sync-engine.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +27 -0
- package/dist/index.js.map +1 -0
- package/dist/platform/browser-filesystem.d.ts +26 -0
- package/dist/platform/browser-filesystem.d.ts.map +1 -0
- package/dist/platform/browser-filesystem.js +91 -0
- package/dist/platform/browser-filesystem.js.map +1 -0
- package/dist/platform/filesystem.d.ts +29 -0
- package/dist/platform/filesystem.d.ts.map +1 -0
- package/dist/platform/filesystem.js +65 -0
- package/dist/platform/filesystem.js.map +1 -0
- package/dist/platform/node-filesystem.d.ts +21 -0
- package/dist/platform/node-filesystem.d.ts.map +1 -0
- package/dist/platform/node-filesystem.js +93 -0
- package/dist/platform/node-filesystem.js.map +1 -0
- package/dist/types/config.d.ts +119 -0
- package/dist/types/config.d.ts.map +1 -0
- package/dist/types/config.js +3 -0
- package/dist/types/config.js.map +1 -0
- package/dist/types/documents.d.ts +70 -0
- package/dist/types/documents.d.ts.map +1 -0
- package/dist/types/documents.js +23 -0
- package/dist/types/documents.js.map +1 -0
- package/dist/types/index.d.ts +4 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +23 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/snapshot.d.ts +81 -0
- package/dist/types/snapshot.d.ts.map +1 -0
- package/dist/types/snapshot.js +17 -0
- package/dist/types/snapshot.js.map +1 -0
- package/dist/utils/content-similarity.d.ts +53 -0
- package/dist/utils/content-similarity.d.ts.map +1 -0
- package/dist/utils/content-similarity.js +155 -0
- package/dist/utils/content-similarity.js.map +1 -0
- package/dist/utils/content.d.ts +5 -0
- package/dist/utils/content.d.ts.map +1 -0
- package/dist/utils/content.js +30 -0
- package/dist/utils/content.js.map +1 -0
- package/dist/utils/fs-browser.d.ts +57 -0
- package/dist/utils/fs-browser.d.ts.map +1 -0
- package/dist/utils/fs-browser.js +311 -0
- package/dist/utils/fs-browser.js.map +1 -0
- package/dist/utils/fs-node.d.ts +53 -0
- package/dist/utils/fs-node.d.ts.map +1 -0
- package/dist/utils/fs-node.js +220 -0
- package/dist/utils/fs-node.js.map +1 -0
- package/dist/utils/fs.d.ts +62 -0
- package/dist/utils/fs.d.ts.map +1 -0
- package/dist/utils/fs.js +293 -0
- package/dist/utils/fs.js.map +1 -0
- package/dist/utils/index.d.ts +4 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +23 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/isomorphic.d.ts +29 -0
- package/dist/utils/isomorphic.d.ts.map +1 -0
- package/dist/utils/isomorphic.js +139 -0
- package/dist/utils/isomorphic.js.map +1 -0
- package/dist/utils/mime-types.d.ts +13 -0
- package/dist/utils/mime-types.d.ts.map +1 -0
- package/dist/utils/mime-types.js +240 -0
- package/dist/utils/mime-types.js.map +1 -0
- package/dist/utils/network-sync.d.ts +12 -0
- package/dist/utils/network-sync.d.ts.map +1 -0
- package/dist/utils/network-sync.js +149 -0
- package/dist/utils/network-sync.js.map +1 -0
- package/dist/utils/pure.d.ts +25 -0
- package/dist/utils/pure.d.ts.map +1 -0
- package/dist/utils/pure.js +112 -0
- package/dist/utils/pure.js.map +1 -0
- package/dist/utils/repo-factory.d.ts +11 -0
- package/dist/utils/repo-factory.d.ts.map +1 -0
- package/dist/utils/repo-factory.js +77 -0
- package/dist/utils/repo-factory.js.map +1 -0
- package/package.json +83 -0
- package/src/cli/commands.ts +1053 -0
- package/src/cli/index.ts +2 -0
- package/src/cli.ts +287 -0
- package/src/config/index.ts +334 -0
- package/src/core/change-detection.ts +484 -0
- package/src/core/index.ts +5 -0
- package/src/core/move-detection.ts +269 -0
- package/src/core/snapshot.ts +285 -0
- package/src/core/sync-engine.ts +1167 -0
- package/src/index.ts +14 -0
- package/src/types/config.ts +130 -0
- package/src/types/documents.ts +72 -0
- package/src/types/index.ts +8 -0
- package/src/types/snapshot.ts +88 -0
- package/src/utils/content-similarity.ts +194 -0
- package/src/utils/content.ts +28 -0
- package/src/utils/fs.ts +289 -0
- package/src/utils/index.ts +8 -0
- package/src/utils/mime-types.ts +236 -0
- package/src/utils/network-sync.ts +153 -0
- package/src/utils/repo-factory.ts +58 -0
- package/test/README-TESTING-GAPS.md +174 -0
- package/test/integration/README.md +328 -0
- package/test/integration/clone-test.sh +310 -0
- package/test/integration/conflict-resolution-test.sh +309 -0
- package/test/integration/deletion-behavior-test.sh +487 -0
- package/test/integration/deletion-sync-test-simple.sh +193 -0
- package/test/integration/deletion-sync-test.sh +297 -0
- package/test/integration/exclude-patterns.test.ts +152 -0
- package/test/integration/full-integration-test.sh +363 -0
- package/test/integration/sync-deletion.test.ts +339 -0
- package/test/integration/sync-flow.test.ts +309 -0
- package/test/run-tests.sh +225 -0
- package/test/unit/content-similarity.test.ts +236 -0
- package/test/unit/deletion-behavior.test.ts +260 -0
- package/test/unit/enhanced-mime-detection.test.ts +266 -0
- package/test/unit/snapshot.test.ts +431 -0
- package/test/unit/sync-timing.test.ts +178 -0
- package/test/unit/utils.test.ts +368 -0
- package/tools/browser-sync/README.md +116 -0
- package/tools/browser-sync/package.json +44 -0
- package/tools/browser-sync/patchwork.json +1 -0
- package/tools/browser-sync/pnpm-lock.yaml +4202 -0
- package/tools/browser-sync/src/components/BrowserSyncTool.tsx +599 -0
- package/tools/browser-sync/src/index.ts +20 -0
- package/tools/browser-sync/src/polyfills.ts +31 -0
- package/tools/browser-sync/src/styles.css +290 -0
- package/tools/browser-sync/src/types.ts +27 -0
- package/tools/browser-sync/vite.config.ts +25 -0
- package/tsconfig.json +22 -0
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
import * as fs from "fs/promises";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import { tmpdir } from "os";
|
|
4
|
+
|
|
5
|
+
describe("Sync Timing Analysis", () => {
|
|
6
|
+
let testDir: string;
|
|
7
|
+
|
|
8
|
+
beforeEach(async () => {
|
|
9
|
+
testDir = await fs.mkdtemp(path.join(tmpdir(), "sync-timing-"));
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterEach(async () => {
|
|
13
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
describe("File Operation Timing", () => {
|
|
17
|
+
it("should measure rapid file operations timing", async () => {
|
|
18
|
+
const startTime = Date.now();
|
|
19
|
+
|
|
20
|
+
// Simulate rapid file operations similar to sync
|
|
21
|
+
const promises: Promise<void>[] = [];
|
|
22
|
+
for (let i = 0; i < 10; i++) {
|
|
23
|
+
promises.push(
|
|
24
|
+
fs.writeFile(path.join(testDir, `file${i}.txt`), `content${i}`)
|
|
25
|
+
);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
await Promise.all(promises);
|
|
29
|
+
const totalTime = Date.now() - startTime;
|
|
30
|
+
|
|
31
|
+
console.log(`Created 10 files in ${totalTime}ms`);
|
|
32
|
+
|
|
33
|
+
// Verify all files exist
|
|
34
|
+
const files = await fs.readdir(testDir);
|
|
35
|
+
expect(files).toHaveLength(10);
|
|
36
|
+
|
|
37
|
+
// This test shows us baseline file operation timing
|
|
38
|
+
expect(totalTime).toBeLessThan(1000); // Should be fast for local operations
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it("should measure sequential vs parallel file operations", async () => {
|
|
42
|
+
// Sequential operations
|
|
43
|
+
const sequentialStart = Date.now();
|
|
44
|
+
for (let i = 0; i < 5; i++) {
|
|
45
|
+
await fs.writeFile(path.join(testDir, `seq${i}.txt`), `content${i}`);
|
|
46
|
+
}
|
|
47
|
+
const sequentialTime = Date.now() - sequentialStart;
|
|
48
|
+
|
|
49
|
+
// Parallel operations
|
|
50
|
+
const parallelStart = Date.now();
|
|
51
|
+
const promises: Promise<void>[] = [];
|
|
52
|
+
for (let i = 0; i < 5; i++) {
|
|
53
|
+
promises.push(
|
|
54
|
+
fs.writeFile(path.join(testDir, `par${i}.txt`), `content${i}`)
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
await Promise.all(promises);
|
|
58
|
+
const parallelTime = Date.now() - parallelStart;
|
|
59
|
+
|
|
60
|
+
console.log(
|
|
61
|
+
`Sequential: ${sequentialTime}ms, Parallel: ${parallelTime}ms`
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
// Parallel should generally be faster
|
|
65
|
+
expect(parallelTime).toBeLessThanOrEqual(sequentialTime);
|
|
66
|
+
|
|
67
|
+
// Verify all files exist
|
|
68
|
+
const files = await fs.readdir(testDir);
|
|
69
|
+
expect(files).toHaveLength(10);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it("should test file operation atomicity", async () => {
|
|
73
|
+
const filePath = path.join(testDir, "test.txt");
|
|
74
|
+
|
|
75
|
+
// Write initial content
|
|
76
|
+
await fs.writeFile(filePath, "initial content");
|
|
77
|
+
|
|
78
|
+
// Rapid successive writes to same file
|
|
79
|
+
const writes: Promise<void>[] = [];
|
|
80
|
+
for (let i = 0; i < 5; i++) {
|
|
81
|
+
writes.push(fs.writeFile(filePath, `updated content ${i}`));
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
await Promise.all(writes);
|
|
85
|
+
|
|
86
|
+
// Check final content (should be one of the updates)
|
|
87
|
+
const finalContent = await fs.readFile(filePath, "utf8");
|
|
88
|
+
expect(finalContent).toMatch(/updated content \d/);
|
|
89
|
+
|
|
90
|
+
console.log(`Final content after rapid writes: "${finalContent}"`);
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
describe("Sync Completion Scenarios", () => {
|
|
95
|
+
it("should simulate the need for sync completion detection", async () => {
|
|
96
|
+
// This test simulates what might happen with network sync
|
|
97
|
+
// where we need to wait for operations to complete
|
|
98
|
+
|
|
99
|
+
const results: { operation: string; time: number }[] = [];
|
|
100
|
+
|
|
101
|
+
// Simulate "local" operations (fast)
|
|
102
|
+
const localStart = Date.now();
|
|
103
|
+
await fs.writeFile(path.join(testDir, "local.txt"), "local content");
|
|
104
|
+
const localTime = Date.now() - localStart;
|
|
105
|
+
results.push({ operation: "local write", time: localTime });
|
|
106
|
+
|
|
107
|
+
// Simulate "network" operations (slower with artificial delay)
|
|
108
|
+
const networkStart = Date.now();
|
|
109
|
+
await new Promise((resolve) => setTimeout(resolve, 50)); // 50ms delay
|
|
110
|
+
await fs.writeFile(path.join(testDir, "network.txt"), "network content");
|
|
111
|
+
const networkTime = Date.now() - networkStart;
|
|
112
|
+
results.push({ operation: "network write", time: networkTime });
|
|
113
|
+
|
|
114
|
+
console.log("Operation timing:");
|
|
115
|
+
results.forEach((r) => {
|
|
116
|
+
console.log(` ${r.operation}: ${r.time}ms`);
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
// This demonstrates why we might need to wait for slower operations
|
|
120
|
+
expect(networkTime).toBeGreaterThan(localTime);
|
|
121
|
+
expect(networkTime).toBeGreaterThan(40); // Should include our delay
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it("should test what happens without proper completion waiting", async () => {
|
|
125
|
+
// Simulate starting an operation but not waiting for it
|
|
126
|
+
const promises: Promise<void>[] = [];
|
|
127
|
+
|
|
128
|
+
// Start operations without awaiting
|
|
129
|
+
for (let i = 0; i < 3; i++) {
|
|
130
|
+
promises.push(
|
|
131
|
+
(async () => {
|
|
132
|
+
await new Promise((resolve) => setTimeout(resolve, 10 * i)); // Varying delays
|
|
133
|
+
await fs.writeFile(
|
|
134
|
+
path.join(testDir, `async${i}.txt`),
|
|
135
|
+
`content${i}`
|
|
136
|
+
);
|
|
137
|
+
})()
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Check immediately (before operations complete)
|
|
142
|
+
const filesImmediate = await fs.readdir(testDir);
|
|
143
|
+
console.log(`Files immediately: ${filesImmediate.length}`);
|
|
144
|
+
|
|
145
|
+
// Now wait for operations to complete
|
|
146
|
+
await Promise.all(promises);
|
|
147
|
+
|
|
148
|
+
// Check after completion
|
|
149
|
+
const filesAfter = await fs.readdir(testDir);
|
|
150
|
+
console.log(`Files after completion: ${filesAfter.length}`);
|
|
151
|
+
|
|
152
|
+
// This shows the difference between checking immediately vs waiting
|
|
153
|
+
expect(filesAfter.length).toBeGreaterThanOrEqual(filesImmediate.length);
|
|
154
|
+
expect(filesAfter).toHaveLength(3);
|
|
155
|
+
});
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
describe("Potential Race Conditions", () => {
|
|
159
|
+
it("should test for potential race conditions in file operations", async () => {
|
|
160
|
+
const sharedFile = path.join(testDir, "shared.txt");
|
|
161
|
+
|
|
162
|
+
// Multiple operations on the same file
|
|
163
|
+
const operations = [
|
|
164
|
+
fs.writeFile(sharedFile, "operation1"),
|
|
165
|
+
fs.writeFile(sharedFile, "operation2"),
|
|
166
|
+
fs.writeFile(sharedFile, "operation3"),
|
|
167
|
+
];
|
|
168
|
+
|
|
169
|
+
await Promise.all(operations);
|
|
170
|
+
|
|
171
|
+
// Only one operation should "win"
|
|
172
|
+
const content = await fs.readFile(sharedFile, "utf8");
|
|
173
|
+
expect(["operation1", "operation2", "operation3"]).toContain(content);
|
|
174
|
+
|
|
175
|
+
console.log(`Final content from race condition: "${content}"`);
|
|
176
|
+
});
|
|
177
|
+
});
|
|
178
|
+
});
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
import * as fs from "fs/promises";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import * as tmp from "tmp";
|
|
4
|
+
import {
|
|
5
|
+
pathExists,
|
|
6
|
+
getFileSystemEntry,
|
|
7
|
+
isTextFile,
|
|
8
|
+
readFileContent,
|
|
9
|
+
writeFileContent,
|
|
10
|
+
ensureDirectoryExists,
|
|
11
|
+
removePath,
|
|
12
|
+
listDirectory,
|
|
13
|
+
copyFile,
|
|
14
|
+
movePath,
|
|
15
|
+
calculateContentHash,
|
|
16
|
+
getMimeType,
|
|
17
|
+
getFileExtension,
|
|
18
|
+
normalizePath,
|
|
19
|
+
getRelativePath,
|
|
20
|
+
} from "../../src/utils/fs";
|
|
21
|
+
import { FileType } from "../../src/types";
|
|
22
|
+
|
|
23
|
+
describe("File System Utilities", () => {
|
|
24
|
+
let tmpDir: string;
|
|
25
|
+
let cleanup: () => void;
|
|
26
|
+
|
|
27
|
+
beforeEach(() => {
|
|
28
|
+
const tmpObj = tmp.dirSync({ unsafeCleanup: true });
|
|
29
|
+
tmpDir = tmpObj.name;
|
|
30
|
+
cleanup = tmpObj.removeCallback;
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
afterEach(() => {
|
|
34
|
+
cleanup();
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
describe("pathExists", () => {
|
|
38
|
+
it("should return true for existing files", async () => {
|
|
39
|
+
const filePath = path.join(tmpDir, "test.txt");
|
|
40
|
+
await fs.writeFile(filePath, "test content");
|
|
41
|
+
|
|
42
|
+
expect(await pathExists(filePath)).toBe(true);
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
it("should return false for non-existing files", async () => {
|
|
46
|
+
const filePath = path.join(tmpDir, "nonexistent.txt");
|
|
47
|
+
|
|
48
|
+
expect(await pathExists(filePath)).toBe(false);
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it("should return true for existing directories", async () => {
|
|
52
|
+
expect(await pathExists(tmpDir)).toBe(true);
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
describe("getFileSystemEntry", () => {
|
|
57
|
+
it("should return metadata for files", async () => {
|
|
58
|
+
const filePath = path.join(tmpDir, "test.txt");
|
|
59
|
+
await fs.writeFile(filePath, "test content");
|
|
60
|
+
|
|
61
|
+
const entry = await getFileSystemEntry(filePath);
|
|
62
|
+
|
|
63
|
+
expect(entry).not.toBeNull();
|
|
64
|
+
expect(entry?.path).toBe(filePath);
|
|
65
|
+
expect(entry?.type).toBe(FileType.TEXT);
|
|
66
|
+
expect(entry?.size).toBe(12); // 'test content'.length
|
|
67
|
+
expect(entry?.mtime).toBeDefined();
|
|
68
|
+
expect(entry?.mtime.getTime()).toBeGreaterThan(0);
|
|
69
|
+
expect(typeof entry?.mtime.getTime()).toBe("number");
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it("should return metadata for directories", async () => {
|
|
73
|
+
const dirPath = path.join(tmpDir, "subdir");
|
|
74
|
+
await fs.mkdir(dirPath);
|
|
75
|
+
|
|
76
|
+
const entry = await getFileSystemEntry(dirPath);
|
|
77
|
+
|
|
78
|
+
expect(entry).not.toBeNull();
|
|
79
|
+
expect(entry?.path).toBe(dirPath);
|
|
80
|
+
expect(entry?.type).toBe(FileType.DIRECTORY);
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
it("should return null for non-existing paths", async () => {
|
|
84
|
+
const entry = await getFileSystemEntry(path.join(tmpDir, "nonexistent"));
|
|
85
|
+
expect(entry).toBeNull();
|
|
86
|
+
});
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
describe("isTextFile", () => {
|
|
90
|
+
it("should detect text files by extension", async () => {
|
|
91
|
+
const filePath = path.join(tmpDir, "test.txt");
|
|
92
|
+
await fs.writeFile(filePath, "text content");
|
|
93
|
+
|
|
94
|
+
expect(await isTextFile(filePath)).toBe(true);
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
it("should detect JSON files as text", async () => {
|
|
98
|
+
const filePath = path.join(tmpDir, "test.json");
|
|
99
|
+
await fs.writeFile(filePath, '{"key": "value"}');
|
|
100
|
+
|
|
101
|
+
expect(await isTextFile(filePath)).toBe(true);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it("should detect binary files by content", async () => {
|
|
105
|
+
const filePath = path.join(tmpDir, "test.bin");
|
|
106
|
+
const binaryContent = Buffer.from([0x00, 0x01, 0x02, 0x03]);
|
|
107
|
+
await fs.writeFile(filePath, binaryContent);
|
|
108
|
+
|
|
109
|
+
expect(await isTextFile(filePath)).toBe(false);
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
describe("readFileContent", () => {
|
|
114
|
+
it("should read text files as strings", async () => {
|
|
115
|
+
const filePath = path.join(tmpDir, "test.txt");
|
|
116
|
+
const content = "Hello, world!";
|
|
117
|
+
await fs.writeFile(filePath, content);
|
|
118
|
+
|
|
119
|
+
const result = await readFileContent(filePath);
|
|
120
|
+
|
|
121
|
+
expect(typeof result).toBe("string");
|
|
122
|
+
expect(result).toBe(content);
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it("should read TypeScript files as strings", async () => {
|
|
126
|
+
const filePath = path.join(tmpDir, "component.ts");
|
|
127
|
+
const content = "interface User { name: string; age: number; }";
|
|
128
|
+
await fs.writeFile(filePath, content);
|
|
129
|
+
|
|
130
|
+
const result = await readFileContent(filePath);
|
|
131
|
+
|
|
132
|
+
expect(typeof result).toBe("string");
|
|
133
|
+
expect(result).toBe(content);
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
it("should read TSX files as strings", async () => {
|
|
137
|
+
const filePath = path.join(tmpDir, "Component.tsx");
|
|
138
|
+
const content = "export const App = () => <div>Hello World</div>;";
|
|
139
|
+
await fs.writeFile(filePath, content);
|
|
140
|
+
|
|
141
|
+
const result = await readFileContent(filePath);
|
|
142
|
+
|
|
143
|
+
expect(typeof result).toBe("string");
|
|
144
|
+
expect(result).toBe(content);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
it("should read Vue files as strings", async () => {
|
|
148
|
+
const filePath = path.join(tmpDir, "App.vue");
|
|
149
|
+
const content = "<template><div>{{ message }}</div></template>";
|
|
150
|
+
await fs.writeFile(filePath, content);
|
|
151
|
+
|
|
152
|
+
const result = await readFileContent(filePath);
|
|
153
|
+
|
|
154
|
+
expect(typeof result).toBe("string");
|
|
155
|
+
expect(result).toBe(content);
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
it("should read SCSS files as strings", async () => {
|
|
159
|
+
const filePath = path.join(tmpDir, "styles.scss");
|
|
160
|
+
const content = "$primary: #007bff; .btn { color: $primary; }";
|
|
161
|
+
await fs.writeFile(filePath, content);
|
|
162
|
+
|
|
163
|
+
const result = await readFileContent(filePath);
|
|
164
|
+
|
|
165
|
+
expect(typeof result).toBe("string");
|
|
166
|
+
expect(result).toBe(content);
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
it("should read binary files as Uint8Array", async () => {
|
|
170
|
+
const filePath = path.join(tmpDir, "test.bin");
|
|
171
|
+
const binaryContent = Buffer.from([0x00, 0x01, 0x02, 0x03]);
|
|
172
|
+
await fs.writeFile(filePath, binaryContent);
|
|
173
|
+
|
|
174
|
+
const result = await readFileContent(filePath);
|
|
175
|
+
|
|
176
|
+
expect(result).toBeInstanceOf(Uint8Array);
|
|
177
|
+
expect(Array.from(result as Uint8Array)).toEqual([
|
|
178
|
+
0x00, 0x01, 0x02, 0x03,
|
|
179
|
+
]);
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
describe("writeFileContent", () => {
|
|
184
|
+
it("should write string content to files", async () => {
|
|
185
|
+
const filePath = path.join(tmpDir, "output.txt");
|
|
186
|
+
const content = "Test content";
|
|
187
|
+
|
|
188
|
+
await writeFileContent(filePath, content);
|
|
189
|
+
|
|
190
|
+
const written = await fs.readFile(filePath, "utf8");
|
|
191
|
+
expect(written).toBe(content);
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
it("should write binary content to files", async () => {
|
|
195
|
+
const filePath = path.join(tmpDir, "output.bin");
|
|
196
|
+
const content = new Uint8Array([0x00, 0x01, 0x02, 0x03]);
|
|
197
|
+
|
|
198
|
+
await writeFileContent(filePath, content);
|
|
199
|
+
|
|
200
|
+
const written = await fs.readFile(filePath);
|
|
201
|
+
expect(Array.from(written)).toEqual([0x00, 0x01, 0x02, 0x03]);
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
it("should create directories if they don't exist", async () => {
|
|
205
|
+
const filePath = path.join(tmpDir, "nested", "deep", "file.txt");
|
|
206
|
+
|
|
207
|
+
await writeFileContent(filePath, "content");
|
|
208
|
+
|
|
209
|
+
expect(await pathExists(filePath)).toBe(true);
|
|
210
|
+
expect(await fs.readFile(filePath, "utf8")).toBe("content");
|
|
211
|
+
});
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
describe("ensureDirectoryExists", () => {
|
|
215
|
+
it("should create directories recursively", async () => {
|
|
216
|
+
const dirPath = path.join(tmpDir, "nested", "deep", "directory");
|
|
217
|
+
|
|
218
|
+
await ensureDirectoryExists(dirPath);
|
|
219
|
+
|
|
220
|
+
expect(await pathExists(dirPath)).toBe(true);
|
|
221
|
+
const stats = await fs.stat(dirPath);
|
|
222
|
+
expect(stats.isDirectory()).toBe(true);
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
it("should not fail if directory already exists", async () => {
|
|
226
|
+
await ensureDirectoryExists(tmpDir);
|
|
227
|
+
await ensureDirectoryExists(tmpDir); // Should not throw
|
|
228
|
+
|
|
229
|
+
expect(await pathExists(tmpDir)).toBe(true);
|
|
230
|
+
});
|
|
231
|
+
});
|
|
232
|
+
|
|
233
|
+
describe("removePath", () => {
|
|
234
|
+
it("should remove files", async () => {
|
|
235
|
+
const filePath = path.join(tmpDir, "toremove.txt");
|
|
236
|
+
await fs.writeFile(filePath, "content");
|
|
237
|
+
|
|
238
|
+
await removePath(filePath);
|
|
239
|
+
|
|
240
|
+
expect(await pathExists(filePath)).toBe(false);
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
it("should remove directories recursively", async () => {
|
|
244
|
+
const dirPath = path.join(tmpDir, "toremove");
|
|
245
|
+
const filePath = path.join(dirPath, "file.txt");
|
|
246
|
+
await fs.mkdir(dirPath);
|
|
247
|
+
await fs.writeFile(filePath, "content");
|
|
248
|
+
|
|
249
|
+
await removePath(dirPath);
|
|
250
|
+
|
|
251
|
+
expect(await pathExists(dirPath)).toBe(false);
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
it("should not fail if path doesn't exist", async () => {
|
|
255
|
+
const nonExistentPath = path.join(tmpDir, "nonexistent");
|
|
256
|
+
|
|
257
|
+
await removePath(nonExistentPath); // Should not throw
|
|
258
|
+
|
|
259
|
+
expect(await pathExists(nonExistentPath)).toBe(false);
|
|
260
|
+
});
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
describe("listDirectory", () => {
|
|
264
|
+
beforeEach(async () => {
|
|
265
|
+
// Create test directory structure
|
|
266
|
+
await fs.mkdir(path.join(tmpDir, "subdir"));
|
|
267
|
+
await fs.writeFile(path.join(tmpDir, "file1.txt"), "content1");
|
|
268
|
+
await fs.writeFile(path.join(tmpDir, "file2.txt"), "content2");
|
|
269
|
+
await fs.writeFile(path.join(tmpDir, "subdir", "file3.txt"), "content3");
|
|
270
|
+
});
|
|
271
|
+
|
|
272
|
+
it("should list directory contents non-recursively", async () => {
|
|
273
|
+
const entries = await listDirectory(tmpDir, false);
|
|
274
|
+
|
|
275
|
+
const names = entries.map((e) => path.basename(e.path)).sort();
|
|
276
|
+
expect(names).toEqual(["file1.txt", "file2.txt", "subdir"]);
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
it("should list directory contents recursively", async () => {
|
|
280
|
+
const entries = await listDirectory(tmpDir, true);
|
|
281
|
+
|
|
282
|
+
const relativePaths = entries
|
|
283
|
+
.map((e) => path.relative(tmpDir, e.path))
|
|
284
|
+
.sort();
|
|
285
|
+
|
|
286
|
+
expect(relativePaths).toContain("file1.txt");
|
|
287
|
+
expect(relativePaths).toContain("file2.txt");
|
|
288
|
+
expect(relativePaths).toContain("subdir");
|
|
289
|
+
expect(relativePaths).toContain(path.join("subdir", "file3.txt"));
|
|
290
|
+
});
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
describe("calculateContentHash", () => {
|
|
294
|
+
it("should generate consistent hashes for string content", async () => {
|
|
295
|
+
const content = "test content";
|
|
296
|
+
|
|
297
|
+
const hash1 = await calculateContentHash(content);
|
|
298
|
+
const hash2 = await calculateContentHash(content);
|
|
299
|
+
|
|
300
|
+
expect(hash1).toBe(hash2);
|
|
301
|
+
expect(hash1).toHaveLength(64); // SHA-256 hex string
|
|
302
|
+
});
|
|
303
|
+
|
|
304
|
+
it("should generate consistent hashes for binary content", async () => {
|
|
305
|
+
const content = new Uint8Array([0x00, 0x01, 0x02, 0x03]);
|
|
306
|
+
|
|
307
|
+
const hash1 = await calculateContentHash(content);
|
|
308
|
+
const hash2 = await calculateContentHash(content);
|
|
309
|
+
|
|
310
|
+
expect(hash1).toBe(hash2);
|
|
311
|
+
expect(hash1).toHaveLength(64);
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
it("should generate different hashes for different content", async () => {
|
|
315
|
+
const content1 = "content1";
|
|
316
|
+
const content2 = "content2";
|
|
317
|
+
|
|
318
|
+
const hash1 = await calculateContentHash(content1);
|
|
319
|
+
const hash2 = await calculateContentHash(content2);
|
|
320
|
+
|
|
321
|
+
expect(hash1).not.toBe(hash2);
|
|
322
|
+
});
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
describe("getMimeType", () => {
|
|
326
|
+
it("should return correct MIME type for text files", () => {
|
|
327
|
+
expect(getMimeType("test.txt")).toBe("text/plain");
|
|
328
|
+
expect(getMimeType("test.json")).toBe("application/json");
|
|
329
|
+
expect(getMimeType("test.html")).toBe("text/html");
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
it("should return default MIME type for unknown extensions", () => {
|
|
333
|
+
expect(getMimeType("test.unknown")).toBe("application/octet-stream");
|
|
334
|
+
});
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
describe("getFileExtension", () => {
|
|
338
|
+
it("should extract file extensions", () => {
|
|
339
|
+
expect(getFileExtension("test.txt")).toBe("txt");
|
|
340
|
+
expect(getFileExtension("archive.tar.gz")).toBe("gz");
|
|
341
|
+
expect(getFileExtension("noextension")).toBe("");
|
|
342
|
+
});
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
describe("normalizePath", () => {
|
|
346
|
+
it("should normalize path separators", () => {
|
|
347
|
+
expect(normalizePath("path\\to\\file")).toBe("path/to/file");
|
|
348
|
+
expect(normalizePath("path/to/file")).toBe("path/to/file");
|
|
349
|
+
expect(normalizePath("path//to//file")).toBe("path/to/file");
|
|
350
|
+
});
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
describe("getRelativePath", () => {
|
|
354
|
+
it("should return relative paths", () => {
|
|
355
|
+
const base = "/home/user/project";
|
|
356
|
+
const target = "/home/user/project/src/file.txt";
|
|
357
|
+
|
|
358
|
+
expect(getRelativePath(base, target)).toBe("src/file.txt");
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
it("should handle same directory", () => {
|
|
362
|
+
const base = "/home/user/project";
|
|
363
|
+
const target = "/home/user/project";
|
|
364
|
+
|
|
365
|
+
expect(getRelativePath(base, target)).toBe(".");
|
|
366
|
+
});
|
|
367
|
+
});
|
|
368
|
+
});
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
# Browser Folder Sync - Patchwork Tool
|
|
2
|
+
|
|
3
|
+
A patchwork tool that enables synchronizing local folders with Patchwork documents using the Chrome File System Access API.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- **📂 Folder Selection**: Native browser folder picker with persistent access
|
|
8
|
+
- **🔄 Real-time Sync**: Manual sync with visual progress indicators
|
|
9
|
+
- **📋 File Listing**: Browse files and folders with size information
|
|
10
|
+
- **⚙️ Configurable Settings**: Auto-sync, exclude patterns, sync intervals
|
|
11
|
+
- **🛡️ Permission Management**: Proper handling of File System Access API permissions
|
|
12
|
+
- **🎨 Modern UI**: Clean, responsive interface with status indicators
|
|
13
|
+
|
|
14
|
+
## Browser Support
|
|
15
|
+
|
|
16
|
+
This tool requires browsers that support the File System Access API:
|
|
17
|
+
|
|
18
|
+
- ✅ Chrome 86+
|
|
19
|
+
- ✅ Edge 86+
|
|
20
|
+
- ✅ Safari 15.2+ (limited support)
|
|
21
|
+
- ❌ Firefox (not supported yet)
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
1. **Load the Tool**: The tool appears as "Browser Sync" for folder documents in Patchwork
|
|
26
|
+
2. **Select Folder**: Click "Select Folder" to choose a local directory
|
|
27
|
+
3. **Grant Permissions**: Allow the browser to access the selected folder
|
|
28
|
+
4. **Sync Files**: Use "Sync Now" for manual synchronization
|
|
29
|
+
5. **Configure Settings**: Toggle auto-sync and adjust settings as needed
|
|
30
|
+
|
|
31
|
+
## Technical Implementation
|
|
32
|
+
|
|
33
|
+
### Architecture
|
|
34
|
+
|
|
35
|
+
```
|
|
36
|
+
Browser Sync Tool
|
|
37
|
+
├── SimpleBrowserSyncTool.tsx # Main React component
|
|
38
|
+
├── polyfills.ts # Node.js browser compatibility
|
|
39
|
+
├── types.ts # TypeScript interfaces
|
|
40
|
+
└── styles.css # Tool styling
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### Key Technologies
|
|
44
|
+
|
|
45
|
+
- **File System Access API**: Native browser folder access
|
|
46
|
+
- **React**: Component-based UI framework
|
|
47
|
+
- **Patchwork SDK**: Integration with Patchwork platform
|
|
48
|
+
- **Automerge**: CRDT-based document synchronization
|
|
49
|
+
- **Vite**: Modern build tooling with browser optimization
|
|
50
|
+
|
|
51
|
+
### Browser Polyfills
|
|
52
|
+
|
|
53
|
+
The tool includes comprehensive polyfills for Node.js globals:
|
|
54
|
+
|
|
55
|
+
- `process` object with environment variables
|
|
56
|
+
- `Buffer` minimal implementation
|
|
57
|
+
- `global` reference for compatibility
|
|
58
|
+
|
|
59
|
+
### Build Configuration
|
|
60
|
+
|
|
61
|
+
Vite configuration excludes Node.js modules and provides browser-safe aliases:
|
|
62
|
+
|
|
63
|
+
- Disabled: `fs`, `path`, `crypto`, `glob`, and other Node.js modules
|
|
64
|
+
- Polyfilled: `process`, `Buffer`, `global`
|
|
65
|
+
- Optimized: ES2022 target with tree-shaking
|
|
66
|
+
|
|
67
|
+
## Current Limitations
|
|
68
|
+
|
|
69
|
+
1. **Demo Implementation**: Sync functionality is currently simulated
|
|
70
|
+
2. **Basic File Listing**: No recursive directory traversal
|
|
71
|
+
3. **No Real Persistence**: Changes aren't actually synced to Automerge docs
|
|
72
|
+
4. **Simple UI**: Basic interface without advanced features
|
|
73
|
+
|
|
74
|
+
## Future Enhancements
|
|
75
|
+
|
|
76
|
+
To complete the full pushwork integration:
|
|
77
|
+
|
|
78
|
+
1. **Real Sync Engine**: Integrate with pushwork's browser sync engine
|
|
79
|
+
2. **Change Detection**: Implement file modification monitoring
|
|
80
|
+
3. **Conflict Resolution**: Add CRDT-based merge capabilities
|
|
81
|
+
4. **Performance**: Optimize for large directories
|
|
82
|
+
5. **Advanced UI**: Add progress bars, conflict indicators, etc.
|
|
83
|
+
|
|
84
|
+
## Development
|
|
85
|
+
|
|
86
|
+
### Build Commands
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
# Install dependencies
|
|
90
|
+
pnpm install
|
|
91
|
+
|
|
92
|
+
# Build for production
|
|
93
|
+
pnpm run build
|
|
94
|
+
|
|
95
|
+
# Watch mode (with auto-push to Patchwork)
|
|
96
|
+
pnpm run watch
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
### Project Structure
|
|
100
|
+
|
|
101
|
+
```
|
|
102
|
+
tools/browser-sync/
|
|
103
|
+
├── dist/ # Built output
|
|
104
|
+
├── src/
|
|
105
|
+
│ ├── components/
|
|
106
|
+
│ │ └── SimpleBrowserSyncTool.tsx
|
|
107
|
+
│ ├── polyfills.ts
|
|
108
|
+
│ ├── types.ts
|
|
109
|
+
│ ├── index.ts
|
|
110
|
+
│ └── styles.css
|
|
111
|
+
├── package.json
|
|
112
|
+
├── vite.config.ts
|
|
113
|
+
└── patchwork.json
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
This tool demonstrates the foundation for browser-based file synchronization and can be extended to provide full pushwork compatibility for web-based collaborative editing.
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@pushwork/browser-sync",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Browser folder sync tool for Patchwork",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "src/index.ts",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./dist/index.js"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"build": "vite build",
|
|
12
|
+
"dev": "vite build --watch",
|
|
13
|
+
"push": "pnpm build && patchwork push",
|
|
14
|
+
"watch": "nodemon --watch src -e js,tsx,ts,css,json --exec 'pnpm build && pnpm push'"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"patchwork",
|
|
18
|
+
"sync",
|
|
19
|
+
"filesystem",
|
|
20
|
+
"browser",
|
|
21
|
+
"automerge"
|
|
22
|
+
],
|
|
23
|
+
"author": "Pushwork Team",
|
|
24
|
+
"dependencies": {
|
|
25
|
+
"@automerge/automerge-repo": "2.0.0-alpha.23",
|
|
26
|
+
"@automerge/automerge-repo-react-hooks": "2.0.0-alpha.23",
|
|
27
|
+
"@patchwork/sdk": "file:../../../patchwork/sdk",
|
|
28
|
+
"react": "^18.3.1",
|
|
29
|
+
"react-dom": "^18.3.1",
|
|
30
|
+
"pushwork": "file:../../",
|
|
31
|
+
"lucide-react": "^0.460.0"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@types/react": "^18.3.3",
|
|
35
|
+
"@vitejs/plugin-react": "^4.3.1",
|
|
36
|
+
"nodemon": "^3.1.9",
|
|
37
|
+
"typescript": "^5.2.0",
|
|
38
|
+
"vite": "^5.3.4",
|
|
39
|
+
"vite-plugin-css-injected-by-js": "^3.5.2",
|
|
40
|
+
"vite-plugin-top-level-await": "^1.4.2",
|
|
41
|
+
"vite-plugin-wasm": "^3.3.0"
|
|
42
|
+
},
|
|
43
|
+
"license": "MIT"
|
|
44
|
+
}
|