@storacha/clawracha 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/blockstore/disk.d.ts +15 -0
- package/dist/blockstore/disk.d.ts.map +1 -0
- package/dist/blockstore/disk.js +38 -0
- package/dist/blockstore/gateway.d.ts +12 -0
- package/dist/blockstore/gateway.d.ts.map +1 -0
- package/dist/blockstore/gateway.js +28 -0
- package/dist/blockstore/index.d.ts +13 -0
- package/dist/blockstore/index.d.ts.map +1 -0
- package/dist/blockstore/index.js +12 -0
- package/dist/blockstore/memory.d.ts +16 -0
- package/dist/blockstore/memory.d.ts.map +1 -0
- package/dist/blockstore/memory.js +23 -0
- package/dist/blockstore/tiered.d.ts +29 -0
- package/dist/blockstore/tiered.d.ts.map +1 -0
- package/dist/blockstore/tiered.js +65 -0
- package/dist/blockstore/workspace.d.ts +15 -0
- package/dist/blockstore/workspace.d.ts.map +1 -0
- package/dist/blockstore/workspace.js +20 -0
- package/dist/differ.d.ts +33 -0
- package/dist/differ.d.ts.map +1 -0
- package/dist/differ.js +57 -0
- package/dist/encoder.d.ts +16 -0
- package/dist/encoder.d.ts.map +1 -0
- package/dist/encoder.js +52 -0
- package/dist/fs.d.ts +17 -0
- package/dist/fs.d.ts.map +1 -0
- package/dist/fs.js +27 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +11 -0
- package/dist/plugin.d.ts +14 -0
- package/dist/plugin.d.ts.map +1 -0
- package/dist/plugin.js +220 -0
- package/dist/sync.d.ts +51 -0
- package/dist/sync.d.ts.map +1 -0
- package/dist/sync.js +290 -0
- package/dist/types.d.ts +54 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +4 -0
- package/dist/watcher.d.ts +42 -0
- package/dist/watcher.d.ts.map +1 -0
- package/dist/watcher.js +101 -0
- package/openclaw.plugin.json +30 -0
- package/package.json +54 -0
- package/src/blockstore/disk.ts +57 -0
- package/src/blockstore/index.ts +23 -0
- package/src/blockstore/workspace.ts +41 -0
- package/src/handlers/apply.ts +79 -0
- package/src/handlers/process.ts +118 -0
- package/src/handlers/remote.ts +61 -0
- package/src/index.ts +13 -0
- package/src/mdsync/index.ts +557 -0
- package/src/plugin.ts +481 -0
- package/src/sync.ts +258 -0
- package/src/types/index.ts +64 -0
- package/src/utils/client.ts +51 -0
- package/src/utils/differ.ts +67 -0
- package/src/utils/encoder.ts +64 -0
- package/src/utils/tempcar.ts +79 -0
- package/src/watcher.ts +128 -0
- package/test/blockstore/blockstore.test.ts +113 -0
- package/test/handlers/apply.test.ts +276 -0
- package/test/handlers/process.test.ts +301 -0
- package/test/handlers/remote.test.ts +182 -0
- package/test/mdsync/mdsync.test.ts +120 -0
- package/test/utils/differ.test.ts +94 -0
- package/tsconfig.json +18 -0
package/dist/fs.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Graceful filesystem — drop-in replacement for node:fs/promises.
|
|
3
|
+
* Handles EMFILE/ENFILE by queuing and retrying on platforms with low fd limits.
|
|
4
|
+
*/
|
|
5
|
+
import * as rawFs from "node:fs";
|
|
6
|
+
import gracefulFs from "graceful-fs";
|
|
7
|
+
// Patch the global fs module (skip if already patched or in test environments)
|
|
8
|
+
try {
|
|
9
|
+
gracefulFs.gracefulify(rawFs);
|
|
10
|
+
} catch {
|
|
11
|
+
// gracefulify can fail in some runtimes (e.g. vitest) where fs properties
|
|
12
|
+
// are non-configurable. The promises API still works fine without patching.
|
|
13
|
+
}
|
|
14
|
+
// Re-export the promises API
|
|
15
|
+
const fs = rawFs.promises;
|
|
16
|
+
export default fs;
|
|
17
|
+
export const {
|
|
18
|
+
readFile,
|
|
19
|
+
writeFile,
|
|
20
|
+
mkdir,
|
|
21
|
+
readdir,
|
|
22
|
+
stat,
|
|
23
|
+
unlink,
|
|
24
|
+
access,
|
|
25
|
+
rm,
|
|
26
|
+
mkdtemp,
|
|
27
|
+
} = fs;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @storacha/clawracha - OpenClaw plugin for Storacha workspace sync
|
|
3
|
+
*/
|
|
4
|
+
export * from "./types.js";
|
|
5
|
+
export * from "./blockstore/index.js";
|
|
6
|
+
export { encodeWorkspaceFile, encodeFiles } from "./encoder.js";
|
|
7
|
+
export { diffEntries, encodedToEntries, diffRemoteChanges } from "./differ.js";
|
|
8
|
+
export { SyncEngine } from "./sync.js";
|
|
9
|
+
export { FileWatcher } from "./watcher.js";
|
|
10
|
+
export { default as plugin } from "./plugin.js";
|
|
11
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,cAAc,YAAY,CAAC;AAC3B,cAAc,uBAAuB,CAAC;AACtC,OAAO,EAAE,mBAAmB,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAChE,OAAO,EAAE,WAAW,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAC/E,OAAO,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAG3C,OAAO,EAAE,OAAO,IAAI,MAAM,EAAE,MAAM,aAAa,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @storacha/clawracha - OpenClaw plugin for Storacha workspace sync
|
|
3
|
+
*/
|
|
4
|
+
export * from "./types.js";
|
|
5
|
+
export * from "./blockstore/index.js";
|
|
6
|
+
export { encodeWorkspaceFile, encodeFiles } from "./encoder.js";
|
|
7
|
+
export { diffEntries, encodedToEntries, diffRemoteChanges } from "./differ.js";
|
|
8
|
+
export { SyncEngine } from "./sync.js";
|
|
9
|
+
export { FileWatcher } from "./watcher.js";
|
|
10
|
+
// Re-export plugin definition (default export)
|
|
11
|
+
export { default as plugin } from "./plugin.js";
|
package/dist/plugin.d.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenClaw Plugin Entry Point
|
|
3
|
+
*
|
|
4
|
+
* Registers:
|
|
5
|
+
* - Background service for file watching and sync
|
|
6
|
+
* - Agent tools for manual sync control
|
|
7
|
+
* - Slash commands for setup
|
|
8
|
+
*/
|
|
9
|
+
import type { OpenClawPluginApi } from "openclaw/plugin-sdk";
|
|
10
|
+
/**
|
|
11
|
+
* Plugin entry — called by OpenClaw when the plugin is loaded.
|
|
12
|
+
*/
|
|
13
|
+
export default function plugin(api: OpenClawPluginApi): void;
|
|
14
|
+
//# sourceMappingURL=plugin.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"plugin.d.ts","sourceRoot":"","sources":["../src/plugin.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAIH,OAAO,KAAK,EACV,iBAAiB,EAGlB,MAAM,qBAAqB,CAAC;AAuC7B;;GAEG;AACH,MAAM,CAAC,OAAO,UAAU,MAAM,CAAC,GAAG,EAAE,iBAAiB,QAyMpD"}
|
package/dist/plugin.js
ADDED
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenClaw Plugin Entry Point
|
|
3
|
+
*
|
|
4
|
+
* Registers:
|
|
5
|
+
* - Background service for file watching and sync
|
|
6
|
+
* - Agent tools for manual sync control
|
|
7
|
+
* - Slash commands for setup
|
|
8
|
+
*/
|
|
9
|
+
import * as fs from "node:fs/promises";
|
|
10
|
+
import * as path from "node:path";
|
|
11
|
+
import { SyncEngine } from "./sync.js";
|
|
12
|
+
import { FileWatcher } from "./watcher.js";
|
|
13
|
+
// Global state
|
|
14
|
+
let syncEngine = null;
|
|
15
|
+
let fileWatcher = null;
|
|
16
|
+
let workspaceDir;
|
|
17
|
+
/**
|
|
18
|
+
* Load device config from .storacha/config.json
|
|
19
|
+
*/
|
|
20
|
+
async function loadDeviceConfig(workspace) {
|
|
21
|
+
const configPath = path.join(workspace, ".storacha", "config.json");
|
|
22
|
+
try {
|
|
23
|
+
const content = await fs.readFile(configPath, "utf-8");
|
|
24
|
+
return JSON.parse(content);
|
|
25
|
+
}
|
|
26
|
+
catch (err) {
|
|
27
|
+
if (err.code === "ENOENT")
|
|
28
|
+
return null;
|
|
29
|
+
throw err;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Save device config
|
|
34
|
+
*/
|
|
35
|
+
async function saveDeviceConfig(workspace, config) {
|
|
36
|
+
const configDir = path.join(workspace, ".storacha");
|
|
37
|
+
await fs.mkdir(configDir, { recursive: true });
|
|
38
|
+
const configPath = path.join(configDir, "config.json");
|
|
39
|
+
await fs.writeFile(configPath, JSON.stringify(config, null, 2));
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Plugin entry — called by OpenClaw when the plugin is loaded.
|
|
43
|
+
*/
|
|
44
|
+
export default function plugin(api) {
|
|
45
|
+
// Register background service
|
|
46
|
+
api.registerService({
|
|
47
|
+
id: "storacha-sync",
|
|
48
|
+
async start(ctx) {
|
|
49
|
+
workspaceDir = ctx.workspaceDir;
|
|
50
|
+
const workspace = workspaceDir;
|
|
51
|
+
if (!workspace) {
|
|
52
|
+
ctx.logger.warn("No workspace directory configured");
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const deviceConfig = await loadDeviceConfig(workspace);
|
|
56
|
+
if (!deviceConfig) {
|
|
57
|
+
ctx.logger.info("No device config found. Run /storacha-init first.");
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
syncEngine = new SyncEngine(workspace);
|
|
61
|
+
await syncEngine.init(deviceConfig);
|
|
62
|
+
fileWatcher = new FileWatcher({
|
|
63
|
+
workspace,
|
|
64
|
+
config: {
|
|
65
|
+
enabled: true,
|
|
66
|
+
watchPatterns: ["**/*"],
|
|
67
|
+
ignorePatterns: [
|
|
68
|
+
".storacha/**",
|
|
69
|
+
"node_modules/**",
|
|
70
|
+
".git/**",
|
|
71
|
+
"dist/**",
|
|
72
|
+
],
|
|
73
|
+
},
|
|
74
|
+
onChanges: async (changes) => {
|
|
75
|
+
if (!syncEngine)
|
|
76
|
+
return;
|
|
77
|
+
await syncEngine.processChanges(changes);
|
|
78
|
+
await syncEngine.sync();
|
|
79
|
+
const nameArchive = await syncEngine.exportNameArchive();
|
|
80
|
+
const updatedConfig = { ...deviceConfig, nameArchive };
|
|
81
|
+
await saveDeviceConfig(workspace, updatedConfig);
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
fileWatcher.start();
|
|
85
|
+
ctx.logger.info("Started watching workspace");
|
|
86
|
+
},
|
|
87
|
+
async stop(ctx) {
|
|
88
|
+
if (fileWatcher) {
|
|
89
|
+
await fileWatcher.stop();
|
|
90
|
+
fileWatcher = null;
|
|
91
|
+
}
|
|
92
|
+
syncEngine = null;
|
|
93
|
+
ctx.logger.info("Stopped");
|
|
94
|
+
},
|
|
95
|
+
});
|
|
96
|
+
// Register agent tools
|
|
97
|
+
api.registerTool({
|
|
98
|
+
name: "storacha_sync_status",
|
|
99
|
+
label: "Storacha Sync Status",
|
|
100
|
+
description: "Get the current Storacha workspace sync status",
|
|
101
|
+
parameters: { type: "object", properties: {} },
|
|
102
|
+
execute: async () => {
|
|
103
|
+
if (!syncEngine) {
|
|
104
|
+
return {
|
|
105
|
+
content: [
|
|
106
|
+
{
|
|
107
|
+
type: "text",
|
|
108
|
+
text: "Sync not initialized. Run /storacha-init first.",
|
|
109
|
+
},
|
|
110
|
+
],
|
|
111
|
+
details: null,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
const status = await syncEngine.status();
|
|
115
|
+
return {
|
|
116
|
+
content: [
|
|
117
|
+
{ type: "text", text: JSON.stringify(status, null, 2) },
|
|
118
|
+
],
|
|
119
|
+
details: status,
|
|
120
|
+
};
|
|
121
|
+
},
|
|
122
|
+
});
|
|
123
|
+
api.registerTool({
|
|
124
|
+
name: "storacha_sync_now",
|
|
125
|
+
label: "Storacha Sync Now",
|
|
126
|
+
description: "Trigger an immediate workspace sync to Storacha",
|
|
127
|
+
parameters: { type: "object", properties: {} },
|
|
128
|
+
execute: async () => {
|
|
129
|
+
if (!syncEngine) {
|
|
130
|
+
return {
|
|
131
|
+
content: [
|
|
132
|
+
{
|
|
133
|
+
type: "text",
|
|
134
|
+
text: "Sync not initialized. Run /storacha-init first.",
|
|
135
|
+
},
|
|
136
|
+
],
|
|
137
|
+
details: null,
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
await syncEngine.sync();
|
|
141
|
+
const status = await syncEngine.status();
|
|
142
|
+
return {
|
|
143
|
+
content: [
|
|
144
|
+
{
|
|
145
|
+
type: "text",
|
|
146
|
+
text: JSON.stringify({ success: true, status }, null, 2),
|
|
147
|
+
},
|
|
148
|
+
],
|
|
149
|
+
details: status,
|
|
150
|
+
};
|
|
151
|
+
},
|
|
152
|
+
});
|
|
153
|
+
// Register slash commands
|
|
154
|
+
api.registerCommand({
|
|
155
|
+
name: "storacha-init",
|
|
156
|
+
description: "Initialize Storacha sync for this workspace",
|
|
157
|
+
handler: async (ctx) => {
|
|
158
|
+
const workspace = workspaceDir;
|
|
159
|
+
if (!workspace)
|
|
160
|
+
return { text: "No workspace configured." };
|
|
161
|
+
const { Agent } = await import("@storacha/ucn/pail");
|
|
162
|
+
const agent = await Agent.generate();
|
|
163
|
+
const agentKey = Agent.format(agent);
|
|
164
|
+
const config = { agentKey };
|
|
165
|
+
await saveDeviceConfig(workspace, config);
|
|
166
|
+
return {
|
|
167
|
+
text: [
|
|
168
|
+
"🔥 Storacha sync initialized!",
|
|
169
|
+
`Agent DID: \`${agent.did()}\``,
|
|
170
|
+
"",
|
|
171
|
+
"Next: get a delegation from a space owner, then run /storacha-delegate",
|
|
172
|
+
].join("\n"),
|
|
173
|
+
};
|
|
174
|
+
},
|
|
175
|
+
});
|
|
176
|
+
api.registerCommand({
|
|
177
|
+
name: "storacha-delegate",
|
|
178
|
+
description: "Import a delegation to sync with a Storacha space",
|
|
179
|
+
acceptsArgs: true,
|
|
180
|
+
handler: async (ctx) => {
|
|
181
|
+
const workspace = workspaceDir;
|
|
182
|
+
if (!workspace)
|
|
183
|
+
return { text: "No workspace configured." };
|
|
184
|
+
const delegationB64 = ctx.args?.trim();
|
|
185
|
+
if (!delegationB64)
|
|
186
|
+
return { text: "Usage: /storacha-delegate <base64-delegation>" };
|
|
187
|
+
const config = await loadDeviceConfig(workspace);
|
|
188
|
+
if (!config)
|
|
189
|
+
return { text: "Not initialized. Run /storacha-init first." };
|
|
190
|
+
config.delegation = delegationB64;
|
|
191
|
+
await saveDeviceConfig(workspace, config);
|
|
192
|
+
return {
|
|
193
|
+
text: "✅ Delegation imported! Restart the gateway to start syncing.",
|
|
194
|
+
};
|
|
195
|
+
},
|
|
196
|
+
});
|
|
197
|
+
api.registerCommand({
|
|
198
|
+
name: "storacha-status",
|
|
199
|
+
description: "Show Storacha sync status",
|
|
200
|
+
handler: async (ctx) => {
|
|
201
|
+
const workspace = workspaceDir;
|
|
202
|
+
if (!workspace)
|
|
203
|
+
return { text: "No workspace configured." };
|
|
204
|
+
const config = await loadDeviceConfig(workspace);
|
|
205
|
+
if (!config)
|
|
206
|
+
return { text: "Not initialized. Run /storacha-init first." };
|
|
207
|
+
const lines = [
|
|
208
|
+
"🔥 Storacha Sync Status",
|
|
209
|
+
`Agent: configured`,
|
|
210
|
+
`Delegation: ${config.delegation ? "imported" : "not set"}`,
|
|
211
|
+
`Name Archive: ${config.nameArchive ? "saved" : "not created"}`,
|
|
212
|
+
];
|
|
213
|
+
if (syncEngine) {
|
|
214
|
+
const status = await syncEngine.status();
|
|
215
|
+
lines.push(`Running: ${status.running}`, `Last Sync: ${status.lastSync ? new Date(status.lastSync).toISOString() : "never"}`, `Entries: ${status.entryCount}`, `Pending: ${status.pendingChanges}`);
|
|
216
|
+
}
|
|
217
|
+
return { text: lines.join("\n") };
|
|
218
|
+
},
|
|
219
|
+
});
|
|
220
|
+
}
|
package/dist/sync.d.ts
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync engine - orchestrates the full sync loop
|
|
3
|
+
*
|
|
4
|
+
* 1. Watch for file changes
|
|
5
|
+
* 2. Encode changed files to root CIDs
|
|
6
|
+
* 3. Diff local vs pail entries → put/del ops
|
|
7
|
+
* 4. Generate UCN revision via batch (puts) + individual ops (dels)
|
|
8
|
+
* 5. Upload all blocks as CAR
|
|
9
|
+
* 6. Apply remote changes to local filesystem
|
|
10
|
+
*/
|
|
11
|
+
import type { SyncState, FileChange, DeviceConfig } from "./types.js";
|
|
12
|
+
import { type PailEntries } from "./differ.js";
|
|
13
|
+
export declare class SyncEngine {
|
|
14
|
+
private workspace;
|
|
15
|
+
private blocks;
|
|
16
|
+
private name;
|
|
17
|
+
private current;
|
|
18
|
+
private pendingOps;
|
|
19
|
+
private allBlocks;
|
|
20
|
+
private running;
|
|
21
|
+
private lastSync;
|
|
22
|
+
constructor(workspace: string);
|
|
23
|
+
/**
|
|
24
|
+
* Initialize sync engine with device config
|
|
25
|
+
*/
|
|
26
|
+
init(config: DeviceConfig): Promise<void>;
|
|
27
|
+
/**
|
|
28
|
+
* Process a batch of file changes
|
|
29
|
+
*/
|
|
30
|
+
processChanges(changes: FileChange[]): Promise<void>;
|
|
31
|
+
/**
|
|
32
|
+
* Execute sync: generate revision, publish, upload, apply remote changes
|
|
33
|
+
*/
|
|
34
|
+
sync(): Promise<void>;
|
|
35
|
+
/**
|
|
36
|
+
* Create CAR and upload to Storacha
|
|
37
|
+
*/
|
|
38
|
+
private uploadCAR;
|
|
39
|
+
/**
|
|
40
|
+
* Get current pail entries as map
|
|
41
|
+
*/
|
|
42
|
+
getPailEntries(): Promise<PailEntries>;
|
|
43
|
+
/**
|
|
44
|
+
* Apply remote changes to local filesystem
|
|
45
|
+
*/
|
|
46
|
+
private applyRemoteChanges;
|
|
47
|
+
status(): Promise<SyncState>;
|
|
48
|
+
exportNameArchive(): Promise<string>;
|
|
49
|
+
private storeBlocks;
|
|
50
|
+
}
|
|
51
|
+
//# sourceMappingURL=sync.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../src/sync.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAYH,OAAO,KAAK,EAAE,SAAS,EAAE,UAAU,EAAU,YAAY,EAAE,MAAM,YAAY,CAAC;AAO9E,OAAO,EAAqB,KAAK,WAAW,EAAE,MAAM,aAAa,CAAC;AAiBlE,qBAAa,UAAU;IACrB,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,MAAM,CAAsB;IACpC,OAAO,CAAC,IAAI,CAAyB;IACrC,OAAO,CAAC,OAAO,CAA0B;IACzC,OAAO,CAAC,UAAU,CAAgB;IAClC,OAAO,CAAC,SAAS,CAAe;IAChC,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,QAAQ,CAAuB;gBAE3B,SAAS,EAAE,MAAM;IAK7B;;OAEG;IACG,IAAI,CAAC,MAAM,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAuB/C;;OAEG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAqC1D;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IA4H3B;;OAEG;YACW,SAAS;IAwCvB;;OAEG;IACG,cAAc,IAAI,OAAO,CAAC,WAAW,CAAC;IAc5C;;OAEG;YACW,kBAAkB;IAsB1B,MAAM,IAAI,OAAO,CAAC,SAAS,CAAC;IAW5B,iBAAiB,IAAI,OAAO,CAAC,MAAM,CAAC;YAM5B,WAAW;CAK1B"}
|
package/dist/sync.js
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync engine - orchestrates the full sync loop
|
|
3
|
+
*
|
|
4
|
+
* 1. Watch for file changes
|
|
5
|
+
* 2. Encode changed files to root CIDs
|
|
6
|
+
* 3. Diff local vs pail entries → put/del ops
|
|
7
|
+
* 4. Generate UCN revision via batch (puts) + individual ops (dels)
|
|
8
|
+
* 5. Upload all blocks as CAR
|
|
9
|
+
* 6. Apply remote changes to local filesystem
|
|
10
|
+
*/
|
|
11
|
+
import * as fs from "node:fs/promises";
|
|
12
|
+
import * as path from "node:path";
|
|
13
|
+
import { CarWriter } from "@ipld/car/writer";
|
|
14
|
+
// UCN Pail imports
|
|
15
|
+
import { Agent, Name, Revision, Value } from "@storacha/ucn/pail";
|
|
16
|
+
import * as Batch from "@storacha/ucn/pail/batch";
|
|
17
|
+
import { createWorkspaceBlockstore, } from "./blockstore/index.js";
|
|
18
|
+
import { encodeFiles } from "./encoder.js";
|
|
19
|
+
import { diffRemoteChanges } from "./differ.js";
|
|
20
|
+
export class SyncEngine {
|
|
21
|
+
workspace;
|
|
22
|
+
blocks;
|
|
23
|
+
name = null;
|
|
24
|
+
current = null;
|
|
25
|
+
pendingOps = [];
|
|
26
|
+
allBlocks = []; // Accumulate blocks for CAR upload
|
|
27
|
+
running = false;
|
|
28
|
+
lastSync = null;
|
|
29
|
+
constructor(workspace) {
|
|
30
|
+
this.workspace = workspace;
|
|
31
|
+
this.blocks = createWorkspaceBlockstore(workspace);
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Initialize sync engine with device config
|
|
35
|
+
*/
|
|
36
|
+
async init(config) {
|
|
37
|
+
const agent = Agent.parse(config.agentKey);
|
|
38
|
+
if (config.nameArchive) {
|
|
39
|
+
const archiveBytes = Buffer.from(config.nameArchive, "base64");
|
|
40
|
+
this.name = await Name.extract(agent, archiveBytes);
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
this.name = await Name.create(agent);
|
|
44
|
+
}
|
|
45
|
+
try {
|
|
46
|
+
const result = await Revision.resolve(this.blocks, this.name);
|
|
47
|
+
this.current = result.value;
|
|
48
|
+
await this.storeBlocks(result.additions);
|
|
49
|
+
}
|
|
50
|
+
catch (err) {
|
|
51
|
+
if (err.code === "ERR_NO_VALUE") {
|
|
52
|
+
this.current = null;
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
throw err;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Process a batch of file changes
|
|
61
|
+
*/
|
|
62
|
+
async processChanges(changes) {
|
|
63
|
+
const toEncode = changes
|
|
64
|
+
.filter((c) => c.type !== "unlink")
|
|
65
|
+
.map((c) => c.path);
|
|
66
|
+
const toDelete = changes
|
|
67
|
+
.filter((c) => c.type === "unlink")
|
|
68
|
+
.map((c) => c.path);
|
|
69
|
+
// Encode changed files
|
|
70
|
+
const encoded = await encodeFiles(this.workspace, toEncode);
|
|
71
|
+
// Accumulate file blocks for CAR upload only (not persisted to blockstore)
|
|
72
|
+
// TODO: spill to temporary CAR file if memory becomes an issue
|
|
73
|
+
for (const file of encoded) {
|
|
74
|
+
this.allBlocks.push(...file.blocks);
|
|
75
|
+
}
|
|
76
|
+
// Check current pail state to skip no-ops
|
|
77
|
+
const currentEntries = await this.getPailEntries();
|
|
78
|
+
// Generate puts for changed files (skip if CID unchanged)
|
|
79
|
+
for (const file of encoded) {
|
|
80
|
+
const existing = currentEntries.get(file.path);
|
|
81
|
+
if (!existing || !existing.equals(file.rootCID)) {
|
|
82
|
+
this.pendingOps.push({ type: 'put', key: file.path, value: file.rootCID });
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
// Generate dels for removed files (skip if not in pail)
|
|
86
|
+
for (const deletePath of toDelete) {
|
|
87
|
+
if (currentEntries.has(deletePath)) {
|
|
88
|
+
this.pendingOps.push({ type: 'del', key: deletePath });
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Execute sync: generate revision, publish, upload, apply remote changes
|
|
94
|
+
*/
|
|
95
|
+
async sync() {
|
|
96
|
+
if (!this.name) {
|
|
97
|
+
throw new Error("Sync engine not initialized");
|
|
98
|
+
}
|
|
99
|
+
const beforeEntries = await this.getPailEntries();
|
|
100
|
+
const revisionBlocks = [];
|
|
101
|
+
// Separate puts and dels (batch only supports puts)
|
|
102
|
+
const puts = this.pendingOps.filter((op) => op.type === "put" && op.value);
|
|
103
|
+
const dels = this.pendingOps.filter((op) => op.type === "del");
|
|
104
|
+
// Process puts with batch
|
|
105
|
+
if (puts.length > 0) {
|
|
106
|
+
if (this.current) {
|
|
107
|
+
const batcher = await Batch.create(this.blocks, this.current);
|
|
108
|
+
for (const op of puts) {
|
|
109
|
+
if (op.value)
|
|
110
|
+
await batcher.put(op.key, op.value);
|
|
111
|
+
}
|
|
112
|
+
const result = await batcher.commit();
|
|
113
|
+
revisionBlocks.push(...result.additions);
|
|
114
|
+
await this.storeBlocks(result.additions);
|
|
115
|
+
const opResult = result.revision.operation;
|
|
116
|
+
this.current = Value.create(this.name, opResult.root, [
|
|
117
|
+
result.revision,
|
|
118
|
+
]);
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
// First revision - v0Put
|
|
122
|
+
const firstPut = puts[0];
|
|
123
|
+
if (firstPut.value) {
|
|
124
|
+
const result = await Revision.v0Put(this.blocks, firstPut.key, firstPut.value);
|
|
125
|
+
revisionBlocks.push(...result.additions);
|
|
126
|
+
await this.storeBlocks(result.additions);
|
|
127
|
+
const opResult = result.revision.operation;
|
|
128
|
+
this.current = Value.create(this.name, opResult.root, [
|
|
129
|
+
result.revision,
|
|
130
|
+
]);
|
|
131
|
+
// Batch remaining puts
|
|
132
|
+
if (puts.length > 1) {
|
|
133
|
+
const batcher = await Batch.create(this.blocks, this.current);
|
|
134
|
+
for (const op of puts.slice(1)) {
|
|
135
|
+
if (op.value)
|
|
136
|
+
await batcher.put(op.key, op.value);
|
|
137
|
+
}
|
|
138
|
+
const batchResult = await batcher.commit();
|
|
139
|
+
revisionBlocks.push(...batchResult.additions);
|
|
140
|
+
await this.storeBlocks(batchResult.additions);
|
|
141
|
+
const batchOpResult = batchResult.revision.operation;
|
|
142
|
+
this.current = Value.create(this.name, batchOpResult.root, [
|
|
143
|
+
batchResult.revision,
|
|
144
|
+
]);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
// Process dels individually (batch doesn't support del)
|
|
150
|
+
if (dels.length > 0 && this.current) {
|
|
151
|
+
for (const op of dels) {
|
|
152
|
+
const result = await Revision.del(this.blocks, this.current, op.key);
|
|
153
|
+
revisionBlocks.push(...result.additions);
|
|
154
|
+
await this.storeBlocks(result.additions);
|
|
155
|
+
const opResult = result.revision.operation;
|
|
156
|
+
this.current = Value.create(this.name, opResult.root, [
|
|
157
|
+
result.revision,
|
|
158
|
+
]);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
this.pendingOps = [];
|
|
162
|
+
// Publish to network
|
|
163
|
+
if (this.current && this.current.revision.length > 0) {
|
|
164
|
+
const latestRevision = this.current.revision[this.current.revision.length - 1];
|
|
165
|
+
const pubResult = await Revision.publish(this.blocks, this.name, latestRevision);
|
|
166
|
+
revisionBlocks.push(...pubResult.additions);
|
|
167
|
+
await this.storeBlocks(pubResult.additions);
|
|
168
|
+
this.current = pubResult.value;
|
|
169
|
+
}
|
|
170
|
+
else {
|
|
171
|
+
// Just pull remote
|
|
172
|
+
try {
|
|
173
|
+
const result = await Revision.resolve(this.blocks, this.name, {
|
|
174
|
+
base: this.current ?? undefined,
|
|
175
|
+
});
|
|
176
|
+
await this.storeBlocks(result.additions);
|
|
177
|
+
this.current = result.value;
|
|
178
|
+
}
|
|
179
|
+
catch (err) {
|
|
180
|
+
if (err.code !== "ERR_NO_VALUE")
|
|
181
|
+
throw err;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
// Combine all blocks and upload as CAR
|
|
185
|
+
const allUploadBlocks = [...this.allBlocks, ...revisionBlocks];
|
|
186
|
+
if (allUploadBlocks.length > 0) {
|
|
187
|
+
await this.uploadCAR(allUploadBlocks);
|
|
188
|
+
this.allBlocks = []; // Clear accumulated blocks
|
|
189
|
+
}
|
|
190
|
+
// Apply remote changes
|
|
191
|
+
const afterEntries = await this.getPailEntries();
|
|
192
|
+
const remoteChanges = diffRemoteChanges(beforeEntries, afterEntries);
|
|
193
|
+
if (remoteChanges.length > 0) {
|
|
194
|
+
await this.applyRemoteChanges(remoteChanges, afterEntries);
|
|
195
|
+
}
|
|
196
|
+
this.lastSync = Date.now();
|
|
197
|
+
}
|
|
198
|
+
/**
|
|
199
|
+
* Create CAR and upload to Storacha
|
|
200
|
+
*/
|
|
201
|
+
async uploadCAR(blocks) {
|
|
202
|
+
if (blocks.length === 0)
|
|
203
|
+
return;
|
|
204
|
+
// Find root CID (last block is typically the root)
|
|
205
|
+
const rootCID = blocks[blocks.length - 1].cid;
|
|
206
|
+
// Create CAR
|
|
207
|
+
const { writer, out } = CarWriter.create([rootCID]);
|
|
208
|
+
// Collect CAR bytes
|
|
209
|
+
const chunks = [];
|
|
210
|
+
const collectPromise = (async () => {
|
|
211
|
+
for await (const chunk of out) {
|
|
212
|
+
chunks.push(chunk);
|
|
213
|
+
}
|
|
214
|
+
})();
|
|
215
|
+
// Write blocks
|
|
216
|
+
for (const block of blocks) {
|
|
217
|
+
await writer.put(block);
|
|
218
|
+
}
|
|
219
|
+
await writer.close();
|
|
220
|
+
await collectPromise;
|
|
221
|
+
const carBytes = new Uint8Array(chunks.reduce((acc, c) => acc + c.length, 0));
|
|
222
|
+
let offset = 0;
|
|
223
|
+
for (const chunk of chunks) {
|
|
224
|
+
carBytes.set(chunk, offset);
|
|
225
|
+
offset += chunk.length;
|
|
226
|
+
}
|
|
227
|
+
// TODO: Upload to Storacha using client.uploadCAR
|
|
228
|
+
// For now, just log
|
|
229
|
+
console.log(`[storacha-sync] Would upload CAR: ${blocks.length} blocks, ${carBytes.length} bytes, root: ${rootCID}`);
|
|
230
|
+
}
|
|
231
|
+
/**
|
|
232
|
+
* Get current pail entries as map
|
|
233
|
+
*/
|
|
234
|
+
async getPailEntries() {
|
|
235
|
+
const entries = new Map();
|
|
236
|
+
if (!this.current)
|
|
237
|
+
return entries;
|
|
238
|
+
for await (const [key, value] of Revision.entries(this.blocks, this.current)) {
|
|
239
|
+
const cid = value;
|
|
240
|
+
if (cid)
|
|
241
|
+
entries.set(key, cid);
|
|
242
|
+
}
|
|
243
|
+
return entries;
|
|
244
|
+
}
|
|
245
|
+
/**
|
|
246
|
+
* Apply remote changes to local filesystem
|
|
247
|
+
*/
|
|
248
|
+
async applyRemoteChanges(changedPaths, entries) {
|
|
249
|
+
for (const relativePath of changedPaths) {
|
|
250
|
+
const cid = entries.get(relativePath);
|
|
251
|
+
if (!cid) {
|
|
252
|
+
// Deleted remotely
|
|
253
|
+
const fullPath = path.join(this.workspace, relativePath);
|
|
254
|
+
try {
|
|
255
|
+
await fs.unlink(fullPath);
|
|
256
|
+
console.log(`[storacha-sync] Deleted: ${relativePath}`);
|
|
257
|
+
}
|
|
258
|
+
catch (err) {
|
|
259
|
+
if (err.code !== "ENOENT")
|
|
260
|
+
throw err;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
// Changed remotely - TODO: fetch and reconstruct
|
|
265
|
+
console.log(`[storacha-sync] Remote change: ${relativePath} → ${cid}`);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
async status() {
|
|
270
|
+
const entries = await this.getPailEntries();
|
|
271
|
+
return {
|
|
272
|
+
running: this.running,
|
|
273
|
+
lastSync: this.lastSync,
|
|
274
|
+
root: this.current?.root ?? null,
|
|
275
|
+
entryCount: entries.size,
|
|
276
|
+
pendingChanges: this.pendingOps.length,
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
async exportNameArchive() {
|
|
280
|
+
if (!this.name)
|
|
281
|
+
throw new Error("Sync engine not initialized");
|
|
282
|
+
const bytes = await this.name.archive();
|
|
283
|
+
return Buffer.from(bytes).toString("base64");
|
|
284
|
+
}
|
|
285
|
+
async storeBlocks(blocks) {
|
|
286
|
+
for (const block of blocks) {
|
|
287
|
+
await this.blocks.put(block);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|