@storacha/clawracha 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +134 -0
  2. package/dist/blockstore/disk.d.ts +15 -0
  3. package/dist/blockstore/disk.d.ts.map +1 -0
  4. package/dist/blockstore/disk.js +38 -0
  5. package/dist/blockstore/gateway.d.ts +12 -0
  6. package/dist/blockstore/gateway.d.ts.map +1 -0
  7. package/dist/blockstore/gateway.js +28 -0
  8. package/dist/blockstore/index.d.ts +13 -0
  9. package/dist/blockstore/index.d.ts.map +1 -0
  10. package/dist/blockstore/index.js +12 -0
  11. package/dist/blockstore/memory.d.ts +16 -0
  12. package/dist/blockstore/memory.d.ts.map +1 -0
  13. package/dist/blockstore/memory.js +23 -0
  14. package/dist/blockstore/tiered.d.ts +29 -0
  15. package/dist/blockstore/tiered.d.ts.map +1 -0
  16. package/dist/blockstore/tiered.js +65 -0
  17. package/dist/blockstore/workspace.d.ts +15 -0
  18. package/dist/blockstore/workspace.d.ts.map +1 -0
  19. package/dist/blockstore/workspace.js +20 -0
  20. package/dist/differ.d.ts +33 -0
  21. package/dist/differ.d.ts.map +1 -0
  22. package/dist/differ.js +57 -0
  23. package/dist/encoder.d.ts +16 -0
  24. package/dist/encoder.d.ts.map +1 -0
  25. package/dist/encoder.js +52 -0
  26. package/dist/fs.d.ts +17 -0
  27. package/dist/fs.d.ts.map +1 -0
  28. package/dist/fs.js +27 -0
  29. package/dist/index.d.ts +11 -0
  30. package/dist/index.d.ts.map +1 -0
  31. package/dist/index.js +11 -0
  32. package/dist/plugin.d.ts +14 -0
  33. package/dist/plugin.d.ts.map +1 -0
  34. package/dist/plugin.js +220 -0
  35. package/dist/sync.d.ts +51 -0
  36. package/dist/sync.d.ts.map +1 -0
  37. package/dist/sync.js +290 -0
  38. package/dist/types.d.ts +54 -0
  39. package/dist/types.d.ts.map +1 -0
  40. package/dist/types.js +4 -0
  41. package/dist/watcher.d.ts +42 -0
  42. package/dist/watcher.d.ts.map +1 -0
  43. package/dist/watcher.js +101 -0
  44. package/openclaw.plugin.json +30 -0
  45. package/package.json +54 -0
  46. package/src/blockstore/disk.ts +57 -0
  47. package/src/blockstore/index.ts +23 -0
  48. package/src/blockstore/workspace.ts +41 -0
  49. package/src/handlers/apply.ts +79 -0
  50. package/src/handlers/process.ts +118 -0
  51. package/src/handlers/remote.ts +61 -0
  52. package/src/index.ts +13 -0
  53. package/src/mdsync/index.ts +557 -0
  54. package/src/plugin.ts +481 -0
  55. package/src/sync.ts +258 -0
  56. package/src/types/index.ts +64 -0
  57. package/src/utils/client.ts +51 -0
  58. package/src/utils/differ.ts +67 -0
  59. package/src/utils/encoder.ts +64 -0
  60. package/src/utils/tempcar.ts +79 -0
  61. package/src/watcher.ts +128 -0
  62. package/test/blockstore/blockstore.test.ts +113 -0
  63. package/test/handlers/apply.test.ts +276 -0
  64. package/test/handlers/process.test.ts +301 -0
  65. package/test/handlers/remote.test.ts +182 -0
  66. package/test/mdsync/mdsync.test.ts +120 -0
  67. package/test/utils/differ.test.ts +94 -0
  68. package/tsconfig.json +18 -0
package/src/watcher.ts ADDED
@@ -0,0 +1,128 @@
1
+ /**
2
+ * File watcher - monitors workspace for changes
3
+ *
4
+ * Uses chokidar to watch for file changes and batches them
5
+ * before triggering sync.
6
+ */
7
+
8
+ import chokidar from "chokidar";
9
+ import * as path from "node:path";
10
+ import type { FileChange, SyncPluginConfig } from "./types/index.js";
11
+
12
+ export interface WatcherOptions {
13
+ workspace: string;
14
+ config: SyncPluginConfig;
15
+ onChanges: (changes: FileChange[]) => Promise<void>;
16
+ debounceMs?: number;
17
+ }
18
+
19
+ export class FileWatcher {
20
+ private watcher: chokidar.FSWatcher | null = null;
21
+ private pendingChanges: Map<string, FileChange> = new Map();
22
+ private debounceTimer: NodeJS.Timeout | null = null;
23
+ private options: WatcherOptions;
24
+ private debounceMs: number;
25
+
26
+ constructor(options: WatcherOptions) {
27
+ this.options = options;
28
+ this.debounceMs = options.debounceMs ?? 500;
29
+ }
30
+
31
+ /**
32
+ * Start watching the workspace
33
+ */
34
+ start(): void {
35
+ if (this.watcher) return;
36
+
37
+ const { workspace, config } = this.options;
38
+
39
+ // Build watch patterns
40
+ const watchPaths = config.watchPatterns.map((p) => path.join(workspace, p));
41
+
42
+ // Build ignore patterns
43
+ const ignored = [
44
+ ...config.ignorePatterns,
45
+ ".storacha/**", // Always ignore our own data
46
+ ];
47
+
48
+ this.watcher = chokidar.watch(watchPaths, {
49
+ ignored,
50
+ persistent: true,
51
+ ignoreInitial: true,
52
+ awaitWriteFinish: {
53
+ stabilityThreshold: 200,
54
+ pollInterval: 100,
55
+ },
56
+ });
57
+
58
+ this.watcher
59
+ .on("add", (filePath) => this.handleChange("add", filePath))
60
+ .on("change", (filePath) => this.handleChange("change", filePath))
61
+ .on("unlink", (filePath) => this.handleChange("unlink", filePath))
62
+ .on("error", (err) => console.error("Watcher error:", err));
63
+ }
64
+
65
+ /**
66
+ * Stop watching
67
+ */
68
+ async stop(): Promise<void> {
69
+ if (this.debounceTimer) {
70
+ clearTimeout(this.debounceTimer);
71
+ this.debounceTimer = null;
72
+ }
73
+
74
+ if (this.watcher) {
75
+ await this.watcher.close();
76
+ this.watcher = null;
77
+ }
78
+ }
79
+
80
+ /**
81
+ * Handle a file change event
82
+ */
83
+ private handleChange(
84
+ type: "add" | "change" | "unlink",
85
+ filePath: string
86
+ ): void {
87
+ const relativePath = path.relative(this.options.workspace, filePath);
88
+
89
+ // Dedupe: later events for same path replace earlier ones
90
+ this.pendingChanges.set(relativePath, { type, path: relativePath });
91
+
92
+ // Debounce: wait for activity to settle before processing
93
+ if (this.debounceTimer) {
94
+ clearTimeout(this.debounceTimer);
95
+ }
96
+
97
+ this.debounceTimer = setTimeout(() => {
98
+ this.flush();
99
+ }, this.debounceMs);
100
+ }
101
+
102
+ /**
103
+ * Flush pending changes to callback
104
+ */
105
+ private async flush(): Promise<void> {
106
+ if (this.pendingChanges.size === 0) return;
107
+
108
+ const changes = Array.from(this.pendingChanges.values());
109
+ this.pendingChanges.clear();
110
+
111
+ try {
112
+ await this.options.onChanges(changes);
113
+ } catch (err) {
114
+ console.error("Error processing file changes:", err);
115
+ }
116
+ }
117
+
118
+ /**
119
+ * Force immediate flush (for testing)
120
+ */
121
+ async forceFlush(): Promise<void> {
122
+ if (this.debounceTimer) {
123
+ clearTimeout(this.debounceTimer);
124
+ this.debounceTimer = null;
125
+ }
126
+ await this.flush();
127
+ }
128
+ }
@@ -0,0 +1,113 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import * as fs from "node:fs/promises";
3
+ import * as path from "node:path";
4
+ import * as os from "node:os";
5
+ import { CID } from "multiformats/cid";
6
+ import { sha256 } from "multiformats/hashes/sha2";
7
+ import * as raw from "multiformats/codecs/raw";
8
+ import {
9
+ MemoryBlockstore,
10
+ DiskBlockstore,
11
+ createWorkspaceBlockstore,
12
+ } from "../../src/blockstore/index.js";
13
+
14
+ const createTestBlock = async (content: string) => {
15
+ const bytes = new TextEncoder().encode(content);
16
+ const hash = await sha256.digest(bytes);
17
+ const cid = CID.create(1, raw.code, hash);
18
+ return { cid, bytes };
19
+ };
20
+
21
+ describe("MemoryBlockstore (from UCN)", () => {
22
+ it("should store and retrieve blocks", async () => {
23
+ const store = new MemoryBlockstore();
24
+ const block = await createTestBlock("hello");
25
+
26
+ await store.put(block);
27
+ const retrieved = await store.get(block.cid);
28
+
29
+ expect(retrieved).toBeDefined();
30
+ expect(new Uint8Array(retrieved!.bytes)).toEqual(block.bytes);
31
+ });
32
+
33
+ it("should return undefined for missing blocks", async () => {
34
+ const store = new MemoryBlockstore();
35
+ const block = await createTestBlock("missing");
36
+
37
+ const retrieved = await store.get(block.cid);
38
+ expect(retrieved).toBeUndefined();
39
+ });
40
+ });
41
+
42
+ describe("DiskBlockstore", () => {
43
+ let tmpDir: string;
44
+ let store: DiskBlockstore;
45
+
46
+ beforeEach(async () => {
47
+ tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawracha-test-"));
48
+ store = new DiskBlockstore(tmpDir);
49
+ });
50
+
51
+ afterEach(async () => {
52
+ await fs.rm(tmpDir, { recursive: true, force: true });
53
+ });
54
+
55
+ it("should store and retrieve blocks", async () => {
56
+ const block = await createTestBlock("disk-test");
57
+
58
+ await store.put(block);
59
+ const retrieved = await store.get(block.cid);
60
+
61
+ expect(retrieved).toBeDefined();
62
+ expect(new Uint8Array(retrieved!.bytes)).toEqual(block.bytes);
63
+ });
64
+
65
+ it("should persist blocks to disk", async () => {
66
+ const block = await createTestBlock("persist-test");
67
+
68
+ await store.put(block);
69
+
70
+ const store2 = new DiskBlockstore(tmpDir);
71
+ const retrieved = await store2.get(block.cid);
72
+
73
+ expect(retrieved).toBeDefined();
74
+ expect(new Uint8Array(retrieved!.bytes)).toEqual(block.bytes);
75
+ });
76
+ });
77
+
78
+ describe("createWorkspaceBlockstore", () => {
79
+ let tmpDir: string;
80
+
81
+ beforeEach(async () => {
82
+ tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "clawracha-ws-"));
83
+ });
84
+
85
+ afterEach(async () => {
86
+ await fs.rm(tmpDir, { recursive: true, force: true });
87
+ });
88
+
89
+ it("should store and retrieve via tiered blockstore", async () => {
90
+ const store = createWorkspaceBlockstore(tmpDir);
91
+ const block = await createTestBlock("workspace-test");
92
+
93
+ await store.put(block);
94
+ const retrieved = await store.get(block.cid);
95
+
96
+ expect(retrieved).toBeDefined();
97
+ expect(new Uint8Array(retrieved!.bytes)).toEqual(block.bytes);
98
+ });
99
+
100
+ it("should persist to disk", async () => {
101
+ const store = createWorkspaceBlockstore(tmpDir);
102
+ const block = await createTestBlock("persist-ws-test");
103
+
104
+ await store.put(block);
105
+
106
+ // Verify on disk directly
107
+ const disk = new DiskBlockstore(tmpDir);
108
+ const retrieved = await disk.get(block.cid);
109
+
110
+ expect(retrieved).toBeDefined();
111
+ expect(new Uint8Array(retrieved!.bytes)).toEqual(block.bytes);
112
+ });
113
+ });
@@ -0,0 +1,276 @@
1
+ import { describe, it, expect, beforeEach } from "vitest";
2
+ import { CID } from "multiformats/cid";
3
+ import { sha256 } from "multiformats/hashes/sha2";
4
+ import * as raw from "multiformats/codecs/raw";
5
+ import { connect } from "@ucanto/client";
6
+ import * as CAR from "@ucanto/transport/car";
7
+ import * as ed25519 from "@ucanto/principal/ed25519";
8
+
9
+ import { Agent, Name, Revision } from "@storacha/ucn/pail";
10
+ import type { ClockConnection, ValueView } from "@storacha/ucn/pail/api";
11
+ import { MemoryBlockstore } from "@storacha/ucn/block";
12
+ import { createServer, createService } from "@storacha/ucn/server";
13
+ import type { HeadEvent } from "@storacha/ucn/server/api";
14
+ import { applyPendingOps } from "../../src/handlers/apply.js";
15
+ import type { PailOp } from "../../src/types/index.js";
16
+ import { Block } from "@ucanto/interface";
17
+
18
+ // --- Helpers ---
19
+
20
+ const createTestCID = async (content: string) => {
21
+ const bytes = new TextEncoder().encode(content);
22
+ const hash = await sha256.digest(bytes);
23
+ return CID.create(1, raw.code, hash);
24
+ };
25
+
26
+ class MemoryHeadStorage {
27
+ heads: Record<string, HeadEvent[]> = {};
28
+
29
+ async get(clock: string) {
30
+ return this.heads[clock]
31
+ ? { ok: this.heads[clock] }
32
+ : { error: { name: "NotFound" as const, message: "Clock not found" } };
33
+ }
34
+
35
+ async put(clock: string, head: HeadEvent[]) {
36
+ this.heads[clock] = head;
37
+ return { ok: {} };
38
+ }
39
+ }
40
+
41
+ const storeBlocks = async (store: MemoryBlockstore, blocks: Array<Block>) => {
42
+ for (const block of blocks) {
43
+ await store.put(block);
44
+ }
45
+ };
46
+
47
+ /**
48
+ * Create a mock clock server and return a ClockConnection for it.
49
+ * Shared headStore + blockCache allow multiple agents to sync.
50
+ */
51
+ const createTestEnv = async (sharedBlocks?: MemoryBlockstore) => {
52
+ const headStore = new MemoryHeadStorage();
53
+ const blockCache = sharedBlocks ?? new MemoryBlockstore();
54
+ const serviceId = await ed25519.generate();
55
+
56
+ const service = createService({
57
+ headStore: headStore,
58
+ blockFetcher: blockCache,
59
+ blockCache,
60
+ });
61
+
62
+ const server = createServer(serviceId, service);
63
+ const remote = connect({
64
+ id: serviceId,
65
+ codec: CAR.outbound,
66
+ channel: server,
67
+ }) as unknown as ClockConnection;
68
+
69
+ return { remote, headStore, blockCache };
70
+ };
71
+
72
+ /**
73
+ * Read all entries from a pail value.
74
+ */
75
+ const getEntries = async (
76
+ blocks: MemoryBlockstore,
77
+ value: ValueView,
78
+ ): Promise<Map<string, string>> => {
79
+ const entries = new Map<string, string>();
80
+ for await (const [key, val] of Revision.entries(blocks, value)) {
81
+ entries.set(key, (val as CID).toString());
82
+ }
83
+ return entries;
84
+ };
85
+
86
+ // --- Tests ---
87
+
88
+ describe("applyPendingOps", () => {
89
+ let blocks: MemoryBlockstore;
90
+ let remote: ClockConnection;
91
+
92
+ beforeEach(async () => {
93
+ blocks = new MemoryBlockstore();
94
+ const env = await createTestEnv();
95
+ remote = env.remote;
96
+ });
97
+
98
+ it("should handle a single put on an empty pail", async () => {
99
+ const agent = await Agent.generate();
100
+ const name = await Name.create(agent);
101
+ const cid = await createTestCID("file-a");
102
+
103
+ const ops: PailOp[] = [{ type: "put", key: "docs/readme.md", value: cid }];
104
+
105
+ const result = await applyPendingOps(blocks, name, null, ops, {
106
+ remotes: [remote],
107
+ });
108
+
109
+ expect(result.current).not.toBeNull();
110
+ expect(result.revisionBlocks.length).toBeGreaterThan(0);
111
+
112
+ // Store blocks so we can read entries
113
+ await storeBlocks(blocks, result.revisionBlocks);
114
+
115
+ const entries = await getEntries(blocks, result.current!);
116
+ expect(entries.size).toBe(1);
117
+ expect(entries.get("docs/readme.md")).toBe(cid.toString());
118
+ });
119
+
120
+ it("should handle multiple puts on an empty pail", async () => {
121
+ const agent = await Agent.generate();
122
+ const name = await Name.create(agent);
123
+ const cidA = await createTestCID("file-a");
124
+ const cidB = await createTestCID("file-b");
125
+ const cidC = await createTestCID("file-c");
126
+
127
+ const ops: PailOp[] = [
128
+ { type: "put", key: "a.txt", value: cidA },
129
+ { type: "put", key: "b.txt", value: cidB },
130
+ { type: "put", key: "c.txt", value: cidC },
131
+ ];
132
+
133
+ const result = await applyPendingOps(blocks, name, null, ops, {
134
+ remotes: [remote],
135
+ });
136
+
137
+ expect(result.current).not.toBeNull();
138
+ await storeBlocks(blocks, result.revisionBlocks);
139
+
140
+ const entries = await getEntries(blocks, result.current!);
141
+ expect(entries.size).toBe(3);
142
+ expect(entries.get("a.txt")).toBe(cidA.toString());
143
+ expect(entries.get("b.txt")).toBe(cidB.toString());
144
+ expect(entries.get("c.txt")).toBe(cidC.toString());
145
+ });
146
+
147
+ it("should handle puts and dels on an existing value", async () => {
148
+ const agent = await Agent.generate();
149
+ const name = await Name.create(agent);
150
+
151
+ // Bootstrap with initial entries
152
+ const cidA = await createTestCID("file-a");
153
+ const cidB = await createTestCID("file-b");
154
+ const init = await applyPendingOps(
155
+ blocks,
156
+ name,
157
+ null,
158
+ [
159
+ { type: "put", key: "a.txt", value: cidA },
160
+ { type: "put", key: "b.txt", value: cidB },
161
+ ],
162
+ { remotes: [remote] },
163
+ );
164
+ await storeBlocks(blocks, init.revisionBlocks);
165
+
166
+ // Now apply mixed ops: add c.txt, delete a.txt
167
+ const cidC = await createTestCID("file-c");
168
+ const result = await applyPendingOps(
169
+ blocks,
170
+ name,
171
+ init.current,
172
+ [
173
+ { type: "put", key: "c.txt", value: cidC },
174
+ { type: "del", key: "a.txt" },
175
+ ],
176
+ { remotes: [remote] },
177
+ );
178
+
179
+ expect(result.current).not.toBeNull();
180
+ await storeBlocks(blocks, result.revisionBlocks);
181
+
182
+ const entries = await getEntries(blocks, result.current!);
183
+ expect(entries.size).toBe(2);
184
+ expect(entries.has("a.txt")).toBe(false);
185
+ expect(entries.get("b.txt")).toBe(cidB.toString());
186
+ expect(entries.get("c.txt")).toBe(cidC.toString());
187
+ });
188
+
189
+ it("should return null current when no ops and no current", async () => {
190
+ const agent = await Agent.generate();
191
+ const name = await Name.create(agent);
192
+
193
+ const result = await applyPendingOps(blocks, name, null, [], {
194
+ remotes: [remote],
195
+ });
196
+
197
+ expect(result.current).toBeNull();
198
+ expect(result.revisionBlocks).toEqual([]);
199
+ });
200
+
201
+ it("should converge when two agents publish concurrently", async () => {
202
+ const sharedBlocks = new MemoryBlockstore();
203
+ const env = await createTestEnv(sharedBlocks);
204
+
205
+ const agentA = await Agent.generate();
206
+ const nameA = await Name.create(agentA);
207
+
208
+ const agentB = await Agent.generate();
209
+ const proof = await Name.grant(nameA, agentB.did());
210
+ const nameB = Name.from(agentB, [proof]);
211
+
212
+ const cid1 = await createTestCID("file-1");
213
+ const cid2 = await createTestCID("file-2");
214
+ const cid3 = await createTestCID("file-3");
215
+
216
+ // Step 1: Agent A puts first key
217
+ const resultA1 = await applyPendingOps(
218
+ sharedBlocks,
219
+ nameA,
220
+ null,
221
+ [{ type: "put", key: "one.txt", value: cid1 }],
222
+ { remotes: [env.remote] },
223
+ );
224
+ await storeBlocks(sharedBlocks, resultA1.revisionBlocks);
225
+
226
+ // Step 2: Agent B resolves to get A's value
227
+ const resolveB1 = await Revision.resolve(sharedBlocks, nameB, {
228
+ remotes: [env.remote],
229
+ });
230
+ await storeBlocks(sharedBlocks, resolveB1.additions);
231
+ let currentB = resolveB1.value;
232
+
233
+ // Step 3: Agent A puts second key
234
+ const resultA2 = await applyPendingOps(
235
+ sharedBlocks,
236
+ nameA,
237
+ resultA1.current,
238
+ [{ type: "put", key: "two.txt", value: cid2 }],
239
+ { remotes: [env.remote] },
240
+ );
241
+ await storeBlocks(sharedBlocks, resultA2.revisionBlocks);
242
+
243
+ // Step 4: Agent B (still on old value) puts third key and applies
244
+ const resultB = await applyPendingOps(
245
+ sharedBlocks,
246
+ nameB,
247
+ currentB,
248
+ [{ type: "put", key: "three.txt", value: cid3 }],
249
+ { remotes: [env.remote] },
250
+ );
251
+ await storeBlocks(sharedBlocks, resultB.revisionBlocks);
252
+
253
+ // Agent B should see all three keys
254
+ const entriesB = await getEntries(sharedBlocks, resultB.current!);
255
+ expect(entriesB.size).toBe(3);
256
+ expect(entriesB.get("one.txt")).toBe(cid1.toString());
257
+ expect(entriesB.get("two.txt")).toBe(cid2.toString());
258
+ expect(entriesB.get("three.txt")).toBe(cid3.toString());
259
+
260
+ // Step 5: Agent A resolves — should also see all three keys
261
+ const resolveA = await Revision.resolve(sharedBlocks, nameA, {
262
+ base: resultA2.current ?? undefined,
263
+ remotes: [env.remote],
264
+ });
265
+ await storeBlocks(sharedBlocks, resolveA.additions);
266
+
267
+ const entriesA = await getEntries(sharedBlocks, resolveA.value);
268
+ expect(entriesA.size).toBe(3);
269
+ expect(entriesA.get("one.txt")).toBe(cid1.toString());
270
+ expect(entriesA.get("two.txt")).toBe(cid2.toString());
271
+ expect(entriesA.get("three.txt")).toBe(cid3.toString());
272
+
273
+ // Both agents should have converged to the same root
274
+ expect(resolveA.value.root.toString()).toBe(resultB.current!.root.toString());
275
+ });
276
+ });