@storacha/clawracha 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/blockstore/disk.d.ts +15 -0
- package/dist/blockstore/disk.d.ts.map +1 -0
- package/dist/blockstore/disk.js +38 -0
- package/dist/blockstore/gateway.d.ts +12 -0
- package/dist/blockstore/gateway.d.ts.map +1 -0
- package/dist/blockstore/gateway.js +28 -0
- package/dist/blockstore/index.d.ts +13 -0
- package/dist/blockstore/index.d.ts.map +1 -0
- package/dist/blockstore/index.js +12 -0
- package/dist/blockstore/memory.d.ts +16 -0
- package/dist/blockstore/memory.d.ts.map +1 -0
- package/dist/blockstore/memory.js +23 -0
- package/dist/blockstore/tiered.d.ts +29 -0
- package/dist/blockstore/tiered.d.ts.map +1 -0
- package/dist/blockstore/tiered.js +65 -0
- package/dist/blockstore/workspace.d.ts +15 -0
- package/dist/blockstore/workspace.d.ts.map +1 -0
- package/dist/blockstore/workspace.js +20 -0
- package/dist/differ.d.ts +33 -0
- package/dist/differ.d.ts.map +1 -0
- package/dist/differ.js +57 -0
- package/dist/encoder.d.ts +16 -0
- package/dist/encoder.d.ts.map +1 -0
- package/dist/encoder.js +52 -0
- package/dist/fs.d.ts +17 -0
- package/dist/fs.d.ts.map +1 -0
- package/dist/fs.js +27 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +11 -0
- package/dist/plugin.d.ts +14 -0
- package/dist/plugin.d.ts.map +1 -0
- package/dist/plugin.js +220 -0
- package/dist/sync.d.ts +51 -0
- package/dist/sync.d.ts.map +1 -0
- package/dist/sync.js +290 -0
- package/dist/types.d.ts +54 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +4 -0
- package/dist/watcher.d.ts +42 -0
- package/dist/watcher.d.ts.map +1 -0
- package/dist/watcher.js +101 -0
- package/openclaw.plugin.json +30 -0
- package/package.json +54 -0
- package/src/blockstore/disk.ts +57 -0
- package/src/blockstore/index.ts +23 -0
- package/src/blockstore/workspace.ts +41 -0
- package/src/handlers/apply.ts +79 -0
- package/src/handlers/process.ts +118 -0
- package/src/handlers/remote.ts +61 -0
- package/src/index.ts +13 -0
- package/src/mdsync/index.ts +557 -0
- package/src/plugin.ts +481 -0
- package/src/sync.ts +258 -0
- package/src/types/index.ts +64 -0
- package/src/utils/client.ts +51 -0
- package/src/utils/differ.ts +67 -0
- package/src/utils/encoder.ts +64 -0
- package/src/utils/tempcar.ts +79 -0
- package/src/watcher.ts +128 -0
- package/test/blockstore/blockstore.test.ts +113 -0
- package/test/handlers/apply.test.ts +276 -0
- package/test/handlers/process.test.ts +301 -0
- package/test/handlers/remote.test.ts +182 -0
- package/test/mdsync/mdsync.test.ts +120 -0
- package/test/utils/differ.test.ts +94 -0
- package/tsconfig.json +18 -0
package/README.md
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# @storacha/clawracha
|
|
2
|
+
|
|
3
|
+
OpenClaw plugin for Storacha workspace sync via UCN Pail.
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
Clawracha syncs OpenClaw agent workspaces to Storacha using UCN (User Controlled Names) with Pail — a CRDT-based KV store backed by merkle clocks.
|
|
8
|
+
|
|
9
|
+
**Features:**
|
|
10
|
+
|
|
11
|
+
- 🔄 Live sync of workspace files (`.md` by default)
|
|
12
|
+
- 🌐 Multi-device, multi-user via UCAN delegation
|
|
13
|
+
- 🔀 CRDT-based conflict resolution (merkle clock)
|
|
14
|
+
- 📦 Local-first with network sync
|
|
15
|
+
|
|
16
|
+
## Installation
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
openclaw plugins install @storacha/clawracha
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Setup
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
# Initialize sync for this workspace
|
|
26
|
+
openclaw storacha init
|
|
27
|
+
|
|
28
|
+
# Import a delegation from a space owner
|
|
29
|
+
openclaw storacha delegate <base64-delegation>
|
|
30
|
+
|
|
31
|
+
# Restart gateway to start syncing
|
|
32
|
+
openclaw gateway restart
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## How It Works
|
|
36
|
+
|
|
37
|
+
```
|
|
38
|
+
Workspace Files UCN Pail KV Store
|
|
39
|
+
================ ================
|
|
40
|
+
/AGENTS.md ────► "AGENTS.md" → bafk...xyz
|
|
41
|
+
/SOUL.md ────► "SOUL.md" → bafk...abc
|
|
42
|
+
/memory/2026-02-10.md ────► "memory/2026-02-10.md" → bafk...123
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
The sync loop:
|
|
46
|
+
|
|
47
|
+
1. **Watch** - File watcher detects changes
|
|
48
|
+
2. **Encode** - Files → UnixFS DAG → root CID
|
|
49
|
+
3. **Diff** - Compare local vs pail entries
|
|
50
|
+
4. **Batch** - Generate UCN revision with all changes
|
|
51
|
+
5. **Upload** - All blocks → CAR → Storacha
|
|
52
|
+
6. **Apply** - Remote changes → local filesystem
|
|
53
|
+
|
|
54
|
+
## Configuration
|
|
55
|
+
|
|
56
|
+
In your OpenClaw config:
|
|
57
|
+
|
|
58
|
+
```yaml
|
|
59
|
+
plugins:
|
|
60
|
+
entries:
|
|
61
|
+
storacha-sync:
|
|
62
|
+
enabled: true
|
|
63
|
+
config:
|
|
64
|
+
watchPatterns:
|
|
65
|
+
- "**/*.md"
|
|
66
|
+
ignorePatterns:
|
|
67
|
+
- ".git"
|
|
68
|
+
- "node_modules"
|
|
69
|
+
- ".storacha"
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## Agent Tools
|
|
73
|
+
|
|
74
|
+
The plugin provides tools for manual sync control:
|
|
75
|
+
|
|
76
|
+
- `storacha_sync_status` - Get current sync status
|
|
77
|
+
- `storacha_sync_now` - Trigger immediate sync
|
|
78
|
+
|
|
79
|
+
## Architecture
|
|
80
|
+
|
|
81
|
+
```
|
|
82
|
+
┌─────────────────────────────────────────────────────────┐
|
|
83
|
+
│ OpenClaw Agent Workspace │
|
|
84
|
+
│ ┌─────────────────┐ ┌─────────────────┐ │
|
|
85
|
+
│ │ Workspace │◄───►│ File Watcher │ │
|
|
86
|
+
│ │ *.md files │ │ (chokidar) │ │
|
|
87
|
+
│ └─────────────────┘ └────────┬────────┘ │
|
|
88
|
+
│ │ │ │
|
|
89
|
+
│ ▼ ▼ │
|
|
90
|
+
│ ┌─────────────────┐ ┌─────────────────┐ │
|
|
91
|
+
│ │ .storacha/ │ │ Sync Engine │ │
|
|
92
|
+
│ │ ├─ config.json │◄───►│ (UCN Pail) │ │
|
|
93
|
+
│ │ └─ blocks/ │ └────────┬────────┘ │
|
|
94
|
+
│ └─────────────────┘ │ │
|
|
95
|
+
└───────────────────────────────────┼─────────────────────┘
|
|
96
|
+
│
|
|
97
|
+
▼ publish/resolve
|
|
98
|
+
┌───────────────────────────────────────────────────────┐
|
|
99
|
+
│ Storacha Network │
|
|
100
|
+
│ ┌───────────────────┐ ┌───────────────────────────┐ │
|
|
101
|
+
│ │ UCN Rendezvous │ │ Storage Nodes │ │
|
|
102
|
+
│ │ (clock/head) │ │ (blob storage) │ │
|
|
103
|
+
│ └───────────────────┘ └───────────────────────────┘ │
|
|
104
|
+
└───────────────────────────────────────────────────────┘
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## Local Data
|
|
108
|
+
|
|
109
|
+
The plugin stores data in `.storacha/` within the workspace:
|
|
110
|
+
|
|
111
|
+
- `config.json` - Agent key, delegation, name archive (NOT synced)
|
|
112
|
+
- `blocks/` - Local block cache (NOT synced)
|
|
113
|
+
|
|
114
|
+
Add `.storacha/` to your `.gitignore`.
|
|
115
|
+
|
|
116
|
+
## Status
|
|
117
|
+
|
|
118
|
+
🚧 **Work in Progress**
|
|
119
|
+
|
|
120
|
+
- [x] Plugin scaffold
|
|
121
|
+
- [x] Tiered blockstore (memory → disk → gateway)
|
|
122
|
+
- [x] File encoder (UnixFS)
|
|
123
|
+
- [x] Differ (local ↔ pail)
|
|
124
|
+
- [x] Sync engine (UCN Pail batch)
|
|
125
|
+
- [x] File watcher
|
|
126
|
+
- [x] OpenClaw plugin integration
|
|
127
|
+
- [ ] CAR upload to Storacha
|
|
128
|
+
- [ ] Remote file download/apply
|
|
129
|
+
- [ ] Encryption (encrypt-upload-client)
|
|
130
|
+
- [ ] Full test coverage
|
|
131
|
+
|
|
132
|
+
## License
|
|
133
|
+
|
|
134
|
+
Apache-2.0 OR MIT
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Filesystem-backed blockstore — the one thing UCN doesn't ship.
|
|
3
|
+
* Persists blocks to .storacha/blocks/ in workspace.
|
|
4
|
+
*/
|
|
5
|
+
import type { Link, Block, Version } from "multiformats";
|
|
6
|
+
export declare class DiskBlockstore {
|
|
7
|
+
private dir;
|
|
8
|
+
private initialized;
|
|
9
|
+
constructor(workspacePath: string);
|
|
10
|
+
private ensureDir;
|
|
11
|
+
private cidPath;
|
|
12
|
+
get<T = unknown, C extends number = number, A extends number = number, V extends Version = 1>(link: Link<T, C, A, V>): Promise<Block<T, C, A, V> | undefined>;
|
|
13
|
+
put(block: Block): Promise<void>;
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=disk.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"disk.d.ts","sourceRoot":"","sources":["../../src/blockstore/disk.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AAKzD,qBAAa,cAAc;IACzB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,WAAW,CAAS;gBAEhB,aAAa,EAAE,MAAM;YAInB,SAAS;IAOvB,OAAO,CAAC,OAAO;IAIT,GAAG,CACP,CAAC,GAAG,OAAO,EACX,CAAC,SAAS,MAAM,GAAG,MAAM,EACzB,CAAC,SAAS,MAAM,GAAG,MAAM,EACzB,CAAC,SAAS,OAAO,GAAG,CAAC,EACrB,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC;IAgB3D,GAAG,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;CAIvC"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Filesystem-backed blockstore — the one thing UCN doesn't ship.
|
|
3
|
+
* Persists blocks to .storacha/blocks/ in workspace.
|
|
4
|
+
*/
|
|
5
|
+
import * as fs from "node:fs/promises";
|
|
6
|
+
import * as path from "node:path";
|
|
7
|
+
export class DiskBlockstore {
|
|
8
|
+
dir;
|
|
9
|
+
initialized = false;
|
|
10
|
+
constructor(workspacePath) {
|
|
11
|
+
this.dir = path.join(workspacePath, ".storacha", "blocks");
|
|
12
|
+
}
|
|
13
|
+
async ensureDir() {
|
|
14
|
+
if (!this.initialized) {
|
|
15
|
+
await fs.mkdir(this.dir, { recursive: true });
|
|
16
|
+
this.initialized = true;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
cidPath(cid) {
|
|
20
|
+
return path.join(this.dir, cid.toString());
|
|
21
|
+
}
|
|
22
|
+
async get(link) {
|
|
23
|
+
try {
|
|
24
|
+
const bytes = new Uint8Array(await fs.readFile(this.cidPath(link)));
|
|
25
|
+
// Return a minimal block — decoder doesn't matter for storage, the CID is the truth
|
|
26
|
+
return { cid: link, bytes, links: () => [] };
|
|
27
|
+
}
|
|
28
|
+
catch (err) {
|
|
29
|
+
if (err.code === "ENOENT")
|
|
30
|
+
return undefined;
|
|
31
|
+
throw err;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
async put(block) {
|
|
35
|
+
await this.ensureDir();
|
|
36
|
+
await fs.writeFile(this.cidPath(block.cid), block.bytes);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Gateway block fetcher
|
|
3
|
+
* Fetches blocks from Storacha gateway as last resort
|
|
4
|
+
*/
|
|
5
|
+
import type { CID } from "multiformats/cid";
|
|
6
|
+
import type { BlockFetcher } from "../types.js";
|
|
7
|
+
export declare class GatewayBlockFetcher implements BlockFetcher {
|
|
8
|
+
private gateway;
|
|
9
|
+
constructor(gateway?: string);
|
|
10
|
+
get(cid: CID): Promise<Uint8Array | undefined>;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=gateway.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gateway.d.ts","sourceRoot":"","sources":["../../src/blockstore/gateway.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AAC3C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAI/C,qBAAa,mBAAoB,YAAW,YAAY;IACtD,OAAO,CAAC,OAAO,CAAQ;gBAEX,OAAO,GAAE,MAAwB;IAIvC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;CAmBrD"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Gateway block fetcher
|
|
3
|
+
* Fetches blocks from Storacha gateway as last resort
|
|
4
|
+
*/
|
|
5
|
+
const DEFAULT_GATEWAY = "https://w3s.link";
|
|
6
|
+
export class GatewayBlockFetcher {
|
|
7
|
+
gateway;
|
|
8
|
+
constructor(gateway = DEFAULT_GATEWAY) {
|
|
9
|
+
this.gateway = gateway;
|
|
10
|
+
}
|
|
11
|
+
async get(cid) {
|
|
12
|
+
try {
|
|
13
|
+
const url = `${this.gateway}/ipfs/${cid.toString()}?format=raw`;
|
|
14
|
+
const response = await fetch(url, {
|
|
15
|
+
headers: { Accept: "application/vnd.ipld.raw" },
|
|
16
|
+
});
|
|
17
|
+
if (!response.ok) {
|
|
18
|
+
if (response.status === 404) return undefined;
|
|
19
|
+
throw new Error(`Gateway error: ${response.status}`);
|
|
20
|
+
}
|
|
21
|
+
return new Uint8Array(await response.arrayBuffer());
|
|
22
|
+
} catch (err) {
|
|
23
|
+
// Network errors → treat as not found
|
|
24
|
+
console.warn(`Gateway fetch failed for ${cid}:`, err);
|
|
25
|
+
return undefined;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Blockstore layer — thin wrapper around @storacha/ucn/block
|
|
3
|
+
*
|
|
4
|
+
* UCN provides: MemoryBlockstore, LRUBlockstore, GatewayBlockFetcher,
|
|
5
|
+
* TieredBlockFetcher, withCache.
|
|
6
|
+
*
|
|
7
|
+
* We add: DiskBlockstore (filesystem persistence) and a pre-configured
|
|
8
|
+
* tiered setup for workspace sync.
|
|
9
|
+
*/
|
|
10
|
+
export { MemoryBlockstore, LRUBlockstore, GatewayBlockFetcher, TieredBlockFetcher, withCache, } from "@storacha/ucn/block";
|
|
11
|
+
export { DiskBlockstore } from "./disk.js";
|
|
12
|
+
export { createWorkspaceBlockstore, type WorkspaceBlockstore, } from "./workspace.js";
|
|
13
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/blockstore/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,EACL,gBAAgB,EAChB,aAAa,EACb,mBAAmB,EACnB,kBAAkB,EAClB,SAAS,GACV,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EACL,yBAAyB,EACzB,KAAK,mBAAmB,GACzB,MAAM,gBAAgB,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Blockstore layer — thin wrapper around @storacha/ucn/block
|
|
3
|
+
*
|
|
4
|
+
* UCN provides: MemoryBlockstore, LRUBlockstore, GatewayBlockFetcher,
|
|
5
|
+
* TieredBlockFetcher, withCache.
|
|
6
|
+
*
|
|
7
|
+
* We add: DiskBlockstore (filesystem persistence) and a pre-configured
|
|
8
|
+
* tiered setup for workspace sync.
|
|
9
|
+
*/
|
|
10
|
+
export { MemoryBlockstore, LRUBlockstore, GatewayBlockFetcher, TieredBlockFetcher, withCache, } from "@storacha/ucn/block";
|
|
11
|
+
export { DiskBlockstore } from "./disk.js";
|
|
12
|
+
export { createWorkspaceBlockstore, } from "./workspace.js";
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* In-memory block cache
|
|
3
|
+
*/
|
|
4
|
+
import type { CID } from "multiformats/cid";
|
|
5
|
+
import type { Block, BlockStore } from "../types.js";
|
|
6
|
+
export declare class MemoryBlockstore implements BlockStore {
|
|
7
|
+
private blocks;
|
|
8
|
+
get(cid: CID): Promise<Uint8Array | undefined>;
|
|
9
|
+
put(block: Block): Promise<void>;
|
|
10
|
+
has(cid: CID): Promise<boolean>;
|
|
11
|
+
/** Clear all blocks */
|
|
12
|
+
clear(): void;
|
|
13
|
+
/** Get number of blocks */
|
|
14
|
+
get size(): number;
|
|
15
|
+
}
|
|
16
|
+
//# sourceMappingURL=memory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../src/blockstore/memory.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AAC3C,OAAO,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEpD,qBAAa,gBAAiB,YAAW,UAAU;IACjD,OAAO,CAAC,MAAM,CAAgC;IAExC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAI9C,GAAG,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;IAIhC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAIrC,uBAAuB;IACvB,KAAK,IAAI,IAAI;IAIb,2BAA2B;IAC3B,IAAI,IAAI,IAAI,MAAM,CAEjB;CACF"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* In-memory block cache
|
|
3
|
+
*/
|
|
4
|
+
export class MemoryBlockstore {
|
|
5
|
+
blocks = new Map();
|
|
6
|
+
async get(cid) {
|
|
7
|
+
return this.blocks.get(cid.toString());
|
|
8
|
+
}
|
|
9
|
+
async put(block) {
|
|
10
|
+
this.blocks.set(block.cid.toString(), block.bytes);
|
|
11
|
+
}
|
|
12
|
+
async has(cid) {
|
|
13
|
+
return this.blocks.has(cid.toString());
|
|
14
|
+
}
|
|
15
|
+
/** Clear all blocks */
|
|
16
|
+
clear() {
|
|
17
|
+
this.blocks.clear();
|
|
18
|
+
}
|
|
19
|
+
/** Get number of blocks */
|
|
20
|
+
get size() {
|
|
21
|
+
return this.blocks.size;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tiered blockstore: memory → disk → gateway
|
|
3
|
+
*
|
|
4
|
+
* Reads check each tier in order.
|
|
5
|
+
* Writes go to memory (and optionally disk).
|
|
6
|
+
*/
|
|
7
|
+
import type { CID } from "multiformats/cid";
|
|
8
|
+
import type { Block, BlockStore } from "../types.js";
|
|
9
|
+
export declare class TieredBlockstore implements BlockStore {
|
|
10
|
+
private memory;
|
|
11
|
+
private disk;
|
|
12
|
+
private gateway;
|
|
13
|
+
private persistToDisk;
|
|
14
|
+
constructor(
|
|
15
|
+
workspacePath: string,
|
|
16
|
+
options?: {
|
|
17
|
+
persistToDisk?: boolean;
|
|
18
|
+
gateway?: string;
|
|
19
|
+
}
|
|
20
|
+
);
|
|
21
|
+
get(cid: CID): Promise<Uint8Array | undefined>;
|
|
22
|
+
put(block: Block): Promise<void>;
|
|
23
|
+
has(cid: CID): Promise<boolean>;
|
|
24
|
+
/** Flush memory blocks to disk */
|
|
25
|
+
flush(): Promise<void>;
|
|
26
|
+
/** Clear memory cache */
|
|
27
|
+
clearMemory(): void;
|
|
28
|
+
}
|
|
29
|
+
//# sourceMappingURL=tiered.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tiered.d.ts","sourceRoot":"","sources":["../../src/blockstore/tiered.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AAC3C,OAAO,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,qBAAa,gBAAiB,YAAW,UAAU;IACjD,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,IAAI,CAAgB;IAC5B,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,aAAa,CAAS;gBAElB,aAAa,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,aAAa,CAAC,EAAE,OAAO,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE;IAOpF,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IA4B9C,GAAG,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;IAUhC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAIrC,kCAAkC;IAC5B,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAK5B,yBAAyB;IACzB,WAAW,IAAI,IAAI;CAGpB"}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tiered blockstore: memory → disk → gateway
|
|
3
|
+
*
|
|
4
|
+
* Reads check each tier in order.
|
|
5
|
+
* Writes go to memory (and optionally disk).
|
|
6
|
+
*/
|
|
7
|
+
import { MemoryBlockstore } from "./memory.js";
|
|
8
|
+
import { DiskBlockstore } from "./disk.js";
|
|
9
|
+
import { GatewayBlockFetcher } from "./gateway.js";
|
|
10
|
+
export class TieredBlockstore {
|
|
11
|
+
memory;
|
|
12
|
+
disk;
|
|
13
|
+
gateway;
|
|
14
|
+
persistToDisk;
|
|
15
|
+
constructor(workspacePath, options) {
|
|
16
|
+
this.memory = new MemoryBlockstore();
|
|
17
|
+
this.disk = new DiskBlockstore(workspacePath);
|
|
18
|
+
this.gateway = new GatewayBlockFetcher(options?.gateway);
|
|
19
|
+
this.persistToDisk = options?.persistToDisk ?? true;
|
|
20
|
+
}
|
|
21
|
+
async get(cid) {
|
|
22
|
+
// Check memory first
|
|
23
|
+
let bytes = await this.memory.get(cid);
|
|
24
|
+
if (bytes) return bytes;
|
|
25
|
+
// Check disk
|
|
26
|
+
bytes = await this.disk.get(cid);
|
|
27
|
+
if (bytes) {
|
|
28
|
+
// Promote to memory cache
|
|
29
|
+
await this.memory.put({ cid, bytes });
|
|
30
|
+
return bytes;
|
|
31
|
+
}
|
|
32
|
+
// Fetch from gateway
|
|
33
|
+
bytes = await this.gateway.get(cid);
|
|
34
|
+
if (bytes) {
|
|
35
|
+
// Cache in memory and disk
|
|
36
|
+
const block = { cid, bytes };
|
|
37
|
+
await this.memory.put(block);
|
|
38
|
+
if (this.persistToDisk) {
|
|
39
|
+
await this.disk.put(block);
|
|
40
|
+
}
|
|
41
|
+
return bytes;
|
|
42
|
+
}
|
|
43
|
+
return undefined;
|
|
44
|
+
}
|
|
45
|
+
async put(block) {
|
|
46
|
+
// Always write to memory
|
|
47
|
+
await this.memory.put(block);
|
|
48
|
+
// Optionally persist to disk
|
|
49
|
+
if (this.persistToDisk) {
|
|
50
|
+
await this.disk.put(block);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
async has(cid) {
|
|
54
|
+
return (await this.memory.has(cid)) || (await this.disk.has(cid));
|
|
55
|
+
}
|
|
56
|
+
/** Flush memory blocks to disk */
|
|
57
|
+
async flush() {
|
|
58
|
+
// Memory store doesn't expose iteration, so this is a no-op for now
|
|
59
|
+
// In practice, we persist on put anyway
|
|
60
|
+
}
|
|
61
|
+
/** Clear memory cache */
|
|
62
|
+
clearMemory() {
|
|
63
|
+
this.memory.clear();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pre-configured blockstore for workspace sync:
|
|
3
|
+
* Memory (LRU) → Disk → Gateway, with cache promotion.
|
|
4
|
+
*/
|
|
5
|
+
import { TieredBlockFetcher } from "@storacha/ucn/block";
|
|
6
|
+
import type { Block } from "multiformats";
|
|
7
|
+
export interface WorkspaceBlockstore {
|
|
8
|
+
get: TieredBlockFetcher["get"];
|
|
9
|
+
put: (block: Block) => Promise<void>;
|
|
10
|
+
}
|
|
11
|
+
export declare function createWorkspaceBlockstore(workspacePath: string, options?: {
|
|
12
|
+
gateway?: string;
|
|
13
|
+
lruMax?: number;
|
|
14
|
+
}): WorkspaceBlockstore;
|
|
15
|
+
//# sourceMappingURL=workspace.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"workspace.d.ts","sourceRoot":"","sources":["../../src/blockstore/workspace.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAGL,kBAAkB,EAEnB,MAAM,qBAAqB,CAAC;AAE7B,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AAE1C,MAAM,WAAW,mBAAmB;IAClC,GAAG,EAAE,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAC/B,GAAG,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CACtC;AAED,wBAAgB,yBAAyB,CACvC,aAAa,EAAE,MAAM,EACrB,OAAO,CAAC,EAAE;IAAE,OAAO,CAAC,EAAE,MAAM,CAAC;IAAC,MAAM,CAAC,EAAE,MAAM,CAAA;CAAE,GAC9C,mBAAmB,CAkBrB"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pre-configured blockstore for workspace sync:
|
|
3
|
+
* Memory (LRU) → Disk → Gateway, with cache promotion.
|
|
4
|
+
*/
|
|
5
|
+
import { LRUBlockstore, GatewayBlockFetcher, TieredBlockFetcher, withCache, } from "@storacha/ucn/block";
|
|
6
|
+
import { DiskBlockstore } from "./disk.js";
|
|
7
|
+
export function createWorkspaceBlockstore(workspacePath, options) {
|
|
8
|
+
const memory = new LRUBlockstore(options?.lruMax ?? 1024);
|
|
9
|
+
const disk = new DiskBlockstore(workspacePath);
|
|
10
|
+
const gateway = new GatewayBlockFetcher(options?.gateway);
|
|
11
|
+
// Reads: memory → disk → gateway, with cache promotion to memory
|
|
12
|
+
const fetcher = withCache(new TieredBlockFetcher(memory, disk, gateway), memory);
|
|
13
|
+
return {
|
|
14
|
+
get: fetcher.get.bind(fetcher),
|
|
15
|
+
async put(block) {
|
|
16
|
+
await memory.put(block);
|
|
17
|
+
await disk.put(block);
|
|
18
|
+
},
|
|
19
|
+
};
|
|
20
|
+
}
|
package/dist/differ.d.ts
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Differ - compares local directory tree with pail entries
|
|
3
|
+
*
|
|
4
|
+
* Generates put/del operations to sync local state to pail.
|
|
5
|
+
*/
|
|
6
|
+
import type { CID } from "multiformats/cid";
|
|
7
|
+
import type { PailOp, EncodedFile } from "./types.js";
|
|
8
|
+
/** Map of path → CID from pail entries */
|
|
9
|
+
export type PailEntries = Map<string, CID>;
|
|
10
|
+
/** Map of path → CID from local encoded files */
|
|
11
|
+
export type LocalEntries = Map<string, CID>;
|
|
12
|
+
/**
|
|
13
|
+
* Compute diff between local files and pail entries
|
|
14
|
+
*
|
|
15
|
+
* @param local - Encoded local files (path → rootCID)
|
|
16
|
+
* @param pail - Current pail entries (path → CID)
|
|
17
|
+
* @returns Operations to apply to pail
|
|
18
|
+
*/
|
|
19
|
+
export declare function diffEntries(local: LocalEntries, pail: PailEntries): PailOp[];
|
|
20
|
+
/**
|
|
21
|
+
* Convert encoded files to local entries map
|
|
22
|
+
*/
|
|
23
|
+
export declare function encodedToEntries(encoded: EncodedFile[]): LocalEntries;
|
|
24
|
+
/**
|
|
25
|
+
* Diff two pail states to find files that changed remotely
|
|
26
|
+
* (Used after publish to determine what to download)
|
|
27
|
+
*
|
|
28
|
+
* @param before - Pail entries before publish
|
|
29
|
+
* @param after - Pail entries after publish (may include remote changes)
|
|
30
|
+
* @returns Paths that changed remotely (need to download)
|
|
31
|
+
*/
|
|
32
|
+
export declare function diffRemoteChanges(before: PailEntries, after: PailEntries): string[];
|
|
33
|
+
//# sourceMappingURL=differ.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"differ.d.ts","sourceRoot":"","sources":["../src/differ.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAEtD,0CAA0C;AAC1C,MAAM,MAAM,WAAW,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAE3C,iDAAiD;AACjD,MAAM,MAAM,YAAY,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAE5C;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,YAAY,EAAE,IAAI,EAAE,WAAW,GAAG,MAAM,EAAE,CAmB5E;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,WAAW,EAAE,GAAG,YAAY,CAMrE;AAED;;;;;;;GAOG;AACH,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,WAAW,EACnB,KAAK,EAAE,WAAW,GACjB,MAAM,EAAE,CAWV"}
|
package/dist/differ.js
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Differ - compares local directory tree with pail entries
|
|
3
|
+
*
|
|
4
|
+
* Generates put/del operations to sync local state to pail.
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Compute diff between local files and pail entries
|
|
8
|
+
*
|
|
9
|
+
* @param local - Encoded local files (path → rootCID)
|
|
10
|
+
* @param pail - Current pail entries (path → CID)
|
|
11
|
+
* @returns Operations to apply to pail
|
|
12
|
+
*/
|
|
13
|
+
export function diffEntries(local, pail) {
|
|
14
|
+
const ops = [];
|
|
15
|
+
// Find puts: files in local that are new or changed
|
|
16
|
+
for (const [path, localCID] of local) {
|
|
17
|
+
const pailCID = pail.get(path);
|
|
18
|
+
if (!pailCID || !localCID.equals(pailCID)) {
|
|
19
|
+
ops.push({ type: "put", key: path, value: localCID });
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
// Find deletes: files in pail that aren't in local
|
|
23
|
+
for (const path of pail.keys()) {
|
|
24
|
+
if (!local.has(path)) {
|
|
25
|
+
ops.push({ type: "del", key: path });
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return ops;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Convert encoded files to local entries map
|
|
32
|
+
*/
|
|
33
|
+
export function encodedToEntries(encoded) {
|
|
34
|
+
const entries = new Map();
|
|
35
|
+
for (const file of encoded) {
|
|
36
|
+
entries.set(file.path, file.rootCID);
|
|
37
|
+
}
|
|
38
|
+
return entries;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Diff two pail states to find files that changed remotely
|
|
42
|
+
* (Used after publish to determine what to download)
|
|
43
|
+
*
|
|
44
|
+
* @param before - Pail entries before publish
|
|
45
|
+
* @param after - Pail entries after publish (may include remote changes)
|
|
46
|
+
* @returns Paths that changed remotely (need to download)
|
|
47
|
+
*/
|
|
48
|
+
export function diffRemoteChanges(before, after) {
|
|
49
|
+
const changed = [];
|
|
50
|
+
for (const [path, afterCID] of after) {
|
|
51
|
+
const beforeCID = before.get(path);
|
|
52
|
+
if (!beforeCID || !afterCID.equals(beforeCID)) {
|
|
53
|
+
changed.push(path);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
return changed;
|
|
57
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File encoder - converts files to UnixFS DAG with root CID
|
|
3
|
+
*
|
|
4
|
+
* Uses @storacha/upload-client's UnixFS encoding to generate
|
|
5
|
+
* content-addressed blocks for each file.
|
|
6
|
+
*/
|
|
7
|
+
import type { EncodedFile } from "./types.js";
|
|
8
|
+
/**
|
|
9
|
+
* Encode a single file to UnixFS blocks
|
|
10
|
+
*/
|
|
11
|
+
export declare function encodeWorkspaceFile(workspacePath: string, relativePath: string): Promise<EncodedFile>;
|
|
12
|
+
/**
|
|
13
|
+
* Encode multiple files, returning all encoded results
|
|
14
|
+
*/
|
|
15
|
+
export declare function encodeFiles(workspacePath: string, relativePaths: string[]): Promise<EncodedFile[]>;
|
|
16
|
+
//# sourceMappingURL=encoder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"encoder.d.ts","sourceRoot":"","sources":["../src/encoder.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAMH,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAG9C;;GAEG;AACH,wBAAsB,mBAAmB,CACvC,aAAa,EAAE,MAAM,EACrB,YAAY,EAAE,MAAM,GACnB,OAAO,CAAC,WAAW,CAAC,CAmBtB;AAED;;GAEG;AACH,wBAAsB,WAAW,CAC/B,aAAa,EAAE,MAAM,EACrB,aAAa,EAAE,MAAM,EAAE,GACtB,OAAO,CAAC,WAAW,EAAE,CAAC,CAkBxB"}
|
package/dist/encoder.js
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File encoder - converts files to UnixFS DAG with root CID
|
|
3
|
+
*
|
|
4
|
+
* Uses @storacha/upload-client's UnixFS encoding to generate
|
|
5
|
+
* content-addressed blocks for each file.
|
|
6
|
+
*/
|
|
7
|
+
import * as fs from "node:fs/promises";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
import * as stream from "node:stream";
|
|
10
|
+
import { encodeFile } from "@storacha/upload-client/unixfs";
|
|
11
|
+
/**
|
|
12
|
+
* Encode a single file to UnixFS blocks
|
|
13
|
+
*/
|
|
14
|
+
export async function encodeWorkspaceFile(workspacePath, relativePath) {
|
|
15
|
+
const fullPath = path.join(workspacePath, relativePath);
|
|
16
|
+
const fileHandle = await fs.open(fullPath);
|
|
17
|
+
const stat = await fs.stat(fullPath);
|
|
18
|
+
// Encode to UnixFS - returns { cid, blocks }
|
|
19
|
+
const result = await encodeFile({
|
|
20
|
+
// @ts-expect-error node web stream not type compatible with web stream
|
|
21
|
+
stream() {
|
|
22
|
+
return stream.Readable.toWeb(fileHandle.createReadStream());
|
|
23
|
+
},
|
|
24
|
+
});
|
|
25
|
+
return {
|
|
26
|
+
path: relativePath,
|
|
27
|
+
rootCID: result.cid,
|
|
28
|
+
blocks: result.blocks, // Type mismatch between @storacha/upload-client and @ipld/unixfs - treat as unknown
|
|
29
|
+
size: stat.size,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Encode multiple files, returning all encoded results
|
|
34
|
+
*/
|
|
35
|
+
export async function encodeFiles(workspacePath, relativePaths) {
|
|
36
|
+
const results = [];
|
|
37
|
+
for (const relativePath of relativePaths) {
|
|
38
|
+
try {
|
|
39
|
+
const encoded = await encodeWorkspaceFile(workspacePath, relativePath);
|
|
40
|
+
results.push(encoded);
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
if (err.code === "ENOENT") {
|
|
44
|
+
// File was deleted between detection and encoding - skip
|
|
45
|
+
console.warn(`File not found during encoding: ${relativePath}`);
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
throw err;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
return results;
|
|
52
|
+
}
|
package/dist/fs.d.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Graceful filesystem — drop-in replacement for node:fs/promises.
|
|
3
|
+
* Handles EMFILE/ENFILE by queuing and retrying on platforms with low fd limits.
|
|
4
|
+
*/
|
|
5
|
+
import * as rawFs from "node:fs";
|
|
6
|
+
declare const fs: typeof rawFs.promises;
|
|
7
|
+
export default fs;
|
|
8
|
+
export declare const readFile: typeof rawFs.promises.readFile,
|
|
9
|
+
writeFile: typeof rawFs.promises.writeFile,
|
|
10
|
+
mkdir: typeof rawFs.promises.mkdir,
|
|
11
|
+
readdir: typeof rawFs.promises.readdir,
|
|
12
|
+
stat: typeof rawFs.promises.stat,
|
|
13
|
+
unlink: typeof rawFs.promises.unlink,
|
|
14
|
+
access: typeof rawFs.promises.access,
|
|
15
|
+
rm: typeof rawFs.promises.rm,
|
|
16
|
+
mkdtemp: typeof rawFs.promises.mkdtemp;
|
|
17
|
+
//# sourceMappingURL=fs.d.ts.map
|
package/dist/fs.d.ts.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../src/fs.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,KAAK,MAAM,SAAS,CAAA;AAYhC,QAAA,MAAM,EAAE,uBAAiB,CAAA;AACzB,eAAe,EAAE,CAAA;AACjB,eAAO,MACL,QAAQ,kCACR,SAAS,mCACT,KAAK,+BACL,OAAO,iCACP,IAAI,8BACJ,MAAM,gCACN,MAAM,gCACN,EAAE,4BACF,OAAO,+BACH,CAAA"}
|