@noy-db/to-file 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +35 -0
- package/dist/index.cjs +306 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +197 -0
- package/dist/index.d.ts +197 -0
- package/dist/index.js +282 -0
- package/dist/index.js.map +1 -0
- package/package.json +71 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 vLannaAi
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# @noy-db/to-file
|
|
2
|
+
|
|
3
|
+
> JSON file adapter for [noy-db](https://github.com/vLannaAi/noy-db) — encrypted document store on local disk, USB sticks, or network drives.
|
|
4
|
+
|
|
5
|
+
[](https://www.npmjs.com/package/@noy-db/to-file)
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
pnpm add @noy-db/hub @noy-db/to-file
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
```ts
|
|
16
|
+
import { createNoydb } from '@noy-db/hub'
|
|
17
|
+
import { file } from '@noy-db/to-file'
|
|
18
|
+
|
|
19
|
+
const db = await createNoydb({
|
|
20
|
+
adapter: file({ dir: '/Volumes/USB/firm-data' }),
|
|
21
|
+
userId: 'alice',
|
|
22
|
+
passphrase: process.env.NOYDB_PASSPHRASE!,
|
|
23
|
+
})
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
Each compartment is written as a set of JSON files containing only ciphertext envelopes — the adapter never sees plaintext. Perfect for:
|
|
27
|
+
|
|
28
|
+
- USB-stick workflows (air-gapped data portability)
|
|
29
|
+
- Local-first desktop apps
|
|
30
|
+
- Network drive sharing with per-user passphrases
|
|
31
|
+
- Backup-friendly storage
|
|
32
|
+
|
|
33
|
+
## License
|
|
34
|
+
|
|
35
|
+
MIT © vLannaAi — see the [noy-db repo](https://github.com/vLannaAi/noy-db) for full documentation.
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
exportBlobsToDirectory: () => exportBlobsToDirectory,
|
|
24
|
+
jsonFile: () => jsonFile,
|
|
25
|
+
loadBundle: () => loadBundle,
|
|
26
|
+
saveBundle: () => saveBundle
|
|
27
|
+
});
|
|
28
|
+
module.exports = __toCommonJS(index_exports);
|
|
29
|
+
var import_promises2 = require("fs/promises");
|
|
30
|
+
var import_node_path2 = require("path");
|
|
31
|
+
var import_hub2 = require("@noy-db/hub");
|
|
32
|
+
|
|
33
|
+
// src/export-blobs-to-directory.ts
|
|
34
|
+
var import_promises = require("fs/promises");
|
|
35
|
+
var import_node_path = require("path");
|
|
36
|
+
var import_hub = require("@noy-db/hub");
|
|
37
|
+
var import_util = require("@noy-db/hub/util");
|
|
38
|
+
async function exportBlobsToDirectory(vault, targetDir, options = {}) {
|
|
39
|
+
const profile = options.filenameProfile ?? "macos-smb";
|
|
40
|
+
const onCollision = options.onCollision ?? "suffix";
|
|
41
|
+
const absTargetDir = (0, import_node_path.resolve)(targetDir);
|
|
42
|
+
await (0, import_promises.mkdir)(absTargetDir, { recursive: true });
|
|
43
|
+
const containmentPrefix = absTargetDir + import_node_path.sep;
|
|
44
|
+
const used = /* @__PURE__ */ new Set();
|
|
45
|
+
const entries = [];
|
|
46
|
+
const opaqueEntries = [];
|
|
47
|
+
let totalBytes = 0;
|
|
48
|
+
const handle = vault.exportBlobs({
|
|
49
|
+
...options.collections && { collections: options.collections },
|
|
50
|
+
...options.where && { where: options.where },
|
|
51
|
+
...options.afterBlobId && { afterBlobId: options.afterBlobId },
|
|
52
|
+
...options.signal && { signal: options.signal }
|
|
53
|
+
});
|
|
54
|
+
for await (const blob of handle) {
|
|
55
|
+
const original = blob.meta.filename;
|
|
56
|
+
const sanitizeOpts = profile === "opaque" ? { profile, opaqueId: blob.blobId } : { profile };
|
|
57
|
+
const candidate = (0, import_util.sanitizeFilename)(original, sanitizeOpts);
|
|
58
|
+
const finalName = resolveCollision(candidate, used, onCollision);
|
|
59
|
+
used.add(finalName);
|
|
60
|
+
const absPath = (0, import_node_path.resolve)(absTargetDir, finalName);
|
|
61
|
+
if (absPath !== absTargetDir && !absPath.startsWith(containmentPrefix)) {
|
|
62
|
+
throw new import_hub.PathEscapeError({ attempted: finalName, targetDir: absTargetDir });
|
|
63
|
+
}
|
|
64
|
+
await (0, import_promises.mkdir)((0, import_node_path.dirname)(absPath), { recursive: true });
|
|
65
|
+
await (0, import_promises.writeFile)(absPath, blob.bytes);
|
|
66
|
+
entries.push({ blobId: blob.blobId, path: absPath });
|
|
67
|
+
totalBytes += blob.bytes.byteLength;
|
|
68
|
+
if (profile === "opaque") {
|
|
69
|
+
const entry = {
|
|
70
|
+
opaqueName: finalName,
|
|
71
|
+
originalName: original,
|
|
72
|
+
collection: blob.recordRef.collection,
|
|
73
|
+
recordId: blob.recordRef.id,
|
|
74
|
+
slot: blob.recordRef.slot,
|
|
75
|
+
blobId: blob.blobId,
|
|
76
|
+
...blob.meta.mimeType !== void 0 && { mimeType: blob.meta.mimeType }
|
|
77
|
+
};
|
|
78
|
+
opaqueEntries.push(entry);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
let manifestPath = null;
|
|
82
|
+
if (profile === "opaque") {
|
|
83
|
+
manifestPath = (0, import_node_path.resolve)(absTargetDir, "manifest.json");
|
|
84
|
+
const json = JSON.stringify(
|
|
85
|
+
{
|
|
86
|
+
format: "noydb-opaque-export",
|
|
87
|
+
version: 1,
|
|
88
|
+
entries: opaqueEntries
|
|
89
|
+
},
|
|
90
|
+
null,
|
|
91
|
+
2
|
|
92
|
+
);
|
|
93
|
+
await (0, import_promises.writeFile)(manifestPath, json);
|
|
94
|
+
}
|
|
95
|
+
return {
|
|
96
|
+
written: entries.length,
|
|
97
|
+
bytes: totalBytes,
|
|
98
|
+
entries,
|
|
99
|
+
manifestPath
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
function resolveCollision(candidate, used, strategy) {
|
|
103
|
+
if (!used.has(candidate)) return candidate;
|
|
104
|
+
if (strategy === "overwrite") return candidate;
|
|
105
|
+
if (strategy === "fail") {
|
|
106
|
+
throw new Error(`exportBlobsToDirectory: filename collision on "${candidate}"`);
|
|
107
|
+
}
|
|
108
|
+
for (let attempt = 1; attempt < 1e4; attempt++) {
|
|
109
|
+
const next = typeof strategy === "function" ? strategy(candidate, attempt) : addSuffix(candidate, attempt);
|
|
110
|
+
if (!used.has(next)) return next;
|
|
111
|
+
}
|
|
112
|
+
throw new Error(`exportBlobsToDirectory: collision suffix exhausted for "${candidate}"`);
|
|
113
|
+
}
|
|
114
|
+
function addSuffix(name, attempt) {
|
|
115
|
+
const ext = (0, import_node_path.extname)(name);
|
|
116
|
+
if (ext.length > 0 && ext.length < name.length) {
|
|
117
|
+
const stem = name.slice(0, name.length - ext.length);
|
|
118
|
+
return `${stem}-${attempt}${ext}`;
|
|
119
|
+
}
|
|
120
|
+
return `${name}-${attempt}`;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// src/index.ts
|
|
124
|
+
function jsonFile(options) {
|
|
125
|
+
const { dir, pretty = true } = options;
|
|
126
|
+
function recordPath(vault, collection, id) {
|
|
127
|
+
return (0, import_node_path2.join)(dir, vault, collection, `${id}.json`);
|
|
128
|
+
}
|
|
129
|
+
function collectionDir(vault, collection) {
|
|
130
|
+
return (0, import_node_path2.join)(dir, vault, collection);
|
|
131
|
+
}
|
|
132
|
+
async function ensureDir(path) {
|
|
133
|
+
await (0, import_promises2.mkdir)(path, { recursive: true });
|
|
134
|
+
}
|
|
135
|
+
async function fileExists(path) {
|
|
136
|
+
try {
|
|
137
|
+
await (0, import_promises2.stat)(path);
|
|
138
|
+
return true;
|
|
139
|
+
} catch {
|
|
140
|
+
return false;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
function serialize(envelope) {
|
|
144
|
+
return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope);
|
|
145
|
+
}
|
|
146
|
+
return {
|
|
147
|
+
name: "file",
|
|
148
|
+
async get(vault, collection, id) {
|
|
149
|
+
const path = recordPath(vault, collection, id);
|
|
150
|
+
try {
|
|
151
|
+
const content = await (0, import_promises2.readFile)(path, "utf-8");
|
|
152
|
+
return JSON.parse(content);
|
|
153
|
+
} catch {
|
|
154
|
+
return null;
|
|
155
|
+
}
|
|
156
|
+
},
|
|
157
|
+
async put(vault, collection, id, envelope, expectedVersion) {
|
|
158
|
+
const path = recordPath(vault, collection, id);
|
|
159
|
+
if (expectedVersion !== void 0 && await fileExists(path)) {
|
|
160
|
+
const existing = JSON.parse(await (0, import_promises2.readFile)(path, "utf-8"));
|
|
161
|
+
if (existing._v !== expectedVersion) {
|
|
162
|
+
throw new import_hub2.ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
await ensureDir(collectionDir(vault, collection));
|
|
166
|
+
await (0, import_promises2.writeFile)(path, serialize(envelope), "utf-8");
|
|
167
|
+
},
|
|
168
|
+
async delete(vault, collection, id) {
|
|
169
|
+
const path = recordPath(vault, collection, id);
|
|
170
|
+
try {
|
|
171
|
+
await (0, import_promises2.unlink)(path);
|
|
172
|
+
} catch {
|
|
173
|
+
}
|
|
174
|
+
},
|
|
175
|
+
async list(vault, collection) {
|
|
176
|
+
const dirPath = collectionDir(vault, collection);
|
|
177
|
+
try {
|
|
178
|
+
const entries = await (0, import_promises2.readdir)(dirPath);
|
|
179
|
+
return entries.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5));
|
|
180
|
+
} catch {
|
|
181
|
+
return [];
|
|
182
|
+
}
|
|
183
|
+
},
|
|
184
|
+
async loadAll(vault) {
|
|
185
|
+
const compDir = (0, import_node_path2.join)(dir, vault);
|
|
186
|
+
const snapshot = {};
|
|
187
|
+
try {
|
|
188
|
+
const collections = await (0, import_promises2.readdir)(compDir);
|
|
189
|
+
for (const collName of collections) {
|
|
190
|
+
if (collName.startsWith("_")) continue;
|
|
191
|
+
const collPath = (0, import_node_path2.join)(compDir, collName);
|
|
192
|
+
const collStat = await (0, import_promises2.stat)(collPath);
|
|
193
|
+
if (!collStat.isDirectory()) continue;
|
|
194
|
+
const records = {};
|
|
195
|
+
const files = await (0, import_promises2.readdir)(collPath);
|
|
196
|
+
for (const file of files) {
|
|
197
|
+
if (!file.endsWith(".json")) continue;
|
|
198
|
+
const id = file.slice(0, -5);
|
|
199
|
+
const content = await (0, import_promises2.readFile)((0, import_node_path2.join)(collPath, file), "utf-8");
|
|
200
|
+
records[id] = JSON.parse(content);
|
|
201
|
+
}
|
|
202
|
+
snapshot[collName] = records;
|
|
203
|
+
}
|
|
204
|
+
} catch {
|
|
205
|
+
}
|
|
206
|
+
return snapshot;
|
|
207
|
+
},
|
|
208
|
+
async saveAll(vault, data) {
|
|
209
|
+
for (const [collName, records] of Object.entries(data)) {
|
|
210
|
+
const collDir = collectionDir(vault, collName);
|
|
211
|
+
await ensureDir(collDir);
|
|
212
|
+
for (const [id, envelope] of Object.entries(records)) {
|
|
213
|
+
await (0, import_promises2.writeFile)((0, import_node_path2.join)(collDir, `${id}.json`), serialize(envelope), "utf-8");
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
},
|
|
217
|
+
async ping() {
|
|
218
|
+
try {
|
|
219
|
+
await (0, import_promises2.stat)(dir);
|
|
220
|
+
return true;
|
|
221
|
+
} catch {
|
|
222
|
+
return false;
|
|
223
|
+
}
|
|
224
|
+
},
|
|
225
|
+
/**
|
|
226
|
+
* Enumerate every top-level vault subdirectory under the
|
|
227
|
+
* configured base directory. Used by
|
|
228
|
+
* `Noydb.listAccessibleVaults()`.
|
|
229
|
+
*
|
|
230
|
+
* The implementation is `readdir(dir)` filtered to entries that
|
|
231
|
+
* are themselves directories — files at the top level (READMEs,
|
|
232
|
+
* .DS_Store, etc.) are skipped, and missing base directory
|
|
233
|
+
* returns an empty array rather than throwing. Result order is
|
|
234
|
+
* filesystem-defined; consumers that want stable order should
|
|
235
|
+
* sort themselves.
|
|
236
|
+
*/
|
|
237
|
+
async listVaults() {
|
|
238
|
+
let entries;
|
|
239
|
+
try {
|
|
240
|
+
entries = await (0, import_promises2.readdir)(dir);
|
|
241
|
+
} catch {
|
|
242
|
+
return [];
|
|
243
|
+
}
|
|
244
|
+
const compartments = [];
|
|
245
|
+
for (const entry of entries) {
|
|
246
|
+
try {
|
|
247
|
+
const entryStat = await (0, import_promises2.stat)((0, import_node_path2.join)(dir, entry));
|
|
248
|
+
if (entryStat.isDirectory()) compartments.push(entry);
|
|
249
|
+
} catch {
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
return compartments;
|
|
253
|
+
},
|
|
254
|
+
/**
|
|
255
|
+
* Paginate over a collection. Cursor is a numeric offset (as a string)
|
|
256
|
+
* into the sorted filename list. Files are sorted alphabetically so
|
|
257
|
+
* pages are stable across runs and across processes that share the
|
|
258
|
+
* same data directory.
|
|
259
|
+
*
|
|
260
|
+
* The default `limit` is 100. Each item carries its decoded envelope
|
|
261
|
+
* so callers don't need an extra `get()` round-trip per id.
|
|
262
|
+
*/
|
|
263
|
+
async listPage(vault, collection, cursor, limit = 100) {
|
|
264
|
+
const dirPath = collectionDir(vault, collection);
|
|
265
|
+
let files;
|
|
266
|
+
try {
|
|
267
|
+
files = await (0, import_promises2.readdir)(dirPath);
|
|
268
|
+
} catch {
|
|
269
|
+
return { items: [], nextCursor: null };
|
|
270
|
+
}
|
|
271
|
+
const ids = files.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5)).sort();
|
|
272
|
+
const start = cursor ? parseInt(cursor, 10) : 0;
|
|
273
|
+
const end = Math.min(start + limit, ids.length);
|
|
274
|
+
const items = [];
|
|
275
|
+
for (let i = start; i < end; i++) {
|
|
276
|
+
const id = ids[i];
|
|
277
|
+
try {
|
|
278
|
+
const content = await (0, import_promises2.readFile)((0, import_node_path2.join)(dirPath, `${id}.json`), "utf-8");
|
|
279
|
+
items.push({ id, envelope: JSON.parse(content) });
|
|
280
|
+
} catch {
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
return {
|
|
284
|
+
items,
|
|
285
|
+
nextCursor: end < ids.length ? String(end) : null
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
};
|
|
289
|
+
}
|
|
290
|
+
async function saveBundle(path, vault, opts = {}) {
|
|
291
|
+
const bytes = await (0, import_hub2.writeNoydbBundle)(vault, opts);
|
|
292
|
+
await (0, import_promises2.mkdir)((0, import_node_path2.dirname)(path), { recursive: true });
|
|
293
|
+
await (0, import_promises2.writeFile)(path, bytes);
|
|
294
|
+
}
|
|
295
|
+
async function loadBundle(path) {
|
|
296
|
+
const bytes = await (0, import_promises2.readFile)(path);
|
|
297
|
+
return (0, import_hub2.readNoydbBundle)(bytes);
|
|
298
|
+
}
|
|
299
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
300
|
+
0 && (module.exports = {
|
|
301
|
+
exportBlobsToDirectory,
|
|
302
|
+
jsonFile,
|
|
303
|
+
loadBundle,
|
|
304
|
+
saveBundle
|
|
305
|
+
});
|
|
306
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/export-blobs-to-directory.ts"],"sourcesContent":["/**\n * **@noy-db/to-file** — JSON file store for NOYDB (USB / local disk).\n *\n * Maps the NOYDB hierarchy directly to the filesystem:\n *\n * ```\n * {dir}/\n * {vault}/\n * {collection}/\n * {id}.json ← EncryptedEnvelope, optionally pretty-printed\n * _keyring/\n * {userId}.json ← wrapped DEKs for this user\n * _sync/\n * meta.json ← sync metadata\n * ```\n *\n * ## When to use\n *\n * - **USB stick workflow** — the data directory lives on a removable drive.\n * Plug in, unlock, work offline, eject. No cloud dependency.\n * - **Local development** — simple, inspectable files; no Docker or cloud\n * credentials required.\n * - **Single-user desktop apps** — Electron, Tauri, or any Node.js app that\n * writes to a local directory.\n *\n * ## Capabilities\n *\n * | Capability | Value |\n * |---|---|\n * | `casAtomic` | `false` — no atomic compare-and-swap at the FS layer |\n * | `listVaults` | ✓ — enumerates subdirectories |\n * | `listPage` | ✓ — cursor-based pagination over sorted filenames |\n * | `ping` | ✓ — `stat(dir)` |\n *\n * ## Bundle helpers\n *\n * {@link saveBundle} and {@link loadBundle} are thin wrappers around the\n * core `writeNoydbBundle` / `readNoydbBundle` primitives that pipe bytes\n * to/from `node:fs`.\n *\n * @packageDocumentation\n */\n\nimport { readFile, writeFile, mkdir, readdir, unlink, stat } from 'node:fs/promises'\nimport { dirname, join } from 'node:path'\nimport type {\n NoydbStore,\n EncryptedEnvelope,\n VaultSnapshot,\n Vault,\n WriteNoydbBundleOptions,\n NoydbBundleReadResult,\n} from '@noy-db/hub'\nimport {\n ConflictError,\n writeNoydbBundle,\n readNoydbBundle,\n} from '@noy-db/hub'\n\n/**\n * Options for `jsonFile()`.\n *\n * Files are laid out as `{dir}/{vault}/{collection}/{id}.json`.\n * Internal collections (`_keyring`, `_sync`) follow the same pattern\n * under their vault directory.\n */\nexport interface JsonFileOptions {\n /** Base directory for NOYDB data. */\n dir: string\n /** Pretty-print JSON files. Default: true. */\n pretty?: boolean\n}\n\n/**\n * Create a JSON file adapter.\n * Maps the NOYDB hierarchy to the filesystem:\n *\n * ```\n * {dir}/{vault}/{collection}/{id}.json\n * {dir}/{vault}/_keyring/{userId}.json\n * ```\n */\nexport function jsonFile(options: JsonFileOptions): NoydbStore {\n const { dir, pretty = true } = options\n\n function recordPath(vault: string, collection: string, id: string): string {\n return join(dir, vault, collection, `${id}.json`)\n }\n\n function collectionDir(vault: string, collection: string): string {\n return join(dir, vault, collection)\n }\n\n async function ensureDir(path: string): Promise<void> {\n await mkdir(path, { recursive: true })\n }\n\n async function fileExists(path: string): Promise<boolean> {\n try {\n await stat(path)\n return true\n } catch {\n return false\n }\n }\n\n function serialize(envelope: EncryptedEnvelope): string {\n return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)\n }\n\n return {\n name: 'file',\n\n async get(vault, collection, id) {\n const path = recordPath(vault, collection, id)\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as EncryptedEnvelope\n } catch {\n return null\n }\n },\n\n async put(vault, collection, id, envelope, expectedVersion) {\n const path = recordPath(vault, collection, id)\n\n if (expectedVersion !== undefined && await fileExists(path)) {\n const existing = JSON.parse(await readFile(path, 'utf-8')) as EncryptedEnvelope\n if (existing._v !== expectedVersion) {\n throw new ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`)\n }\n }\n\n await ensureDir(collectionDir(vault, collection))\n await writeFile(path, serialize(envelope), 'utf-8')\n },\n\n async delete(vault, collection, id) {\n const path = recordPath(vault, collection, id)\n try {\n await unlink(path)\n } catch {\n // File doesn't exist — that's fine\n }\n },\n\n async list(vault, collection) {\n const dirPath = collectionDir(vault, collection)\n try {\n const entries = await readdir(dirPath)\n return entries\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5)) // remove .json extension\n } catch {\n return []\n }\n },\n\n async loadAll(vault) {\n const compDir = join(dir, vault)\n const snapshot: VaultSnapshot = {}\n\n try {\n const collections = await readdir(compDir)\n for (const collName of collections) {\n if (collName.startsWith('_')) continue // skip _keyring, _sync\n const collPath = join(compDir, collName)\n const collStat = await stat(collPath)\n if (!collStat.isDirectory()) continue\n\n const records: Record<string, EncryptedEnvelope> = {}\n const files = await readdir(collPath)\n for (const file of files) {\n if (!file.endsWith('.json')) continue\n const id = file.slice(0, -5)\n const content = await readFile(join(collPath, file), 'utf-8')\n records[id] = JSON.parse(content) as EncryptedEnvelope\n }\n snapshot[collName] = records\n }\n } catch {\n // Directory doesn't exist — return empty snapshot\n }\n\n return snapshot\n },\n\n async saveAll(vault, data) {\n for (const [collName, records] of Object.entries(data)) {\n const collDir = collectionDir(vault, collName)\n await ensureDir(collDir)\n for (const [id, envelope] of Object.entries(records)) {\n await writeFile(join(collDir, `${id}.json`), serialize(envelope), 'utf-8')\n }\n }\n },\n\n async ping() {\n try {\n await stat(dir)\n return true\n } catch {\n return false\n }\n },\n\n /**\n * Enumerate every top-level vault subdirectory under the\n * configured base directory. Used by\n * `Noydb.listAccessibleVaults()`.\n *\n * The implementation is `readdir(dir)` filtered to entries that\n * are themselves directories — files at the top level (READMEs,\n * .DS_Store, etc.) are skipped, and missing base directory\n * returns an empty array rather than throwing. Result order is\n * filesystem-defined; consumers that want stable order should\n * sort themselves.\n */\n async listVaults() {\n let entries: string[]\n try {\n entries = await readdir(dir)\n } catch {\n return []\n }\n const compartments: string[] = []\n for (const entry of entries) {\n try {\n const entryStat = await stat(join(dir, entry))\n if (entryStat.isDirectory()) compartments.push(entry)\n } catch {\n // Entry vanished between readdir and stat — skip silently.\n }\n }\n return compartments\n },\n\n /**\n * Paginate over a collection. Cursor is a numeric offset (as a string)\n * into the sorted filename list. Files are sorted alphabetically so\n * pages are stable across runs and across processes that share the\n * same data directory.\n *\n * The default `limit` is 100. Each item carries its decoded envelope\n * so callers don't need an extra `get()` round-trip per id.\n */\n async listPage(vault, collection, cursor, limit = 100) {\n const dirPath = collectionDir(vault, collection)\n let files: string[]\n try {\n files = await readdir(dirPath)\n } catch {\n return { items: [], nextCursor: null }\n }\n\n const ids = files\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5))\n .sort()\n\n const start = cursor ? parseInt(cursor, 10) : 0\n const end = Math.min(start + limit, ids.length)\n\n const items: Array<{ id: string; envelope: EncryptedEnvelope }> = []\n for (let i = start; i < end; i++) {\n const id = ids[i]!\n try {\n const content = await readFile(join(dirPath, `${id}.json`), 'utf-8')\n items.push({ id, envelope: JSON.parse(content) as EncryptedEnvelope })\n } catch {\n // File disappeared between readdir and readFile — skip silently.\n }\n }\n\n return {\n items,\n nextCursor: end < ids.length ? String(end) : null,\n }\n },\n }\n}\n\n// ─── .noydb bundle helpers ─────────────────────────────────\n\n/**\n * Write a `.noydb` container for a vault to a local file.\n *\n * Thin wrapper around `writeNoydbBundle` from `@noy-db/core` —\n * the core primitive returns a `Uint8Array`, this helper just\n * pipes it to `node:fs.writeFile` after ensuring the parent\n * directory exists. Use the same options as the core primitive.\n *\n * **Path convention** is up to the caller — `.noydb` is the\n * recommended extension. Consumers using cloud-sync folders\n * should name files by the bundle handle (available via\n * `vault.getBundleHandle()`) rather than the vault\n * name to avoid leaking metadata at the filesystem layer:\n *\n * ```ts\n * const handle = await company.getBundleHandle()\n * await saveBundle(`./bundles/${handle}.noydb`, company)\n * ```\n *\n * The full container is written atomically by `node:fs.writeFile`\n * (the platform's atomic-write semantics apply — POSIX `write()`\n * is atomic up to PIPE_BUF, larger files race with concurrent\n * readers; consumers writing into shared cloud folders should\n * pair this with their cloud sync's conflict resolution).\n */\nexport async function saveBundle(\n path: string,\n vault: Vault,\n opts: WriteNoydbBundleOptions = {},\n): Promise<void> {\n const bytes = await writeNoydbBundle(vault, opts)\n // Ensure the parent directory exists — `writeFile` does NOT\n // create intermediate directories on its own. Recursive mkdir\n // is a no-op when the directory already exists.\n await mkdir(dirname(path), { recursive: true })\n await writeFile(path, bytes)\n}\n\n/**\n * Read and verify a `.noydb` container from a local file.\n *\n * Returns the parsed header plus the unwrapped `dump()` JSON\n * string ready to feed to `vault.load(json, passphrase)`.\n * Throws `BundleIntegrityError` from `@noy-db/core` if the body\n * bytes don't match the integrity hash declared in the header\n * (the bundle was modified between write and read), or any\n * format error from the core reader if the bytes aren't a valid\n * bundle at all.\n *\n * Does NOT take a passphrase — the bundle reader is purely a\n * format layer. Restoring a vault from the returned dump\n * JSON requires a separate `vault.load()` call with the\n * passphrase, mirroring the split between\n * `readNoydbBundle()` and `vault.load()` in core.\n */\nexport async function loadBundle(path: string): Promise<NoydbBundleReadResult> {\n const bytes = await readFile(path)\n // node:fs.readFile returns a Buffer, which is a Uint8Array\n // subclass — `readNoydbBundle` accepts Uint8Array directly,\n // no copy needed.\n return readNoydbBundle(bytes)\n}\n\n// Export-blobs FS materializer — wraps `vault.exportBlobs()` with\n// target-profile filename sanitization, Zip-Slip path containment, and\n// collision policy. Lives in `to-file` (not core) because hub stays\n// portable across browser/Node and shouldn't import `node:fs`.\nexport {\n exportBlobsToDirectory,\n} from './export-blobs-to-directory.js'\nexport type {\n ExportBlobsToDirectoryOptions,\n ExportBlobsToDirectoryResult,\n CollisionStrategy,\n} from './export-blobs-to-directory.js'\n","/**\n * `exportBlobsToDirectory(vault, targetDir, opts)` — bulk blob\n * extraction into a real filesystem directory, with target-profile\n * filename sanitization and Zip-Slip path containment built in\n *.\n *\n * Wraps `vault.exportBlobs()` (the framework-agnostic async iterable\n * in core) with the FS-write concerns that don't belong in core:\n *\n * - sanitize filenames per a target profile (`posix`, `windows`,\n * `macos-smb`, `zip`, `url-path`, `s3-key`, `opaque`),\n * - guard against path-escape after sanitization (`PathEscapeError`),\n * - resolve filename collisions (`suffix` / `overwrite` / `fail` /\n * custom callback),\n * - emit a sidecar `manifest.json` when the profile is `'opaque'`,\n * mapping opaque ids back to the original record-supplied\n * filenames.\n *\n * @module\n */\n\nimport { mkdir, writeFile } from 'node:fs/promises'\nimport { resolve, sep, dirname, extname } from 'node:path'\nimport type { Vault } from '@noy-db/hub'\nimport { PathEscapeError } from '@noy-db/hub'\nimport { sanitizeFilename, type FilenameProfile } from '@noy-db/hub/util'\n\n/** Strategy for resolving two records that sanitize to the same name. */\nexport type CollisionStrategy =\n | 'suffix'\n | 'overwrite'\n | 'fail'\n | ((existing: string, attempt: number) => string)\n\nexport interface ExportBlobsToDirectoryOptions {\n /**\n * Filename profile to sanitize against. Default: `'macos-smb'` —\n * the most restrictive intersection of the rules adopters\n * typically hit. Pick a more specific profile when you know the\n * exact destination.\n */\n readonly filenameProfile?: FilenameProfile\n /**\n * How to handle two blobs whose sanitized filenames collide.\n * Default: `'suffix'`.\n */\n readonly onCollision?: CollisionStrategy\n /**\n * Optional collection allowlist forwarded to `vault.exportBlobs`.\n */\n readonly collections?: readonly string[]\n /**\n * Optional record predicate forwarded to `vault.exportBlobs`.\n */\n readonly where?: (\n record: unknown,\n context: { collection: string; id: string },\n ) => boolean\n /**\n * Optional resume cursor forwarded to `vault.exportBlobs`.\n */\n readonly afterBlobId?: string\n /**\n * External abort signal forwarded to `vault.exportBlobs`.\n */\n readonly signal?: AbortSignal\n}\n\nexport interface ExportBlobsToDirectoryResult {\n /** Total blobs written. */\n readonly written: number\n /** Total bytes written across all blobs. */\n readonly bytes: number\n /** Pairs of `{ blobId, path }` for every blob that landed on disk. */\n readonly entries: ReadonlyArray<{ blobId: string; path: string }>\n /**\n * When `filenameProfile === 'opaque'`, the absolute path of the\n * `manifest.json` sidecar. `null` for every other profile.\n */\n readonly manifestPath: string | null\n}\n\ninterface OpaqueManifestEntry {\n readonly opaqueName: string\n readonly originalName: string\n readonly collection: string\n readonly recordId: string\n readonly slot: string\n readonly blobId: string\n readonly mimeType?: string\n}\n\n/**\n * Materialize every blob in the vault into `targetDir`. Returns a\n * summary suitable for logging / audit.\n *\n * Caller MUST already hold whatever capability the vault demands\n * (`canExportPlaintext['blob']`) — this function delegates to\n * `vault.exportBlobs()`, which performs the capability check itself.\n */\nexport async function exportBlobsToDirectory(\n vault: Vault,\n targetDir: string,\n options: ExportBlobsToDirectoryOptions = {},\n): Promise<ExportBlobsToDirectoryResult> {\n const profile: FilenameProfile = options.filenameProfile ?? 'macos-smb'\n const onCollision: CollisionStrategy = options.onCollision ?? 'suffix'\n\n const absTargetDir = resolve(targetDir)\n await mkdir(absTargetDir, { recursive: true })\n const containmentPrefix = absTargetDir + sep\n\n // Track filenames already used in this run so collision resolution\n // is deterministic and cheap (no extra stat() per attempt).\n const used = new Set<string>()\n const entries: { blobId: string; path: string }[] = []\n const opaqueEntries: OpaqueManifestEntry[] = []\n let totalBytes = 0\n\n const handle = vault.exportBlobs({\n ...(options.collections && { collections: options.collections }),\n ...(options.where && { where: options.where }),\n ...(options.afterBlobId && { afterBlobId: options.afterBlobId }),\n ...(options.signal && { signal: options.signal }),\n })\n\n for await (const blob of handle) {\n const original = blob.meta.filename\n const sanitizeOpts =\n profile === 'opaque'\n ? { profile, opaqueId: blob.blobId } as const\n : { profile } as const\n const candidate = sanitizeFilename(original, sanitizeOpts)\n const finalName = resolveCollision(candidate, used, onCollision)\n used.add(finalName)\n\n const absPath = resolve(absTargetDir, finalName)\n if (absPath !== absTargetDir && !absPath.startsWith(containmentPrefix)) {\n throw new PathEscapeError({ attempted: finalName, targetDir: absTargetDir })\n }\n\n await mkdir(dirname(absPath), { recursive: true })\n await writeFile(absPath, blob.bytes)\n entries.push({ blobId: blob.blobId, path: absPath })\n totalBytes += blob.bytes.byteLength\n\n if (profile === 'opaque') {\n const entry: OpaqueManifestEntry = {\n opaqueName: finalName,\n originalName: original,\n collection: blob.recordRef.collection,\n recordId: blob.recordRef.id,\n slot: blob.recordRef.slot,\n blobId: blob.blobId,\n ...(blob.meta.mimeType !== undefined && { mimeType: blob.meta.mimeType }),\n }\n opaqueEntries.push(entry)\n }\n }\n\n let manifestPath: string | null = null\n if (profile === 'opaque') {\n manifestPath = resolve(absTargetDir, 'manifest.json')\n const json = JSON.stringify(\n {\n format: 'noydb-opaque-export',\n version: 1,\n entries: opaqueEntries,\n },\n null,\n 2,\n )\n await writeFile(manifestPath, json)\n }\n\n return {\n written: entries.length,\n bytes: totalBytes,\n entries,\n manifestPath,\n }\n}\n\nfunction resolveCollision(\n candidate: string,\n used: Set<string>,\n strategy: CollisionStrategy,\n): string {\n if (!used.has(candidate)) return candidate\n if (strategy === 'overwrite') return candidate\n if (strategy === 'fail') {\n throw new Error(`exportBlobsToDirectory: filename collision on \"${candidate}\"`)\n }\n // `'suffix'` and the function-callback path both build a sequence\n // of attempts and pick the first non-colliding one.\n for (let attempt = 1; attempt < 10_000; attempt++) {\n const next =\n typeof strategy === 'function'\n ? strategy(candidate, attempt)\n : addSuffix(candidate, attempt)\n if (!used.has(next)) return next\n }\n throw new Error(`exportBlobsToDirectory: collision suffix exhausted for \"${candidate}\"`)\n}\n\nfunction addSuffix(name: string, attempt: number): string {\n const ext = extname(name)\n if (ext.length > 0 && ext.length < name.length) {\n const stem = name.slice(0, name.length - ext.length)\n return `${stem}-${attempt}${ext}`\n }\n return `${name}-${attempt}`\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA2CA,IAAAA,mBAAkE;AAClE,IAAAC,oBAA8B;AAS9B,IAAAC,cAIO;;;ACpCP,sBAAiC;AACjC,uBAA+C;AAE/C,iBAAgC;AAChC,kBAAuD;AA2EvD,eAAsB,uBACpB,OACA,WACA,UAAyC,CAAC,GACH;AACvC,QAAM,UAA2B,QAAQ,mBAAmB;AAC5D,QAAM,cAAiC,QAAQ,eAAe;AAE9D,QAAM,mBAAe,0BAAQ,SAAS;AACtC,YAAM,uBAAM,cAAc,EAAE,WAAW,KAAK,CAAC;AAC7C,QAAM,oBAAoB,eAAe;AAIzC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA8C,CAAC;AACrD,QAAM,gBAAuC,CAAC;AAC9C,MAAI,aAAa;AAEjB,QAAM,SAAS,MAAM,YAAY;AAAA,IAC/B,GAAI,QAAQ,eAAe,EAAE,aAAa,QAAQ,YAAY;AAAA,IAC9D,GAAI,QAAQ,SAAS,EAAE,OAAO,QAAQ,MAAM;AAAA,IAC5C,GAAI,QAAQ,eAAe,EAAE,aAAa,QAAQ,YAAY;AAAA,IAC9D,GAAI,QAAQ,UAAU,EAAE,QAAQ,QAAQ,OAAO;AAAA,EACjD,CAAC;AAED,mBAAiB,QAAQ,QAAQ;AAC/B,UAAM,WAAW,KAAK,KAAK;AAC3B,UAAM,eACJ,YAAY,WACR,EAAE,SAAS,UAAU,KAAK,OAAO,IACjC,EAAE,QAAQ;AAChB,UAAM,gBAAY,8BAAiB,UAAU,YAAY;AACzD,UAAM,YAAY,iBAAiB,WAAW,MAAM,WAAW;AAC/D,SAAK,IAAI,SAAS;AAElB,UAAM,cAAU,0BAAQ,cAAc,SAAS;AAC/C,QAAI,YAAY,gBAAgB,CAAC,QAAQ,WAAW,iBAAiB,GAAG;AACtE,YAAM,IAAI,2BAAgB,EAAE,WAAW,WAAW,WAAW,aAAa,CAAC;AAAA,IAC7E;AAEA,cAAM,2BAAM,0BAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AACjD,cAAM,2BAAU,SAAS,KAAK,KAAK;AACnC,YAAQ,KAAK,EAAE,QAAQ,KAAK,QAAQ,MAAM,QAAQ,CAAC;AACnD,kBAAc,KAAK,MAAM;AAEzB,QAAI,YAAY,UAAU;AACxB,YAAM,QAA6B;AAAA,QACjC,YAAY;AAAA,QACZ,cAAc;AAAA,QACd,YAAY,KAAK,UAAU;AAAA,QAC3B,UAAU,KAAK,UAAU;AAAA,QACzB,MAAM,KAAK,UAAU;AAAA,QACrB,QAAQ,KAAK;AAAA,QACb,GAAI,KAAK,KAAK,aAAa,UAAa,EAAE,UAAU,KAAK,KAAK,SAAS;AAAA,MACzE;AACA,oBAAc,KAAK,KAAK;AAAA,IAC1B;AAAA,EACF;AAEA,MAAI,eAA8B;AAClC,MAAI,YAAY,UAAU;AACxB,uBAAe,0BAAQ,cAAc,eAAe;AACpD,UAAM,OAAO,KAAK;AAAA,MAChB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,cAAM,2BAAU,cAAc,IAAI;AAAA,EACpC;AAEA,SAAO;AAAA,IACL,SAAS,QAAQ;AAAA,IACjB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,iBACP,WACA,MACA,UACQ;AACR,MAAI,CAAC,KAAK,IAAI,SAAS,EAAG,QAAO;AACjC,MAAI,aAAa,YAAa,QAAO;AACrC,MAAI,aAAa,QAAQ;AACvB,UAAM,IAAI,MAAM,kDAAkD,SAAS,GAAG;AAAA,EAChF;AAGA,WAAS,UAAU,GAAG,UAAU,KAAQ,WAAW;AACjD,UAAM,OACJ,OAAO,aAAa,aAChB,SAAS,WAAW,OAAO,IAC3B,UAAU,WAAW,OAAO;AAClC,QAAI,CAAC,KAAK,IAAI,IAAI,EAAG,QAAO;AAAA,EAC9B;AACA,QAAM,IAAI,MAAM,2DAA2D,SAAS,GAAG;AACzF;AAEA,SAAS,UAAU,MAAc,SAAyB;AACxD,QAAM,UAAM,0BAAQ,IAAI;AACxB,MAAI,IAAI,SAAS,KAAK,IAAI,SAAS,KAAK,QAAQ;AAC9C,UAAM,OAAO,KAAK,MAAM,GAAG,KAAK,SAAS,IAAI,MAAM;AACnD,WAAO,GAAG,IAAI,IAAI,OAAO,GAAG,GAAG;AAAA,EACjC;AACA,SAAO,GAAG,IAAI,IAAI,OAAO;AAC3B;;;ADlIO,SAAS,SAAS,SAAsC;AAC7D,QAAM,EAAE,KAAK,SAAS,KAAK,IAAI;AAE/B,WAAS,WAAW,OAAe,YAAoB,IAAoB;AACzE,eAAO,wBAAK,KAAK,OAAO,YAAY,GAAG,EAAE,OAAO;AAAA,EAClD;AAEA,WAAS,cAAc,OAAe,YAA4B;AAChE,eAAO,wBAAK,KAAK,OAAO,UAAU;AAAA,EACpC;AAEA,iBAAe,UAAU,MAA6B;AACpD,cAAM,wBAAM,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AAEA,iBAAe,WAAW,MAAgC;AACxD,QAAI;AACF,gBAAM,uBAAK,IAAI;AACf,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,WAAS,UAAU,UAAqC;AACtD,WAAO,SAAS,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,KAAK,UAAU,QAAQ;AAAA,EAC7E;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IAEN,MAAM,IAAI,OAAO,YAAY,IAAI;AAC/B,YAAM,OAAO,WAAW,OAAO,YAAY,EAAE;AAC7C,UAAI;AACF,cAAM,UAAU,UAAM,2BAAS,MAAM,OAAO;AAC5C,eAAO,KAAK,MAAM,OAAO;AAAA,MAC3B,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,MAAM,IAAI,OAAO,YAAY,IAAI,UAAU,iBAAiB;AAC1D,YAAM,OAAO,WAAW,OAAO,YAAY,EAAE;AAE7C,UAAI,oBAAoB,UAAa,MAAM,WAAW,IAAI,GAAG;AAC3D,cAAM,WAAW,KAAK,MAAM,UAAM,2BAAS,MAAM,OAAO,CAAC;AACzD,YAAI,SAAS,OAAO,iBAAiB;AACnC,gBAAM,IAAI,0BAAc,SAAS,IAAI,8BAA8B,eAAe,WAAW,SAAS,EAAE,EAAE;AAAA,QAC5G;AAAA,MACF;AAEA,YAAM,UAAU,cAAc,OAAO,UAAU,CAAC;AAChD,gBAAM,4BAAU,MAAM,UAAU,QAAQ,GAAG,OAAO;AAAA,IACpD;AAAA,IAEA,MAAM,OAAO,OAAO,YAAY,IAAI;AAClC,YAAM,OAAO,WAAW,OAAO,YAAY,EAAE;AAC7C,UAAI;AACF,kBAAM,yBAAO,IAAI;AAAA,MACnB,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,IAEA,MAAM,KAAK,OAAO,YAAY;AAC5B,YAAM,UAAU,cAAc,OAAO,UAAU;AAC/C,UAAI;AACF,cAAM,UAAU,UAAM,0BAAQ,OAAO;AACrC,eAAO,QACJ,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC5B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,QAAQ,OAAO;AACnB,YAAM,cAAU,wBAAK,KAAK,KAAK;AAC/B,YAAM,WAA0B,CAAC;AAEjC,UAAI;AACF,cAAM,cAAc,UAAM,0BAAQ,OAAO;AACzC,mBAAW,YAAY,aAAa;AAClC,cAAI,SAAS,WAAW,GAAG,EAAG;AAC9B,gBAAM,eAAW,wBAAK,SAAS,QAAQ;AACvC,gBAAM,WAAW,UAAM,uBAAK,QAAQ;AACpC,cAAI,CAAC,SAAS,YAAY,EAAG;AAE7B,gBAAM,UAA6C,CAAC;AACpD,gBAAM,QAAQ,UAAM,0BAAQ,QAAQ;AACpC,qBAAW,QAAQ,OAAO;AACxB,gBAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAC7B,kBAAM,KAAK,KAAK,MAAM,GAAG,EAAE;AAC3B,kBAAM,UAAU,UAAM,+BAAS,wBAAK,UAAU,IAAI,GAAG,OAAO;AAC5D,oBAAQ,EAAE,IAAI,KAAK,MAAM,OAAO;AAAA,UAClC;AACA,mBAAS,QAAQ,IAAI;AAAA,QACvB;AAAA,MACF,QAAQ;AAAA,MAER;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QAAQ,OAAO,MAAM;AACzB,iBAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,cAAM,UAAU,cAAc,OAAO,QAAQ;AAC7C,cAAM,UAAU,OAAO;AACvB,mBAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,oBAAM,gCAAU,wBAAK,SAAS,GAAG,EAAE,OAAO,GAAG,UAAU,QAAQ,GAAG,OAAO;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAAA,IAEA,MAAM,OAAO;AACX,UAAI;AACF,kBAAM,uBAAK,GAAG;AACd,eAAO;AAAA,MACT,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAcA,MAAM,aAAa;AACjB,UAAI;AACJ,UAAI;AACF,kBAAU,UAAM,0BAAQ,GAAG;AAAA,MAC7B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AACA,YAAM,eAAyB,CAAC;AAChC,iBAAW,SAAS,SAAS;AAC3B,YAAI;AACF,gBAAM,YAAY,UAAM,2BAAK,wBAAK,KAAK,KAAK,CAAC;AAC7C,cAAI,UAAU,YAAY,EAAG,cAAa,KAAK,KAAK;AAAA,QACtD,QAAQ;AAAA,QAER;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAWA,MAAM,SAAS,OAAO,YAAY,QAAQ,QAAQ,KAAK;AACrD,YAAM,UAAU,cAAc,OAAO,UAAU;AAC/C,UAAI;AACJ,UAAI;AACF,gBAAQ,UAAM,0BAAQ,OAAO;AAAA,MAC/B,QAAQ;AACN,eAAO,EAAE,OAAO,CAAC,GAAG,YAAY,KAAK;AAAA,MACvC;AAEA,YAAM,MAAM,MACT,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC,EACvB,KAAK;AAER,YAAM,QAAQ,SAAS,SAAS,QAAQ,EAAE,IAAI;AAC9C,YAAM,MAAM,KAAK,IAAI,QAAQ,OAAO,IAAI,MAAM;AAE9C,YAAM,QAA4D,CAAC;AACnE,eAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAChC,cAAM,KAAK,IAAI,CAAC;AAChB,YAAI;AACF,gBAAM,UAAU,UAAM,+BAAS,wBAAK,SAAS,GAAG,EAAE,OAAO,GAAG,OAAO;AACnE,gBAAM,KAAK,EAAE,IAAI,UAAU,KAAK,MAAM,OAAO,EAAuB,CAAC;AAAA,QACvE,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA,YAAY,MAAM,IAAI,SAAS,OAAO,GAAG,IAAI;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AACF;AA6BA,eAAsB,WACpB,MACA,OACA,OAAgC,CAAC,GAClB;AACf,QAAM,QAAQ,UAAM,8BAAiB,OAAO,IAAI;AAIhD,YAAM,4BAAM,2BAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAC9C,YAAM,4BAAU,MAAM,KAAK;AAC7B;AAmBA,eAAsB,WAAW,MAA8C;AAC7E,QAAM,QAAQ,UAAM,2BAAS,IAAI;AAIjC,aAAO,6BAAgB,KAAK;AAC9B;","names":["import_promises","import_node_path","import_hub"]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { Vault, NoydbStore, NoydbBundleReadResult, WriteNoydbBundleOptions } from '@noy-db/hub';
|
|
2
|
+
import { FilenameProfile } from '@noy-db/hub/util';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* `exportBlobsToDirectory(vault, targetDir, opts)` — bulk blob
|
|
6
|
+
* extraction into a real filesystem directory, with target-profile
|
|
7
|
+
* filename sanitization and Zip-Slip path containment built in
|
|
8
|
+
*.
|
|
9
|
+
*
|
|
10
|
+
* Wraps `vault.exportBlobs()` (the framework-agnostic async iterable
|
|
11
|
+
* in core) with the FS-write concerns that don't belong in core:
|
|
12
|
+
*
|
|
13
|
+
* - sanitize filenames per a target profile (`posix`, `windows`,
|
|
14
|
+
* `macos-smb`, `zip`, `url-path`, `s3-key`, `opaque`),
|
|
15
|
+
* - guard against path-escape after sanitization (`PathEscapeError`),
|
|
16
|
+
* - resolve filename collisions (`suffix` / `overwrite` / `fail` /
|
|
17
|
+
* custom callback),
|
|
18
|
+
* - emit a sidecar `manifest.json` when the profile is `'opaque'`,
|
|
19
|
+
* mapping opaque ids back to the original record-supplied
|
|
20
|
+
* filenames.
|
|
21
|
+
*
|
|
22
|
+
* @module
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
/** Strategy for resolving two records that sanitize to the same name. */
|
|
26
|
+
type CollisionStrategy = 'suffix' | 'overwrite' | 'fail' | ((existing: string, attempt: number) => string);
|
|
27
|
+
interface ExportBlobsToDirectoryOptions {
|
|
28
|
+
/**
|
|
29
|
+
* Filename profile to sanitize against. Default: `'macos-smb'` —
|
|
30
|
+
* the most restrictive intersection of the rules adopters
|
|
31
|
+
* typically hit. Pick a more specific profile when you know the
|
|
32
|
+
* exact destination.
|
|
33
|
+
*/
|
|
34
|
+
readonly filenameProfile?: FilenameProfile;
|
|
35
|
+
/**
|
|
36
|
+
* How to handle two blobs whose sanitized filenames collide.
|
|
37
|
+
* Default: `'suffix'`.
|
|
38
|
+
*/
|
|
39
|
+
readonly onCollision?: CollisionStrategy;
|
|
40
|
+
/**
|
|
41
|
+
* Optional collection allowlist forwarded to `vault.exportBlobs`.
|
|
42
|
+
*/
|
|
43
|
+
readonly collections?: readonly string[];
|
|
44
|
+
/**
|
|
45
|
+
* Optional record predicate forwarded to `vault.exportBlobs`.
|
|
46
|
+
*/
|
|
47
|
+
readonly where?: (record: unknown, context: {
|
|
48
|
+
collection: string;
|
|
49
|
+
id: string;
|
|
50
|
+
}) => boolean;
|
|
51
|
+
/**
|
|
52
|
+
* Optional resume cursor forwarded to `vault.exportBlobs`.
|
|
53
|
+
*/
|
|
54
|
+
readonly afterBlobId?: string;
|
|
55
|
+
/**
|
|
56
|
+
* External abort signal forwarded to `vault.exportBlobs`.
|
|
57
|
+
*/
|
|
58
|
+
readonly signal?: AbortSignal;
|
|
59
|
+
}
|
|
60
|
+
interface ExportBlobsToDirectoryResult {
|
|
61
|
+
/** Total blobs written. */
|
|
62
|
+
readonly written: number;
|
|
63
|
+
/** Total bytes written across all blobs. */
|
|
64
|
+
readonly bytes: number;
|
|
65
|
+
/** Pairs of `{ blobId, path }` for every blob that landed on disk. */
|
|
66
|
+
readonly entries: ReadonlyArray<{
|
|
67
|
+
blobId: string;
|
|
68
|
+
path: string;
|
|
69
|
+
}>;
|
|
70
|
+
/**
|
|
71
|
+
* When `filenameProfile === 'opaque'`, the absolute path of the
|
|
72
|
+
* `manifest.json` sidecar. `null` for every other profile.
|
|
73
|
+
*/
|
|
74
|
+
readonly manifestPath: string | null;
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Materialize every blob in the vault into `targetDir`. Returns a
|
|
78
|
+
* summary suitable for logging / audit.
|
|
79
|
+
*
|
|
80
|
+
* Caller MUST already hold whatever capability the vault demands
|
|
81
|
+
* (`canExportPlaintext['blob']`) — this function delegates to
|
|
82
|
+
* `vault.exportBlobs()`, which performs the capability check itself.
|
|
83
|
+
*/
|
|
84
|
+
declare function exportBlobsToDirectory(vault: Vault, targetDir: string, options?: ExportBlobsToDirectoryOptions): Promise<ExportBlobsToDirectoryResult>;
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* **@noy-db/to-file** — JSON file store for NOYDB (USB / local disk).
|
|
88
|
+
*
|
|
89
|
+
* Maps the NOYDB hierarchy directly to the filesystem:
|
|
90
|
+
*
|
|
91
|
+
* ```
|
|
92
|
+
* {dir}/
|
|
93
|
+
* {vault}/
|
|
94
|
+
* {collection}/
|
|
95
|
+
* {id}.json ← EncryptedEnvelope, optionally pretty-printed
|
|
96
|
+
* _keyring/
|
|
97
|
+
* {userId}.json ← wrapped DEKs for this user
|
|
98
|
+
* _sync/
|
|
99
|
+
* meta.json ← sync metadata
|
|
100
|
+
* ```
|
|
101
|
+
*
|
|
102
|
+
* ## When to use
|
|
103
|
+
*
|
|
104
|
+
* - **USB stick workflow** — the data directory lives on a removable drive.
|
|
105
|
+
* Plug in, unlock, work offline, eject. No cloud dependency.
|
|
106
|
+
* - **Local development** — simple, inspectable files; no Docker or cloud
|
|
107
|
+
* credentials required.
|
|
108
|
+
* - **Single-user desktop apps** — Electron, Tauri, or any Node.js app that
|
|
109
|
+
* writes to a local directory.
|
|
110
|
+
*
|
|
111
|
+
* ## Capabilities
|
|
112
|
+
*
|
|
113
|
+
* | Capability | Value |
|
|
114
|
+
* |---|---|
|
|
115
|
+
* | `casAtomic` | `false` — no atomic compare-and-swap at the FS layer |
|
|
116
|
+
* | `listVaults` | ✓ — enumerates subdirectories |
|
|
117
|
+
* | `listPage` | ✓ — cursor-based pagination over sorted filenames |
|
|
118
|
+
* | `ping` | ✓ — `stat(dir)` |
|
|
119
|
+
*
|
|
120
|
+
* ## Bundle helpers
|
|
121
|
+
*
|
|
122
|
+
* {@link saveBundle} and {@link loadBundle} are thin wrappers around the
|
|
123
|
+
* core `writeNoydbBundle` / `readNoydbBundle` primitives that pipe bytes
|
|
124
|
+
* to/from `node:fs`.
|
|
125
|
+
*
|
|
126
|
+
* @packageDocumentation
|
|
127
|
+
*/
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Options for `jsonFile()`.
|
|
131
|
+
*
|
|
132
|
+
* Files are laid out as `{dir}/{vault}/{collection}/{id}.json`.
|
|
133
|
+
* Internal collections (`_keyring`, `_sync`) follow the same pattern
|
|
134
|
+
* under their vault directory.
|
|
135
|
+
*/
|
|
136
|
+
interface JsonFileOptions {
|
|
137
|
+
/** Base directory for NOYDB data. */
|
|
138
|
+
dir: string;
|
|
139
|
+
/** Pretty-print JSON files. Default: true. */
|
|
140
|
+
pretty?: boolean;
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Create a JSON file adapter.
|
|
144
|
+
* Maps the NOYDB hierarchy to the filesystem:
|
|
145
|
+
*
|
|
146
|
+
* ```
|
|
147
|
+
* {dir}/{vault}/{collection}/{id}.json
|
|
148
|
+
* {dir}/{vault}/_keyring/{userId}.json
|
|
149
|
+
* ```
|
|
150
|
+
*/
|
|
151
|
+
declare function jsonFile(options: JsonFileOptions): NoydbStore;
|
|
152
|
+
/**
|
|
153
|
+
* Write a `.noydb` container for a vault to a local file.
|
|
154
|
+
*
|
|
155
|
+
* Thin wrapper around `writeNoydbBundle` from `@noy-db/core` —
|
|
156
|
+
* the core primitive returns a `Uint8Array`, this helper just
|
|
157
|
+
* pipes it to `node:fs.writeFile` after ensuring the parent
|
|
158
|
+
* directory exists. Use the same options as the core primitive.
|
|
159
|
+
*
|
|
160
|
+
* **Path convention** is up to the caller — `.noydb` is the
|
|
161
|
+
* recommended extension. Consumers using cloud-sync folders
|
|
162
|
+
* should name files by the bundle handle (available via
|
|
163
|
+
* `vault.getBundleHandle()`) rather than the vault
|
|
164
|
+
* name to avoid leaking metadata at the filesystem layer:
|
|
165
|
+
*
|
|
166
|
+
* ```ts
|
|
167
|
+
* const handle = await company.getBundleHandle()
|
|
168
|
+
* await saveBundle(`./bundles/${handle}.noydb`, company)
|
|
169
|
+
* ```
|
|
170
|
+
*
|
|
171
|
+
* The full container is written atomically by `node:fs.writeFile`
|
|
172
|
+
* (the platform's atomic-write semantics apply — POSIX `write()`
|
|
173
|
+
* is atomic up to PIPE_BUF, larger files race with concurrent
|
|
174
|
+
* readers; consumers writing into shared cloud folders should
|
|
175
|
+
* pair this with their cloud sync's conflict resolution).
|
|
176
|
+
*/
|
|
177
|
+
declare function saveBundle(path: string, vault: Vault, opts?: WriteNoydbBundleOptions): Promise<void>;
|
|
178
|
+
/**
|
|
179
|
+
* Read and verify a `.noydb` container from a local file.
|
|
180
|
+
*
|
|
181
|
+
* Returns the parsed header plus the unwrapped `dump()` JSON
|
|
182
|
+
* string ready to feed to `vault.load(json, passphrase)`.
|
|
183
|
+
* Throws `BundleIntegrityError` from `@noy-db/core` if the body
|
|
184
|
+
* bytes don't match the integrity hash declared in the header
|
|
185
|
+
* (the bundle was modified between write and read), or any
|
|
186
|
+
* format error from the core reader if the bytes aren't a valid
|
|
187
|
+
* bundle at all.
|
|
188
|
+
*
|
|
189
|
+
* Does NOT take a passphrase — the bundle reader is purely a
|
|
190
|
+
* format layer. Restoring a vault from the returned dump
|
|
191
|
+
* JSON requires a separate `vault.load()` call with the
|
|
192
|
+
* passphrase, mirroring the split between
|
|
193
|
+
* `readNoydbBundle()` and `vault.load()` in core.
|
|
194
|
+
*/
|
|
195
|
+
declare function loadBundle(path: string): Promise<NoydbBundleReadResult>;
|
|
196
|
+
|
|
197
|
+
export { type CollisionStrategy, type ExportBlobsToDirectoryOptions, type ExportBlobsToDirectoryResult, type JsonFileOptions, exportBlobsToDirectory, jsonFile, loadBundle, saveBundle };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { Vault, NoydbStore, NoydbBundleReadResult, WriteNoydbBundleOptions } from '@noy-db/hub';
|
|
2
|
+
import { FilenameProfile } from '@noy-db/hub/util';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* `exportBlobsToDirectory(vault, targetDir, opts)` — bulk blob
|
|
6
|
+
* extraction into a real filesystem directory, with target-profile
|
|
7
|
+
* filename sanitization and Zip-Slip path containment built in
|
|
8
|
+
*.
|
|
9
|
+
*
|
|
10
|
+
* Wraps `vault.exportBlobs()` (the framework-agnostic async iterable
|
|
11
|
+
* in core) with the FS-write concerns that don't belong in core:
|
|
12
|
+
*
|
|
13
|
+
* - sanitize filenames per a target profile (`posix`, `windows`,
|
|
14
|
+
* `macos-smb`, `zip`, `url-path`, `s3-key`, `opaque`),
|
|
15
|
+
* - guard against path-escape after sanitization (`PathEscapeError`),
|
|
16
|
+
* - resolve filename collisions (`suffix` / `overwrite` / `fail` /
|
|
17
|
+
* custom callback),
|
|
18
|
+
* - emit a sidecar `manifest.json` when the profile is `'opaque'`,
|
|
19
|
+
* mapping opaque ids back to the original record-supplied
|
|
20
|
+
* filenames.
|
|
21
|
+
*
|
|
22
|
+
* @module
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
/** Strategy for resolving two records that sanitize to the same name. */
|
|
26
|
+
type CollisionStrategy = 'suffix' | 'overwrite' | 'fail' | ((existing: string, attempt: number) => string);
|
|
27
|
+
interface ExportBlobsToDirectoryOptions {
|
|
28
|
+
/**
|
|
29
|
+
* Filename profile to sanitize against. Default: `'macos-smb'` —
|
|
30
|
+
* the most restrictive intersection of the rules adopters
|
|
31
|
+
* typically hit. Pick a more specific profile when you know the
|
|
32
|
+
* exact destination.
|
|
33
|
+
*/
|
|
34
|
+
readonly filenameProfile?: FilenameProfile;
|
|
35
|
+
/**
|
|
36
|
+
* How to handle two blobs whose sanitized filenames collide.
|
|
37
|
+
* Default: `'suffix'`.
|
|
38
|
+
*/
|
|
39
|
+
readonly onCollision?: CollisionStrategy;
|
|
40
|
+
/**
|
|
41
|
+
* Optional collection allowlist forwarded to `vault.exportBlobs`.
|
|
42
|
+
*/
|
|
43
|
+
readonly collections?: readonly string[];
|
|
44
|
+
/**
|
|
45
|
+
* Optional record predicate forwarded to `vault.exportBlobs`.
|
|
46
|
+
*/
|
|
47
|
+
readonly where?: (record: unknown, context: {
|
|
48
|
+
collection: string;
|
|
49
|
+
id: string;
|
|
50
|
+
}) => boolean;
|
|
51
|
+
/**
|
|
52
|
+
* Optional resume cursor forwarded to `vault.exportBlobs`.
|
|
53
|
+
*/
|
|
54
|
+
readonly afterBlobId?: string;
|
|
55
|
+
/**
|
|
56
|
+
* External abort signal forwarded to `vault.exportBlobs`.
|
|
57
|
+
*/
|
|
58
|
+
readonly signal?: AbortSignal;
|
|
59
|
+
}
|
|
60
|
+
interface ExportBlobsToDirectoryResult {
|
|
61
|
+
/** Total blobs written. */
|
|
62
|
+
readonly written: number;
|
|
63
|
+
/** Total bytes written across all blobs. */
|
|
64
|
+
readonly bytes: number;
|
|
65
|
+
/** Pairs of `{ blobId, path }` for every blob that landed on disk. */
|
|
66
|
+
readonly entries: ReadonlyArray<{
|
|
67
|
+
blobId: string;
|
|
68
|
+
path: string;
|
|
69
|
+
}>;
|
|
70
|
+
/**
|
|
71
|
+
* When `filenameProfile === 'opaque'`, the absolute path of the
|
|
72
|
+
* `manifest.json` sidecar. `null` for every other profile.
|
|
73
|
+
*/
|
|
74
|
+
readonly manifestPath: string | null;
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Materialize every blob in the vault into `targetDir`. Returns a
|
|
78
|
+
* summary suitable for logging / audit.
|
|
79
|
+
*
|
|
80
|
+
* Caller MUST already hold whatever capability the vault demands
|
|
81
|
+
* (`canExportPlaintext['blob']`) — this function delegates to
|
|
82
|
+
* `vault.exportBlobs()`, which performs the capability check itself.
|
|
83
|
+
*/
|
|
84
|
+
declare function exportBlobsToDirectory(vault: Vault, targetDir: string, options?: ExportBlobsToDirectoryOptions): Promise<ExportBlobsToDirectoryResult>;
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* **@noy-db/to-file** — JSON file store for NOYDB (USB / local disk).
|
|
88
|
+
*
|
|
89
|
+
* Maps the NOYDB hierarchy directly to the filesystem:
|
|
90
|
+
*
|
|
91
|
+
* ```
|
|
92
|
+
* {dir}/
|
|
93
|
+
* {vault}/
|
|
94
|
+
* {collection}/
|
|
95
|
+
* {id}.json ← EncryptedEnvelope, optionally pretty-printed
|
|
96
|
+
* _keyring/
|
|
97
|
+
* {userId}.json ← wrapped DEKs for this user
|
|
98
|
+
* _sync/
|
|
99
|
+
* meta.json ← sync metadata
|
|
100
|
+
* ```
|
|
101
|
+
*
|
|
102
|
+
* ## When to use
|
|
103
|
+
*
|
|
104
|
+
* - **USB stick workflow** — the data directory lives on a removable drive.
|
|
105
|
+
* Plug in, unlock, work offline, eject. No cloud dependency.
|
|
106
|
+
* - **Local development** — simple, inspectable files; no Docker or cloud
|
|
107
|
+
* credentials required.
|
|
108
|
+
* - **Single-user desktop apps** — Electron, Tauri, or any Node.js app that
|
|
109
|
+
* writes to a local directory.
|
|
110
|
+
*
|
|
111
|
+
* ## Capabilities
|
|
112
|
+
*
|
|
113
|
+
* | Capability | Value |
|
|
114
|
+
* |---|---|
|
|
115
|
+
* | `casAtomic` | `false` — no atomic compare-and-swap at the FS layer |
|
|
116
|
+
* | `listVaults` | ✓ — enumerates subdirectories |
|
|
117
|
+
* | `listPage` | ✓ — cursor-based pagination over sorted filenames |
|
|
118
|
+
* | `ping` | ✓ — `stat(dir)` |
|
|
119
|
+
*
|
|
120
|
+
* ## Bundle helpers
|
|
121
|
+
*
|
|
122
|
+
* {@link saveBundle} and {@link loadBundle} are thin wrappers around the
|
|
123
|
+
* core `writeNoydbBundle` / `readNoydbBundle` primitives that pipe bytes
|
|
124
|
+
* to/from `node:fs`.
|
|
125
|
+
*
|
|
126
|
+
* @packageDocumentation
|
|
127
|
+
*/
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Options for `jsonFile()`.
|
|
131
|
+
*
|
|
132
|
+
* Files are laid out as `{dir}/{vault}/{collection}/{id}.json`.
|
|
133
|
+
* Internal collections (`_keyring`, `_sync`) follow the same pattern
|
|
134
|
+
* under their vault directory.
|
|
135
|
+
*/
|
|
136
|
+
interface JsonFileOptions {
|
|
137
|
+
/** Base directory for NOYDB data. */
|
|
138
|
+
dir: string;
|
|
139
|
+
/** Pretty-print JSON files. Default: true. */
|
|
140
|
+
pretty?: boolean;
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Create a JSON file adapter.
|
|
144
|
+
* Maps the NOYDB hierarchy to the filesystem:
|
|
145
|
+
*
|
|
146
|
+
* ```
|
|
147
|
+
* {dir}/{vault}/{collection}/{id}.json
|
|
148
|
+
* {dir}/{vault}/_keyring/{userId}.json
|
|
149
|
+
* ```
|
|
150
|
+
*/
|
|
151
|
+
declare function jsonFile(options: JsonFileOptions): NoydbStore;
|
|
152
|
+
/**
|
|
153
|
+
* Write a `.noydb` container for a vault to a local file.
|
|
154
|
+
*
|
|
155
|
+
* Thin wrapper around `writeNoydbBundle` from `@noy-db/core` —
|
|
156
|
+
* the core primitive returns a `Uint8Array`, this helper just
|
|
157
|
+
* pipes it to `node:fs.writeFile` after ensuring the parent
|
|
158
|
+
* directory exists. Use the same options as the core primitive.
|
|
159
|
+
*
|
|
160
|
+
* **Path convention** is up to the caller — `.noydb` is the
|
|
161
|
+
* recommended extension. Consumers using cloud-sync folders
|
|
162
|
+
* should name files by the bundle handle (available via
|
|
163
|
+
* `vault.getBundleHandle()`) rather than the vault
|
|
164
|
+
* name to avoid leaking metadata at the filesystem layer:
|
|
165
|
+
*
|
|
166
|
+
* ```ts
|
|
167
|
+
* const handle = await company.getBundleHandle()
|
|
168
|
+
* await saveBundle(`./bundles/${handle}.noydb`, company)
|
|
169
|
+
* ```
|
|
170
|
+
*
|
|
171
|
+
* The full container is written atomically by `node:fs.writeFile`
|
|
172
|
+
* (the platform's atomic-write semantics apply — POSIX `write()`
|
|
173
|
+
* is atomic up to PIPE_BUF, larger files race with concurrent
|
|
174
|
+
* readers; consumers writing into shared cloud folders should
|
|
175
|
+
* pair this with their cloud sync's conflict resolution).
|
|
176
|
+
*/
|
|
177
|
+
declare function saveBundle(path: string, vault: Vault, opts?: WriteNoydbBundleOptions): Promise<void>;
|
|
178
|
+
/**
|
|
179
|
+
* Read and verify a `.noydb` container from a local file.
|
|
180
|
+
*
|
|
181
|
+
* Returns the parsed header plus the unwrapped `dump()` JSON
|
|
182
|
+
* string ready to feed to `vault.load(json, passphrase)`.
|
|
183
|
+
* Throws `BundleIntegrityError` from `@noy-db/core` if the body
|
|
184
|
+
* bytes don't match the integrity hash declared in the header
|
|
185
|
+
* (the bundle was modified between write and read), or any
|
|
186
|
+
* format error from the core reader if the bytes aren't a valid
|
|
187
|
+
* bundle at all.
|
|
188
|
+
*
|
|
189
|
+
* Does NOT take a passphrase — the bundle reader is purely a
|
|
190
|
+
* format layer. Restoring a vault from the returned dump
|
|
191
|
+
* JSON requires a separate `vault.load()` call with the
|
|
192
|
+
* passphrase, mirroring the split between
|
|
193
|
+
* `readNoydbBundle()` and `vault.load()` in core.
|
|
194
|
+
*/
|
|
195
|
+
declare function loadBundle(path: string): Promise<NoydbBundleReadResult>;
|
|
196
|
+
|
|
197
|
+
export { type CollisionStrategy, type ExportBlobsToDirectoryOptions, type ExportBlobsToDirectoryResult, type JsonFileOptions, exportBlobsToDirectory, jsonFile, loadBundle, saveBundle };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { readFile, writeFile as writeFile2, mkdir as mkdir2, readdir, unlink, stat } from "fs/promises";
|
|
3
|
+
import { dirname as dirname2, join } from "path";
|
|
4
|
+
import {
|
|
5
|
+
ConflictError,
|
|
6
|
+
writeNoydbBundle,
|
|
7
|
+
readNoydbBundle
|
|
8
|
+
} from "@noy-db/hub";
|
|
9
|
+
|
|
10
|
+
// src/export-blobs-to-directory.ts
|
|
11
|
+
import { mkdir, writeFile } from "fs/promises";
|
|
12
|
+
import { resolve, sep, dirname, extname } from "path";
|
|
13
|
+
import { PathEscapeError } from "@noy-db/hub";
|
|
14
|
+
import { sanitizeFilename } from "@noy-db/hub/util";
|
|
15
|
+
async function exportBlobsToDirectory(vault, targetDir, options = {}) {
|
|
16
|
+
const profile = options.filenameProfile ?? "macos-smb";
|
|
17
|
+
const onCollision = options.onCollision ?? "suffix";
|
|
18
|
+
const absTargetDir = resolve(targetDir);
|
|
19
|
+
await mkdir(absTargetDir, { recursive: true });
|
|
20
|
+
const containmentPrefix = absTargetDir + sep;
|
|
21
|
+
const used = /* @__PURE__ */ new Set();
|
|
22
|
+
const entries = [];
|
|
23
|
+
const opaqueEntries = [];
|
|
24
|
+
let totalBytes = 0;
|
|
25
|
+
const handle = vault.exportBlobs({
|
|
26
|
+
...options.collections && { collections: options.collections },
|
|
27
|
+
...options.where && { where: options.where },
|
|
28
|
+
...options.afterBlobId && { afterBlobId: options.afterBlobId },
|
|
29
|
+
...options.signal && { signal: options.signal }
|
|
30
|
+
});
|
|
31
|
+
for await (const blob of handle) {
|
|
32
|
+
const original = blob.meta.filename;
|
|
33
|
+
const sanitizeOpts = profile === "opaque" ? { profile, opaqueId: blob.blobId } : { profile };
|
|
34
|
+
const candidate = sanitizeFilename(original, sanitizeOpts);
|
|
35
|
+
const finalName = resolveCollision(candidate, used, onCollision);
|
|
36
|
+
used.add(finalName);
|
|
37
|
+
const absPath = resolve(absTargetDir, finalName);
|
|
38
|
+
if (absPath !== absTargetDir && !absPath.startsWith(containmentPrefix)) {
|
|
39
|
+
throw new PathEscapeError({ attempted: finalName, targetDir: absTargetDir });
|
|
40
|
+
}
|
|
41
|
+
await mkdir(dirname(absPath), { recursive: true });
|
|
42
|
+
await writeFile(absPath, blob.bytes);
|
|
43
|
+
entries.push({ blobId: blob.blobId, path: absPath });
|
|
44
|
+
totalBytes += blob.bytes.byteLength;
|
|
45
|
+
if (profile === "opaque") {
|
|
46
|
+
const entry = {
|
|
47
|
+
opaqueName: finalName,
|
|
48
|
+
originalName: original,
|
|
49
|
+
collection: blob.recordRef.collection,
|
|
50
|
+
recordId: blob.recordRef.id,
|
|
51
|
+
slot: blob.recordRef.slot,
|
|
52
|
+
blobId: blob.blobId,
|
|
53
|
+
...blob.meta.mimeType !== void 0 && { mimeType: blob.meta.mimeType }
|
|
54
|
+
};
|
|
55
|
+
opaqueEntries.push(entry);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
let manifestPath = null;
|
|
59
|
+
if (profile === "opaque") {
|
|
60
|
+
manifestPath = resolve(absTargetDir, "manifest.json");
|
|
61
|
+
const json = JSON.stringify(
|
|
62
|
+
{
|
|
63
|
+
format: "noydb-opaque-export",
|
|
64
|
+
version: 1,
|
|
65
|
+
entries: opaqueEntries
|
|
66
|
+
},
|
|
67
|
+
null,
|
|
68
|
+
2
|
|
69
|
+
);
|
|
70
|
+
await writeFile(manifestPath, json);
|
|
71
|
+
}
|
|
72
|
+
return {
|
|
73
|
+
written: entries.length,
|
|
74
|
+
bytes: totalBytes,
|
|
75
|
+
entries,
|
|
76
|
+
manifestPath
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
function resolveCollision(candidate, used, strategy) {
|
|
80
|
+
if (!used.has(candidate)) return candidate;
|
|
81
|
+
if (strategy === "overwrite") return candidate;
|
|
82
|
+
if (strategy === "fail") {
|
|
83
|
+
throw new Error(`exportBlobsToDirectory: filename collision on "${candidate}"`);
|
|
84
|
+
}
|
|
85
|
+
for (let attempt = 1; attempt < 1e4; attempt++) {
|
|
86
|
+
const next = typeof strategy === "function" ? strategy(candidate, attempt) : addSuffix(candidate, attempt);
|
|
87
|
+
if (!used.has(next)) return next;
|
|
88
|
+
}
|
|
89
|
+
throw new Error(`exportBlobsToDirectory: collision suffix exhausted for "${candidate}"`);
|
|
90
|
+
}
|
|
91
|
+
function addSuffix(name, attempt) {
|
|
92
|
+
const ext = extname(name);
|
|
93
|
+
if (ext.length > 0 && ext.length < name.length) {
|
|
94
|
+
const stem = name.slice(0, name.length - ext.length);
|
|
95
|
+
return `${stem}-${attempt}${ext}`;
|
|
96
|
+
}
|
|
97
|
+
return `${name}-${attempt}`;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// src/index.ts
|
|
101
|
+
function jsonFile(options) {
|
|
102
|
+
const { dir, pretty = true } = options;
|
|
103
|
+
function recordPath(vault, collection, id) {
|
|
104
|
+
return join(dir, vault, collection, `${id}.json`);
|
|
105
|
+
}
|
|
106
|
+
function collectionDir(vault, collection) {
|
|
107
|
+
return join(dir, vault, collection);
|
|
108
|
+
}
|
|
109
|
+
async function ensureDir(path) {
|
|
110
|
+
await mkdir2(path, { recursive: true });
|
|
111
|
+
}
|
|
112
|
+
async function fileExists(path) {
|
|
113
|
+
try {
|
|
114
|
+
await stat(path);
|
|
115
|
+
return true;
|
|
116
|
+
} catch {
|
|
117
|
+
return false;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
function serialize(envelope) {
|
|
121
|
+
return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope);
|
|
122
|
+
}
|
|
123
|
+
return {
|
|
124
|
+
name: "file",
|
|
125
|
+
async get(vault, collection, id) {
|
|
126
|
+
const path = recordPath(vault, collection, id);
|
|
127
|
+
try {
|
|
128
|
+
const content = await readFile(path, "utf-8");
|
|
129
|
+
return JSON.parse(content);
|
|
130
|
+
} catch {
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
},
|
|
134
|
+
async put(vault, collection, id, envelope, expectedVersion) {
|
|
135
|
+
const path = recordPath(vault, collection, id);
|
|
136
|
+
if (expectedVersion !== void 0 && await fileExists(path)) {
|
|
137
|
+
const existing = JSON.parse(await readFile(path, "utf-8"));
|
|
138
|
+
if (existing._v !== expectedVersion) {
|
|
139
|
+
throw new ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
await ensureDir(collectionDir(vault, collection));
|
|
143
|
+
await writeFile2(path, serialize(envelope), "utf-8");
|
|
144
|
+
},
|
|
145
|
+
async delete(vault, collection, id) {
|
|
146
|
+
const path = recordPath(vault, collection, id);
|
|
147
|
+
try {
|
|
148
|
+
await unlink(path);
|
|
149
|
+
} catch {
|
|
150
|
+
}
|
|
151
|
+
},
|
|
152
|
+
async list(vault, collection) {
|
|
153
|
+
const dirPath = collectionDir(vault, collection);
|
|
154
|
+
try {
|
|
155
|
+
const entries = await readdir(dirPath);
|
|
156
|
+
return entries.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5));
|
|
157
|
+
} catch {
|
|
158
|
+
return [];
|
|
159
|
+
}
|
|
160
|
+
},
|
|
161
|
+
async loadAll(vault) {
|
|
162
|
+
const compDir = join(dir, vault);
|
|
163
|
+
const snapshot = {};
|
|
164
|
+
try {
|
|
165
|
+
const collections = await readdir(compDir);
|
|
166
|
+
for (const collName of collections) {
|
|
167
|
+
if (collName.startsWith("_")) continue;
|
|
168
|
+
const collPath = join(compDir, collName);
|
|
169
|
+
const collStat = await stat(collPath);
|
|
170
|
+
if (!collStat.isDirectory()) continue;
|
|
171
|
+
const records = {};
|
|
172
|
+
const files = await readdir(collPath);
|
|
173
|
+
for (const file of files) {
|
|
174
|
+
if (!file.endsWith(".json")) continue;
|
|
175
|
+
const id = file.slice(0, -5);
|
|
176
|
+
const content = await readFile(join(collPath, file), "utf-8");
|
|
177
|
+
records[id] = JSON.parse(content);
|
|
178
|
+
}
|
|
179
|
+
snapshot[collName] = records;
|
|
180
|
+
}
|
|
181
|
+
} catch {
|
|
182
|
+
}
|
|
183
|
+
return snapshot;
|
|
184
|
+
},
|
|
185
|
+
async saveAll(vault, data) {
|
|
186
|
+
for (const [collName, records] of Object.entries(data)) {
|
|
187
|
+
const collDir = collectionDir(vault, collName);
|
|
188
|
+
await ensureDir(collDir);
|
|
189
|
+
for (const [id, envelope] of Object.entries(records)) {
|
|
190
|
+
await writeFile2(join(collDir, `${id}.json`), serialize(envelope), "utf-8");
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
},
|
|
194
|
+
async ping() {
|
|
195
|
+
try {
|
|
196
|
+
await stat(dir);
|
|
197
|
+
return true;
|
|
198
|
+
} catch {
|
|
199
|
+
return false;
|
|
200
|
+
}
|
|
201
|
+
},
|
|
202
|
+
/**
|
|
203
|
+
* Enumerate every top-level vault subdirectory under the
|
|
204
|
+
* configured base directory. Used by
|
|
205
|
+
* `Noydb.listAccessibleVaults()`.
|
|
206
|
+
*
|
|
207
|
+
* The implementation is `readdir(dir)` filtered to entries that
|
|
208
|
+
* are themselves directories — files at the top level (READMEs,
|
|
209
|
+
* .DS_Store, etc.) are skipped, and missing base directory
|
|
210
|
+
* returns an empty array rather than throwing. Result order is
|
|
211
|
+
* filesystem-defined; consumers that want stable order should
|
|
212
|
+
* sort themselves.
|
|
213
|
+
*/
|
|
214
|
+
async listVaults() {
|
|
215
|
+
let entries;
|
|
216
|
+
try {
|
|
217
|
+
entries = await readdir(dir);
|
|
218
|
+
} catch {
|
|
219
|
+
return [];
|
|
220
|
+
}
|
|
221
|
+
const compartments = [];
|
|
222
|
+
for (const entry of entries) {
|
|
223
|
+
try {
|
|
224
|
+
const entryStat = await stat(join(dir, entry));
|
|
225
|
+
if (entryStat.isDirectory()) compartments.push(entry);
|
|
226
|
+
} catch {
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
return compartments;
|
|
230
|
+
},
|
|
231
|
+
/**
|
|
232
|
+
* Paginate over a collection. Cursor is a numeric offset (as a string)
|
|
233
|
+
* into the sorted filename list. Files are sorted alphabetically so
|
|
234
|
+
* pages are stable across runs and across processes that share the
|
|
235
|
+
* same data directory.
|
|
236
|
+
*
|
|
237
|
+
* The default `limit` is 100. Each item carries its decoded envelope
|
|
238
|
+
* so callers don't need an extra `get()` round-trip per id.
|
|
239
|
+
*/
|
|
240
|
+
async listPage(vault, collection, cursor, limit = 100) {
|
|
241
|
+
const dirPath = collectionDir(vault, collection);
|
|
242
|
+
let files;
|
|
243
|
+
try {
|
|
244
|
+
files = await readdir(dirPath);
|
|
245
|
+
} catch {
|
|
246
|
+
return { items: [], nextCursor: null };
|
|
247
|
+
}
|
|
248
|
+
const ids = files.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5)).sort();
|
|
249
|
+
const start = cursor ? parseInt(cursor, 10) : 0;
|
|
250
|
+
const end = Math.min(start + limit, ids.length);
|
|
251
|
+
const items = [];
|
|
252
|
+
for (let i = start; i < end; i++) {
|
|
253
|
+
const id = ids[i];
|
|
254
|
+
try {
|
|
255
|
+
const content = await readFile(join(dirPath, `${id}.json`), "utf-8");
|
|
256
|
+
items.push({ id, envelope: JSON.parse(content) });
|
|
257
|
+
} catch {
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
return {
|
|
261
|
+
items,
|
|
262
|
+
nextCursor: end < ids.length ? String(end) : null
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
async function saveBundle(path, vault, opts = {}) {
|
|
268
|
+
const bytes = await writeNoydbBundle(vault, opts);
|
|
269
|
+
await mkdir2(dirname2(path), { recursive: true });
|
|
270
|
+
await writeFile2(path, bytes);
|
|
271
|
+
}
|
|
272
|
+
async function loadBundle(path) {
|
|
273
|
+
const bytes = await readFile(path);
|
|
274
|
+
return readNoydbBundle(bytes);
|
|
275
|
+
}
|
|
276
|
+
export {
|
|
277
|
+
exportBlobsToDirectory,
|
|
278
|
+
jsonFile,
|
|
279
|
+
loadBundle,
|
|
280
|
+
saveBundle
|
|
281
|
+
};
|
|
282
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/export-blobs-to-directory.ts"],"sourcesContent":["/**\n * **@noy-db/to-file** — JSON file store for NOYDB (USB / local disk).\n *\n * Maps the NOYDB hierarchy directly to the filesystem:\n *\n * ```\n * {dir}/\n * {vault}/\n * {collection}/\n * {id}.json ← EncryptedEnvelope, optionally pretty-printed\n * _keyring/\n * {userId}.json ← wrapped DEKs for this user\n * _sync/\n * meta.json ← sync metadata\n * ```\n *\n * ## When to use\n *\n * - **USB stick workflow** — the data directory lives on a removable drive.\n * Plug in, unlock, work offline, eject. No cloud dependency.\n * - **Local development** — simple, inspectable files; no Docker or cloud\n * credentials required.\n * - **Single-user desktop apps** — Electron, Tauri, or any Node.js app that\n * writes to a local directory.\n *\n * ## Capabilities\n *\n * | Capability | Value |\n * |---|---|\n * | `casAtomic` | `false` — no atomic compare-and-swap at the FS layer |\n * | `listVaults` | ✓ — enumerates subdirectories |\n * | `listPage` | ✓ — cursor-based pagination over sorted filenames |\n * | `ping` | ✓ — `stat(dir)` |\n *\n * ## Bundle helpers\n *\n * {@link saveBundle} and {@link loadBundle} are thin wrappers around the\n * core `writeNoydbBundle` / `readNoydbBundle` primitives that pipe bytes\n * to/from `node:fs`.\n *\n * @packageDocumentation\n */\n\nimport { readFile, writeFile, mkdir, readdir, unlink, stat } from 'node:fs/promises'\nimport { dirname, join } from 'node:path'\nimport type {\n NoydbStore,\n EncryptedEnvelope,\n VaultSnapshot,\n Vault,\n WriteNoydbBundleOptions,\n NoydbBundleReadResult,\n} from '@noy-db/hub'\nimport {\n ConflictError,\n writeNoydbBundle,\n readNoydbBundle,\n} from '@noy-db/hub'\n\n/**\n * Options for `jsonFile()`.\n *\n * Files are laid out as `{dir}/{vault}/{collection}/{id}.json`.\n * Internal collections (`_keyring`, `_sync`) follow the same pattern\n * under their vault directory.\n */\nexport interface JsonFileOptions {\n /** Base directory for NOYDB data. */\n dir: string\n /** Pretty-print JSON files. Default: true. */\n pretty?: boolean\n}\n\n/**\n * Create a JSON file adapter.\n * Maps the NOYDB hierarchy to the filesystem:\n *\n * ```\n * {dir}/{vault}/{collection}/{id}.json\n * {dir}/{vault}/_keyring/{userId}.json\n * ```\n */\nexport function jsonFile(options: JsonFileOptions): NoydbStore {\n const { dir, pretty = true } = options\n\n function recordPath(vault: string, collection: string, id: string): string {\n return join(dir, vault, collection, `${id}.json`)\n }\n\n function collectionDir(vault: string, collection: string): string {\n return join(dir, vault, collection)\n }\n\n async function ensureDir(path: string): Promise<void> {\n await mkdir(path, { recursive: true })\n }\n\n async function fileExists(path: string): Promise<boolean> {\n try {\n await stat(path)\n return true\n } catch {\n return false\n }\n }\n\n function serialize(envelope: EncryptedEnvelope): string {\n return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)\n }\n\n return {\n name: 'file',\n\n async get(vault, collection, id) {\n const path = recordPath(vault, collection, id)\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as EncryptedEnvelope\n } catch {\n return null\n }\n },\n\n async put(vault, collection, id, envelope, expectedVersion) {\n const path = recordPath(vault, collection, id)\n\n if (expectedVersion !== undefined && await fileExists(path)) {\n const existing = JSON.parse(await readFile(path, 'utf-8')) as EncryptedEnvelope\n if (existing._v !== expectedVersion) {\n throw new ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`)\n }\n }\n\n await ensureDir(collectionDir(vault, collection))\n await writeFile(path, serialize(envelope), 'utf-8')\n },\n\n async delete(vault, collection, id) {\n const path = recordPath(vault, collection, id)\n try {\n await unlink(path)\n } catch {\n // File doesn't exist — that's fine\n }\n },\n\n async list(vault, collection) {\n const dirPath = collectionDir(vault, collection)\n try {\n const entries = await readdir(dirPath)\n return entries\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5)) // remove .json extension\n } catch {\n return []\n }\n },\n\n async loadAll(vault) {\n const compDir = join(dir, vault)\n const snapshot: VaultSnapshot = {}\n\n try {\n const collections = await readdir(compDir)\n for (const collName of collections) {\n if (collName.startsWith('_')) continue // skip _keyring, _sync\n const collPath = join(compDir, collName)\n const collStat = await stat(collPath)\n if (!collStat.isDirectory()) continue\n\n const records: Record<string, EncryptedEnvelope> = {}\n const files = await readdir(collPath)\n for (const file of files) {\n if (!file.endsWith('.json')) continue\n const id = file.slice(0, -5)\n const content = await readFile(join(collPath, file), 'utf-8')\n records[id] = JSON.parse(content) as EncryptedEnvelope\n }\n snapshot[collName] = records\n }\n } catch {\n // Directory doesn't exist — return empty snapshot\n }\n\n return snapshot\n },\n\n async saveAll(vault, data) {\n for (const [collName, records] of Object.entries(data)) {\n const collDir = collectionDir(vault, collName)\n await ensureDir(collDir)\n for (const [id, envelope] of Object.entries(records)) {\n await writeFile(join(collDir, `${id}.json`), serialize(envelope), 'utf-8')\n }\n }\n },\n\n async ping() {\n try {\n await stat(dir)\n return true\n } catch {\n return false\n }\n },\n\n /**\n * Enumerate every top-level vault subdirectory under the\n * configured base directory. Used by\n * `Noydb.listAccessibleVaults()`.\n *\n * The implementation is `readdir(dir)` filtered to entries that\n * are themselves directories — files at the top level (READMEs,\n * .DS_Store, etc.) are skipped, and missing base directory\n * returns an empty array rather than throwing. Result order is\n * filesystem-defined; consumers that want stable order should\n * sort themselves.\n */\n async listVaults() {\n let entries: string[]\n try {\n entries = await readdir(dir)\n } catch {\n return []\n }\n const compartments: string[] = []\n for (const entry of entries) {\n try {\n const entryStat = await stat(join(dir, entry))\n if (entryStat.isDirectory()) compartments.push(entry)\n } catch {\n // Entry vanished between readdir and stat — skip silently.\n }\n }\n return compartments\n },\n\n /**\n * Paginate over a collection. Cursor is a numeric offset (as a string)\n * into the sorted filename list. Files are sorted alphabetically so\n * pages are stable across runs and across processes that share the\n * same data directory.\n *\n * The default `limit` is 100. Each item carries its decoded envelope\n * so callers don't need an extra `get()` round-trip per id.\n */\n async listPage(vault, collection, cursor, limit = 100) {\n const dirPath = collectionDir(vault, collection)\n let files: string[]\n try {\n files = await readdir(dirPath)\n } catch {\n return { items: [], nextCursor: null }\n }\n\n const ids = files\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5))\n .sort()\n\n const start = cursor ? parseInt(cursor, 10) : 0\n const end = Math.min(start + limit, ids.length)\n\n const items: Array<{ id: string; envelope: EncryptedEnvelope }> = []\n for (let i = start; i < end; i++) {\n const id = ids[i]!\n try {\n const content = await readFile(join(dirPath, `${id}.json`), 'utf-8')\n items.push({ id, envelope: JSON.parse(content) as EncryptedEnvelope })\n } catch {\n // File disappeared between readdir and readFile — skip silently.\n }\n }\n\n return {\n items,\n nextCursor: end < ids.length ? String(end) : null,\n }\n },\n }\n}\n\n// ─── .noydb bundle helpers ─────────────────────────────────\n\n/**\n * Write a `.noydb` container for a vault to a local file.\n *\n * Thin wrapper around `writeNoydbBundle` from `@noy-db/core` —\n * the core primitive returns a `Uint8Array`, this helper just\n * pipes it to `node:fs.writeFile` after ensuring the parent\n * directory exists. Use the same options as the core primitive.\n *\n * **Path convention** is up to the caller — `.noydb` is the\n * recommended extension. Consumers using cloud-sync folders\n * should name files by the bundle handle (available via\n * `vault.getBundleHandle()`) rather than the vault\n * name to avoid leaking metadata at the filesystem layer:\n *\n * ```ts\n * const handle = await company.getBundleHandle()\n * await saveBundle(`./bundles/${handle}.noydb`, company)\n * ```\n *\n * The full container is written atomically by `node:fs.writeFile`\n * (the platform's atomic-write semantics apply — POSIX `write()`\n * is atomic up to PIPE_BUF, larger files race with concurrent\n * readers; consumers writing into shared cloud folders should\n * pair this with their cloud sync's conflict resolution).\n */\nexport async function saveBundle(\n path: string,\n vault: Vault,\n opts: WriteNoydbBundleOptions = {},\n): Promise<void> {\n const bytes = await writeNoydbBundle(vault, opts)\n // Ensure the parent directory exists — `writeFile` does NOT\n // create intermediate directories on its own. Recursive mkdir\n // is a no-op when the directory already exists.\n await mkdir(dirname(path), { recursive: true })\n await writeFile(path, bytes)\n}\n\n/**\n * Read and verify a `.noydb` container from a local file.\n *\n * Returns the parsed header plus the unwrapped `dump()` JSON\n * string ready to feed to `vault.load(json, passphrase)`.\n * Throws `BundleIntegrityError` from `@noy-db/core` if the body\n * bytes don't match the integrity hash declared in the header\n * (the bundle was modified between write and read), or any\n * format error from the core reader if the bytes aren't a valid\n * bundle at all.\n *\n * Does NOT take a passphrase — the bundle reader is purely a\n * format layer. Restoring a vault from the returned dump\n * JSON requires a separate `vault.load()` call with the\n * passphrase, mirroring the split between\n * `readNoydbBundle()` and `vault.load()` in core.\n */\nexport async function loadBundle(path: string): Promise<NoydbBundleReadResult> {\n const bytes = await readFile(path)\n // node:fs.readFile returns a Buffer, which is a Uint8Array\n // subclass — `readNoydbBundle` accepts Uint8Array directly,\n // no copy needed.\n return readNoydbBundle(bytes)\n}\n\n// Export-blobs FS materializer — wraps `vault.exportBlobs()` with\n// target-profile filename sanitization, Zip-Slip path containment, and\n// collision policy. Lives in `to-file` (not core) because hub stays\n// portable across browser/Node and shouldn't import `node:fs`.\nexport {\n exportBlobsToDirectory,\n} from './export-blobs-to-directory.js'\nexport type {\n ExportBlobsToDirectoryOptions,\n ExportBlobsToDirectoryResult,\n CollisionStrategy,\n} from './export-blobs-to-directory.js'\n","/**\n * `exportBlobsToDirectory(vault, targetDir, opts)` — bulk blob\n * extraction into a real filesystem directory, with target-profile\n * filename sanitization and Zip-Slip path containment built in\n *.\n *\n * Wraps `vault.exportBlobs()` (the framework-agnostic async iterable\n * in core) with the FS-write concerns that don't belong in core:\n *\n * - sanitize filenames per a target profile (`posix`, `windows`,\n * `macos-smb`, `zip`, `url-path`, `s3-key`, `opaque`),\n * - guard against path-escape after sanitization (`PathEscapeError`),\n * - resolve filename collisions (`suffix` / `overwrite` / `fail` /\n * custom callback),\n * - emit a sidecar `manifest.json` when the profile is `'opaque'`,\n * mapping opaque ids back to the original record-supplied\n * filenames.\n *\n * @module\n */\n\nimport { mkdir, writeFile } from 'node:fs/promises'\nimport { resolve, sep, dirname, extname } from 'node:path'\nimport type { Vault } from '@noy-db/hub'\nimport { PathEscapeError } from '@noy-db/hub'\nimport { sanitizeFilename, type FilenameProfile } from '@noy-db/hub/util'\n\n/** Strategy for resolving two records that sanitize to the same name. */\nexport type CollisionStrategy =\n | 'suffix'\n | 'overwrite'\n | 'fail'\n | ((existing: string, attempt: number) => string)\n\nexport interface ExportBlobsToDirectoryOptions {\n /**\n * Filename profile to sanitize against. Default: `'macos-smb'` —\n * the most restrictive intersection of the rules adopters\n * typically hit. Pick a more specific profile when you know the\n * exact destination.\n */\n readonly filenameProfile?: FilenameProfile\n /**\n * How to handle two blobs whose sanitized filenames collide.\n * Default: `'suffix'`.\n */\n readonly onCollision?: CollisionStrategy\n /**\n * Optional collection allowlist forwarded to `vault.exportBlobs`.\n */\n readonly collections?: readonly string[]\n /**\n * Optional record predicate forwarded to `vault.exportBlobs`.\n */\n readonly where?: (\n record: unknown,\n context: { collection: string; id: string },\n ) => boolean\n /**\n * Optional resume cursor forwarded to `vault.exportBlobs`.\n */\n readonly afterBlobId?: string\n /**\n * External abort signal forwarded to `vault.exportBlobs`.\n */\n readonly signal?: AbortSignal\n}\n\nexport interface ExportBlobsToDirectoryResult {\n /** Total blobs written. */\n readonly written: number\n /** Total bytes written across all blobs. */\n readonly bytes: number\n /** Pairs of `{ blobId, path }` for every blob that landed on disk. */\n readonly entries: ReadonlyArray<{ blobId: string; path: string }>\n /**\n * When `filenameProfile === 'opaque'`, the absolute path of the\n * `manifest.json` sidecar. `null` for every other profile.\n */\n readonly manifestPath: string | null\n}\n\ninterface OpaqueManifestEntry {\n readonly opaqueName: string\n readonly originalName: string\n readonly collection: string\n readonly recordId: string\n readonly slot: string\n readonly blobId: string\n readonly mimeType?: string\n}\n\n/**\n * Materialize every blob in the vault into `targetDir`. Returns a\n * summary suitable for logging / audit.\n *\n * Caller MUST already hold whatever capability the vault demands\n * (`canExportPlaintext['blob']`) — this function delegates to\n * `vault.exportBlobs()`, which performs the capability check itself.\n */\nexport async function exportBlobsToDirectory(\n vault: Vault,\n targetDir: string,\n options: ExportBlobsToDirectoryOptions = {},\n): Promise<ExportBlobsToDirectoryResult> {\n const profile: FilenameProfile = options.filenameProfile ?? 'macos-smb'\n const onCollision: CollisionStrategy = options.onCollision ?? 'suffix'\n\n const absTargetDir = resolve(targetDir)\n await mkdir(absTargetDir, { recursive: true })\n const containmentPrefix = absTargetDir + sep\n\n // Track filenames already used in this run so collision resolution\n // is deterministic and cheap (no extra stat() per attempt).\n const used = new Set<string>()\n const entries: { blobId: string; path: string }[] = []\n const opaqueEntries: OpaqueManifestEntry[] = []\n let totalBytes = 0\n\n const handle = vault.exportBlobs({\n ...(options.collections && { collections: options.collections }),\n ...(options.where && { where: options.where }),\n ...(options.afterBlobId && { afterBlobId: options.afterBlobId }),\n ...(options.signal && { signal: options.signal }),\n })\n\n for await (const blob of handle) {\n const original = blob.meta.filename\n const sanitizeOpts =\n profile === 'opaque'\n ? { profile, opaqueId: blob.blobId } as const\n : { profile } as const\n const candidate = sanitizeFilename(original, sanitizeOpts)\n const finalName = resolveCollision(candidate, used, onCollision)\n used.add(finalName)\n\n const absPath = resolve(absTargetDir, finalName)\n if (absPath !== absTargetDir && !absPath.startsWith(containmentPrefix)) {\n throw new PathEscapeError({ attempted: finalName, targetDir: absTargetDir })\n }\n\n await mkdir(dirname(absPath), { recursive: true })\n await writeFile(absPath, blob.bytes)\n entries.push({ blobId: blob.blobId, path: absPath })\n totalBytes += blob.bytes.byteLength\n\n if (profile === 'opaque') {\n const entry: OpaqueManifestEntry = {\n opaqueName: finalName,\n originalName: original,\n collection: blob.recordRef.collection,\n recordId: blob.recordRef.id,\n slot: blob.recordRef.slot,\n blobId: blob.blobId,\n ...(blob.meta.mimeType !== undefined && { mimeType: blob.meta.mimeType }),\n }\n opaqueEntries.push(entry)\n }\n }\n\n let manifestPath: string | null = null\n if (profile === 'opaque') {\n manifestPath = resolve(absTargetDir, 'manifest.json')\n const json = JSON.stringify(\n {\n format: 'noydb-opaque-export',\n version: 1,\n entries: opaqueEntries,\n },\n null,\n 2,\n )\n await writeFile(manifestPath, json)\n }\n\n return {\n written: entries.length,\n bytes: totalBytes,\n entries,\n manifestPath,\n }\n}\n\nfunction resolveCollision(\n candidate: string,\n used: Set<string>,\n strategy: CollisionStrategy,\n): string {\n if (!used.has(candidate)) return candidate\n if (strategy === 'overwrite') return candidate\n if (strategy === 'fail') {\n throw new Error(`exportBlobsToDirectory: filename collision on \"${candidate}\"`)\n }\n // `'suffix'` and the function-callback path both build a sequence\n // of attempts and pick the first non-colliding one.\n for (let attempt = 1; attempt < 10_000; attempt++) {\n const next =\n typeof strategy === 'function'\n ? strategy(candidate, attempt)\n : addSuffix(candidate, attempt)\n if (!used.has(next)) return next\n }\n throw new Error(`exportBlobsToDirectory: collision suffix exhausted for \"${candidate}\"`)\n}\n\nfunction addSuffix(name: string, attempt: number): string {\n const ext = extname(name)\n if (ext.length > 0 && ext.length < name.length) {\n const stem = name.slice(0, name.length - ext.length)\n return `${stem}-${attempt}${ext}`\n }\n return `${name}-${attempt}`\n}\n"],"mappings":";AA2CA,SAAS,UAAU,aAAAA,YAAW,SAAAC,QAAO,SAAS,QAAQ,YAAY;AAClE,SAAS,WAAAC,UAAS,YAAY;AAS9B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACpCP,SAAS,OAAO,iBAAiB;AACjC,SAAS,SAAS,KAAK,SAAS,eAAe;AAE/C,SAAS,uBAAuB;AAChC,SAAS,wBAA8C;AA2EvD,eAAsB,uBACpB,OACA,WACA,UAAyC,CAAC,GACH;AACvC,QAAM,UAA2B,QAAQ,mBAAmB;AAC5D,QAAM,cAAiC,QAAQ,eAAe;AAE9D,QAAM,eAAe,QAAQ,SAAS;AACtC,QAAM,MAAM,cAAc,EAAE,WAAW,KAAK,CAAC;AAC7C,QAAM,oBAAoB,eAAe;AAIzC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA8C,CAAC;AACrD,QAAM,gBAAuC,CAAC;AAC9C,MAAI,aAAa;AAEjB,QAAM,SAAS,MAAM,YAAY;AAAA,IAC/B,GAAI,QAAQ,eAAe,EAAE,aAAa,QAAQ,YAAY;AAAA,IAC9D,GAAI,QAAQ,SAAS,EAAE,OAAO,QAAQ,MAAM;AAAA,IAC5C,GAAI,QAAQ,eAAe,EAAE,aAAa,QAAQ,YAAY;AAAA,IAC9D,GAAI,QAAQ,UAAU,EAAE,QAAQ,QAAQ,OAAO;AAAA,EACjD,CAAC;AAED,mBAAiB,QAAQ,QAAQ;AAC/B,UAAM,WAAW,KAAK,KAAK;AAC3B,UAAM,eACJ,YAAY,WACR,EAAE,SAAS,UAAU,KAAK,OAAO,IACjC,EAAE,QAAQ;AAChB,UAAM,YAAY,iBAAiB,UAAU,YAAY;AACzD,UAAM,YAAY,iBAAiB,WAAW,MAAM,WAAW;AAC/D,SAAK,IAAI,SAAS;AAElB,UAAM,UAAU,QAAQ,cAAc,SAAS;AAC/C,QAAI,YAAY,gBAAgB,CAAC,QAAQ,WAAW,iBAAiB,GAAG;AACtE,YAAM,IAAI,gBAAgB,EAAE,WAAW,WAAW,WAAW,aAAa,CAAC;AAAA,IAC7E;AAEA,UAAM,MAAM,QAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AACjD,UAAM,UAAU,SAAS,KAAK,KAAK;AACnC,YAAQ,KAAK,EAAE,QAAQ,KAAK,QAAQ,MAAM,QAAQ,CAAC;AACnD,kBAAc,KAAK,MAAM;AAEzB,QAAI,YAAY,UAAU;AACxB,YAAM,QAA6B;AAAA,QACjC,YAAY;AAAA,QACZ,cAAc;AAAA,QACd,YAAY,KAAK,UAAU;AAAA,QAC3B,UAAU,KAAK,UAAU;AAAA,QACzB,MAAM,KAAK,UAAU;AAAA,QACrB,QAAQ,KAAK;AAAA,QACb,GAAI,KAAK,KAAK,aAAa,UAAa,EAAE,UAAU,KAAK,KAAK,SAAS;AAAA,MACzE;AACA,oBAAc,KAAK,KAAK;AAAA,IAC1B;AAAA,EACF;AAEA,MAAI,eAA8B;AAClC,MAAI,YAAY,UAAU;AACxB,mBAAe,QAAQ,cAAc,eAAe;AACpD,UAAM,OAAO,KAAK;AAAA,MAChB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,UAAM,UAAU,cAAc,IAAI;AAAA,EACpC;AAEA,SAAO;AAAA,IACL,SAAS,QAAQ;AAAA,IACjB,OAAO;AAAA,IACP;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,iBACP,WACA,MACA,UACQ;AACR,MAAI,CAAC,KAAK,IAAI,SAAS,EAAG,QAAO;AACjC,MAAI,aAAa,YAAa,QAAO;AACrC,MAAI,aAAa,QAAQ;AACvB,UAAM,IAAI,MAAM,kDAAkD,SAAS,GAAG;AAAA,EAChF;AAGA,WAAS,UAAU,GAAG,UAAU,KAAQ,WAAW;AACjD,UAAM,OACJ,OAAO,aAAa,aAChB,SAAS,WAAW,OAAO,IAC3B,UAAU,WAAW,OAAO;AAClC,QAAI,CAAC,KAAK,IAAI,IAAI,EAAG,QAAO;AAAA,EAC9B;AACA,QAAM,IAAI,MAAM,2DAA2D,SAAS,GAAG;AACzF;AAEA,SAAS,UAAU,MAAc,SAAyB;AACxD,QAAM,MAAM,QAAQ,IAAI;AACxB,MAAI,IAAI,SAAS,KAAK,IAAI,SAAS,KAAK,QAAQ;AAC9C,UAAM,OAAO,KAAK,MAAM,GAAG,KAAK,SAAS,IAAI,MAAM;AACnD,WAAO,GAAG,IAAI,IAAI,OAAO,GAAG,GAAG;AAAA,EACjC;AACA,SAAO,GAAG,IAAI,IAAI,OAAO;AAC3B;;;ADlIO,SAAS,SAAS,SAAsC;AAC7D,QAAM,EAAE,KAAK,SAAS,KAAK,IAAI;AAE/B,WAAS,WAAW,OAAe,YAAoB,IAAoB;AACzE,WAAO,KAAK,KAAK,OAAO,YAAY,GAAG,EAAE,OAAO;AAAA,EAClD;AAEA,WAAS,cAAc,OAAe,YAA4B;AAChE,WAAO,KAAK,KAAK,OAAO,UAAU;AAAA,EACpC;AAEA,iBAAe,UAAU,MAA6B;AACpD,UAAMC,OAAM,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AAEA,iBAAe,WAAW,MAAgC;AACxD,QAAI;AACF,YAAM,KAAK,IAAI;AACf,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,WAAS,UAAU,UAAqC;AACtD,WAAO,SAAS,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,KAAK,UAAU,QAAQ;AAAA,EAC7E;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IAEN,MAAM,IAAI,OAAO,YAAY,IAAI;AAC/B,YAAM,OAAO,WAAW,OAAO,YAAY,EAAE;AAC7C,UAAI;AACF,cAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAC5C,eAAO,KAAK,MAAM,OAAO;AAAA,MAC3B,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,MAAM,IAAI,OAAO,YAAY,IAAI,UAAU,iBAAiB;AAC1D,YAAM,OAAO,WAAW,OAAO,YAAY,EAAE;AAE7C,UAAI,oBAAoB,UAAa,MAAM,WAAW,IAAI,GAAG;AAC3D,cAAM,WAAW,KAAK,MAAM,MAAM,SAAS,MAAM,OAAO,CAAC;AACzD,YAAI,SAAS,OAAO,iBAAiB;AACnC,gBAAM,IAAI,cAAc,SAAS,IAAI,8BAA8B,eAAe,WAAW,SAAS,EAAE,EAAE;AAAA,QAC5G;AAAA,MACF;AAEA,YAAM,UAAU,cAAc,OAAO,UAAU,CAAC;AAChD,YAAMC,WAAU,MAAM,UAAU,QAAQ,GAAG,OAAO;AAAA,IACpD;AAAA,IAEA,MAAM,OAAO,OAAO,YAAY,IAAI;AAClC,YAAM,OAAO,WAAW,OAAO,YAAY,EAAE;AAC7C,UAAI;AACF,cAAM,OAAO,IAAI;AAAA,MACnB,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,IAEA,MAAM,KAAK,OAAO,YAAY;AAC5B,YAAM,UAAU,cAAc,OAAO,UAAU;AAC/C,UAAI;AACF,cAAM,UAAU,MAAM,QAAQ,OAAO;AACrC,eAAO,QACJ,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC5B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,QAAQ,OAAO;AACnB,YAAM,UAAU,KAAK,KAAK,KAAK;AAC/B,YAAM,WAA0B,CAAC;AAEjC,UAAI;AACF,cAAM,cAAc,MAAM,QAAQ,OAAO;AACzC,mBAAW,YAAY,aAAa;AAClC,cAAI,SAAS,WAAW,GAAG,EAAG;AAC9B,gBAAM,WAAW,KAAK,SAAS,QAAQ;AACvC,gBAAM,WAAW,MAAM,KAAK,QAAQ;AACpC,cAAI,CAAC,SAAS,YAAY,EAAG;AAE7B,gBAAM,UAA6C,CAAC;AACpD,gBAAM,QAAQ,MAAM,QAAQ,QAAQ;AACpC,qBAAW,QAAQ,OAAO;AACxB,gBAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAC7B,kBAAM,KAAK,KAAK,MAAM,GAAG,EAAE;AAC3B,kBAAM,UAAU,MAAM,SAAS,KAAK,UAAU,IAAI,GAAG,OAAO;AAC5D,oBAAQ,EAAE,IAAI,KAAK,MAAM,OAAO;AAAA,UAClC;AACA,mBAAS,QAAQ,IAAI;AAAA,QACvB;AAAA,MACF,QAAQ;AAAA,MAER;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QAAQ,OAAO,MAAM;AACzB,iBAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,cAAM,UAAU,cAAc,OAAO,QAAQ;AAC7C,cAAM,UAAU,OAAO;AACvB,mBAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,gBAAMA,WAAU,KAAK,SAAS,GAAG,EAAE,OAAO,GAAG,UAAU,QAAQ,GAAG,OAAO;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAAA,IAEA,MAAM,OAAO;AACX,UAAI;AACF,cAAM,KAAK,GAAG;AACd,eAAO;AAAA,MACT,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAcA,MAAM,aAAa;AACjB,UAAI;AACJ,UAAI;AACF,kBAAU,MAAM,QAAQ,GAAG;AAAA,MAC7B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AACA,YAAM,eAAyB,CAAC;AAChC,iBAAW,SAAS,SAAS;AAC3B,YAAI;AACF,gBAAM,YAAY,MAAM,KAAK,KAAK,KAAK,KAAK,CAAC;AAC7C,cAAI,UAAU,YAAY,EAAG,cAAa,KAAK,KAAK;AAAA,QACtD,QAAQ;AAAA,QAER;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAWA,MAAM,SAAS,OAAO,YAAY,QAAQ,QAAQ,KAAK;AACrD,YAAM,UAAU,cAAc,OAAO,UAAU;AAC/C,UAAI;AACJ,UAAI;AACF,gBAAQ,MAAM,QAAQ,OAAO;AAAA,MAC/B,QAAQ;AACN,eAAO,EAAE,OAAO,CAAC,GAAG,YAAY,KAAK;AAAA,MACvC;AAEA,YAAM,MAAM,MACT,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC,EACvB,KAAK;AAER,YAAM,QAAQ,SAAS,SAAS,QAAQ,EAAE,IAAI;AAC9C,YAAM,MAAM,KAAK,IAAI,QAAQ,OAAO,IAAI,MAAM;AAE9C,YAAM,QAA4D,CAAC;AACnE,eAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAChC,cAAM,KAAK,IAAI,CAAC;AAChB,YAAI;AACF,gBAAM,UAAU,MAAM,SAAS,KAAK,SAAS,GAAG,EAAE,OAAO,GAAG,OAAO;AACnE,gBAAM,KAAK,EAAE,IAAI,UAAU,KAAK,MAAM,OAAO,EAAuB,CAAC;AAAA,QACvE,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA,YAAY,MAAM,IAAI,SAAS,OAAO,GAAG,IAAI;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AACF;AA6BA,eAAsB,WACpB,MACA,OACA,OAAgC,CAAC,GAClB;AACf,QAAM,QAAQ,MAAM,iBAAiB,OAAO,IAAI;AAIhD,QAAMD,OAAME,SAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAC9C,QAAMD,WAAU,MAAM,KAAK;AAC7B;AAmBA,eAAsB,WAAW,MAA8C;AAC7E,QAAM,QAAQ,MAAM,SAAS,IAAI;AAIjC,SAAO,gBAAgB,KAAK;AAC9B;","names":["writeFile","mkdir","dirname","mkdir","writeFile","dirname"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@noy-db/to-file",
|
|
3
|
+
"version": "0.1.0-pre.3",
|
|
4
|
+
"description": "JSON file adapter for noy-db — encrypted document store on local disk, USB sticks, or network drives",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": "vLannaAi <vicio@lanna.ai>",
|
|
7
|
+
"homepage": "https://github.com/vLannaAi/noy-db/tree/main/packages/to-file#readme",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/vLannaAi/noy-db.git",
|
|
11
|
+
"directory": "packages/to-file"
|
|
12
|
+
},
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/vLannaAi/noy-db/issues"
|
|
15
|
+
},
|
|
16
|
+
"type": "module",
|
|
17
|
+
"sideEffects": false,
|
|
18
|
+
"exports": {
|
|
19
|
+
".": {
|
|
20
|
+
"import": {
|
|
21
|
+
"types": "./dist/index.d.ts",
|
|
22
|
+
"default": "./dist/index.js"
|
|
23
|
+
},
|
|
24
|
+
"require": {
|
|
25
|
+
"types": "./dist/index.d.cts",
|
|
26
|
+
"default": "./dist/index.cjs"
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
"main": "./dist/index.cjs",
|
|
31
|
+
"module": "./dist/index.js",
|
|
32
|
+
"types": "./dist/index.d.ts",
|
|
33
|
+
"files": [
|
|
34
|
+
"dist",
|
|
35
|
+
"README.md",
|
|
36
|
+
"LICENSE"
|
|
37
|
+
],
|
|
38
|
+
"engines": {
|
|
39
|
+
"node": ">=18.0.0"
|
|
40
|
+
},
|
|
41
|
+
"peerDependencies": {
|
|
42
|
+
"@noy-db/hub": "0.1.0-pre.3"
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"@types/node": "^22.0.0",
|
|
46
|
+
"@noy-db/hub": "0.1.0-pre.3",
|
|
47
|
+
"@noy-db/test-adapter-conformance": "0.0.0"
|
|
48
|
+
},
|
|
49
|
+
"keywords": [
|
|
50
|
+
"noy-db",
|
|
51
|
+
"adapter",
|
|
52
|
+
"file",
|
|
53
|
+
"json",
|
|
54
|
+
"filesystem",
|
|
55
|
+
"local-disk",
|
|
56
|
+
"usb",
|
|
57
|
+
"storage",
|
|
58
|
+
"encryption",
|
|
59
|
+
"offline-first"
|
|
60
|
+
],
|
|
61
|
+
"publishConfig": {
|
|
62
|
+
"access": "public",
|
|
63
|
+
"tag": "latest"
|
|
64
|
+
},
|
|
65
|
+
"scripts": {
|
|
66
|
+
"build": "tsup",
|
|
67
|
+
"test": "vitest run",
|
|
68
|
+
"lint": "eslint src/",
|
|
69
|
+
"typecheck": "tsc --noEmit"
|
|
70
|
+
}
|
|
71
|
+
}
|