@noy-db/file 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 vLannaAi
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,35 @@
1
+ # @noy-db/file
2
+
3
+ > JSON file adapter for [noy-db](https://github.com/vLannaAi/noy-db) — encrypted document store on local disk, USB sticks, or network drives.
4
+
5
+ [![npm](https://img.shields.io/npm/v/@noy-db/file.svg)](https://www.npmjs.com/package/@noy-db/file)
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ pnpm add @noy-db/core @noy-db/file
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ ```ts
16
+ import { createNoydb } from '@noy-db/core'
17
+ import { file } from '@noy-db/file'
18
+
19
+ const db = await createNoydb({
20
+ adapter: file({ dir: '/Volumes/USB/firm-data' }),
21
+ userId: 'alice',
22
+ passphrase: process.env.NOYDB_PASSPHRASE!,
23
+ })
24
+ ```
25
+
26
+ Each compartment is written as a set of JSON files containing only ciphertext envelopes — the adapter never sees plaintext. Perfect for:
27
+
28
+ - USB-stick workflows (air-gapped data portability)
29
+ - Local-first desktop apps
30
+ - Network drive sharing with per-user passphrases
31
+ - Backup-friendly storage
32
+
33
+ ## License
34
+
35
+ MIT © vLannaAi — see the [noy-db repo](https://github.com/vLannaAi/noy-db) for full documentation.
package/dist/index.cjs ADDED
@@ -0,0 +1,199 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ jsonFile: () => jsonFile
24
+ });
25
+ module.exports = __toCommonJS(index_exports);
26
+ var import_promises = require("fs/promises");
27
+ var import_node_path = require("path");
28
+ var import_core = require("@noy-db/core");
29
+ function jsonFile(options) {
30
+ const { dir, pretty = true } = options;
31
+ function recordPath(compartment, collection, id) {
32
+ return (0, import_node_path.join)(dir, compartment, collection, `${id}.json`);
33
+ }
34
+ function collectionDir(compartment, collection) {
35
+ return (0, import_node_path.join)(dir, compartment, collection);
36
+ }
37
+ async function ensureDir(path) {
38
+ await (0, import_promises.mkdir)(path, { recursive: true });
39
+ }
40
+ async function fileExists(path) {
41
+ try {
42
+ await (0, import_promises.stat)(path);
43
+ return true;
44
+ } catch {
45
+ return false;
46
+ }
47
+ }
48
+ function serialize(envelope) {
49
+ return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope);
50
+ }
51
+ return {
52
+ name: "file",
53
+ async get(compartment, collection, id) {
54
+ const path = recordPath(compartment, collection, id);
55
+ try {
56
+ const content = await (0, import_promises.readFile)(path, "utf-8");
57
+ return JSON.parse(content);
58
+ } catch {
59
+ return null;
60
+ }
61
+ },
62
+ async put(compartment, collection, id, envelope, expectedVersion) {
63
+ const path = recordPath(compartment, collection, id);
64
+ if (expectedVersion !== void 0 && await fileExists(path)) {
65
+ const existing = JSON.parse(await (0, import_promises.readFile)(path, "utf-8"));
66
+ if (existing._v !== expectedVersion) {
67
+ throw new import_core.ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`);
68
+ }
69
+ }
70
+ await ensureDir(collectionDir(compartment, collection));
71
+ await (0, import_promises.writeFile)(path, serialize(envelope), "utf-8");
72
+ },
73
+ async delete(compartment, collection, id) {
74
+ const path = recordPath(compartment, collection, id);
75
+ try {
76
+ await (0, import_promises.unlink)(path);
77
+ } catch {
78
+ }
79
+ },
80
+ async list(compartment, collection) {
81
+ const dirPath = collectionDir(compartment, collection);
82
+ try {
83
+ const entries = await (0, import_promises.readdir)(dirPath);
84
+ return entries.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5));
85
+ } catch {
86
+ return [];
87
+ }
88
+ },
89
+ async loadAll(compartment) {
90
+ const compDir = (0, import_node_path.join)(dir, compartment);
91
+ const snapshot = {};
92
+ try {
93
+ const collections = await (0, import_promises.readdir)(compDir);
94
+ for (const collName of collections) {
95
+ if (collName.startsWith("_")) continue;
96
+ const collPath = (0, import_node_path.join)(compDir, collName);
97
+ const collStat = await (0, import_promises.stat)(collPath);
98
+ if (!collStat.isDirectory()) continue;
99
+ const records = {};
100
+ const files = await (0, import_promises.readdir)(collPath);
101
+ for (const file of files) {
102
+ if (!file.endsWith(".json")) continue;
103
+ const id = file.slice(0, -5);
104
+ const content = await (0, import_promises.readFile)((0, import_node_path.join)(collPath, file), "utf-8");
105
+ records[id] = JSON.parse(content);
106
+ }
107
+ snapshot[collName] = records;
108
+ }
109
+ } catch {
110
+ }
111
+ return snapshot;
112
+ },
113
+ async saveAll(compartment, data) {
114
+ for (const [collName, records] of Object.entries(data)) {
115
+ const collDir = collectionDir(compartment, collName);
116
+ await ensureDir(collDir);
117
+ for (const [id, envelope] of Object.entries(records)) {
118
+ await (0, import_promises.writeFile)((0, import_node_path.join)(collDir, `${id}.json`), serialize(envelope), "utf-8");
119
+ }
120
+ }
121
+ },
122
+ async ping() {
123
+ try {
124
+ await (0, import_promises.stat)(dir);
125
+ return true;
126
+ } catch {
127
+ return false;
128
+ }
129
+ },
130
+ /**
131
+ * Enumerate every top-level compartment subdirectory under the
132
+ * configured base directory. Used by
133
+ * `Noydb.listAccessibleCompartments()` (v0.5 #63).
134
+ *
135
+ * The implementation is `readdir(dir)` filtered to entries that
136
+ * are themselves directories — files at the top level (READMEs,
137
+ * .DS_Store, etc.) are skipped, and missing base directory
138
+ * returns an empty array rather than throwing. Result order is
139
+ * filesystem-defined; consumers that want stable order should
140
+ * sort themselves.
141
+ */
142
+ async listCompartments() {
143
+ let entries;
144
+ try {
145
+ entries = await (0, import_promises.readdir)(dir);
146
+ } catch {
147
+ return [];
148
+ }
149
+ const compartments = [];
150
+ for (const entry of entries) {
151
+ try {
152
+ const entryStat = await (0, import_promises.stat)((0, import_node_path.join)(dir, entry));
153
+ if (entryStat.isDirectory()) compartments.push(entry);
154
+ } catch {
155
+ }
156
+ }
157
+ return compartments;
158
+ },
159
+ /**
160
+ * Paginate over a collection. Cursor is a numeric offset (as a string)
161
+ * into the sorted filename list. Files are sorted alphabetically so
162
+ * pages are stable across runs and across processes that share the
163
+ * same data directory.
164
+ *
165
+ * The default `limit` is 100. Each item carries its decoded envelope
166
+ * so callers don't need an extra `get()` round-trip per id.
167
+ */
168
+ async listPage(compartment, collection, cursor, limit = 100) {
169
+ const dirPath = collectionDir(compartment, collection);
170
+ let files;
171
+ try {
172
+ files = await (0, import_promises.readdir)(dirPath);
173
+ } catch {
174
+ return { items: [], nextCursor: null };
175
+ }
176
+ const ids = files.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5)).sort();
177
+ const start = cursor ? parseInt(cursor, 10) : 0;
178
+ const end = Math.min(start + limit, ids.length);
179
+ const items = [];
180
+ for (let i = start; i < end; i++) {
181
+ const id = ids[i];
182
+ try {
183
+ const content = await (0, import_promises.readFile)((0, import_node_path.join)(dirPath, `${id}.json`), "utf-8");
184
+ items.push({ id, envelope: JSON.parse(content) });
185
+ } catch {
186
+ }
187
+ }
188
+ return {
189
+ items,
190
+ nextCursor: end < ids.length ? String(end) : null
191
+ };
192
+ }
193
+ };
194
+ }
195
+ // Annotate the CommonJS export names for ESM import in node:
196
+ 0 && (module.exports = {
197
+ jsonFile
198
+ });
199
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { readFile, writeFile, mkdir, readdir, unlink, stat } from 'node:fs/promises'\nimport { join } from 'node:path'\nimport type { NoydbAdapter, EncryptedEnvelope, CompartmentSnapshot } from '@noy-db/core'\nimport { ConflictError } from '@noy-db/core'\n\nexport interface JsonFileOptions {\n /** Base directory for NOYDB data. */\n dir: string\n /** Pretty-print JSON files. Default: true. */\n pretty?: boolean\n}\n\n/**\n * Create a JSON file adapter.\n * Maps the NOYDB hierarchy to the filesystem:\n *\n * ```\n * {dir}/{compartment}/{collection}/{id}.json\n * {dir}/{compartment}/_keyring/{userId}.json\n * ```\n */\nexport function jsonFile(options: JsonFileOptions): NoydbAdapter {\n const { dir, pretty = true } = options\n\n function recordPath(compartment: string, collection: string, id: string): string {\n return join(dir, compartment, collection, `${id}.json`)\n }\n\n function collectionDir(compartment: string, collection: string): string {\n return join(dir, compartment, collection)\n }\n\n async function ensureDir(path: string): Promise<void> {\n await mkdir(path, { recursive: true })\n }\n\n async function fileExists(path: string): Promise<boolean> {\n try {\n await stat(path)\n return true\n } catch {\n return false\n }\n }\n\n function serialize(envelope: EncryptedEnvelope): string {\n return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)\n }\n\n return {\n name: 'file',\n\n async get(compartment, collection, id) {\n const path = recordPath(compartment, collection, id)\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as EncryptedEnvelope\n } catch {\n return null\n }\n },\n\n async put(compartment, collection, id, envelope, expectedVersion) {\n const path = recordPath(compartment, collection, id)\n\n if (expectedVersion !== undefined && await fileExists(path)) {\n const existing = JSON.parse(await readFile(path, 'utf-8')) as EncryptedEnvelope\n if (existing._v !== expectedVersion) {\n throw new ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`)\n }\n }\n\n await ensureDir(collectionDir(compartment, collection))\n await writeFile(path, serialize(envelope), 'utf-8')\n },\n\n async delete(compartment, collection, id) {\n const path = recordPath(compartment, collection, id)\n try {\n await unlink(path)\n } catch {\n // File doesn't exist — that's fine\n }\n },\n\n async list(compartment, collection) {\n const dirPath = collectionDir(compartment, collection)\n try {\n const entries = await readdir(dirPath)\n return entries\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5)) // remove .json extension\n } catch {\n return []\n }\n },\n\n async loadAll(compartment) {\n const compDir = join(dir, compartment)\n const snapshot: CompartmentSnapshot = {}\n\n try {\n const collections = await readdir(compDir)\n for (const collName of collections) {\n if (collName.startsWith('_')) continue // skip _keyring, _sync\n const collPath = join(compDir, collName)\n const collStat = await stat(collPath)\n if (!collStat.isDirectory()) continue\n\n const records: Record<string, EncryptedEnvelope> = {}\n const files = await readdir(collPath)\n for (const file of files) {\n if (!file.endsWith('.json')) continue\n const id = file.slice(0, -5)\n const content = await readFile(join(collPath, file), 'utf-8')\n records[id] = JSON.parse(content) as EncryptedEnvelope\n }\n snapshot[collName] = records\n }\n } catch {\n // Directory doesn't exist — return empty snapshot\n }\n\n return snapshot\n },\n\n async saveAll(compartment, data) {\n for (const [collName, records] of Object.entries(data)) {\n const collDir = collectionDir(compartment, collName)\n await ensureDir(collDir)\n for (const [id, envelope] of Object.entries(records)) {\n await writeFile(join(collDir, `${id}.json`), serialize(envelope), 'utf-8')\n }\n }\n },\n\n async ping() {\n try {\n await stat(dir)\n return true\n } catch {\n return false\n }\n },\n\n /**\n * Enumerate every top-level compartment subdirectory under the\n * configured base directory. Used by\n * `Noydb.listAccessibleCompartments()` (v0.5 #63).\n *\n * The implementation is `readdir(dir)` filtered to entries that\n * are themselves directories — files at the top level (READMEs,\n * .DS_Store, etc.) are skipped, and missing base directory\n * returns an empty array rather than throwing. Result order is\n * filesystem-defined; consumers that want stable order should\n * sort themselves.\n */\n async listCompartments() {\n let entries: string[]\n try {\n entries = await readdir(dir)\n } catch {\n return []\n }\n const compartments: string[] = []\n for (const entry of entries) {\n try {\n const entryStat = await stat(join(dir, entry))\n if (entryStat.isDirectory()) compartments.push(entry)\n } catch {\n // Entry vanished between readdir and stat — skip silently.\n }\n }\n return compartments\n },\n\n /**\n * Paginate over a collection. Cursor is a numeric offset (as a string)\n * into the sorted filename list. Files are sorted alphabetically so\n * pages are stable across runs and across processes that share the\n * same data directory.\n *\n * The default `limit` is 100. Each item carries its decoded envelope\n * so callers don't need an extra `get()` round-trip per id.\n */\n async listPage(compartment, collection, cursor, limit = 100) {\n const dirPath = collectionDir(compartment, collection)\n let files: string[]\n try {\n files = await readdir(dirPath)\n } catch {\n return { items: [], nextCursor: null }\n }\n\n const ids = files\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5))\n .sort()\n\n const start = cursor ? parseInt(cursor, 10) : 0\n const end = Math.min(start + limit, ids.length)\n\n const items: Array<{ id: string; envelope: EncryptedEnvelope }> = []\n for (let i = start; i < end; i++) {\n const id = ids[i]!\n try {\n const content = await readFile(join(dirPath, `${id}.json`), 'utf-8')\n items.push({ id, envelope: JSON.parse(content) as EncryptedEnvelope })\n } catch {\n // File disappeared between readdir and readFile — skip silently.\n }\n }\n\n return {\n items,\n nextCursor: end < ids.length ? String(end) : null,\n }\n },\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAkE;AAClE,uBAAqB;AAErB,kBAA8B;AAkBvB,SAAS,SAAS,SAAwC;AAC/D,QAAM,EAAE,KAAK,SAAS,KAAK,IAAI;AAE/B,WAAS,WAAW,aAAqB,YAAoB,IAAoB;AAC/E,eAAO,uBAAK,KAAK,aAAa,YAAY,GAAG,EAAE,OAAO;AAAA,EACxD;AAEA,WAAS,cAAc,aAAqB,YAA4B;AACtE,eAAO,uBAAK,KAAK,aAAa,UAAU;AAAA,EAC1C;AAEA,iBAAe,UAAU,MAA6B;AACpD,cAAM,uBAAM,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AAEA,iBAAe,WAAW,MAAgC;AACxD,QAAI;AACF,gBAAM,sBAAK,IAAI;AACf,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,WAAS,UAAU,UAAqC;AACtD,WAAO,SAAS,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,KAAK,UAAU,QAAQ;AAAA,EAC7E;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IAEN,MAAM,IAAI,aAAa,YAAY,IAAI;AACrC,YAAM,OAAO,WAAW,aAAa,YAAY,EAAE;AACnD,UAAI;AACF,cAAM,UAAU,UAAM,0BAAS,MAAM,OAAO;AAC5C,eAAO,KAAK,MAAM,OAAO;AAAA,MAC3B,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,MAAM,IAAI,aAAa,YAAY,IAAI,UAAU,iBAAiB;AAChE,YAAM,OAAO,WAAW,aAAa,YAAY,EAAE;AAEnD,UAAI,oBAAoB,UAAa,MAAM,WAAW,IAAI,GAAG;AAC3D,cAAM,WAAW,KAAK,MAAM,UAAM,0BAAS,MAAM,OAAO,CAAC;AACzD,YAAI,SAAS,OAAO,iBAAiB;AACnC,gBAAM,IAAI,0BAAc,SAAS,IAAI,8BAA8B,eAAe,WAAW,SAAS,EAAE,EAAE;AAAA,QAC5G;AAAA,MACF;AAEA,YAAM,UAAU,cAAc,aAAa,UAAU,CAAC;AACtD,gBAAM,2BAAU,MAAM,UAAU,QAAQ,GAAG,OAAO;AAAA,IACpD;AAAA,IAEA,MAAM,OAAO,aAAa,YAAY,IAAI;AACxC,YAAM,OAAO,WAAW,aAAa,YAAY,EAAE;AACnD,UAAI;AACF,kBAAM,wBAAO,IAAI;AAAA,MACnB,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,IAEA,MAAM,KAAK,aAAa,YAAY;AAClC,YAAM,UAAU,cAAc,aAAa,UAAU;AACrD,UAAI;AACF,cAAM,UAAU,UAAM,yBAAQ,OAAO;AACrC,eAAO,QACJ,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC5B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,QAAQ,aAAa;AACzB,YAAM,cAAU,uBAAK,KAAK,WAAW;AACrC,YAAM,WAAgC,CAAC;AAEvC,UAAI;AACF,cAAM,cAAc,UAAM,yBAAQ,OAAO;AACzC,mBAAW,YAAY,aAAa;AAClC,cAAI,SAAS,WAAW,GAAG,EAAG;AAC9B,gBAAM,eAAW,uBAAK,SAAS,QAAQ;AACvC,gBAAM,WAAW,UAAM,sBAAK,QAAQ;AACpC,cAAI,CAAC,SAAS,YAAY,EAAG;AAE7B,gBAAM,UAA6C,CAAC;AACpD,gBAAM,QAAQ,UAAM,yBAAQ,QAAQ;AACpC,qBAAW,QAAQ,OAAO;AACxB,gBAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAC7B,kBAAM,KAAK,KAAK,MAAM,GAAG,EAAE;AAC3B,kBAAM,UAAU,UAAM,8BAAS,uBAAK,UAAU,IAAI,GAAG,OAAO;AAC5D,oBAAQ,EAAE,IAAI,KAAK,MAAM,OAAO;AAAA,UAClC;AACA,mBAAS,QAAQ,IAAI;AAAA,QACvB;AAAA,MACF,QAAQ;AAAA,MAER;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QAAQ,aAAa,MAAM;AAC/B,iBAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,cAAM,UAAU,cAAc,aAAa,QAAQ;AACnD,cAAM,UAAU,OAAO;AACvB,mBAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,oBAAM,+BAAU,uBAAK,SAAS,GAAG,EAAE,OAAO,GAAG,UAAU,QAAQ,GAAG,OAAO;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAAA,IAEA,MAAM,OAAO;AACX,UAAI;AACF,kBAAM,sBAAK,GAAG;AACd,eAAO;AAAA,MACT,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAcA,MAAM,mBAAmB;AACvB,UAAI;AACJ,UAAI;AACF,kBAAU,UAAM,yBAAQ,GAAG;AAAA,MAC7B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AACA,YAAM,eAAyB,CAAC;AAChC,iBAAW,SAAS,SAAS;AAC3B,YAAI;AACF,gBAAM,YAAY,UAAM,0BAAK,uBAAK,KAAK,KAAK,CAAC;AAC7C,cAAI,UAAU,YAAY,EAAG,cAAa,KAAK,KAAK;AAAA,QACtD,QAAQ;AAAA,QAER;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAWA,MAAM,SAAS,aAAa,YAAY,QAAQ,QAAQ,KAAK;AAC3D,YAAM,UAAU,cAAc,aAAa,UAAU;AACrD,UAAI;AACJ,UAAI;AACF,gBAAQ,UAAM,yBAAQ,OAAO;AAAA,MAC/B,QAAQ;AACN,eAAO,EAAE,OAAO,CAAC,GAAG,YAAY,KAAK;AAAA,MACvC;AAEA,YAAM,MAAM,MACT,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC,EACvB,KAAK;AAER,YAAM,QAAQ,SAAS,SAAS,QAAQ,EAAE,IAAI;AAC9C,YAAM,MAAM,KAAK,IAAI,QAAQ,OAAO,IAAI,MAAM;AAE9C,YAAM,QAA4D,CAAC;AACnE,eAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAChC,cAAM,KAAK,IAAI,CAAC;AAChB,YAAI;AACF,gBAAM,UAAU,UAAM,8BAAS,uBAAK,SAAS,GAAG,EAAE,OAAO,GAAG,OAAO;AACnE,gBAAM,KAAK,EAAE,IAAI,UAAU,KAAK,MAAM,OAAO,EAAuB,CAAC;AAAA,QACvE,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA,YAAY,MAAM,IAAI,SAAS,OAAO,GAAG,IAAI;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,20 @@
1
+ import { NoydbAdapter } from '@noy-db/core';
2
+
3
+ interface JsonFileOptions {
4
+ /** Base directory for NOYDB data. */
5
+ dir: string;
6
+ /** Pretty-print JSON files. Default: true. */
7
+ pretty?: boolean;
8
+ }
9
+ /**
10
+ * Create a JSON file adapter.
11
+ * Maps the NOYDB hierarchy to the filesystem:
12
+ *
13
+ * ```
14
+ * {dir}/{compartment}/{collection}/{id}.json
15
+ * {dir}/{compartment}/_keyring/{userId}.json
16
+ * ```
17
+ */
18
+ declare function jsonFile(options: JsonFileOptions): NoydbAdapter;
19
+
20
+ export { type JsonFileOptions, jsonFile };
@@ -0,0 +1,20 @@
1
+ import { NoydbAdapter } from '@noy-db/core';
2
+
3
+ interface JsonFileOptions {
4
+ /** Base directory for NOYDB data. */
5
+ dir: string;
6
+ /** Pretty-print JSON files. Default: true. */
7
+ pretty?: boolean;
8
+ }
9
+ /**
10
+ * Create a JSON file adapter.
11
+ * Maps the NOYDB hierarchy to the filesystem:
12
+ *
13
+ * ```
14
+ * {dir}/{compartment}/{collection}/{id}.json
15
+ * {dir}/{compartment}/_keyring/{userId}.json
16
+ * ```
17
+ */
18
+ declare function jsonFile(options: JsonFileOptions): NoydbAdapter;
19
+
20
+ export { type JsonFileOptions, jsonFile };
package/dist/index.js ADDED
@@ -0,0 +1,174 @@
1
+ // src/index.ts
2
+ import { readFile, writeFile, mkdir, readdir, unlink, stat } from "fs/promises";
3
+ import { join } from "path";
4
+ import { ConflictError } from "@noy-db/core";
5
+ function jsonFile(options) {
6
+ const { dir, pretty = true } = options;
7
+ function recordPath(compartment, collection, id) {
8
+ return join(dir, compartment, collection, `${id}.json`);
9
+ }
10
+ function collectionDir(compartment, collection) {
11
+ return join(dir, compartment, collection);
12
+ }
13
+ async function ensureDir(path) {
14
+ await mkdir(path, { recursive: true });
15
+ }
16
+ async function fileExists(path) {
17
+ try {
18
+ await stat(path);
19
+ return true;
20
+ } catch {
21
+ return false;
22
+ }
23
+ }
24
+ function serialize(envelope) {
25
+ return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope);
26
+ }
27
+ return {
28
+ name: "file",
29
+ async get(compartment, collection, id) {
30
+ const path = recordPath(compartment, collection, id);
31
+ try {
32
+ const content = await readFile(path, "utf-8");
33
+ return JSON.parse(content);
34
+ } catch {
35
+ return null;
36
+ }
37
+ },
38
+ async put(compartment, collection, id, envelope, expectedVersion) {
39
+ const path = recordPath(compartment, collection, id);
40
+ if (expectedVersion !== void 0 && await fileExists(path)) {
41
+ const existing = JSON.parse(await readFile(path, "utf-8"));
42
+ if (existing._v !== expectedVersion) {
43
+ throw new ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`);
44
+ }
45
+ }
46
+ await ensureDir(collectionDir(compartment, collection));
47
+ await writeFile(path, serialize(envelope), "utf-8");
48
+ },
49
+ async delete(compartment, collection, id) {
50
+ const path = recordPath(compartment, collection, id);
51
+ try {
52
+ await unlink(path);
53
+ } catch {
54
+ }
55
+ },
56
+ async list(compartment, collection) {
57
+ const dirPath = collectionDir(compartment, collection);
58
+ try {
59
+ const entries = await readdir(dirPath);
60
+ return entries.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5));
61
+ } catch {
62
+ return [];
63
+ }
64
+ },
65
+ async loadAll(compartment) {
66
+ const compDir = join(dir, compartment);
67
+ const snapshot = {};
68
+ try {
69
+ const collections = await readdir(compDir);
70
+ for (const collName of collections) {
71
+ if (collName.startsWith("_")) continue;
72
+ const collPath = join(compDir, collName);
73
+ const collStat = await stat(collPath);
74
+ if (!collStat.isDirectory()) continue;
75
+ const records = {};
76
+ const files = await readdir(collPath);
77
+ for (const file of files) {
78
+ if (!file.endsWith(".json")) continue;
79
+ const id = file.slice(0, -5);
80
+ const content = await readFile(join(collPath, file), "utf-8");
81
+ records[id] = JSON.parse(content);
82
+ }
83
+ snapshot[collName] = records;
84
+ }
85
+ } catch {
86
+ }
87
+ return snapshot;
88
+ },
89
+ async saveAll(compartment, data) {
90
+ for (const [collName, records] of Object.entries(data)) {
91
+ const collDir = collectionDir(compartment, collName);
92
+ await ensureDir(collDir);
93
+ for (const [id, envelope] of Object.entries(records)) {
94
+ await writeFile(join(collDir, `${id}.json`), serialize(envelope), "utf-8");
95
+ }
96
+ }
97
+ },
98
+ async ping() {
99
+ try {
100
+ await stat(dir);
101
+ return true;
102
+ } catch {
103
+ return false;
104
+ }
105
+ },
106
+ /**
107
+ * Enumerate every top-level compartment subdirectory under the
108
+ * configured base directory. Used by
109
+ * `Noydb.listAccessibleCompartments()` (v0.5 #63).
110
+ *
111
+ * The implementation is `readdir(dir)` filtered to entries that
112
+ * are themselves directories — files at the top level (READMEs,
113
+ * .DS_Store, etc.) are skipped, and missing base directory
114
+ * returns an empty array rather than throwing. Result order is
115
+ * filesystem-defined; consumers that want stable order should
116
+ * sort themselves.
117
+ */
118
+ async listCompartments() {
119
+ let entries;
120
+ try {
121
+ entries = await readdir(dir);
122
+ } catch {
123
+ return [];
124
+ }
125
+ const compartments = [];
126
+ for (const entry of entries) {
127
+ try {
128
+ const entryStat = await stat(join(dir, entry));
129
+ if (entryStat.isDirectory()) compartments.push(entry);
130
+ } catch {
131
+ }
132
+ }
133
+ return compartments;
134
+ },
135
+ /**
136
+ * Paginate over a collection. Cursor is a numeric offset (as a string)
137
+ * into the sorted filename list. Files are sorted alphabetically so
138
+ * pages are stable across runs and across processes that share the
139
+ * same data directory.
140
+ *
141
+ * The default `limit` is 100. Each item carries its decoded envelope
142
+ * so callers don't need an extra `get()` round-trip per id.
143
+ */
144
+ async listPage(compartment, collection, cursor, limit = 100) {
145
+ const dirPath = collectionDir(compartment, collection);
146
+ let files;
147
+ try {
148
+ files = await readdir(dirPath);
149
+ } catch {
150
+ return { items: [], nextCursor: null };
151
+ }
152
+ const ids = files.filter((f) => f.endsWith(".json")).map((f) => f.slice(0, -5)).sort();
153
+ const start = cursor ? parseInt(cursor, 10) : 0;
154
+ const end = Math.min(start + limit, ids.length);
155
+ const items = [];
156
+ for (let i = start; i < end; i++) {
157
+ const id = ids[i];
158
+ try {
159
+ const content = await readFile(join(dirPath, `${id}.json`), "utf-8");
160
+ items.push({ id, envelope: JSON.parse(content) });
161
+ } catch {
162
+ }
163
+ }
164
+ return {
165
+ items,
166
+ nextCursor: end < ids.length ? String(end) : null
167
+ };
168
+ }
169
+ };
170
+ }
171
+ export {
172
+ jsonFile
173
+ };
174
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { readFile, writeFile, mkdir, readdir, unlink, stat } from 'node:fs/promises'\nimport { join } from 'node:path'\nimport type { NoydbAdapter, EncryptedEnvelope, CompartmentSnapshot } from '@noy-db/core'\nimport { ConflictError } from '@noy-db/core'\n\nexport interface JsonFileOptions {\n /** Base directory for NOYDB data. */\n dir: string\n /** Pretty-print JSON files. Default: true. */\n pretty?: boolean\n}\n\n/**\n * Create a JSON file adapter.\n * Maps the NOYDB hierarchy to the filesystem:\n *\n * ```\n * {dir}/{compartment}/{collection}/{id}.json\n * {dir}/{compartment}/_keyring/{userId}.json\n * ```\n */\nexport function jsonFile(options: JsonFileOptions): NoydbAdapter {\n const { dir, pretty = true } = options\n\n function recordPath(compartment: string, collection: string, id: string): string {\n return join(dir, compartment, collection, `${id}.json`)\n }\n\n function collectionDir(compartment: string, collection: string): string {\n return join(dir, compartment, collection)\n }\n\n async function ensureDir(path: string): Promise<void> {\n await mkdir(path, { recursive: true })\n }\n\n async function fileExists(path: string): Promise<boolean> {\n try {\n await stat(path)\n return true\n } catch {\n return false\n }\n }\n\n function serialize(envelope: EncryptedEnvelope): string {\n return pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)\n }\n\n return {\n name: 'file',\n\n async get(compartment, collection, id) {\n const path = recordPath(compartment, collection, id)\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as EncryptedEnvelope\n } catch {\n return null\n }\n },\n\n async put(compartment, collection, id, envelope, expectedVersion) {\n const path = recordPath(compartment, collection, id)\n\n if (expectedVersion !== undefined && await fileExists(path)) {\n const existing = JSON.parse(await readFile(path, 'utf-8')) as EncryptedEnvelope\n if (existing._v !== expectedVersion) {\n throw new ConflictError(existing._v, `Version conflict: expected ${expectedVersion}, found ${existing._v}`)\n }\n }\n\n await ensureDir(collectionDir(compartment, collection))\n await writeFile(path, serialize(envelope), 'utf-8')\n },\n\n async delete(compartment, collection, id) {\n const path = recordPath(compartment, collection, id)\n try {\n await unlink(path)\n } catch {\n // File doesn't exist — that's fine\n }\n },\n\n async list(compartment, collection) {\n const dirPath = collectionDir(compartment, collection)\n try {\n const entries = await readdir(dirPath)\n return entries\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5)) // remove .json extension\n } catch {\n return []\n }\n },\n\n async loadAll(compartment) {\n const compDir = join(dir, compartment)\n const snapshot: CompartmentSnapshot = {}\n\n try {\n const collections = await readdir(compDir)\n for (const collName of collections) {\n if (collName.startsWith('_')) continue // skip _keyring, _sync\n const collPath = join(compDir, collName)\n const collStat = await stat(collPath)\n if (!collStat.isDirectory()) continue\n\n const records: Record<string, EncryptedEnvelope> = {}\n const files = await readdir(collPath)\n for (const file of files) {\n if (!file.endsWith('.json')) continue\n const id = file.slice(0, -5)\n const content = await readFile(join(collPath, file), 'utf-8')\n records[id] = JSON.parse(content) as EncryptedEnvelope\n }\n snapshot[collName] = records\n }\n } catch {\n // Directory doesn't exist — return empty snapshot\n }\n\n return snapshot\n },\n\n async saveAll(compartment, data) {\n for (const [collName, records] of Object.entries(data)) {\n const collDir = collectionDir(compartment, collName)\n await ensureDir(collDir)\n for (const [id, envelope] of Object.entries(records)) {\n await writeFile(join(collDir, `${id}.json`), serialize(envelope), 'utf-8')\n }\n }\n },\n\n async ping() {\n try {\n await stat(dir)\n return true\n } catch {\n return false\n }\n },\n\n /**\n * Enumerate every top-level compartment subdirectory under the\n * configured base directory. Used by\n * `Noydb.listAccessibleCompartments()` (v0.5 #63).\n *\n * The implementation is `readdir(dir)` filtered to entries that\n * are themselves directories — files at the top level (READMEs,\n * .DS_Store, etc.) are skipped, and missing base directory\n * returns an empty array rather than throwing. Result order is\n * filesystem-defined; consumers that want stable order should\n * sort themselves.\n */\n async listCompartments() {\n let entries: string[]\n try {\n entries = await readdir(dir)\n } catch {\n return []\n }\n const compartments: string[] = []\n for (const entry of entries) {\n try {\n const entryStat = await stat(join(dir, entry))\n if (entryStat.isDirectory()) compartments.push(entry)\n } catch {\n // Entry vanished between readdir and stat — skip silently.\n }\n }\n return compartments\n },\n\n /**\n * Paginate over a collection. Cursor is a numeric offset (as a string)\n * into the sorted filename list. Files are sorted alphabetically so\n * pages are stable across runs and across processes that share the\n * same data directory.\n *\n * The default `limit` is 100. Each item carries its decoded envelope\n * so callers don't need an extra `get()` round-trip per id.\n */\n async listPage(compartment, collection, cursor, limit = 100) {\n const dirPath = collectionDir(compartment, collection)\n let files: string[]\n try {\n files = await readdir(dirPath)\n } catch {\n return { items: [], nextCursor: null }\n }\n\n const ids = files\n .filter(f => f.endsWith('.json'))\n .map(f => f.slice(0, -5))\n .sort()\n\n const start = cursor ? parseInt(cursor, 10) : 0\n const end = Math.min(start + limit, ids.length)\n\n const items: Array<{ id: string; envelope: EncryptedEnvelope }> = []\n for (let i = start; i < end; i++) {\n const id = ids[i]!\n try {\n const content = await readFile(join(dirPath, `${id}.json`), 'utf-8')\n items.push({ id, envelope: JSON.parse(content) as EncryptedEnvelope })\n } catch {\n // File disappeared between readdir and readFile — skip silently.\n }\n }\n\n return {\n items,\n nextCursor: end < ids.length ? String(end) : null,\n }\n },\n }\n}\n"],"mappings":";AAAA,SAAS,UAAU,WAAW,OAAO,SAAS,QAAQ,YAAY;AAClE,SAAS,YAAY;AAErB,SAAS,qBAAqB;AAkBvB,SAAS,SAAS,SAAwC;AAC/D,QAAM,EAAE,KAAK,SAAS,KAAK,IAAI;AAE/B,WAAS,WAAW,aAAqB,YAAoB,IAAoB;AAC/E,WAAO,KAAK,KAAK,aAAa,YAAY,GAAG,EAAE,OAAO;AAAA,EACxD;AAEA,WAAS,cAAc,aAAqB,YAA4B;AACtE,WAAO,KAAK,KAAK,aAAa,UAAU;AAAA,EAC1C;AAEA,iBAAe,UAAU,MAA6B;AACpD,UAAM,MAAM,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AAEA,iBAAe,WAAW,MAAgC;AACxD,QAAI;AACF,YAAM,KAAK,IAAI;AACf,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,WAAS,UAAU,UAAqC;AACtD,WAAO,SAAS,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,KAAK,UAAU,QAAQ;AAAA,EAC7E;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IAEN,MAAM,IAAI,aAAa,YAAY,IAAI;AACrC,YAAM,OAAO,WAAW,aAAa,YAAY,EAAE;AACnD,UAAI;AACF,cAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAC5C,eAAO,KAAK,MAAM,OAAO;AAAA,MAC3B,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,MAAM,IAAI,aAAa,YAAY,IAAI,UAAU,iBAAiB;AAChE,YAAM,OAAO,WAAW,aAAa,YAAY,EAAE;AAEnD,UAAI,oBAAoB,UAAa,MAAM,WAAW,IAAI,GAAG;AAC3D,cAAM,WAAW,KAAK,MAAM,MAAM,SAAS,MAAM,OAAO,CAAC;AACzD,YAAI,SAAS,OAAO,iBAAiB;AACnC,gBAAM,IAAI,cAAc,SAAS,IAAI,8BAA8B,eAAe,WAAW,SAAS,EAAE,EAAE;AAAA,QAC5G;AAAA,MACF;AAEA,YAAM,UAAU,cAAc,aAAa,UAAU,CAAC;AACtD,YAAM,UAAU,MAAM,UAAU,QAAQ,GAAG,OAAO;AAAA,IACpD;AAAA,IAEA,MAAM,OAAO,aAAa,YAAY,IAAI;AACxC,YAAM,OAAO,WAAW,aAAa,YAAY,EAAE;AACnD,UAAI;AACF,cAAM,OAAO,IAAI;AAAA,MACnB,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,IAEA,MAAM,KAAK,aAAa,YAAY;AAClC,YAAM,UAAU,cAAc,aAAa,UAAU;AACrD,UAAI;AACF,cAAM,UAAU,MAAM,QAAQ,OAAO;AACrC,eAAO,QACJ,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC5B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,QAAQ,aAAa;AACzB,YAAM,UAAU,KAAK,KAAK,WAAW;AACrC,YAAM,WAAgC,CAAC;AAEvC,UAAI;AACF,cAAM,cAAc,MAAM,QAAQ,OAAO;AACzC,mBAAW,YAAY,aAAa;AAClC,cAAI,SAAS,WAAW,GAAG,EAAG;AAC9B,gBAAM,WAAW,KAAK,SAAS,QAAQ;AACvC,gBAAM,WAAW,MAAM,KAAK,QAAQ;AACpC,cAAI,CAAC,SAAS,YAAY,EAAG;AAE7B,gBAAM,UAA6C,CAAC;AACpD,gBAAM,QAAQ,MAAM,QAAQ,QAAQ;AACpC,qBAAW,QAAQ,OAAO;AACxB,gBAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAC7B,kBAAM,KAAK,KAAK,MAAM,GAAG,EAAE;AAC3B,kBAAM,UAAU,MAAM,SAAS,KAAK,UAAU,IAAI,GAAG,OAAO;AAC5D,oBAAQ,EAAE,IAAI,KAAK,MAAM,OAAO;AAAA,UAClC;AACA,mBAAS,QAAQ,IAAI;AAAA,QACvB;AAAA,MACF,QAAQ;AAAA,MAER;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QAAQ,aAAa,MAAM;AAC/B,iBAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,cAAM,UAAU,cAAc,aAAa,QAAQ;AACnD,cAAM,UAAU,OAAO;AACvB,mBAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,gBAAM,UAAU,KAAK,SAAS,GAAG,EAAE,OAAO,GAAG,UAAU,QAAQ,GAAG,OAAO;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAAA,IAEA,MAAM,OAAO;AACX,UAAI;AACF,cAAM,KAAK,GAAG;AACd,eAAO;AAAA,MACT,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAcA,MAAM,mBAAmB;AACvB,UAAI;AACJ,UAAI;AACF,kBAAU,MAAM,QAAQ,GAAG;AAAA,MAC7B,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AACA,YAAM,eAAyB,CAAC;AAChC,iBAAW,SAAS,SAAS;AAC3B,YAAI;AACF,gBAAM,YAAY,MAAM,KAAK,KAAK,KAAK,KAAK,CAAC;AAC7C,cAAI,UAAU,YAAY,EAAG,cAAa,KAAK,KAAK;AAAA,QACtD,QAAQ;AAAA,QAER;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAWA,MAAM,SAAS,aAAa,YAAY,QAAQ,QAAQ,KAAK;AAC3D,YAAM,UAAU,cAAc,aAAa,UAAU;AACrD,UAAI;AACJ,UAAI;AACF,gBAAQ,MAAM,QAAQ,OAAO;AAAA,MAC/B,QAAQ;AACN,eAAO,EAAE,OAAO,CAAC,GAAG,YAAY,KAAK;AAAA,MACvC;AAEA,YAAM,MAAM,MACT,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,MAAM,GAAG,EAAE,CAAC,EACvB,KAAK;AAER,YAAM,QAAQ,SAAS,SAAS,QAAQ,EAAE,IAAI;AAC9C,YAAM,MAAM,KAAK,IAAI,QAAQ,OAAO,IAAI,MAAM;AAE9C,YAAM,QAA4D,CAAC;AACnE,eAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAChC,cAAM,KAAK,IAAI,CAAC;AAChB,YAAI;AACF,gBAAM,UAAU,MAAM,SAAS,KAAK,SAAS,GAAG,EAAE,OAAO,GAAG,OAAO;AACnE,gBAAM,KAAK,EAAE,IAAI,UAAU,KAAK,MAAM,OAAO,EAAuB,CAAC;AAAA,QACvE,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA,YAAY,MAAM,IAAI,SAAS,OAAO,GAAG,IAAI;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
package/package.json ADDED
@@ -0,0 +1,67 @@
1
+ {
2
+ "name": "@noy-db/file",
3
+ "version": "0.5.0",
4
+ "description": "JSON file adapter for noy-db — encrypted document store on local disk, USB sticks, or network drives",
5
+ "license": "MIT",
6
+ "author": "vLannaAi <vicio@lanna.ai>",
7
+ "homepage": "https://github.com/vLannaAi/noy-db/tree/main/packages/file#readme",
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "https://github.com/vLannaAi/noy-db.git",
11
+ "directory": "packages/file"
12
+ },
13
+ "bugs": {
14
+ "url": "https://github.com/vLannaAi/noy-db/issues"
15
+ },
16
+ "type": "module",
17
+ "sideEffects": false,
18
+ "exports": {
19
+ ".": {
20
+ "import": {
21
+ "types": "./dist/index.d.ts",
22
+ "default": "./dist/index.js"
23
+ },
24
+ "require": {
25
+ "types": "./dist/index.d.cts",
26
+ "default": "./dist/index.cjs"
27
+ }
28
+ }
29
+ },
30
+ "main": "./dist/index.cjs",
31
+ "module": "./dist/index.js",
32
+ "types": "./dist/index.d.ts",
33
+ "files": [
34
+ "dist",
35
+ "README.md",
36
+ "LICENSE"
37
+ ],
38
+ "engines": {
39
+ "node": ">=18.0.0"
40
+ },
41
+ "peerDependencies": {
42
+ "@noy-db/core": "^0.5.0"
43
+ },
44
+ "devDependencies": {
45
+ "@types/node": "^22.0.0",
46
+ "@noy-db/test-adapter-conformance": "0.0.0",
47
+ "@noy-db/core": "0.5.0"
48
+ },
49
+ "keywords": [
50
+ "noy-db",
51
+ "adapter",
52
+ "file",
53
+ "json",
54
+ "filesystem",
55
+ "local-disk",
56
+ "usb",
57
+ "storage",
58
+ "encryption",
59
+ "offline-first"
60
+ ],
61
+ "scripts": {
62
+ "build": "tsup",
63
+ "test": "vitest run",
64
+ "lint": "eslint src/",
65
+ "typecheck": "tsc --noEmit"
66
+ }
67
+ }