@noy-db/hub 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +197 -0
  3. package/dist/aggregate/index.cjs +476 -0
  4. package/dist/aggregate/index.cjs.map +1 -0
  5. package/dist/aggregate/index.d.cts +38 -0
  6. package/dist/aggregate/index.d.ts +38 -0
  7. package/dist/aggregate/index.js +53 -0
  8. package/dist/aggregate/index.js.map +1 -0
  9. package/dist/blobs/index.cjs +1480 -0
  10. package/dist/blobs/index.cjs.map +1 -0
  11. package/dist/blobs/index.d.cts +45 -0
  12. package/dist/blobs/index.d.ts +45 -0
  13. package/dist/blobs/index.js +48 -0
  14. package/dist/blobs/index.js.map +1 -0
  15. package/dist/bundle/index.cjs +436 -0
  16. package/dist/bundle/index.cjs.map +1 -0
  17. package/dist/bundle/index.d.cts +7 -0
  18. package/dist/bundle/index.d.ts +7 -0
  19. package/dist/bundle/index.js +40 -0
  20. package/dist/bundle/index.js.map +1 -0
  21. package/dist/chunk-2QR2PQTT.js +217 -0
  22. package/dist/chunk-2QR2PQTT.js.map +1 -0
  23. package/dist/chunk-4OWFYIDQ.js +79 -0
  24. package/dist/chunk-4OWFYIDQ.js.map +1 -0
  25. package/dist/chunk-5AATM2M2.js +90 -0
  26. package/dist/chunk-5AATM2M2.js.map +1 -0
  27. package/dist/chunk-ACLDOTNQ.js +543 -0
  28. package/dist/chunk-ACLDOTNQ.js.map +1 -0
  29. package/dist/chunk-BTDCBVJW.js +160 -0
  30. package/dist/chunk-BTDCBVJW.js.map +1 -0
  31. package/dist/chunk-CIMZBAZB.js +72 -0
  32. package/dist/chunk-CIMZBAZB.js.map +1 -0
  33. package/dist/chunk-E445ICYI.js +365 -0
  34. package/dist/chunk-E445ICYI.js.map +1 -0
  35. package/dist/chunk-EXQRC2L4.js +722 -0
  36. package/dist/chunk-EXQRC2L4.js.map +1 -0
  37. package/dist/chunk-FZU343FL.js +32 -0
  38. package/dist/chunk-FZU343FL.js.map +1 -0
  39. package/dist/chunk-GJILMRPO.js +354 -0
  40. package/dist/chunk-GJILMRPO.js.map +1 -0
  41. package/dist/chunk-GOUT6DND.js +1285 -0
  42. package/dist/chunk-GOUT6DND.js.map +1 -0
  43. package/dist/chunk-J66GRPNH.js +111 -0
  44. package/dist/chunk-J66GRPNH.js.map +1 -0
  45. package/dist/chunk-M2F2JAWB.js +464 -0
  46. package/dist/chunk-M2F2JAWB.js.map +1 -0
  47. package/dist/chunk-M5INGEFC.js +84 -0
  48. package/dist/chunk-M5INGEFC.js.map +1 -0
  49. package/dist/chunk-M62XNWRA.js +72 -0
  50. package/dist/chunk-M62XNWRA.js.map +1 -0
  51. package/dist/chunk-MR4424N3.js +275 -0
  52. package/dist/chunk-MR4424N3.js.map +1 -0
  53. package/dist/chunk-NPC4LFV5.js +132 -0
  54. package/dist/chunk-NPC4LFV5.js.map +1 -0
  55. package/dist/chunk-NXFEYLVG.js +311 -0
  56. package/dist/chunk-NXFEYLVG.js.map +1 -0
  57. package/dist/chunk-R36SIKES.js +79 -0
  58. package/dist/chunk-R36SIKES.js.map +1 -0
  59. package/dist/chunk-TDR6T5CJ.js +381 -0
  60. package/dist/chunk-TDR6T5CJ.js.map +1 -0
  61. package/dist/chunk-UF3BUNQZ.js +1 -0
  62. package/dist/chunk-UF3BUNQZ.js.map +1 -0
  63. package/dist/chunk-UQFSPSWG.js +1109 -0
  64. package/dist/chunk-UQFSPSWG.js.map +1 -0
  65. package/dist/chunk-USKYUS74.js +793 -0
  66. package/dist/chunk-USKYUS74.js.map +1 -0
  67. package/dist/chunk-XCL3WP6J.js +121 -0
  68. package/dist/chunk-XCL3WP6J.js.map +1 -0
  69. package/dist/chunk-XHFOENR2.js +680 -0
  70. package/dist/chunk-XHFOENR2.js.map +1 -0
  71. package/dist/chunk-ZFKD4QMV.js +430 -0
  72. package/dist/chunk-ZFKD4QMV.js.map +1 -0
  73. package/dist/chunk-ZLMV3TUA.js +490 -0
  74. package/dist/chunk-ZLMV3TUA.js.map +1 -0
  75. package/dist/chunk-ZRG4V3F5.js +17 -0
  76. package/dist/chunk-ZRG4V3F5.js.map +1 -0
  77. package/dist/consent/index.cjs +204 -0
  78. package/dist/consent/index.cjs.map +1 -0
  79. package/dist/consent/index.d.cts +24 -0
  80. package/dist/consent/index.d.ts +24 -0
  81. package/dist/consent/index.js +23 -0
  82. package/dist/consent/index.js.map +1 -0
  83. package/dist/crdt/index.cjs +152 -0
  84. package/dist/crdt/index.cjs.map +1 -0
  85. package/dist/crdt/index.d.cts +30 -0
  86. package/dist/crdt/index.d.ts +30 -0
  87. package/dist/crdt/index.js +24 -0
  88. package/dist/crdt/index.js.map +1 -0
  89. package/dist/crypto-IVKU7YTT.js +44 -0
  90. package/dist/crypto-IVKU7YTT.js.map +1 -0
  91. package/dist/delegation-XDJCBTI2.js +16 -0
  92. package/dist/delegation-XDJCBTI2.js.map +1 -0
  93. package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
  94. package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
  95. package/dist/hash-9KO1BGxh.d.cts +63 -0
  96. package/dist/hash-ChfJjRjQ.d.ts +63 -0
  97. package/dist/history/index.cjs +1215 -0
  98. package/dist/history/index.cjs.map +1 -0
  99. package/dist/history/index.d.cts +62 -0
  100. package/dist/history/index.d.ts +62 -0
  101. package/dist/history/index.js +79 -0
  102. package/dist/history/index.js.map +1 -0
  103. package/dist/i18n/index.cjs +746 -0
  104. package/dist/i18n/index.cjs.map +1 -0
  105. package/dist/i18n/index.d.cts +38 -0
  106. package/dist/i18n/index.d.ts +38 -0
  107. package/dist/i18n/index.js +55 -0
  108. package/dist/i18n/index.js.map +1 -0
  109. package/dist/index-BRHBCmLt.d.ts +1940 -0
  110. package/dist/index-C8kQtmOk.d.ts +380 -0
  111. package/dist/index-DN-J-5wT.d.cts +1940 -0
  112. package/dist/index-DhjMjz7L.d.cts +380 -0
  113. package/dist/index.cjs +14756 -0
  114. package/dist/index.cjs.map +1 -0
  115. package/dist/index.d.cts +269 -0
  116. package/dist/index.d.ts +269 -0
  117. package/dist/index.js +6085 -0
  118. package/dist/index.js.map +1 -0
  119. package/dist/indexing/index.cjs +736 -0
  120. package/dist/indexing/index.cjs.map +1 -0
  121. package/dist/indexing/index.d.cts +36 -0
  122. package/dist/indexing/index.d.ts +36 -0
  123. package/dist/indexing/index.js +77 -0
  124. package/dist/indexing/index.js.map +1 -0
  125. package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
  126. package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
  127. package/dist/ledger-2NX4L7PN.js +33 -0
  128. package/dist/ledger-2NX4L7PN.js.map +1 -0
  129. package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
  130. package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
  131. package/dist/periods/index.cjs +1035 -0
  132. package/dist/periods/index.cjs.map +1 -0
  133. package/dist/periods/index.d.cts +21 -0
  134. package/dist/periods/index.d.ts +21 -0
  135. package/dist/periods/index.js +25 -0
  136. package/dist/periods/index.js.map +1 -0
  137. package/dist/predicate-SBHmi6D0.d.cts +161 -0
  138. package/dist/predicate-SBHmi6D0.d.ts +161 -0
  139. package/dist/query/index.cjs +1957 -0
  140. package/dist/query/index.cjs.map +1 -0
  141. package/dist/query/index.d.cts +3 -0
  142. package/dist/query/index.d.ts +3 -0
  143. package/dist/query/index.js +62 -0
  144. package/dist/query/index.js.map +1 -0
  145. package/dist/session/index.cjs +487 -0
  146. package/dist/session/index.cjs.map +1 -0
  147. package/dist/session/index.d.cts +45 -0
  148. package/dist/session/index.d.ts +45 -0
  149. package/dist/session/index.js +44 -0
  150. package/dist/session/index.js.map +1 -0
  151. package/dist/shadow/index.cjs +133 -0
  152. package/dist/shadow/index.cjs.map +1 -0
  153. package/dist/shadow/index.d.cts +16 -0
  154. package/dist/shadow/index.d.ts +16 -0
  155. package/dist/shadow/index.js +20 -0
  156. package/dist/shadow/index.js.map +1 -0
  157. package/dist/store/index.cjs +1069 -0
  158. package/dist/store/index.cjs.map +1 -0
  159. package/dist/store/index.d.cts +491 -0
  160. package/dist/store/index.d.ts +491 -0
  161. package/dist/store/index.js +34 -0
  162. package/dist/store/index.js.map +1 -0
  163. package/dist/strategy-BSxFXGzb.d.cts +110 -0
  164. package/dist/strategy-BSxFXGzb.d.ts +110 -0
  165. package/dist/strategy-D-SrOLCl.d.cts +548 -0
  166. package/dist/strategy-D-SrOLCl.d.ts +548 -0
  167. package/dist/sync/index.cjs +1062 -0
  168. package/dist/sync/index.cjs.map +1 -0
  169. package/dist/sync/index.d.cts +42 -0
  170. package/dist/sync/index.d.ts +42 -0
  171. package/dist/sync/index.js +28 -0
  172. package/dist/sync/index.js.map +1 -0
  173. package/dist/team/index.cjs +1233 -0
  174. package/dist/team/index.cjs.map +1 -0
  175. package/dist/team/index.d.cts +117 -0
  176. package/dist/team/index.d.ts +117 -0
  177. package/dist/team/index.js +39 -0
  178. package/dist/team/index.js.map +1 -0
  179. package/dist/tx/index.cjs +212 -0
  180. package/dist/tx/index.cjs.map +1 -0
  181. package/dist/tx/index.d.cts +20 -0
  182. package/dist/tx/index.d.ts +20 -0
  183. package/dist/tx/index.js +20 -0
  184. package/dist/tx/index.js.map +1 -0
  185. package/dist/types-BZpCZB8N.d.ts +7526 -0
  186. package/dist/types-Bfs0qr5F.d.cts +7526 -0
  187. package/dist/ulid-COREQ2RQ.js +9 -0
  188. package/dist/ulid-COREQ2RQ.js.map +1 -0
  189. package/dist/util/index.cjs +230 -0
  190. package/dist/util/index.cjs.map +1 -0
  191. package/dist/util/index.d.cts +77 -0
  192. package/dist/util/index.d.ts +77 -0
  193. package/dist/util/index.js +190 -0
  194. package/dist/util/index.js.map +1 -0
  195. package/package.json +244 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 vLannaAi
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,197 @@
1
+ # @noy-db/hub
2
+
3
+ > Zero-knowledge, offline-first, encrypted document store — core library.
4
+
5
+ [![npm](https://img.shields.io/npm/v/@noy-db/hub.svg)](https://www.npmjs.com/package/@noy-db/hub)
6
+ [![license](https://img.shields.io/npm/l/@noy-db/hub.svg)](https://github.com/vLannaAi/noy-db/blob/main/LICENSE)
7
+
8
+ Part of [**noy-db**](https://github.com/vLannaAi/noy-db) — *"None Of Your Damn Business"*. This is the **core library** — install this first, then pair it with a storage backend.
9
+
10
+ ## Install
11
+
12
+ Every noy-db app needs `@noy-db/hub` plus at least one storage backend (`to-*`). Everything else is optional.
13
+
14
+ ```bash
15
+ pnpm add @noy-db/hub @noy-db/to-memory
16
+ ```
17
+
18
+ ### Pick a storage backend — [`@noy-db/to-*`](https://www.npmjs.com/search?q=%40noy-db%2Fto-)
19
+
20
+ | Package | Use for |
21
+ |---------|---------|
22
+ | [`to-memory`](https://www.npmjs.com/package/@noy-db/to-memory) | Tests, prototypes, ephemeral data |
23
+ | [`to-file`](https://www.npmjs.com/package/@noy-db/to-file) | Local disk, USB stick |
24
+ | [`to-browser-idb`](https://www.npmjs.com/package/@noy-db/to-browser-idb) | Browser IndexedDB (atomic CAS) |
25
+ | [`to-browser-local`](https://www.npmjs.com/package/@noy-db/to-browser-local) | Browser localStorage |
26
+ | [`to-aws-dynamo`](https://www.npmjs.com/package/@noy-db/to-aws-dynamo) | DynamoDB single-table |
27
+ | [`to-aws-s3`](https://www.npmjs.com/package/@noy-db/to-aws-s3) | S3 object store |
28
+ | [`to-cloudflare-d1`](https://www.npmjs.com/package/@noy-db/to-cloudflare-d1) | Edge SQLite via Workers |
29
+ | [`to-cloudflare-r2`](https://www.npmjs.com/package/@noy-db/to-cloudflare-r2) | Zero-egress object storage |
30
+ | [`to-postgres`](https://www.npmjs.com/package/@noy-db/to-postgres) | PostgreSQL `jsonb` |
31
+ | [`to-mysql`](https://www.npmjs.com/package/@noy-db/to-mysql) | MySQL / MariaDB `JSON` |
32
+ | [`to-sqlite`](https://www.npmjs.com/package/@noy-db/to-sqlite) | better-sqlite3 / node:sqlite / bun:sqlite |
33
+ | [`to-supabase`](https://www.npmjs.com/package/@noy-db/to-supabase) | Supabase Postgres + Storage |
34
+ | [`to-turso`](https://www.npmjs.com/package/@noy-db/to-turso) | Hosted libSQL (replicated SQLite) |
35
+ | [`to-webdav`](https://www.npmjs.com/package/@noy-db/to-webdav) | Nextcloud / ownCloud |
36
+ | [`to-ssh`](https://www.npmjs.com/package/@noy-db/to-ssh) | Remote SFTP backend |
37
+ | [`to-smb`](https://www.npmjs.com/package/@noy-db/to-smb) | Windows shares / NAS |
38
+ | [`to-nfs`](https://www.npmjs.com/package/@noy-db/to-nfs) | NFS mounts |
39
+ | [`to-icloud`](https://www.npmjs.com/package/@noy-db/to-icloud) | iCloud Drive (.icloud-aware) |
40
+ | [`to-drive`](https://www.npmjs.com/package/@noy-db/to-drive) | Google Drive bundle |
41
+ | [`to-meter`](https://www.npmjs.com/package/@noy-db/to-meter) | Wrap any store with metrics |
42
+ | [`to-probe`](https://www.npmjs.com/package/@noy-db/to-probe) | Diagnostic suitability test |
43
+
44
+ ### Optional ecosystem
45
+
46
+ - **Framework integrations** — [`@noy-db/in-*`](https://www.npmjs.com/search?q=%40noy-db%2Fin-): vue, pinia, nuxt, react, nextjs, svelte, solid, zustand, tanstack-query, tanstack-table, yjs, ai, rest.
47
+ - **Authentication paths** — [`@noy-db/on-*`](https://www.npmjs.com/search?q=%40noy-db%2Fon-): webauthn, oidc, totp, email-otp, magic-link, recovery, shamir, pin, threat.
48
+ - **Export formats** — [`@noy-db/as-*`](https://www.npmjs.com/search?q=%40noy-db%2Fas-): csv, json, ndjson, xml, sql, xlsx, blob, zip, noydb (encrypted bundle).
49
+ - **Session-share transports** — [`@noy-db/by-*`](https://www.npmjs.com/search?q=%40noy-db%2Fby-): tabs (BroadcastChannel multi-tab), peer (WebRTC).
50
+ - **CLI tooling** — [`@noy-db/cli`](https://www.npmjs.com/package/@noy-db/cli) (`noydb` binary; inspect/verify `.noydb` files), [`create-noy-db`](https://www.npmjs.com/package/create-noy-db) (`npm create noy-db` scaffolder).
51
+
52
+ ## Quick start
53
+
54
+ ```ts
55
+ import { createNoydb } from '@noy-db/hub'
56
+ import { memory } from '@noy-db/to-memory'
57
+
58
+ type Invoice = { id: string; amount: number; customer: string }
59
+
60
+ const db = await createNoydb({
61
+ store: memory(),
62
+ userId: 'alice',
63
+ passphrase: 'correct horse battery staple',
64
+ })
65
+
66
+ const acme = await db.openVault('acme')
67
+ const invoices = acme.collection<Invoice>('invoices')
68
+
69
+ await invoices.put('INV-001', { id: 'INV-001', amount: 8500, customer: 'ABC Trading' })
70
+ const all = await invoices.list()
71
+ ```
72
+
73
+ ## What it does
74
+
75
+ - **Zero-knowledge encryption** — AES-256-GCM + PBKDF2 (600K iterations) + AES-KW, all via Web Crypto API
76
+ - **Per-collection keys** — one DEK per collection, wrapped with a per-user KEK
77
+ - **Multi-user access control** — owner, admin, operator, viewer, client roles
78
+ - **Offline-first sync** — push/pull with optimistic concurrency on encrypted envelopes
79
+ - **Audit history** — full-copy snapshots with `history()`, `diff()`, `revert()`, `pruneHistory()`
80
+ - **Zero runtime dependencies**
81
+
82
+ ## Cross-vault queries
83
+
84
+ When a single principal holds grants across many vaults — multi-tenant apps, multi-project setups, multi-workspace tools — there are two APIs for enumerating and fanning out across them:
85
+
86
+ ### `db.listAccessibleVaults(options?)` — enumerate
87
+
88
+ Returns every vault the calling principal can unwrap, optionally filtered by minimum role. The walk is bounded by the local keyring index — vaults where the user has no keyring file or where the passphrase doesn't unwrap are silently dropped from the result.
89
+
90
+ ```ts
91
+ // All vaults I can unlock
92
+ const all = await db.listAccessibleVaults()
93
+ // → [{ id: 'T1', role: 'owner' }, { id: 'T7', role: 'admin' }, ...]
94
+
95
+ // Only vaults where I'm at least admin
96
+ const admin = await db.listAccessibleVaults({ minRole: 'admin' })
97
+ ```
98
+
99
+ **Existence-leak guarantee.** The return value never reveals the existence of a vault the caller cannot unwrap. The store sees the enumeration call (it owns the storage), but downstream consumers of `listAccessibleVaults()` only see the filtered list.
100
+
101
+ **Store capability.** Requires the optional `NoydbStore.listVaults()` method. The `@noy-db/to-memory` and `@noy-db/to-file` stores implement it; cloud stores (`@noy-db/to-aws-dynamo`, `@noy-db/to-aws-s3`) and `@noy-db/to-browser-idb` do not (cloud enumeration needs a GSI or list-bucket permission that has to be configured by the consumer). Calling `listAccessibleVaults()` against a store that doesn't implement `listVaults` throws `StoreCapabilityError`. Workaround: maintain the candidate list out of band and pass it directly to `queryAcross()`.
102
+
103
+ ### `db.queryAcross(ids, fn, options?)` — fan out
104
+
105
+ Runs a per-vault callback against a list of vault ids and collects the results, tagged by vault. Per-vault errors do not abort the others — each result slot carries either `result` or `error`.
106
+
107
+ ```ts
108
+ const accessible = await db.listAccessibleVaults({ minRole: 'admin' })
109
+
110
+ const results = await db.queryAcross(
111
+ accessible.map((v) => v.id),
112
+ async (vault) => {
113
+ return vault.collection<Invoice>('invoices').query()
114
+ .where('month', '==', '2026-03')
115
+ .toArray()
116
+ },
117
+ { concurrency: 4 }, // default 1 — bump for cloud stores
118
+ )
119
+ // results: Array<{ vault, result?: Invoice[], error?: Error }>
120
+ ```
121
+
122
+ **Composes with `exportStream()` for cross-vault plaintext export:**
123
+
124
+ ```ts
125
+ await db.queryAcross(accessible.map((v) => v.id), async (vault) => {
126
+ const out: unknown[] = []
127
+ for await (const chunk of vault.exportStream()) out.push(chunk)
128
+ return out
129
+ })
130
+ ```
131
+
132
+ ## Backup and export
133
+
134
+ noy-db ships two distinct paths for getting data out of a vault. They are not interchangeable — use the one that matches your goal.
135
+
136
+ ### `vault.dump()` — encrypted backup (the default)
137
+
138
+ Produces a tamper-evident encrypted JSON envelope. Records stay encrypted, the hash-chained ledger is included so the receiver can verify integrity end-to-end after `load()`, and the recipient must hold a valid keyring to read anything. **Use this for backup, transport between machines, or any scenario where the data must remain protected on disk.**
139
+
140
+ ```ts
141
+ const backup = await acme.dump() // string of encrypted JSON
142
+ await fs.writeFile('./acme-backup.json', backup) // safe to store anywhere
143
+ // later, on another machine:
144
+ await otherAcme.load(backup) // verifies + restores
145
+ ```
146
+
147
+ ### `vault.exportStream()` and `vault.exportJSON()` — plaintext export
148
+
149
+ ⚠ **These methods decrypt your records and produce plaintext.**
150
+
151
+ `exportStream()` is an authorization-aware async generator that yields per-collection chunks of decrypted records, with schema and ref metadata attached. `exportJSON()` is a five-line wrapper that serializes the stream to a single JSON string.
152
+
153
+ Both methods are **ACL-scoped**: collections the calling principal cannot read are silently skipped. An operator with `{ invoices: 'rw' }` permissions on a five-collection vault exports only `invoices`, with no error on the others.
154
+
155
+ ```ts
156
+ // Stream every collection the caller can read
157
+ for await (const chunk of acme.exportStream()) {
158
+ console.log(chunk.collection, chunk.records.length)
159
+ }
160
+
161
+ // Or get a single JSON string
162
+ const json = await acme.exportJSON()
163
+ await fs.writeFile('./backup.json', json)
164
+ ```
165
+
166
+ **Use only when:**
167
+ - You are the authorized owner of the data, **and**
168
+ - You have a legitimate downstream tool that requires plaintext, **and**
169
+ - You have a documented plan for how the resulting plaintext will be protected and eventually destroyed.
170
+
171
+ If your goal is encrypted backup or transport between noy-db instances, use **`vault.dump()`** instead.
172
+
173
+ #### Why no built-in file path support
174
+
175
+ Core has zero `node:` imports — it runs unchanged in browsers, Node, Bun, Deno, and edge runtimes. `exportJSON()` returns a `Promise<string>` so the consumer chooses any sink (`fs.writeFile`, `Blob` download, `fetch` upload, IndexedDB) and the destination decision stays explicit at the call site. This is also better for the security warning: there's no library function quietly writing plaintext somewhere.
176
+
177
+ #### Other plaintext formats
178
+
179
+ CSV, XML, xlsx, and the rest of the plaintext tier — plus encrypted `.noydb` bundles under the `as-noydb` encrypted tier — all live in the [`@noy-db/as-*`](https://www.npmjs.com/search?q=%40noy-db%2Fas-) family. Every invocation is gated by the two-tier authorization model (`canExportPlaintext` default off, `canExportBundle` default on for owner/admin) and lands in the audit ledger.
180
+
181
+ ## Status
182
+
183
+ **Pre-release** (`0.1.0-pre.1`). API may change before `1.0`. Install from the `next` dist-tag:
184
+
185
+ ```bash
186
+ pnpm add @noy-db/hub@next @noy-db/to-memory@next
187
+ ```
188
+
189
+ ## Documentation
190
+
191
+ - Full docs: <https://github.com/vLannaAi/noy-db#readme>
192
+ - Spec: <https://github.com/vLannaAi/noy-db/blob/main/SPEC.md>
193
+ - Roadmap: <https://github.com/vLannaAi/noy-db/blob/main/ROADMAP.md>
194
+
195
+ ## License
196
+
197
+ [MIT](./LICENSE) © vLannaAi
@@ -0,0 +1,476 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/aggregate/index.ts
21
+ var aggregate_exports = {};
22
+ __export(aggregate_exports, {
23
+ Aggregation: () => Aggregation,
24
+ GROUPBY_MAX_CARDINALITY: () => GROUPBY_MAX_CARDINALITY,
25
+ GROUPBY_WARN_CARDINALITY: () => GROUPBY_WARN_CARDINALITY,
26
+ GroupedAggregation: () => GroupedAggregation,
27
+ GroupedQuery: () => GroupedQuery,
28
+ avg: () => avg,
29
+ buildLiveAggregation: () => buildLiveAggregation,
30
+ count: () => count,
31
+ groupAndReduce: () => groupAndReduce,
32
+ max: () => max,
33
+ min: () => min,
34
+ reduceRecords: () => reduceRecords,
35
+ resetGroupByWarnings: () => resetGroupByWarnings,
36
+ sum: () => sum,
37
+ withAggregate: () => withAggregate
38
+ });
39
+ module.exports = __toCommonJS(aggregate_exports);
40
+
41
+ // src/aggregate/aggregation.ts
42
+ function reduceRecords(records, spec) {
43
+ const state = {};
44
+ for (const key of Object.keys(spec)) {
45
+ state[key] = spec[key].init();
46
+ }
47
+ for (const record of records) {
48
+ for (const key of Object.keys(spec)) {
49
+ state[key] = spec[key].step(state[key], record);
50
+ }
51
+ }
52
+ const result = {};
53
+ for (const key of Object.keys(spec)) {
54
+ result[key] = spec[key].finalize(state[key]);
55
+ }
56
+ return result;
57
+ }
58
+ var LiveAggregationImpl = class {
59
+ constructor(recompute, upstreams) {
60
+ this.recompute = recompute;
61
+ try {
62
+ this.value = recompute();
63
+ this.error = void 0;
64
+ } catch (err) {
65
+ this.value = void 0;
66
+ this.error = err;
67
+ }
68
+ for (const upstream of upstreams) {
69
+ const unsub = upstream.subscribe(() => this.refresh());
70
+ this.unsubscribes.push(unsub);
71
+ }
72
+ }
73
+ recompute;
74
+ value;
75
+ error;
76
+ listeners = /* @__PURE__ */ new Set();
77
+ unsubscribes = [];
78
+ stopped = false;
79
+ refresh() {
80
+ if (this.stopped) return;
81
+ try {
82
+ this.value = this.recompute();
83
+ this.error = void 0;
84
+ } catch (err) {
85
+ this.error = err;
86
+ }
87
+ for (const listener of this.listeners) {
88
+ try {
89
+ listener();
90
+ } catch (err) {
91
+ console.warn("[noy-db] LiveAggregation listener threw:", err);
92
+ }
93
+ }
94
+ }
95
+ subscribe(cb) {
96
+ if (this.stopped) {
97
+ return () => {
98
+ };
99
+ }
100
+ this.listeners.add(cb);
101
+ return () => {
102
+ this.listeners.delete(cb);
103
+ };
104
+ }
105
+ stop() {
106
+ if (this.stopped) return;
107
+ this.stopped = true;
108
+ for (const unsub of this.unsubscribes) {
109
+ try {
110
+ unsub();
111
+ } catch (err) {
112
+ console.warn("[noy-db] LiveAggregation upstream unsubscribe threw:", err);
113
+ }
114
+ }
115
+ this.unsubscribes.length = 0;
116
+ this.listeners.clear();
117
+ }
118
+ };
119
+ var Aggregation = class {
120
+ constructor(executeRecords, spec, upstreams) {
121
+ this.executeRecords = executeRecords;
122
+ this.spec = spec;
123
+ this.upstreams = upstreams;
124
+ }
125
+ executeRecords;
126
+ spec;
127
+ upstreams;
128
+ /**
129
+ * Execute the query and reduce the results synchronously.
130
+ * Returns the reduced shape matching the spec — e.g. a spec of
131
+ * `{ total: sum('amount'), n: count() }` returns
132
+ * `{ total: number, n: number }`.
133
+ */
134
+ run() {
135
+ return reduceRecords(this.executeRecords(), this.spec);
136
+ }
137
+ /**
138
+ * Build a reactive `LiveAggregation<R>` that re-runs the reduction
139
+ * whenever any upstream source notifies of a change. The initial
140
+ * value is computed eagerly in the constructor, so consumers can
141
+ * read `live.value` immediately after calling `.live()`.
142
+ *
143
+ * Always call `live.stop()` when finished — it tears down the
144
+ * upstream subscriptions. Vue's `onUnmounted` is the canonical
145
+ * place.
146
+ *
147
+ * **Implementation note:** every upstream change triggers a full
148
+ * re-reduction. Incremental maintenance (O(1) per delta for
149
+ * sum/count/avg via the reducer protocol's `remove()` method) is a
150
+ * planned follow-up optimization — the protocol already supports
151
+ * it, but the executor doesn't drive it yet. Consumers get
152
+ * correct, reactive values today; future PRs can switch to
153
+ * delta-based maintenance without changing this API.
154
+ */
155
+ live() {
156
+ const recompute = () => reduceRecords(this.executeRecords(), this.spec);
157
+ return new LiveAggregationImpl(recompute, this.upstreams);
158
+ }
159
+ };
160
+ function buildLiveAggregation(recompute, upstreams) {
161
+ return new LiveAggregationImpl(recompute, upstreams);
162
+ }
163
+
164
+ // src/query/predicate.ts
165
+ function readPath(record, path) {
166
+ if (record === null || record === void 0) return void 0;
167
+ if (!path.includes(".")) {
168
+ return record[path];
169
+ }
170
+ const segments = path.split(".");
171
+ let cursor = record;
172
+ for (const segment of segments) {
173
+ if (cursor === null || cursor === void 0) return void 0;
174
+ cursor = cursor[segment];
175
+ }
176
+ return cursor;
177
+ }
178
+
179
+ // src/errors.ts
180
+ var NoydbError = class extends Error {
181
+ /** Machine-readable error code. Stable across library versions. */
182
+ code;
183
+ constructor(code, message) {
184
+ super(message);
185
+ this.name = "NoydbError";
186
+ this.code = code;
187
+ }
188
+ };
189
+ var GroupCardinalityError = class extends NoydbError {
190
+ /** The field being grouped on. */
191
+ field;
192
+ /** Observed number of distinct groups at the moment the cap tripped. */
193
+ cardinality;
194
+ /** The cap that was exceeded. */
195
+ maxGroups;
196
+ constructor(field, cardinality, maxGroups) {
197
+ super(
198
+ "GROUP_CARDINALITY",
199
+ `.groupBy("${field}") produced ${cardinality} distinct groups, exceeding the ${maxGroups}-group ceiling. This is almost always a query mistake \u2014 grouping on a high-uniqueness field like "id" or "createdAt" produces one bucket per record. Narrow the query with .where() before grouping, or group on a lower-cardinality field (status, category, clientId). If you genuinely need high-cardinality grouping, file an issue with your use case.`
200
+ );
201
+ this.name = "GroupCardinalityError";
202
+ this.field = field;
203
+ this.cardinality = cardinality;
204
+ this.maxGroups = maxGroups;
205
+ }
206
+ };
207
+
208
+ // src/aggregate/groupby.ts
209
+ var GROUPBY_WARN_CARDINALITY = 1e4;
210
+ var GROUPBY_MAX_CARDINALITY = 1e5;
211
+ var warnedCardinalityFields = /* @__PURE__ */ new Set();
212
+ function warnCardinalityApproaching(field, observed) {
213
+ if (warnedCardinalityFields.has(field)) return;
214
+ warnedCardinalityFields.add(field);
215
+ console.warn(
216
+ `[noy-db] .groupBy("${field}") produced ${observed} distinct groups, ${Math.round(observed / GROUPBY_MAX_CARDINALITY * 100)}% of the ${GROUPBY_MAX_CARDINALITY}-group ceiling. Narrow the query with .where() before grouping, or switch to a lower-cardinality field.`
217
+ );
218
+ }
219
+ function resetGroupByWarnings() {
220
+ warnedCardinalityFields.clear();
221
+ }
222
+ var GroupedQuery = class {
223
+ constructor(executeRecords, field, upstreams, dictLabelResolver) {
224
+ this.executeRecords = executeRecords;
225
+ this.field = field;
226
+ this.upstreams = upstreams;
227
+ this.dictLabelResolver = dictLabelResolver;
228
+ }
229
+ executeRecords;
230
+ field;
231
+ upstreams;
232
+ dictLabelResolver;
233
+ /**
234
+ * Build a grouped aggregation. Returns a `GroupedAggregation`
235
+ * with `.run()`, `.runAsync()`, and `.live()` terminals — same shape
236
+ * as the non-grouped `.aggregate()` wrapper, just with an array
237
+ * result (one row per bucket) instead of a single reduced object.
238
+ */
239
+ aggregate(spec) {
240
+ return new GroupedAggregation(
241
+ this.executeRecords,
242
+ this.field,
243
+ spec,
244
+ this.upstreams,
245
+ this.dictLabelResolver
246
+ );
247
+ }
248
+ };
249
+ function groupAndReduce(records, field, spec) {
250
+ const buckets = /* @__PURE__ */ new Map();
251
+ for (const record of records) {
252
+ const key = readPath(record, field);
253
+ let bucket = buckets.get(key);
254
+ if (bucket === void 0) {
255
+ if (buckets.size >= GROUPBY_MAX_CARDINALITY) {
256
+ throw new GroupCardinalityError(
257
+ field,
258
+ buckets.size + 1,
259
+ GROUPBY_MAX_CARDINALITY
260
+ );
261
+ }
262
+ bucket = [];
263
+ buckets.set(key, bucket);
264
+ }
265
+ bucket.push(record);
266
+ }
267
+ if (buckets.size >= GROUPBY_WARN_CARDINALITY) {
268
+ warnCardinalityApproaching(field, buckets.size);
269
+ }
270
+ const keys = Object.keys(spec);
271
+ const out = [];
272
+ for (const [groupKey, bucketRecords] of buckets) {
273
+ const state = {};
274
+ for (const key of keys) {
275
+ state[key] = spec[key].init();
276
+ }
277
+ for (const record of bucketRecords) {
278
+ for (const key of keys) {
279
+ state[key] = spec[key].step(state[key], record);
280
+ }
281
+ }
282
+ const row = { [field]: groupKey };
283
+ for (const key of keys) {
284
+ row[key] = spec[key].finalize(state[key]);
285
+ }
286
+ out.push(row);
287
+ }
288
+ return out;
289
+ }
290
+ var GroupedAggregation = class {
291
+ constructor(executeRecords, field, spec, upstreams, dictLabelResolver) {
292
+ this.executeRecords = executeRecords;
293
+ this.field = field;
294
+ this.spec = spec;
295
+ this.upstreams = upstreams;
296
+ this.dictLabelResolver = dictLabelResolver;
297
+ }
298
+ executeRecords;
299
+ field;
300
+ spec;
301
+ upstreams;
302
+ dictLabelResolver;
303
+ /** Execute the query, group, reduce, and return an array of rows. */
304
+ run() {
305
+ return groupAndReduce(this.executeRecords(), this.field, this.spec);
306
+ }
307
+ /**
308
+ * Execute the query, group, reduce, and resolve `<field>Label` for
309
+ * each result row when the grouping field is a `dictKey` and a
310
+ * `locale` is provided. Returns `R[]` synchronously when
311
+ * no locale is specified (identical to `.run()`).
312
+ *
313
+ * The `<field>Label` field is appended to each row. Rows whose group
314
+ * key has no dictionary entry get `<field>Label: undefined`.
315
+ */
316
+ async runAsync(opts) {
317
+ const rows = groupAndReduce(this.executeRecords(), this.field, this.spec);
318
+ if (!opts?.locale || !this.dictLabelResolver) return rows;
319
+ const resolve = this.dictLabelResolver;
320
+ const locale = opts.locale;
321
+ const fallback = opts.fallback;
322
+ const labelKey = `${this.field}Label`;
323
+ return Promise.all(
324
+ rows.map(async (row) => {
325
+ const key = row[this.field];
326
+ if (typeof key !== "string") return row;
327
+ const label = await resolve(key, locale, fallback);
328
+ return { ...row, [labelKey]: label };
329
+ })
330
+ );
331
+ }
332
+ /**
333
+ * Build a reactive `LiveAggregation<R[]>` that re-runs the full
334
+ * group-and-reduce pipeline whenever any upstream source notifies
335
+ * of a change. Same error-isolation and idempotent-stop contract
336
+ * as `Aggregation.live()` — the implementation delegates to the
337
+ * same `LiveAggregationImpl` class by threading a fresh
338
+ * recompute closure through the existing constructor.
339
+ *
340
+ * uses naive full re-run on every change. Incremental
341
+ * per-bucket maintenance (apply `step` on inserted records,
342
+ * `remove` on deleted records, route by bucket key) is a future
343
+ * optimization — the reducer protocol admits it, but wiring
344
+ * delta-aware source subscriptions is a separate PR.
345
+ *
346
+ * Always call `live.stop()` when finished.
347
+ */
348
+ live() {
349
+ const recompute = () => groupAndReduce(this.executeRecords(), this.field, this.spec);
350
+ return buildLiveAggregation(recompute, this.upstreams);
351
+ }
352
+ };
353
+
354
+ // src/aggregate/active.ts
355
+ function withAggregate() {
356
+ return {
357
+ aggregate(executeRecords, spec, upstreams) {
358
+ return new Aggregation(executeRecords, spec, upstreams);
359
+ },
360
+ groupBy(executeRecords, field, upstreams, dictLabelResolver) {
361
+ return new GroupedQuery(executeRecords, field, upstreams, dictLabelResolver);
362
+ },
363
+ async scanAggregate(iter, spec) {
364
+ const collected = [];
365
+ for await (const record of iter) collected.push(record);
366
+ return reduceRecords(collected, spec);
367
+ }
368
+ };
369
+ }
370
+
371
+ // src/aggregate/reducers.ts
372
+ function count(opts) {
373
+ const _seed = opts?.seed;
374
+ void _seed;
375
+ return {
376
+ init: () => 0,
377
+ step: (state) => state + 1,
378
+ remove: (state) => state - 1,
379
+ finalize: (state) => state
380
+ };
381
+ }
382
+ function sum(field, opts) {
383
+ const _seed = opts?.seed;
384
+ void _seed;
385
+ return {
386
+ init: () => 0,
387
+ step: (state, record) => state + readNumber(record, field),
388
+ remove: (state, record) => state - readNumber(record, field),
389
+ finalize: (state) => state
390
+ };
391
+ }
392
+ function avg(field, opts) {
393
+ const _seed = opts?.seed;
394
+ void _seed;
395
+ return {
396
+ init: () => ({ sum: 0, count: 0 }),
397
+ step: (state, record) => ({
398
+ sum: state.sum + readNumber(record, field),
399
+ count: state.count + 1
400
+ }),
401
+ remove: (state, record) => ({
402
+ sum: state.sum - readNumber(record, field),
403
+ count: state.count - 1
404
+ }),
405
+ finalize: (state) => state.count === 0 ? null : state.sum / state.count
406
+ };
407
+ }
408
+ function pushValue(state, value) {
409
+ return { values: [...state.values, value] };
410
+ }
411
+ function removeValue(state, value) {
412
+ const idx = state.values.indexOf(value);
413
+ if (idx < 0) return state;
414
+ const next = state.values.slice();
415
+ next.splice(idx, 1);
416
+ return { values: next };
417
+ }
418
+ function min(field, opts) {
419
+ const _seed = opts?.seed;
420
+ void _seed;
421
+ return {
422
+ init: () => ({ values: [] }),
423
+ step: (state, record) => pushValue(state, readNumber(record, field)),
424
+ remove: (state, record) => removeValue(state, readNumber(record, field)),
425
+ finalize: (state) => {
426
+ if (state.values.length === 0) return null;
427
+ let out = state.values[0];
428
+ for (let i = 1; i < state.values.length; i++) {
429
+ const v = state.values[i];
430
+ if (v < out) out = v;
431
+ }
432
+ return out;
433
+ }
434
+ };
435
+ }
436
+ function max(field, opts) {
437
+ const _seed = opts?.seed;
438
+ void _seed;
439
+ return {
440
+ init: () => ({ values: [] }),
441
+ step: (state, record) => pushValue(state, readNumber(record, field)),
442
+ remove: (state, record) => removeValue(state, readNumber(record, field)),
443
+ finalize: (state) => {
444
+ if (state.values.length === 0) return null;
445
+ let out = state.values[0];
446
+ for (let i = 1; i < state.values.length; i++) {
447
+ const v = state.values[i];
448
+ if (v > out) out = v;
449
+ }
450
+ return out;
451
+ }
452
+ };
453
+ }
454
+ function readNumber(record, field) {
455
+ const value = readPath(record, field);
456
+ return typeof value === "number" && Number.isFinite(value) ? value : 0;
457
+ }
458
+ // Annotate the CommonJS export names for ESM import in node:
459
+ 0 && (module.exports = {
460
+ Aggregation,
461
+ GROUPBY_MAX_CARDINALITY,
462
+ GROUPBY_WARN_CARDINALITY,
463
+ GroupedAggregation,
464
+ GroupedQuery,
465
+ avg,
466
+ buildLiveAggregation,
467
+ count,
468
+ groupAndReduce,
469
+ max,
470
+ min,
471
+ reduceRecords,
472
+ resetGroupByWarnings,
473
+ sum,
474
+ withAggregate
475
+ });
476
+ //# sourceMappingURL=index.cjs.map