@noy-db/hub 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +197 -0
  3. package/dist/aggregate/index.cjs +476 -0
  4. package/dist/aggregate/index.cjs.map +1 -0
  5. package/dist/aggregate/index.d.cts +38 -0
  6. package/dist/aggregate/index.d.ts +38 -0
  7. package/dist/aggregate/index.js +53 -0
  8. package/dist/aggregate/index.js.map +1 -0
  9. package/dist/blobs/index.cjs +1480 -0
  10. package/dist/blobs/index.cjs.map +1 -0
  11. package/dist/blobs/index.d.cts +45 -0
  12. package/dist/blobs/index.d.ts +45 -0
  13. package/dist/blobs/index.js +48 -0
  14. package/dist/blobs/index.js.map +1 -0
  15. package/dist/bundle/index.cjs +436 -0
  16. package/dist/bundle/index.cjs.map +1 -0
  17. package/dist/bundle/index.d.cts +7 -0
  18. package/dist/bundle/index.d.ts +7 -0
  19. package/dist/bundle/index.js +40 -0
  20. package/dist/bundle/index.js.map +1 -0
  21. package/dist/chunk-2QR2PQTT.js +217 -0
  22. package/dist/chunk-2QR2PQTT.js.map +1 -0
  23. package/dist/chunk-4OWFYIDQ.js +79 -0
  24. package/dist/chunk-4OWFYIDQ.js.map +1 -0
  25. package/dist/chunk-5AATM2M2.js +90 -0
  26. package/dist/chunk-5AATM2M2.js.map +1 -0
  27. package/dist/chunk-ACLDOTNQ.js +543 -0
  28. package/dist/chunk-ACLDOTNQ.js.map +1 -0
  29. package/dist/chunk-BTDCBVJW.js +160 -0
  30. package/dist/chunk-BTDCBVJW.js.map +1 -0
  31. package/dist/chunk-CIMZBAZB.js +72 -0
  32. package/dist/chunk-CIMZBAZB.js.map +1 -0
  33. package/dist/chunk-E445ICYI.js +365 -0
  34. package/dist/chunk-E445ICYI.js.map +1 -0
  35. package/dist/chunk-EXQRC2L4.js +722 -0
  36. package/dist/chunk-EXQRC2L4.js.map +1 -0
  37. package/dist/chunk-FZU343FL.js +32 -0
  38. package/dist/chunk-FZU343FL.js.map +1 -0
  39. package/dist/chunk-GJILMRPO.js +354 -0
  40. package/dist/chunk-GJILMRPO.js.map +1 -0
  41. package/dist/chunk-GOUT6DND.js +1285 -0
  42. package/dist/chunk-GOUT6DND.js.map +1 -0
  43. package/dist/chunk-J66GRPNH.js +111 -0
  44. package/dist/chunk-J66GRPNH.js.map +1 -0
  45. package/dist/chunk-M2F2JAWB.js +464 -0
  46. package/dist/chunk-M2F2JAWB.js.map +1 -0
  47. package/dist/chunk-M5INGEFC.js +84 -0
  48. package/dist/chunk-M5INGEFC.js.map +1 -0
  49. package/dist/chunk-M62XNWRA.js +72 -0
  50. package/dist/chunk-M62XNWRA.js.map +1 -0
  51. package/dist/chunk-MR4424N3.js +275 -0
  52. package/dist/chunk-MR4424N3.js.map +1 -0
  53. package/dist/chunk-NPC4LFV5.js +132 -0
  54. package/dist/chunk-NPC4LFV5.js.map +1 -0
  55. package/dist/chunk-NXFEYLVG.js +311 -0
  56. package/dist/chunk-NXFEYLVG.js.map +1 -0
  57. package/dist/chunk-R36SIKES.js +79 -0
  58. package/dist/chunk-R36SIKES.js.map +1 -0
  59. package/dist/chunk-TDR6T5CJ.js +381 -0
  60. package/dist/chunk-TDR6T5CJ.js.map +1 -0
  61. package/dist/chunk-UF3BUNQZ.js +1 -0
  62. package/dist/chunk-UF3BUNQZ.js.map +1 -0
  63. package/dist/chunk-UQFSPSWG.js +1109 -0
  64. package/dist/chunk-UQFSPSWG.js.map +1 -0
  65. package/dist/chunk-USKYUS74.js +793 -0
  66. package/dist/chunk-USKYUS74.js.map +1 -0
  67. package/dist/chunk-XCL3WP6J.js +121 -0
  68. package/dist/chunk-XCL3WP6J.js.map +1 -0
  69. package/dist/chunk-XHFOENR2.js +680 -0
  70. package/dist/chunk-XHFOENR2.js.map +1 -0
  71. package/dist/chunk-ZFKD4QMV.js +430 -0
  72. package/dist/chunk-ZFKD4QMV.js.map +1 -0
  73. package/dist/chunk-ZLMV3TUA.js +490 -0
  74. package/dist/chunk-ZLMV3TUA.js.map +1 -0
  75. package/dist/chunk-ZRG4V3F5.js +17 -0
  76. package/dist/chunk-ZRG4V3F5.js.map +1 -0
  77. package/dist/consent/index.cjs +204 -0
  78. package/dist/consent/index.cjs.map +1 -0
  79. package/dist/consent/index.d.cts +24 -0
  80. package/dist/consent/index.d.ts +24 -0
  81. package/dist/consent/index.js +23 -0
  82. package/dist/consent/index.js.map +1 -0
  83. package/dist/crdt/index.cjs +152 -0
  84. package/dist/crdt/index.cjs.map +1 -0
  85. package/dist/crdt/index.d.cts +30 -0
  86. package/dist/crdt/index.d.ts +30 -0
  87. package/dist/crdt/index.js +24 -0
  88. package/dist/crdt/index.js.map +1 -0
  89. package/dist/crypto-IVKU7YTT.js +44 -0
  90. package/dist/crypto-IVKU7YTT.js.map +1 -0
  91. package/dist/delegation-XDJCBTI2.js +16 -0
  92. package/dist/delegation-XDJCBTI2.js.map +1 -0
  93. package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
  94. package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
  95. package/dist/hash-9KO1BGxh.d.cts +63 -0
  96. package/dist/hash-ChfJjRjQ.d.ts +63 -0
  97. package/dist/history/index.cjs +1215 -0
  98. package/dist/history/index.cjs.map +1 -0
  99. package/dist/history/index.d.cts +62 -0
  100. package/dist/history/index.d.ts +62 -0
  101. package/dist/history/index.js +79 -0
  102. package/dist/history/index.js.map +1 -0
  103. package/dist/i18n/index.cjs +746 -0
  104. package/dist/i18n/index.cjs.map +1 -0
  105. package/dist/i18n/index.d.cts +38 -0
  106. package/dist/i18n/index.d.ts +38 -0
  107. package/dist/i18n/index.js +55 -0
  108. package/dist/i18n/index.js.map +1 -0
  109. package/dist/index-BRHBCmLt.d.ts +1940 -0
  110. package/dist/index-C8kQtmOk.d.ts +380 -0
  111. package/dist/index-DN-J-5wT.d.cts +1940 -0
  112. package/dist/index-DhjMjz7L.d.cts +380 -0
  113. package/dist/index.cjs +14756 -0
  114. package/dist/index.cjs.map +1 -0
  115. package/dist/index.d.cts +269 -0
  116. package/dist/index.d.ts +269 -0
  117. package/dist/index.js +6085 -0
  118. package/dist/index.js.map +1 -0
  119. package/dist/indexing/index.cjs +736 -0
  120. package/dist/indexing/index.cjs.map +1 -0
  121. package/dist/indexing/index.d.cts +36 -0
  122. package/dist/indexing/index.d.ts +36 -0
  123. package/dist/indexing/index.js +77 -0
  124. package/dist/indexing/index.js.map +1 -0
  125. package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
  126. package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
  127. package/dist/ledger-2NX4L7PN.js +33 -0
  128. package/dist/ledger-2NX4L7PN.js.map +1 -0
  129. package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
  130. package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
  131. package/dist/periods/index.cjs +1035 -0
  132. package/dist/periods/index.cjs.map +1 -0
  133. package/dist/periods/index.d.cts +21 -0
  134. package/dist/periods/index.d.ts +21 -0
  135. package/dist/periods/index.js +25 -0
  136. package/dist/periods/index.js.map +1 -0
  137. package/dist/predicate-SBHmi6D0.d.cts +161 -0
  138. package/dist/predicate-SBHmi6D0.d.ts +161 -0
  139. package/dist/query/index.cjs +1957 -0
  140. package/dist/query/index.cjs.map +1 -0
  141. package/dist/query/index.d.cts +3 -0
  142. package/dist/query/index.d.ts +3 -0
  143. package/dist/query/index.js +62 -0
  144. package/dist/query/index.js.map +1 -0
  145. package/dist/session/index.cjs +487 -0
  146. package/dist/session/index.cjs.map +1 -0
  147. package/dist/session/index.d.cts +45 -0
  148. package/dist/session/index.d.ts +45 -0
  149. package/dist/session/index.js +44 -0
  150. package/dist/session/index.js.map +1 -0
  151. package/dist/shadow/index.cjs +133 -0
  152. package/dist/shadow/index.cjs.map +1 -0
  153. package/dist/shadow/index.d.cts +16 -0
  154. package/dist/shadow/index.d.ts +16 -0
  155. package/dist/shadow/index.js +20 -0
  156. package/dist/shadow/index.js.map +1 -0
  157. package/dist/store/index.cjs +1069 -0
  158. package/dist/store/index.cjs.map +1 -0
  159. package/dist/store/index.d.cts +491 -0
  160. package/dist/store/index.d.ts +491 -0
  161. package/dist/store/index.js +34 -0
  162. package/dist/store/index.js.map +1 -0
  163. package/dist/strategy-BSxFXGzb.d.cts +110 -0
  164. package/dist/strategy-BSxFXGzb.d.ts +110 -0
  165. package/dist/strategy-D-SrOLCl.d.cts +548 -0
  166. package/dist/strategy-D-SrOLCl.d.ts +548 -0
  167. package/dist/sync/index.cjs +1062 -0
  168. package/dist/sync/index.cjs.map +1 -0
  169. package/dist/sync/index.d.cts +42 -0
  170. package/dist/sync/index.d.ts +42 -0
  171. package/dist/sync/index.js +28 -0
  172. package/dist/sync/index.js.map +1 -0
  173. package/dist/team/index.cjs +1233 -0
  174. package/dist/team/index.cjs.map +1 -0
  175. package/dist/team/index.d.cts +117 -0
  176. package/dist/team/index.d.ts +117 -0
  177. package/dist/team/index.js +39 -0
  178. package/dist/team/index.js.map +1 -0
  179. package/dist/tx/index.cjs +212 -0
  180. package/dist/tx/index.cjs.map +1 -0
  181. package/dist/tx/index.d.cts +20 -0
  182. package/dist/tx/index.d.ts +20 -0
  183. package/dist/tx/index.js +20 -0
  184. package/dist/tx/index.js.map +1 -0
  185. package/dist/types-BZpCZB8N.d.ts +7526 -0
  186. package/dist/types-Bfs0qr5F.d.cts +7526 -0
  187. package/dist/ulid-COREQ2RQ.js +9 -0
  188. package/dist/ulid-COREQ2RQ.js.map +1 -0
  189. package/dist/util/index.cjs +230 -0
  190. package/dist/util/index.cjs.map +1 -0
  191. package/dist/util/index.d.cts +77 -0
  192. package/dist/util/index.d.ts +77 -0
  193. package/dist/util/index.js +190 -0
  194. package/dist/util/index.js.map +1 -0
  195. package/package.json +244 -0
@@ -0,0 +1,311 @@
1
+ import {
2
+ decrypt
3
+ } from "./chunk-MR4424N3.js";
4
+ import {
5
+ ReadOnlyAtInstantError
6
+ } from "./chunk-ACLDOTNQ.js";
7
+
8
+ // src/history/history.ts
9
+ var HISTORY_COLLECTION = "_history";
10
+ var VERSION_PAD = 10;
11
+ function historyId(collection, recordId, version) {
12
+ return `${collection}:${recordId}:${String(version).padStart(VERSION_PAD, "0")}`;
13
+ }
14
+ function matchesPrefix(id, collection, recordId) {
15
+ if (recordId) {
16
+ return id.startsWith(`${collection}:${recordId}:`);
17
+ }
18
+ return id.startsWith(`${collection}:`);
19
+ }
20
+ async function saveHistory(adapter, vault, collection, recordId, envelope) {
21
+ const id = historyId(collection, recordId, envelope._v);
22
+ await adapter.put(vault, HISTORY_COLLECTION, id, envelope);
23
+ }
24
+ async function getHistory(adapter, vault, collection, recordId, options) {
25
+ const allIds = await adapter.list(vault, HISTORY_COLLECTION);
26
+ const matchingIds = allIds.filter((id) => matchesPrefix(id, collection, recordId)).sort().reverse();
27
+ const entries = [];
28
+ for (const id of matchingIds) {
29
+ const envelope = await adapter.get(vault, HISTORY_COLLECTION, id);
30
+ if (!envelope) continue;
31
+ if (options?.from && envelope._ts < options.from) continue;
32
+ if (options?.to && envelope._ts > options.to) continue;
33
+ entries.push(envelope);
34
+ if (options?.limit && entries.length >= options.limit) break;
35
+ }
36
+ return entries;
37
+ }
38
+ async function getVersionEnvelope(adapter, vault, collection, recordId, version) {
39
+ const id = historyId(collection, recordId, version);
40
+ return adapter.get(vault, HISTORY_COLLECTION, id);
41
+ }
42
+ async function pruneHistory(adapter, vault, collection, recordId, options) {
43
+ const allIds = await adapter.list(vault, HISTORY_COLLECTION);
44
+ const matchingIds = allIds.filter((id) => recordId ? matchesPrefix(id, collection, recordId) : matchesPrefix(id, collection)).sort();
45
+ let toDelete = [];
46
+ if (options.keepVersions !== void 0) {
47
+ const keep = options.keepVersions;
48
+ if (matchingIds.length > keep) {
49
+ toDelete = matchingIds.slice(0, matchingIds.length - keep);
50
+ }
51
+ }
52
+ if (options.beforeDate) {
53
+ for (const id of matchingIds) {
54
+ if (toDelete.includes(id)) continue;
55
+ const envelope = await adapter.get(vault, HISTORY_COLLECTION, id);
56
+ if (envelope && envelope._ts < options.beforeDate) {
57
+ toDelete.push(id);
58
+ }
59
+ }
60
+ }
61
+ const uniqueDeletes = [...new Set(toDelete)];
62
+ for (const id of uniqueDeletes) {
63
+ await adapter.delete(vault, HISTORY_COLLECTION, id);
64
+ }
65
+ return uniqueDeletes.length;
66
+ }
67
+ async function clearHistory(adapter, vault, collection, recordId) {
68
+ const allIds = await adapter.list(vault, HISTORY_COLLECTION);
69
+ let toDelete;
70
+ if (collection && recordId) {
71
+ toDelete = allIds.filter((id) => matchesPrefix(id, collection, recordId));
72
+ } else if (collection) {
73
+ toDelete = allIds.filter((id) => matchesPrefix(id, collection));
74
+ } else {
75
+ toDelete = allIds;
76
+ }
77
+ for (const id of toDelete) {
78
+ await adapter.delete(vault, HISTORY_COLLECTION, id);
79
+ }
80
+ return toDelete.length;
81
+ }
82
+
83
+ // src/history/time-machine.ts
84
+ var VaultInstant = class {
85
+ constructor(engine, timestamp) {
86
+ this.engine = engine;
87
+ this.timestamp = timestamp;
88
+ }
89
+ engine;
90
+ timestamp;
91
+ /** Get a point-in-time view of a collection. */
92
+ collection(name) {
93
+ return new CollectionInstant(this.engine, this.timestamp, name);
94
+ }
95
+ };
96
+ var CollectionInstant = class {
97
+ constructor(engine, targetTs, name) {
98
+ this.engine = engine;
99
+ this.targetTs = targetTs;
100
+ this.name = name;
101
+ }
102
+ engine;
103
+ targetTs;
104
+ name;
105
+ /**
106
+ * Return the record as it existed at the target timestamp, or
107
+ * `null` if the record had not been created yet or had already been
108
+ * deleted by then.
109
+ */
110
+ async get(id) {
111
+ const envelope = await this.resolveEnvelope(id);
112
+ if (!envelope) return null;
113
+ const plaintext = this.engine.encrypted ? await decrypt(envelope._iv, envelope._data, await this.engine.getDEK(this.name)) : envelope._data;
114
+ return JSON.parse(plaintext);
115
+ }
116
+ /**
117
+ * IDs of records that existed (had at least one `put` and were not
118
+ * subsequently deleted) at the target timestamp.
119
+ *
120
+ * Implemented as a linear scan over history + ledger. Performance
121
+ * is bounded by total history size (not live-vault size), so the
122
+ * memory-first vault-scale cap (1K–50K records × average history
123
+ * depth) still applies.
124
+ */
125
+ async list() {
126
+ const historyIds = await collectHistoryIds(this.engine.adapter, this.engine.name, this.name);
127
+ const liveIds = await this.engine.adapter.list(this.engine.name, this.name);
128
+ const candidateIds = /* @__PURE__ */ new Set([...historyIds, ...liveIds]);
129
+ const alive = [];
130
+ for (const id of candidateIds) {
131
+ const env = await this.resolveEnvelope(id);
132
+ if (env) alive.push(id);
133
+ }
134
+ return alive.sort();
135
+ }
136
+ // ── write guards ───────────────────────────────────────────────────
137
+ async put(_id, _record) {
138
+ throw new ReadOnlyAtInstantError("put", this.targetTs);
139
+ }
140
+ async delete(_id) {
141
+ throw new ReadOnlyAtInstantError("delete", this.targetTs);
142
+ }
143
+ async update(_id, _patch) {
144
+ throw new ReadOnlyAtInstantError("update", this.targetTs);
145
+ }
146
+ // ── internals ─────────────────────────────────────────────────────
147
+ /**
148
+ * Return the envelope that represents the record's state at
149
+ * `targetTs`, accounting for deletes. `null` if the record didn't
150
+ * exist at that instant.
151
+ *
152
+ * ## Why we use the ledger as the authoritative timeline
153
+ *
154
+ * The per-version history snapshots saved by `saveHistory()` do
155
+ * carry a `_ts` field, but that timestamp is the moment the
156
+ * snapshot was *captured* (i.e. the instant right before the
157
+ * subsequent overwrite), not the original write time. The ledger,
158
+ * by contrast, records `ts` at the moment of each `put` / `delete`
159
+ * — it's the only source that tracks the real timeline. So:
160
+ *
161
+ * 1. Walk the ledger; find the latest entry for `(collection, id)`
162
+ * with `ts ≤ targetTs`.
163
+ * 2. If that entry is a `delete`, the record was gone at the
164
+ * target instant — return null.
165
+ * 3. Otherwise it's a `put` with a specific `version`. Load the
166
+ * envelope for that version from history, falling back to the
167
+ * live collection for the most recent version.
168
+ *
169
+ * ## Fallback when the ledger is disabled
170
+ *
171
+ * If the vault has history disabled, `getLedger()` returns null and
172
+ * we fall back to comparing envelope `_ts` fields. This is
173
+ * approximate and gets the *last write* right but may confuse the
174
+ * intermediate versions; adopters needing accurate time-machine
175
+ * reads should leave history enabled.
176
+ */
177
+ async resolveEnvelope(id) {
178
+ const ledger = this.engine.getLedger();
179
+ if (ledger) {
180
+ return this.resolveViaLedger(id, ledger);
181
+ }
182
+ return this.resolveViaEnvelopeTs(id);
183
+ }
184
+ async resolveViaLedger(id, ledger) {
185
+ const entries = await ledger.entries();
186
+ let latest = null;
187
+ for (const e of entries) {
188
+ if (e.collection !== this.name || e.id !== id) continue;
189
+ if (e.ts > this.targetTs) break;
190
+ latest = { op: e.op, version: e.version };
191
+ }
192
+ if (!latest) return null;
193
+ if (latest.op === "delete") return null;
194
+ return this.loadVersion(id, latest.version);
195
+ }
196
+ async resolveViaEnvelopeTs(id) {
197
+ const history = await getHistory(
198
+ this.engine.adapter,
199
+ this.engine.name,
200
+ this.name,
201
+ id
202
+ );
203
+ const live = await this.engine.adapter.get(this.engine.name, this.name, id);
204
+ const byVersion = /* @__PURE__ */ new Map();
205
+ for (const e of history) byVersion.set(e._v, e);
206
+ if (live) byVersion.set(live._v, live);
207
+ const sorted = [...byVersion.values()].sort(
208
+ (a, b) => a._ts < b._ts ? 1 : a._ts > b._ts ? -1 : 0
209
+ );
210
+ return sorted.find((e) => e._ts <= this.targetTs) ?? null;
211
+ }
212
+ /**
213
+ * Fetch the envelope for a specific version. The live record (most
214
+ * recent put) lives in the main collection; prior versions live in
215
+ * `_history`. We check live first because the common case after a
216
+ * delete is that we're trying to load the last-live version from
217
+ * history, and skipping live for the current-version case avoids a
218
+ * redundant lookup.
219
+ */
220
+ async loadVersion(id, version) {
221
+ const live = await this.engine.adapter.get(this.engine.name, this.name, id);
222
+ if (live && live._v === version) return live;
223
+ const historyId2 = `${this.name}:${id}:${String(version).padStart(10, "0")}`;
224
+ return await this.engine.adapter.get(this.engine.name, "_history", historyId2);
225
+ }
226
+ };
227
+ async function collectHistoryIds(adapter, vault, collection) {
228
+ const all = await adapter.list(vault, "_history");
229
+ const prefix = `${collection}:`;
230
+ const seen = /* @__PURE__ */ new Set();
231
+ for (const key of all) {
232
+ if (!key.startsWith(prefix)) continue;
233
+ const lastColon = key.lastIndexOf(":");
234
+ if (lastColon <= prefix.length) continue;
235
+ const middle = key.slice(prefix.length, lastColon);
236
+ seen.add(middle);
237
+ }
238
+ return [...seen];
239
+ }
240
+
241
+ // src/history/diff.ts
242
+ function diff(oldObj, newObj, basePath = "") {
243
+ const changes = [];
244
+ if (oldObj === newObj) return changes;
245
+ if (oldObj == null && newObj != null) {
246
+ return [{ path: basePath || "(root)", type: "added", to: newObj }];
247
+ }
248
+ if (oldObj != null && newObj == null) {
249
+ return [{ path: basePath || "(root)", type: "removed", from: oldObj }];
250
+ }
251
+ if (typeof oldObj !== typeof newObj) {
252
+ return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
253
+ }
254
+ if (typeof oldObj !== "object") {
255
+ return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
256
+ }
257
+ if (Array.isArray(oldObj) && Array.isArray(newObj)) {
258
+ const maxLen = Math.max(oldObj.length, newObj.length);
259
+ for (let i = 0; i < maxLen; i++) {
260
+ const p = basePath ? `${basePath}[${i}]` : `[${i}]`;
261
+ if (i >= oldObj.length) {
262
+ changes.push({ path: p, type: "added", to: newObj[i] });
263
+ } else if (i >= newObj.length) {
264
+ changes.push({ path: p, type: "removed", from: oldObj[i] });
265
+ } else {
266
+ changes.push(...diff(oldObj[i], newObj[i], p));
267
+ }
268
+ }
269
+ return changes;
270
+ }
271
+ const oldRecord = oldObj;
272
+ const newRecord = newObj;
273
+ const allKeys = /* @__PURE__ */ new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)]);
274
+ for (const key of allKeys) {
275
+ const p = basePath ? `${basePath}.${key}` : key;
276
+ if (!(key in oldRecord)) {
277
+ changes.push({ path: p, type: "added", to: newRecord[key] });
278
+ } else if (!(key in newRecord)) {
279
+ changes.push({ path: p, type: "removed", from: oldRecord[key] });
280
+ } else {
281
+ changes.push(...diff(oldRecord[key], newRecord[key], p));
282
+ }
283
+ }
284
+ return changes;
285
+ }
286
+ function formatDiff(changes) {
287
+ if (changes.length === 0) return "(no changes)";
288
+ return changes.map((c) => {
289
+ switch (c.type) {
290
+ case "added":
291
+ return `+ ${c.path}: ${JSON.stringify(c.to)}`;
292
+ case "removed":
293
+ return `- ${c.path}: ${JSON.stringify(c.from)}`;
294
+ case "changed":
295
+ return `~ ${c.path}: ${JSON.stringify(c.from)} \u2192 ${JSON.stringify(c.to)}`;
296
+ }
297
+ }).join("\n");
298
+ }
299
+
300
+ export {
301
+ saveHistory,
302
+ getHistory,
303
+ getVersionEnvelope,
304
+ pruneHistory,
305
+ clearHistory,
306
+ VaultInstant,
307
+ CollectionInstant,
308
+ diff,
309
+ formatDiff
310
+ };
311
+ //# sourceMappingURL=chunk-NXFEYLVG.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/history/history.ts","../src/history/time-machine.ts","../src/history/diff.ts"],"sourcesContent":["import type { NoydbStore, EncryptedEnvelope, HistoryOptions, PruneOptions } from '../types.js'\n\n/**\n * History storage convention:\n * Collection: `_history`\n * ID format: `{collection}:{recordId}:{paddedVersion}`\n * Version is zero-padded to 10 digits for lexicographic sorting.\n */\n\nconst HISTORY_COLLECTION = '_history'\nconst VERSION_PAD = 10\n\nfunction historyId(collection: string, recordId: string, version: number): string {\n return `${collection}:${recordId}:${String(version).padStart(VERSION_PAD, '0')}`\n}\n\n// Unused today, kept for future history-id parsing utilities.\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nfunction parseHistoryId(id: string): { collection: string; recordId: string; version: number } | null {\n const lastColon = id.lastIndexOf(':')\n if (lastColon < 0) return null\n const versionStr = id.slice(lastColon + 1)\n const rest = id.slice(0, lastColon)\n const firstColon = rest.indexOf(':')\n if (firstColon < 0) return null\n return {\n collection: rest.slice(0, firstColon),\n recordId: rest.slice(firstColon + 1),\n version: parseInt(versionStr, 10),\n }\n}\n\nfunction matchesPrefix(id: string, collection: string, recordId?: string): boolean {\n if (recordId) {\n return id.startsWith(`${collection}:${recordId}:`)\n }\n return id.startsWith(`${collection}:`)\n}\n\n/** Save a history entry (a complete encrypted envelope snapshot). */\nexport async function saveHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n envelope: EncryptedEnvelope,\n): Promise<void> {\n const id = historyId(collection, recordId, envelope._v)\n await adapter.put(vault, HISTORY_COLLECTION, id, envelope)\n}\n\n/** Get history entries for a record, sorted newest-first. */\nexport async function getHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n options?: HistoryOptions,\n): Promise<EncryptedEnvelope[]> {\n const allIds = await adapter.list(vault, HISTORY_COLLECTION)\n const matchingIds = allIds\n .filter(id => matchesPrefix(id, collection, recordId))\n .sort()\n .reverse() // newest first\n\n const entries: EncryptedEnvelope[] = []\n\n for (const id of matchingIds) {\n const envelope = await adapter.get(vault, HISTORY_COLLECTION, id)\n if (!envelope) continue\n\n // Apply time filters\n if (options?.from && envelope._ts < options.from) continue\n if (options?.to && envelope._ts > options.to) continue\n\n entries.push(envelope)\n\n if (options?.limit && entries.length >= options.limit) break\n }\n\n return entries\n}\n\n/** Get a specific version's envelope from history. */\nexport async function getVersionEnvelope(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n version: number,\n): Promise<EncryptedEnvelope | null> {\n const id = historyId(collection, recordId, version)\n return adapter.get(vault, HISTORY_COLLECTION, id)\n}\n\n/** Prune history entries. Returns the number of entries deleted. */\nexport async function pruneHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string | undefined,\n options: PruneOptions,\n): Promise<number> {\n const allIds = await adapter.list(vault, HISTORY_COLLECTION)\n const matchingIds = allIds\n .filter(id => recordId ? matchesPrefix(id, collection, recordId) : matchesPrefix(id, collection))\n .sort()\n\n let toDelete: string[] = []\n\n if (options.keepVersions !== undefined) {\n // Keep only the N most recent, delete the rest\n const keep = options.keepVersions\n if (matchingIds.length > keep) {\n toDelete = matchingIds.slice(0, matchingIds.length - keep)\n }\n }\n\n if (options.beforeDate) {\n // Delete entries older than the specified date\n for (const id of matchingIds) {\n if (toDelete.includes(id)) continue\n const envelope = await adapter.get(vault, HISTORY_COLLECTION, id)\n if (envelope && envelope._ts < options.beforeDate) {\n toDelete.push(id)\n }\n }\n }\n\n // Deduplicate\n const uniqueDeletes = [...new Set(toDelete)]\n\n for (const id of uniqueDeletes) {\n await adapter.delete(vault, HISTORY_COLLECTION, id)\n }\n\n return uniqueDeletes.length\n}\n\n/** Clear all history for a vault, optionally scoped to a collection or record. */\nexport async function clearHistory(\n adapter: NoydbStore,\n vault: string,\n collection?: string,\n recordId?: string,\n): Promise<number> {\n const allIds = await adapter.list(vault, HISTORY_COLLECTION)\n let toDelete: string[]\n\n if (collection && recordId) {\n toDelete = allIds.filter(id => matchesPrefix(id, collection, recordId))\n } else if (collection) {\n toDelete = allIds.filter(id => matchesPrefix(id, collection))\n } else {\n toDelete = allIds\n }\n\n for (const id of toDelete) {\n await adapter.delete(vault, HISTORY_COLLECTION, id)\n }\n\n return toDelete.length\n}\n","/**\n * Time-machine queries — point-in-time reads reconstructed from the\n * existing history + ledger infrastructure.\n *\n * ## Usage\n *\n * ```ts\n * const vault = await db.openVault('acme', { passphrase })\n * const q1End = vault.at('2026-03-31T23:59:59Z')\n * const invoice = await q1End.collection<Invoice>('invoices').get('inv-001')\n * // → the record as it stood at the close of Q1 2026\n * ```\n *\n * ## How it works\n *\n * Every write path already fans out into two persistence lanes:\n *\n * 1. `saveHistory(...)` persists a **full encrypted envelope snapshot**\n * per version under the `_history` collection (one envelope per\n * version, keyed by `{collection}:{id}:{paddedVersion}`). Each\n * envelope carries its own `_ts` (the write timestamp).\n * 2. `ledger.append(...)` appends a hash-chained audit entry that\n * records the `op` (put / delete), `version`, and `ts`.\n *\n * Reconstruction at a target timestamp T is therefore:\n *\n * - Find the newest history envelope for `(collection, id)` whose\n * `_ts ≤ T` — that's the state the record was in at T.\n * - Check the ledger for any `op: 'delete'` entry for the same\n * `(collection, id)` with `entry.ts` in `(latestEnvelope._ts, T]` —\n * if present, the record was deleted before T, so return `null`.\n * - Decrypt the surviving envelope with the current collection DEK\n * (DEKs are per-collection but stable across versions — the same\n * key encrypts v1 and v15 of a record).\n *\n * No delta replay. The existing `history.ts` module already stores\n * complete snapshots; we just pick the right one.\n *\n * ## Read-only contract\n *\n * Every write method on `CollectionInstant` throws\n * {@link ReadOnlyAtInstantError}. A historical view is a *read*\n * surface — mutating the past would require either a branch/shadow\n * mechanism (tracked under shadow vaults) or a rewrite of\n * history, which breaks the ledger's tamper-evidence guarantee.\n *\n * @module\n */\nimport type { EncryptedEnvelope, NoydbStore } from '../types.js'\nimport type { LedgerStore } from './ledger/store.js'\nimport { getHistory } from './history.js'\nimport { decrypt } from '../crypto.js'\nimport { ReadOnlyAtInstantError } from '../errors.js'\n\n/**\n * Narrow view of a {@link Vault}'s internals that\n * {@link VaultInstant} needs. Passed in by `Vault.at()` rather than\n * constructed here so all crypto + adapter access stays inside the\n * Vault class.\n *\n * Not exported from the public barrel — consumers should get a\n * `VaultInstant` via `vault.at(ts)`, never by constructing one\n * directly.\n */\nexport interface VaultEngine {\n readonly adapter: NoydbStore\n /** Vault name (the compartment). */\n readonly name: string\n /**\n * `true` when the vault was opened with a passphrase (the normal\n * case). `false` in plaintext-mode vaults (`encrypt: false`) — in\n * that case `envelope._data` is raw JSON and we skip the DEK lookup.\n */\n readonly encrypted: boolean\n /**\n * Resolves the DEK used to decrypt a given collection's envelopes.\n * Not called when `encrypted` is false.\n */\n getDEK(collection: string): Promise<CryptoKey>\n /**\n * Lazily-initialised ledger. We consult it to detect deletes that\n * happened between the latest history snapshot and the target\n * timestamp. `null` when history is disabled for this vault — in\n * that case time-machine reads fall back to history-only\n * reconstruction (which may miss deletes).\n */\n getLedger(): LedgerStore | null\n}\n\n/**\n * A vault at a fixed instant. Produced by `vault.at(timestamp)`.\n * Carries no session state of its own — every read is a fresh\n * lookup through the vault's adapter.\n *\n * Cheap to construct; safe to throw away. Create one per query.\n */\nexport class VaultInstant {\n constructor(\n private readonly engine: VaultEngine,\n /** Fully-resolved target timestamp (ISO-8601 UTC). */\n public readonly timestamp: string,\n ) {}\n\n /** Get a point-in-time view of a collection. */\n collection<T = unknown>(name: string): CollectionInstant<T> {\n return new CollectionInstant<T>(this.engine, this.timestamp, name)\n }\n}\n\n/**\n * A read-only collection view anchored to a past instant.\n *\n * Every write method throws {@link ReadOnlyAtInstantError} — see the\n * module docstring for why. The read surface is intentionally smaller\n * than the live {@link Collection}: `get` and `list` cover the\n * \"what did the books look like on date X\" use case without pulling\n * in the full query DSL / joins / aggregates at this stage. Follow-up\n * work tracked under.\n */\nexport class CollectionInstant<T = unknown> {\n constructor(\n private readonly engine: VaultEngine,\n private readonly targetTs: string,\n public readonly name: string,\n ) {}\n\n /**\n * Return the record as it existed at the target timestamp, or\n * `null` if the record had not been created yet or had already been\n * deleted by then.\n */\n async get(id: string): Promise<T | null> {\n const envelope = await this.resolveEnvelope(id)\n if (!envelope) return null\n const plaintext = this.engine.encrypted\n ? await decrypt(envelope._iv, envelope._data, await this.engine.getDEK(this.name))\n : envelope._data\n return JSON.parse(plaintext) as T\n }\n\n /**\n * IDs of records that existed (had at least one `put` and were not\n * subsequently deleted) at the target timestamp.\n *\n * Implemented as a linear scan over history + ledger. Performance\n * is bounded by total history size (not live-vault size), so the\n * memory-first vault-scale cap (1K–50K records × average history\n * depth) still applies.\n */\n async list(): Promise<string[]> {\n const historyIds = await collectHistoryIds(this.engine.adapter, this.engine.name, this.name)\n const liveIds = await this.engine.adapter.list(this.engine.name, this.name)\n const candidateIds = new Set<string>([...historyIds, ...liveIds])\n const alive: string[] = []\n for (const id of candidateIds) {\n const env = await this.resolveEnvelope(id)\n if (env) alive.push(id)\n }\n return alive.sort()\n }\n\n // ── write guards ───────────────────────────────────────────────────\n\n async put(_id: string, _record: T): Promise<never> {\n throw new ReadOnlyAtInstantError('put', this.targetTs)\n }\n async delete(_id: string): Promise<never> {\n throw new ReadOnlyAtInstantError('delete', this.targetTs)\n }\n async update(_id: string, _patch: Partial<T>): Promise<never> {\n throw new ReadOnlyAtInstantError('update', this.targetTs)\n }\n\n // ── internals ─────────────────────────────────────────────────────\n\n /**\n * Return the envelope that represents the record's state at\n * `targetTs`, accounting for deletes. `null` if the record didn't\n * exist at that instant.\n *\n * ## Why we use the ledger as the authoritative timeline\n *\n * The per-version history snapshots saved by `saveHistory()` do\n * carry a `_ts` field, but that timestamp is the moment the\n * snapshot was *captured* (i.e. the instant right before the\n * subsequent overwrite), not the original write time. The ledger,\n * by contrast, records `ts` at the moment of each `put` / `delete`\n * — it's the only source that tracks the real timeline. So:\n *\n * 1. Walk the ledger; find the latest entry for `(collection, id)`\n * with `ts ≤ targetTs`.\n * 2. If that entry is a `delete`, the record was gone at the\n * target instant — return null.\n * 3. Otherwise it's a `put` with a specific `version`. Load the\n * envelope for that version from history, falling back to the\n * live collection for the most recent version.\n *\n * ## Fallback when the ledger is disabled\n *\n * If the vault has history disabled, `getLedger()` returns null and\n * we fall back to comparing envelope `_ts` fields. This is\n * approximate and gets the *last write* right but may confuse the\n * intermediate versions; adopters needing accurate time-machine\n * reads should leave history enabled.\n */\n private async resolveEnvelope(id: string): Promise<EncryptedEnvelope | null> {\n const ledger = this.engine.getLedger()\n if (ledger) {\n return this.resolveViaLedger(id, ledger)\n }\n return this.resolveViaEnvelopeTs(id)\n }\n\n private async resolveViaLedger(id: string, ledger: LedgerStore): Promise<EncryptedEnvelope | null> {\n const entries = await ledger.entries()\n // Entries are already ordered by index which is the mutation order.\n let latest: { op: 'put' | 'delete'; version: number } | null = null\n for (const e of entries) {\n if (e.collection !== this.name || e.id !== id) continue\n if (e.ts > this.targetTs) break // entries are time-ordered by index\n latest = { op: e.op, version: e.version }\n }\n if (!latest) return null\n if (latest.op === 'delete') return null\n return this.loadVersion(id, latest.version)\n }\n\n private async resolveViaEnvelopeTs(id: string): Promise<EncryptedEnvelope | null> {\n const history = await getHistory(\n this.engine.adapter, this.engine.name, this.name, id,\n )\n const live = await this.engine.adapter.get(this.engine.name, this.name, id)\n const byVersion = new Map<number, EncryptedEnvelope>()\n for (const e of history) byVersion.set(e._v, e)\n if (live) byVersion.set(live._v, live)\n const sorted = [...byVersion.values()].sort((a, b) =>\n a._ts < b._ts ? 1 : a._ts > b._ts ? -1 : 0,\n )\n return sorted.find((e) => e._ts <= this.targetTs) ?? null\n }\n\n /**\n * Fetch the envelope for a specific version. The live record (most\n * recent put) lives in the main collection; prior versions live in\n * `_history`. We check live first because the common case after a\n * delete is that we're trying to load the last-live version from\n * history, and skipping live for the current-version case avoids a\n * redundant lookup.\n */\n private async loadVersion(id: string, version: number): Promise<EncryptedEnvelope | null> {\n const live = await this.engine.adapter.get(this.engine.name, this.name, id)\n if (live && live._v === version) return live\n\n // Direct lookup by (collection, id, version) — avoids scanning all history.\n const historyId = `${this.name}:${id}:${String(version).padStart(10, '0')}`\n return await this.engine.adapter.get(this.engine.name, '_history', historyId)\n }\n}\n\n/**\n * Scan the `_history` collection once and collect every distinct\n * `recordId` for the given collection. History keys follow the\n * shape `<collection>:<recordId>:<paddedVersion>`; we split on the\n * last two colons (delimiter-safe because `paddedVersion` is\n * exactly 10 digits).\n */\nasync function collectHistoryIds(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n): Promise<string[]> {\n const all = await adapter.list(vault, '_history')\n const prefix = `${collection}:`\n const seen = new Set<string>()\n for (const key of all) {\n if (!key.startsWith(prefix)) continue\n const lastColon = key.lastIndexOf(':')\n if (lastColon <= prefix.length) continue\n const middle = key.slice(prefix.length, lastColon)\n seen.add(middle)\n }\n return [...seen]\n}\n","/**\n * Zero-dependency JSON diff.\n * Produces a flat list of changes between two plain objects.\n */\n\nexport type ChangeType = 'added' | 'removed' | 'changed'\n\nexport interface DiffEntry {\n /** Dot-separated path to the changed field (e.g. \"address.city\"). */\n readonly path: string\n /** Type of change. */\n readonly type: ChangeType\n /** Previous value (undefined for 'added'). */\n readonly from?: unknown\n /** New value (undefined for 'removed'). */\n readonly to?: unknown\n}\n\n/**\n * Compute differences between two objects.\n * Returns an array of DiffEntry describing each changed field.\n * Returns empty array if objects are identical.\n */\nexport function diff(oldObj: unknown, newObj: unknown, basePath = ''): DiffEntry[] {\n const changes: DiffEntry[] = []\n\n // Both primitives or nulls\n if (oldObj === newObj) return changes\n\n // One is null/undefined\n if (oldObj == null && newObj != null) {\n return [{ path: basePath || '(root)', type: 'added', to: newObj }]\n }\n if (oldObj != null && newObj == null) {\n return [{ path: basePath || '(root)', type: 'removed', from: oldObj }]\n }\n\n // Different types\n if (typeof oldObj !== typeof newObj) {\n return [{ path: basePath || '(root)', type: 'changed', from: oldObj, to: newObj }]\n }\n\n // Both primitives (and not equal — checked above)\n if (typeof oldObj !== 'object') {\n return [{ path: basePath || '(root)', type: 'changed', from: oldObj, to: newObj }]\n }\n\n // Both arrays\n if (Array.isArray(oldObj) && Array.isArray(newObj)) {\n const maxLen = Math.max(oldObj.length, newObj.length)\n for (let i = 0; i < maxLen; i++) {\n const p = basePath ? `${basePath}[${i}]` : `[${i}]`\n if (i >= oldObj.length) {\n changes.push({ path: p, type: 'added', to: newObj[i] })\n } else if (i >= newObj.length) {\n changes.push({ path: p, type: 'removed', from: oldObj[i] })\n } else {\n changes.push(...diff(oldObj[i], newObj[i], p))\n }\n }\n return changes\n }\n\n // Both objects\n const oldRecord = oldObj as Record<string, unknown>\n const newRecord = newObj as Record<string, unknown>\n const allKeys = new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)])\n\n for (const key of allKeys) {\n const p = basePath ? `${basePath}.${key}` : key\n if (!(key in oldRecord)) {\n changes.push({ path: p, type: 'added', to: newRecord[key] })\n } else if (!(key in newRecord)) {\n changes.push({ path: p, type: 'removed', from: oldRecord[key] })\n } else {\n changes.push(...diff(oldRecord[key], newRecord[key], p))\n }\n }\n\n return changes\n}\n\n/** Format a diff as a human-readable string. */\nexport function formatDiff(changes: DiffEntry[]): string {\n if (changes.length === 0) return '(no changes)'\n return changes.map(c => {\n switch (c.type) {\n case 'added':\n return `+ ${c.path}: ${JSON.stringify(c.to)}`\n case 'removed':\n return `- ${c.path}: ${JSON.stringify(c.from)}`\n case 'changed':\n return `~ ${c.path}: ${JSON.stringify(c.from)} → ${JSON.stringify(c.to)}`\n }\n }).join('\\n')\n}\n"],"mappings":";;;;;;;;AASA,IAAM,qBAAqB;AAC3B,IAAM,cAAc;AAEpB,SAAS,UAAU,YAAoB,UAAkB,SAAyB;AAChF,SAAO,GAAG,UAAU,IAAI,QAAQ,IAAI,OAAO,OAAO,EAAE,SAAS,aAAa,GAAG,CAAC;AAChF;AAkBA,SAAS,cAAc,IAAY,YAAoB,UAA4B;AACjF,MAAI,UAAU;AACZ,WAAO,GAAG,WAAW,GAAG,UAAU,IAAI,QAAQ,GAAG;AAAA,EACnD;AACA,SAAO,GAAG,WAAW,GAAG,UAAU,GAAG;AACvC;AAGA,eAAsB,YACpB,SACA,OACA,YACA,UACA,UACe;AACf,QAAM,KAAK,UAAU,YAAY,UAAU,SAAS,EAAE;AACtD,QAAM,QAAQ,IAAI,OAAO,oBAAoB,IAAI,QAAQ;AAC3D;AAGA,eAAsB,WACpB,SACA,OACA,YACA,UACA,SAC8B;AAC9B,QAAM,SAAS,MAAM,QAAQ,KAAK,OAAO,kBAAkB;AAC3D,QAAM,cAAc,OACjB,OAAO,QAAM,cAAc,IAAI,YAAY,QAAQ,CAAC,EACpD,KAAK,EACL,QAAQ;AAEX,QAAM,UAA+B,CAAC;AAEtC,aAAW,MAAM,aAAa;AAC5B,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,oBAAoB,EAAE;AAChE,QAAI,CAAC,SAAU;AAGf,QAAI,SAAS,QAAQ,SAAS,MAAM,QAAQ,KAAM;AAClD,QAAI,SAAS,MAAM,SAAS,MAAM,QAAQ,GAAI;AAE9C,YAAQ,KAAK,QAAQ;AAErB,QAAI,SAAS,SAAS,QAAQ,UAAU,QAAQ,MAAO;AAAA,EACzD;AAEA,SAAO;AACT;AAGA,eAAsB,mBACpB,SACA,OACA,YACA,UACA,SACmC;AACnC,QAAM,KAAK,UAAU,YAAY,UAAU,OAAO;AAClD,SAAO,QAAQ,IAAI,OAAO,oBAAoB,EAAE;AAClD;AAGA,eAAsB,aACpB,SACA,OACA,YACA,UACA,SACiB;AACjB,QAAM,SAAS,MAAM,QAAQ,KAAK,OAAO,kBAAkB;AAC3D,QAAM,cAAc,OACjB,OAAO,QAAM,WAAW,cAAc,IAAI,YAAY,QAAQ,IAAI,cAAc,IAAI,UAAU,CAAC,EAC/F,KAAK;AAER,MAAI,WAAqB,CAAC;AAE1B,MAAI,QAAQ,iBAAiB,QAAW;AAEtC,UAAM,OAAO,QAAQ;AACrB,QAAI,YAAY,SAAS,MAAM;AAC7B,iBAAW,YAAY,MAAM,GAAG,YAAY,SAAS,IAAI;AAAA,IAC3D;AAAA,EACF;AAEA,MAAI,QAAQ,YAAY;AAEtB,eAAW,MAAM,aAAa;AAC5B,UAAI,SAAS,SAAS,EAAE,EAAG;AAC3B,YAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,oBAAoB,EAAE;AAChE,UAAI,YAAY,SAAS,MAAM,QAAQ,YAAY;AACjD,iBAAS,KAAK,EAAE;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAGA,QAAM,gBAAgB,CAAC,GAAG,IAAI,IAAI,QAAQ,CAAC;AAE3C,aAAW,MAAM,eAAe;AAC9B,UAAM,QAAQ,OAAO,OAAO,oBAAoB,EAAE;AAAA,EACpD;AAEA,SAAO,cAAc;AACvB;AAGA,eAAsB,aACpB,SACA,OACA,YACA,UACiB;AACjB,QAAM,SAAS,MAAM,QAAQ,KAAK,OAAO,kBAAkB;AAC3D,MAAI;AAEJ,MAAI,cAAc,UAAU;AAC1B,eAAW,OAAO,OAAO,QAAM,cAAc,IAAI,YAAY,QAAQ,CAAC;AAAA,EACxE,WAAW,YAAY;AACrB,eAAW,OAAO,OAAO,QAAM,cAAc,IAAI,UAAU,CAAC;AAAA,EAC9D,OAAO;AACL,eAAW;AAAA,EACb;AAEA,aAAW,MAAM,UAAU;AACzB,UAAM,QAAQ,OAAO,OAAO,oBAAoB,EAAE;AAAA,EACpD;AAEA,SAAO,SAAS;AAClB;;;AClEO,IAAM,eAAN,MAAmB;AAAA,EACxB,YACmB,QAED,WAChB;AAHiB;AAED;AAAA,EACf;AAAA,EAHgB;AAAA,EAED;AAAA;AAAA,EAIlB,WAAwB,MAAoC;AAC1D,WAAO,IAAI,kBAAqB,KAAK,QAAQ,KAAK,WAAW,IAAI;AAAA,EACnE;AACF;AAYO,IAAM,oBAAN,MAAqC;AAAA,EAC1C,YACmB,QACA,UACD,MAChB;AAHiB;AACA;AACD;AAAA,EACf;AAAA,EAHgB;AAAA,EACA;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlB,MAAM,IAAI,IAA+B;AACvC,UAAM,WAAW,MAAM,KAAK,gBAAgB,EAAE;AAC9C,QAAI,CAAC,SAAU,QAAO;AACtB,UAAM,YAAY,KAAK,OAAO,YAC1B,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,MAAM,KAAK,OAAO,OAAO,KAAK,IAAI,CAAC,IAC/E,SAAS;AACb,WAAO,KAAK,MAAM,SAAS;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,OAA0B;AAC9B,UAAM,aAAa,MAAM,kBAAkB,KAAK,OAAO,SAAS,KAAK,OAAO,MAAM,KAAK,IAAI;AAC3F,UAAM,UAAU,MAAM,KAAK,OAAO,QAAQ,KAAK,KAAK,OAAO,MAAM,KAAK,IAAI;AAC1E,UAAM,eAAe,oBAAI,IAAY,CAAC,GAAG,YAAY,GAAG,OAAO,CAAC;AAChE,UAAM,QAAkB,CAAC;AACzB,eAAW,MAAM,cAAc;AAC7B,YAAM,MAAM,MAAM,KAAK,gBAAgB,EAAE;AACzC,UAAI,IAAK,OAAM,KAAK,EAAE;AAAA,IACxB;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAAA;AAAA,EAIA,MAAM,IAAI,KAAa,SAA4B;AACjD,UAAM,IAAI,uBAAuB,OAAO,KAAK,QAAQ;AAAA,EACvD;AAAA,EACA,MAAM,OAAO,KAA6B;AACxC,UAAM,IAAI,uBAAuB,UAAU,KAAK,QAAQ;AAAA,EAC1D;AAAA,EACA,MAAM,OAAO,KAAa,QAAoC;AAC5D,UAAM,IAAI,uBAAuB,UAAU,KAAK,QAAQ;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkCA,MAAc,gBAAgB,IAA+C;AAC3E,UAAM,SAAS,KAAK,OAAO,UAAU;AACrC,QAAI,QAAQ;AACV,aAAO,KAAK,iBAAiB,IAAI,MAAM;AAAA,IACzC;AACA,WAAO,KAAK,qBAAqB,EAAE;AAAA,EACrC;AAAA,EAEA,MAAc,iBAAiB,IAAY,QAAwD;AACjG,UAAM,UAAU,MAAM,OAAO,QAAQ;AAErC,QAAI,SAA2D;AAC/D,eAAW,KAAK,SAAS;AACvB,UAAI,EAAE,eAAe,KAAK,QAAQ,EAAE,OAAO,GAAI;AAC/C,UAAI,EAAE,KAAK,KAAK,SAAU;AAC1B,eAAS,EAAE,IAAI,EAAE,IAAI,SAAS,EAAE,QAAQ;AAAA,IAC1C;AACA,QAAI,CAAC,OAAQ,QAAO;AACpB,QAAI,OAAO,OAAO,SAAU,QAAO;AACnC,WAAO,KAAK,YAAY,IAAI,OAAO,OAAO;AAAA,EAC5C;AAAA,EAEA,MAAc,qBAAqB,IAA+C;AAChF,UAAM,UAAU,MAAM;AAAA,MACpB,KAAK,OAAO;AAAA,MAAS,KAAK,OAAO;AAAA,MAAM,KAAK;AAAA,MAAM;AAAA,IACpD;AACA,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO,MAAM,KAAK,MAAM,EAAE;AAC1E,UAAM,YAAY,oBAAI,IAA+B;AACrD,eAAW,KAAK,QAAS,WAAU,IAAI,EAAE,IAAI,CAAC;AAC9C,QAAI,KAAM,WAAU,IAAI,KAAK,IAAI,IAAI;AACrC,UAAM,SAAS,CAAC,GAAG,UAAU,OAAO,CAAC,EAAE;AAAA,MAAK,CAAC,GAAG,MAC9C,EAAE,MAAM,EAAE,MAAM,IAAI,EAAE,MAAM,EAAE,MAAM,KAAK;AAAA,IAC3C;AACA,WAAO,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,KAAK,QAAQ,KAAK;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,YAAY,IAAY,SAAoD;AACxF,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO,MAAM,KAAK,MAAM,EAAE;AAC1E,QAAI,QAAQ,KAAK,OAAO,QAAS,QAAO;AAGxC,UAAMA,aAAY,GAAG,KAAK,IAAI,IAAI,EAAE,IAAI,OAAO,OAAO,EAAE,SAAS,IAAI,GAAG,CAAC;AACzE,WAAO,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO,MAAM,YAAYA,UAAS;AAAA,EAC9E;AACF;AASA,eAAe,kBACb,SACA,OACA,YACmB;AACnB,QAAM,MAAM,MAAM,QAAQ,KAAK,OAAO,UAAU;AAChD,QAAM,SAAS,GAAG,UAAU;AAC5B,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,OAAO,KAAK;AACrB,QAAI,CAAC,IAAI,WAAW,MAAM,EAAG;AAC7B,UAAM,YAAY,IAAI,YAAY,GAAG;AACrC,QAAI,aAAa,OAAO,OAAQ;AAChC,UAAM,SAAS,IAAI,MAAM,OAAO,QAAQ,SAAS;AACjD,SAAK,IAAI,MAAM;AAAA,EACjB;AACA,SAAO,CAAC,GAAG,IAAI;AACjB;;;ACnQO,SAAS,KAAK,QAAiB,QAAiB,WAAW,IAAiB;AACjF,QAAM,UAAuB,CAAC;AAG9B,MAAI,WAAW,OAAQ,QAAO;AAG9B,MAAI,UAAU,QAAQ,UAAU,MAAM;AACpC,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,SAAS,IAAI,OAAO,CAAC;AAAA,EACnE;AACA,MAAI,UAAU,QAAQ,UAAU,MAAM;AACpC,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,WAAW,MAAM,OAAO,CAAC;AAAA,EACvE;AAGA,MAAI,OAAO,WAAW,OAAO,QAAQ;AACnC,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,CAAC;AAAA,EACnF;AAGA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,CAAC;AAAA,EACnF;AAGA,MAAI,MAAM,QAAQ,MAAM,KAAK,MAAM,QAAQ,MAAM,GAAG;AAClD,UAAM,SAAS,KAAK,IAAI,OAAO,QAAQ,OAAO,MAAM;AACpD,aAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,YAAM,IAAI,WAAW,GAAG,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC;AAChD,UAAI,KAAK,OAAO,QAAQ;AACtB,gBAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,SAAS,IAAI,OAAO,CAAC,EAAE,CAAC;AAAA,MACxD,WAAW,KAAK,OAAO,QAAQ;AAC7B,gBAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,WAAW,MAAM,OAAO,CAAC,EAAE,CAAC;AAAA,MAC5D,OAAO;AACL,gBAAQ,KAAK,GAAG,KAAK,OAAO,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;AAAA,MAC/C;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAGA,QAAM,YAAY;AAClB,QAAM,YAAY;AAClB,QAAM,UAAU,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,SAAS,GAAG,GAAG,OAAO,KAAK,SAAS,CAAC,CAAC;AAE9E,aAAW,OAAO,SAAS;AACzB,UAAM,IAAI,WAAW,GAAG,QAAQ,IAAI,GAAG,KAAK;AAC5C,QAAI,EAAE,OAAO,YAAY;AACvB,cAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,SAAS,IAAI,UAAU,GAAG,EAAE,CAAC;AAAA,IAC7D,WAAW,EAAE,OAAO,YAAY;AAC9B,cAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,WAAW,MAAM,UAAU,GAAG,EAAE,CAAC;AAAA,IACjE,OAAO;AACL,cAAQ,KAAK,GAAG,KAAK,UAAU,GAAG,GAAG,UAAU,GAAG,GAAG,CAAC,CAAC;AAAA,IACzD;AAAA,EACF;AAEA,SAAO;AACT;AAGO,SAAS,WAAW,SAA8B;AACvD,MAAI,QAAQ,WAAW,EAAG,QAAO;AACjC,SAAO,QAAQ,IAAI,OAAK;AACtB,YAAQ,EAAE,MAAM;AAAA,MACd,KAAK;AACH,eAAO,KAAK,EAAE,IAAI,KAAK,KAAK,UAAU,EAAE,EAAE,CAAC;AAAA,MAC7C,KAAK;AACH,eAAO,KAAK,EAAE,IAAI,KAAK,KAAK,UAAU,EAAE,IAAI,CAAC;AAAA,MAC/C,KAAK;AACH,eAAO,KAAK,EAAE,IAAI,KAAK,KAAK,UAAU,EAAE,IAAI,CAAC,WAAM,KAAK,UAAU,EAAE,EAAE,CAAC;AAAA,IAC3E;AAAA,EACF,CAAC,EAAE,KAAK,IAAI;AACd;","names":["historyId"]}
@@ -0,0 +1,79 @@
1
+ import {
2
+ ReadOnlyFrameError
3
+ } from "./chunk-ACLDOTNQ.js";
4
+
5
+ // src/shadow/vault-frame.ts
6
+ var VaultFrame = class {
7
+ constructor(vault) {
8
+ this.vault = vault;
9
+ }
10
+ vault;
11
+ /**
12
+ * Get a read-only view of one collection. The returned
13
+ * {@link CollectionFrame} delegates all reads to the underlying
14
+ * live collection — cache, locale handling, and validation all
15
+ * work identically to the live collection.
16
+ */
17
+ collection(name) {
18
+ return new CollectionFrame(this.vault.collection(name), name);
19
+ }
20
+ /** List all collection names visible in the underlying vault. */
21
+ async collections() {
22
+ return this.vault.collections();
23
+ }
24
+ };
25
+ var CollectionFrame = class {
26
+ constructor(inner, name) {
27
+ this.inner = inner;
28
+ this.name = name;
29
+ }
30
+ inner;
31
+ name;
32
+ // ── reads (delegated) ──────────────────────────────────────────────
33
+ get(id, locale) {
34
+ return this.inner.get(id, locale);
35
+ }
36
+ list(locale) {
37
+ return this.inner.list(locale);
38
+ }
39
+ /**
40
+ * Return the chainable query builder. Terminals like `.toArray()`,
41
+ * `.first()`, `.count()`, `.aggregate()` all work; the builder has
42
+ * no write surface of its own, so exposing it directly is safe.
43
+ */
44
+ query(...args) {
45
+ return this.inner.query(...args);
46
+ }
47
+ /** History reads — allowed (history is read-only by nature). */
48
+ history(...args) {
49
+ return this.inner.history(...args);
50
+ }
51
+ getVersion(id, version) {
52
+ return this.inner.getVersion(id, version);
53
+ }
54
+ // ── write guards ──────────────────────────────────────────────────
55
+ async put(_id, _record) {
56
+ throw new ReadOnlyFrameError("put");
57
+ }
58
+ async delete(_id) {
59
+ throw new ReadOnlyFrameError("delete");
60
+ }
61
+ async update(_id, _patch) {
62
+ throw new ReadOnlyFrameError("update");
63
+ }
64
+ async revert(_id, _version) {
65
+ throw new ReadOnlyFrameError("revert");
66
+ }
67
+ async putMany(_entries) {
68
+ throw new ReadOnlyFrameError("putMany");
69
+ }
70
+ async deleteMany(_ids) {
71
+ throw new ReadOnlyFrameError("deleteMany");
72
+ }
73
+ };
74
+
75
+ export {
76
+ VaultFrame,
77
+ CollectionFrame
78
+ };
79
+ //# sourceMappingURL=chunk-R36SIKES.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/shadow/vault-frame.ts"],"sourcesContent":["/**\n * Shadow vaults — `vault.frame()` returns a read-only view of the\n * CURRENT vault state.\n *\n * Companion to {@link VaultInstant} from `history/time-machine.ts`:\n *\n * | Type | Reads from | Use case |\n * |------|------------|----------|\n * | `VaultInstant` | past snapshots (ledger + history) | \"books on date X\" |\n * | `VaultFrame` | live vault state | screen-share / demo / audit |\n *\n * ```ts\n * const readonly = vault.frame()\n * const invoices = await readonly.collection<Invoice>('invoices').list()\n * await readonly.collection<Invoice>('invoices').put(...)\n * // → throws ReadOnlyFrameError\n * ```\n *\n * ## Contract\n *\n * Every write method on {@link CollectionFrame} throws\n * {@link ReadOnlyFrameError}. Reads delegate to the underlying\n * collection, so validation, locale handling, and caching all work\n * exactly as they do on the live collection.\n *\n * ## Security note: behaviour-enforced, not cryptographically-enforced\n *\n * A VaultFrame rejects writes by contract in the JavaScript layer.\n * It does NOT strip the DEKs from the underlying keyring — the same\n * in-memory keys that decrypt records could, in principle, encrypt\n * new writes via a hand-crafted adapter call. Cryptographic\n * enforcement (keyring variants with the write half of each DEK\n * removed) is hierarchical-access work. Use a VaultFrame to\n * prevent *accidental* writes in a read-scoped flow — do not rely on\n * it as a security boundary against a hostile caller sharing the\n * same process.\n *\n * @module\n */\nimport type { Collection } from '../collection.js'\nimport type { Vault } from '../vault.js'\nimport type { LocaleReadOptions } from '../types.js'\nimport { ReadOnlyFrameError } from '../errors.js'\n\n/**\n * A read-only view of a vault's current state. Produced by\n * `vault.frame()`. Cheap to construct; safe to throw away.\n */\nexport class VaultFrame {\n constructor(private readonly vault: Vault) {}\n\n /**\n * Get a read-only view of one collection. The returned\n * {@link CollectionFrame} delegates all reads to the underlying\n * live collection — cache, locale handling, and validation all\n * work identically to the live collection.\n */\n collection<T = unknown>(name: string): CollectionFrame<T> {\n return new CollectionFrame<T>(this.vault.collection<T>(name), name)\n }\n\n /** List all collection names visible in the underlying vault. */\n async collections(): Promise<string[]> {\n return this.vault.collections()\n }\n}\n\n/**\n * Read-only collection view. All write methods throw\n * {@link ReadOnlyFrameError}; all read methods delegate to the\n * underlying live {@link Collection}.\n */\nexport class CollectionFrame<T = unknown> {\n constructor(\n private readonly inner: Collection<T>,\n /** The underlying collection name. Captured at construction so\n * we don't need to peek into the private Collection state. */\n public readonly name: string,\n ) {}\n\n // ── reads (delegated) ──────────────────────────────────────────────\n\n get(id: string, locale?: LocaleReadOptions): Promise<T | null> {\n return this.inner.get(id, locale)\n }\n\n list(locale?: LocaleReadOptions): Promise<T[]> {\n return this.inner.list(locale)\n }\n\n /**\n * Return the chainable query builder. Terminals like `.toArray()`,\n * `.first()`, `.count()`, `.aggregate()` all work; the builder has\n * no write surface of its own, so exposing it directly is safe.\n */\n query(...args: Parameters<Collection<T>['query']>): ReturnType<Collection<T>['query']> {\n return this.inner.query(...args)\n }\n\n /** History reads — allowed (history is read-only by nature). */\n history(...args: Parameters<Collection<T>['history']>): ReturnType<Collection<T>['history']> {\n return this.inner.history(...args)\n }\n\n getVersion(id: string, version: number): Promise<T | null> {\n return this.inner.getVersion(id, version)\n }\n\n // ── write guards ──────────────────────────────────────────────────\n\n async put(_id: string, _record: T): Promise<never> {\n throw new ReadOnlyFrameError('put')\n }\n async delete(_id: string): Promise<never> {\n throw new ReadOnlyFrameError('delete')\n }\n async update(_id: string, _patch: Partial<T>): Promise<never> {\n throw new ReadOnlyFrameError('update')\n }\n async revert(_id: string, _version: number): Promise<never> {\n throw new ReadOnlyFrameError('revert')\n }\n async putMany(_entries: ReadonlyArray<readonly [string, T]>): Promise<never> {\n throw new ReadOnlyFrameError('putMany')\n }\n async deleteMany(_ids: readonly string[]): Promise<never> {\n throw new ReadOnlyFrameError('deleteMany')\n }\n}\n"],"mappings":";;;;;AAgDO,IAAM,aAAN,MAAiB;AAAA,EACtB,YAA6B,OAAc;AAAd;AAAA,EAAe;AAAA,EAAf;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ7B,WAAwB,MAAkC;AACxD,WAAO,IAAI,gBAAmB,KAAK,MAAM,WAAc,IAAI,GAAG,IAAI;AAAA,EACpE;AAAA;AAAA,EAGA,MAAM,cAAiC;AACrC,WAAO,KAAK,MAAM,YAAY;AAAA,EAChC;AACF;AAOO,IAAM,kBAAN,MAAmC;AAAA,EACxC,YACmB,OAGD,MAChB;AAJiB;AAGD;AAAA,EACf;AAAA,EAJgB;AAAA,EAGD;AAAA;AAAA,EAKlB,IAAI,IAAY,QAA+C;AAC7D,WAAO,KAAK,MAAM,IAAI,IAAI,MAAM;AAAA,EAClC;AAAA,EAEA,KAAK,QAA0C;AAC7C,WAAO,KAAK,MAAM,KAAK,MAAM;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,MAA8E;AACrF,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI;AAAA,EACjC;AAAA;AAAA,EAGA,WAAW,MAAkF;AAC3F,WAAO,KAAK,MAAM,QAAQ,GAAG,IAAI;AAAA,EACnC;AAAA,EAEA,WAAW,IAAY,SAAoC;AACzD,WAAO,KAAK,MAAM,WAAW,IAAI,OAAO;AAAA,EAC1C;AAAA;AAAA,EAIA,MAAM,IAAI,KAAa,SAA4B;AACjD,UAAM,IAAI,mBAAmB,KAAK;AAAA,EACpC;AAAA,EACA,MAAM,OAAO,KAA6B;AACxC,UAAM,IAAI,mBAAmB,QAAQ;AAAA,EACvC;AAAA,EACA,MAAM,OAAO,KAAa,QAAoC;AAC5D,UAAM,IAAI,mBAAmB,QAAQ;AAAA,EACvC;AAAA,EACA,MAAM,OAAO,KAAa,UAAkC;AAC1D,UAAM,IAAI,mBAAmB,QAAQ;AAAA,EACvC;AAAA,EACA,MAAM,QAAQ,UAA+D;AAC3E,UAAM,IAAI,mBAAmB,SAAS;AAAA,EACxC;AAAA,EACA,MAAM,WAAW,MAAyC;AACxD,UAAM,IAAI,mBAAmB,YAAY;AAAA,EAC3C;AACF;","names":[]}