@noy-db/hub 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +197 -0
  3. package/dist/aggregate/index.cjs +476 -0
  4. package/dist/aggregate/index.cjs.map +1 -0
  5. package/dist/aggregate/index.d.cts +38 -0
  6. package/dist/aggregate/index.d.ts +38 -0
  7. package/dist/aggregate/index.js +53 -0
  8. package/dist/aggregate/index.js.map +1 -0
  9. package/dist/blobs/index.cjs +1480 -0
  10. package/dist/blobs/index.cjs.map +1 -0
  11. package/dist/blobs/index.d.cts +45 -0
  12. package/dist/blobs/index.d.ts +45 -0
  13. package/dist/blobs/index.js +48 -0
  14. package/dist/blobs/index.js.map +1 -0
  15. package/dist/bundle/index.cjs +436 -0
  16. package/dist/bundle/index.cjs.map +1 -0
  17. package/dist/bundle/index.d.cts +7 -0
  18. package/dist/bundle/index.d.ts +7 -0
  19. package/dist/bundle/index.js +40 -0
  20. package/dist/bundle/index.js.map +1 -0
  21. package/dist/chunk-2QR2PQTT.js +217 -0
  22. package/dist/chunk-2QR2PQTT.js.map +1 -0
  23. package/dist/chunk-4OWFYIDQ.js +79 -0
  24. package/dist/chunk-4OWFYIDQ.js.map +1 -0
  25. package/dist/chunk-5AATM2M2.js +90 -0
  26. package/dist/chunk-5AATM2M2.js.map +1 -0
  27. package/dist/chunk-ACLDOTNQ.js +543 -0
  28. package/dist/chunk-ACLDOTNQ.js.map +1 -0
  29. package/dist/chunk-BTDCBVJW.js +160 -0
  30. package/dist/chunk-BTDCBVJW.js.map +1 -0
  31. package/dist/chunk-CIMZBAZB.js +72 -0
  32. package/dist/chunk-CIMZBAZB.js.map +1 -0
  33. package/dist/chunk-E445ICYI.js +365 -0
  34. package/dist/chunk-E445ICYI.js.map +1 -0
  35. package/dist/chunk-EXQRC2L4.js +722 -0
  36. package/dist/chunk-EXQRC2L4.js.map +1 -0
  37. package/dist/chunk-FZU343FL.js +32 -0
  38. package/dist/chunk-FZU343FL.js.map +1 -0
  39. package/dist/chunk-GJILMRPO.js +354 -0
  40. package/dist/chunk-GJILMRPO.js.map +1 -0
  41. package/dist/chunk-GOUT6DND.js +1285 -0
  42. package/dist/chunk-GOUT6DND.js.map +1 -0
  43. package/dist/chunk-J66GRPNH.js +111 -0
  44. package/dist/chunk-J66GRPNH.js.map +1 -0
  45. package/dist/chunk-M2F2JAWB.js +464 -0
  46. package/dist/chunk-M2F2JAWB.js.map +1 -0
  47. package/dist/chunk-M5INGEFC.js +84 -0
  48. package/dist/chunk-M5INGEFC.js.map +1 -0
  49. package/dist/chunk-M62XNWRA.js +72 -0
  50. package/dist/chunk-M62XNWRA.js.map +1 -0
  51. package/dist/chunk-MR4424N3.js +275 -0
  52. package/dist/chunk-MR4424N3.js.map +1 -0
  53. package/dist/chunk-NPC4LFV5.js +132 -0
  54. package/dist/chunk-NPC4LFV5.js.map +1 -0
  55. package/dist/chunk-NXFEYLVG.js +311 -0
  56. package/dist/chunk-NXFEYLVG.js.map +1 -0
  57. package/dist/chunk-R36SIKES.js +79 -0
  58. package/dist/chunk-R36SIKES.js.map +1 -0
  59. package/dist/chunk-TDR6T5CJ.js +381 -0
  60. package/dist/chunk-TDR6T5CJ.js.map +1 -0
  61. package/dist/chunk-UF3BUNQZ.js +1 -0
  62. package/dist/chunk-UF3BUNQZ.js.map +1 -0
  63. package/dist/chunk-UQFSPSWG.js +1109 -0
  64. package/dist/chunk-UQFSPSWG.js.map +1 -0
  65. package/dist/chunk-USKYUS74.js +793 -0
  66. package/dist/chunk-USKYUS74.js.map +1 -0
  67. package/dist/chunk-XCL3WP6J.js +121 -0
  68. package/dist/chunk-XCL3WP6J.js.map +1 -0
  69. package/dist/chunk-XHFOENR2.js +680 -0
  70. package/dist/chunk-XHFOENR2.js.map +1 -0
  71. package/dist/chunk-ZFKD4QMV.js +430 -0
  72. package/dist/chunk-ZFKD4QMV.js.map +1 -0
  73. package/dist/chunk-ZLMV3TUA.js +490 -0
  74. package/dist/chunk-ZLMV3TUA.js.map +1 -0
  75. package/dist/chunk-ZRG4V3F5.js +17 -0
  76. package/dist/chunk-ZRG4V3F5.js.map +1 -0
  77. package/dist/consent/index.cjs +204 -0
  78. package/dist/consent/index.cjs.map +1 -0
  79. package/dist/consent/index.d.cts +24 -0
  80. package/dist/consent/index.d.ts +24 -0
  81. package/dist/consent/index.js +23 -0
  82. package/dist/consent/index.js.map +1 -0
  83. package/dist/crdt/index.cjs +152 -0
  84. package/dist/crdt/index.cjs.map +1 -0
  85. package/dist/crdt/index.d.cts +30 -0
  86. package/dist/crdt/index.d.ts +30 -0
  87. package/dist/crdt/index.js +24 -0
  88. package/dist/crdt/index.js.map +1 -0
  89. package/dist/crypto-IVKU7YTT.js +44 -0
  90. package/dist/crypto-IVKU7YTT.js.map +1 -0
  91. package/dist/delegation-XDJCBTI2.js +16 -0
  92. package/dist/delegation-XDJCBTI2.js.map +1 -0
  93. package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
  94. package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
  95. package/dist/hash-9KO1BGxh.d.cts +63 -0
  96. package/dist/hash-ChfJjRjQ.d.ts +63 -0
  97. package/dist/history/index.cjs +1215 -0
  98. package/dist/history/index.cjs.map +1 -0
  99. package/dist/history/index.d.cts +62 -0
  100. package/dist/history/index.d.ts +62 -0
  101. package/dist/history/index.js +79 -0
  102. package/dist/history/index.js.map +1 -0
  103. package/dist/i18n/index.cjs +746 -0
  104. package/dist/i18n/index.cjs.map +1 -0
  105. package/dist/i18n/index.d.cts +38 -0
  106. package/dist/i18n/index.d.ts +38 -0
  107. package/dist/i18n/index.js +55 -0
  108. package/dist/i18n/index.js.map +1 -0
  109. package/dist/index-BRHBCmLt.d.ts +1940 -0
  110. package/dist/index-C8kQtmOk.d.ts +380 -0
  111. package/dist/index-DN-J-5wT.d.cts +1940 -0
  112. package/dist/index-DhjMjz7L.d.cts +380 -0
  113. package/dist/index.cjs +14756 -0
  114. package/dist/index.cjs.map +1 -0
  115. package/dist/index.d.cts +269 -0
  116. package/dist/index.d.ts +269 -0
  117. package/dist/index.js +6085 -0
  118. package/dist/index.js.map +1 -0
  119. package/dist/indexing/index.cjs +736 -0
  120. package/dist/indexing/index.cjs.map +1 -0
  121. package/dist/indexing/index.d.cts +36 -0
  122. package/dist/indexing/index.d.ts +36 -0
  123. package/dist/indexing/index.js +77 -0
  124. package/dist/indexing/index.js.map +1 -0
  125. package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
  126. package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
  127. package/dist/ledger-2NX4L7PN.js +33 -0
  128. package/dist/ledger-2NX4L7PN.js.map +1 -0
  129. package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
  130. package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
  131. package/dist/periods/index.cjs +1035 -0
  132. package/dist/periods/index.cjs.map +1 -0
  133. package/dist/periods/index.d.cts +21 -0
  134. package/dist/periods/index.d.ts +21 -0
  135. package/dist/periods/index.js +25 -0
  136. package/dist/periods/index.js.map +1 -0
  137. package/dist/predicate-SBHmi6D0.d.cts +161 -0
  138. package/dist/predicate-SBHmi6D0.d.ts +161 -0
  139. package/dist/query/index.cjs +1957 -0
  140. package/dist/query/index.cjs.map +1 -0
  141. package/dist/query/index.d.cts +3 -0
  142. package/dist/query/index.d.ts +3 -0
  143. package/dist/query/index.js +62 -0
  144. package/dist/query/index.js.map +1 -0
  145. package/dist/session/index.cjs +487 -0
  146. package/dist/session/index.cjs.map +1 -0
  147. package/dist/session/index.d.cts +45 -0
  148. package/dist/session/index.d.ts +45 -0
  149. package/dist/session/index.js +44 -0
  150. package/dist/session/index.js.map +1 -0
  151. package/dist/shadow/index.cjs +133 -0
  152. package/dist/shadow/index.cjs.map +1 -0
  153. package/dist/shadow/index.d.cts +16 -0
  154. package/dist/shadow/index.d.ts +16 -0
  155. package/dist/shadow/index.js +20 -0
  156. package/dist/shadow/index.js.map +1 -0
  157. package/dist/store/index.cjs +1069 -0
  158. package/dist/store/index.cjs.map +1 -0
  159. package/dist/store/index.d.cts +491 -0
  160. package/dist/store/index.d.ts +491 -0
  161. package/dist/store/index.js +34 -0
  162. package/dist/store/index.js.map +1 -0
  163. package/dist/strategy-BSxFXGzb.d.cts +110 -0
  164. package/dist/strategy-BSxFXGzb.d.ts +110 -0
  165. package/dist/strategy-D-SrOLCl.d.cts +548 -0
  166. package/dist/strategy-D-SrOLCl.d.ts +548 -0
  167. package/dist/sync/index.cjs +1062 -0
  168. package/dist/sync/index.cjs.map +1 -0
  169. package/dist/sync/index.d.cts +42 -0
  170. package/dist/sync/index.d.ts +42 -0
  171. package/dist/sync/index.js +28 -0
  172. package/dist/sync/index.js.map +1 -0
  173. package/dist/team/index.cjs +1233 -0
  174. package/dist/team/index.cjs.map +1 -0
  175. package/dist/team/index.d.cts +117 -0
  176. package/dist/team/index.d.ts +117 -0
  177. package/dist/team/index.js +39 -0
  178. package/dist/team/index.js.map +1 -0
  179. package/dist/tx/index.cjs +212 -0
  180. package/dist/tx/index.cjs.map +1 -0
  181. package/dist/tx/index.d.cts +20 -0
  182. package/dist/tx/index.d.ts +20 -0
  183. package/dist/tx/index.js +20 -0
  184. package/dist/tx/index.js.map +1 -0
  185. package/dist/types-BZpCZB8N.d.ts +7526 -0
  186. package/dist/types-Bfs0qr5F.d.cts +7526 -0
  187. package/dist/ulid-COREQ2RQ.js +9 -0
  188. package/dist/ulid-COREQ2RQ.js.map +1 -0
  189. package/dist/util/index.cjs +230 -0
  190. package/dist/util/index.cjs.map +1 -0
  191. package/dist/util/index.d.cts +77 -0
  192. package/dist/util/index.d.ts +77 -0
  193. package/dist/util/index.js +190 -0
  194. package/dist/util/index.js.map +1 -0
  195. package/package.json +244 -0
@@ -0,0 +1,680 @@
1
+ import {
2
+ canonicalJson,
3
+ hashEntry,
4
+ paddedIndex,
5
+ sha256Hex
6
+ } from "./chunk-CIMZBAZB.js";
7
+ import {
8
+ NOYDB_FORMAT_VERSION
9
+ } from "./chunk-ZRG4V3F5.js";
10
+ import {
11
+ decrypt,
12
+ encrypt
13
+ } from "./chunk-MR4424N3.js";
14
+ import {
15
+ ConflictError,
16
+ LedgerContentionError
17
+ } from "./chunk-ACLDOTNQ.js";
18
+
19
+ // src/history/ledger/patch.ts
20
+ function computePatch(prev, next) {
21
+ const ops = [];
22
+ diff(prev, next, "", ops);
23
+ return ops;
24
+ }
25
+ function diff(prev, next, path, out) {
26
+ if (prev === next) return;
27
+ if (prev === null || next === null) {
28
+ out.push({ op: "replace", path, value: next });
29
+ return;
30
+ }
31
+ const prevIsArray = Array.isArray(prev);
32
+ const nextIsArray = Array.isArray(next);
33
+ const prevIsObject = typeof prev === "object" && !prevIsArray;
34
+ const nextIsObject = typeof next === "object" && !nextIsArray;
35
+ if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
36
+ out.push({ op: "replace", path, value: next });
37
+ return;
38
+ }
39
+ if (prevIsArray && nextIsArray) {
40
+ if (!arrayDeepEqual(prev, next)) {
41
+ out.push({ op: "replace", path, value: next });
42
+ }
43
+ return;
44
+ }
45
+ if (prevIsObject && nextIsObject) {
46
+ const prevObj = prev;
47
+ const nextObj = next;
48
+ const prevKeys = Object.keys(prevObj);
49
+ const nextKeys = Object.keys(nextObj);
50
+ for (const key of prevKeys) {
51
+ const childPath = path + "/" + escapePathSegment(key);
52
+ if (!(key in nextObj)) {
53
+ out.push({ op: "remove", path: childPath });
54
+ } else {
55
+ diff(prevObj[key], nextObj[key], childPath, out);
56
+ }
57
+ }
58
+ for (const key of nextKeys) {
59
+ if (!(key in prevObj)) {
60
+ out.push({
61
+ op: "add",
62
+ path: path + "/" + escapePathSegment(key),
63
+ value: nextObj[key]
64
+ });
65
+ }
66
+ }
67
+ return;
68
+ }
69
+ out.push({ op: "replace", path, value: next });
70
+ }
71
+ function arrayDeepEqual(a, b) {
72
+ if (a.length !== b.length) return false;
73
+ for (let i = 0; i < a.length; i++) {
74
+ if (!deepEqual(a[i], b[i])) return false;
75
+ }
76
+ return true;
77
+ }
78
+ function deepEqual(a, b) {
79
+ if (a === b) return true;
80
+ if (a === null || b === null) return false;
81
+ if (typeof a !== typeof b) return false;
82
+ if (typeof a !== "object") return false;
83
+ const aArray = Array.isArray(a);
84
+ const bArray = Array.isArray(b);
85
+ if (aArray !== bArray) return false;
86
+ if (aArray && bArray) return arrayDeepEqual(a, b);
87
+ const aObj = a;
88
+ const bObj = b;
89
+ const aKeys = Object.keys(aObj);
90
+ const bKeys = Object.keys(bObj);
91
+ if (aKeys.length !== bKeys.length) return false;
92
+ for (const key of aKeys) {
93
+ if (!(key in bObj)) return false;
94
+ if (!deepEqual(aObj[key], bObj[key])) return false;
95
+ }
96
+ return true;
97
+ }
98
+ function applyPatch(base, patch) {
99
+ let result = clone(base);
100
+ for (const op of patch) {
101
+ result = applyOp(result, op);
102
+ }
103
+ return result;
104
+ }
105
+ function applyOp(doc, op) {
106
+ if (op.path === "") {
107
+ if (op.op === "remove") return null;
108
+ return clone(op.value);
109
+ }
110
+ const segments = parsePath(op.path);
111
+ return walkAndApply(doc, segments, op);
112
+ }
113
+ function walkAndApply(doc, segments, op) {
114
+ if (segments.length === 0) {
115
+ throw new Error("walkAndApply: empty segments (internal error)");
116
+ }
117
+ const [head, ...rest] = segments;
118
+ if (head === void 0) throw new Error("walkAndApply: undefined segment");
119
+ if (rest.length === 0) {
120
+ return applyAtTerminal(doc, head, op);
121
+ }
122
+ if (Array.isArray(doc)) {
123
+ const idx = parseArrayIndex(head, doc.length);
124
+ const child = doc[idx];
125
+ const newChild = walkAndApply(child, rest, op);
126
+ const next = doc.slice();
127
+ next[idx] = newChild;
128
+ return next;
129
+ }
130
+ if (doc !== null && typeof doc === "object") {
131
+ const obj = doc;
132
+ if (!(head in obj)) {
133
+ throw new Error(`applyPatch: path segment "${head}" not found in object`);
134
+ }
135
+ const newChild = walkAndApply(obj[head], rest, op);
136
+ return { ...obj, [head]: newChild };
137
+ }
138
+ throw new Error(
139
+ `applyPatch: cannot step into ${typeof doc} at segment "${head}"`
140
+ );
141
+ }
142
+ function applyAtTerminal(doc, segment, op) {
143
+ if (Array.isArray(doc)) {
144
+ const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
145
+ const next = doc.slice();
146
+ if (op.op === "remove") {
147
+ next.splice(idx, 1);
148
+ return next;
149
+ }
150
+ if (op.op === "add") {
151
+ next.splice(idx, 0, clone(op.value));
152
+ return next;
153
+ }
154
+ if (op.op === "replace") {
155
+ if (idx >= doc.length) {
156
+ throw new Error(
157
+ `applyPatch: replace at out-of-bounds array index ${idx}`
158
+ );
159
+ }
160
+ next[idx] = clone(op.value);
161
+ return next;
162
+ }
163
+ }
164
+ if (doc !== null && typeof doc === "object") {
165
+ const obj = doc;
166
+ if (op.op === "remove") {
167
+ if (!(segment in obj)) {
168
+ throw new Error(
169
+ `applyPatch: remove on missing key "${segment}"`
170
+ );
171
+ }
172
+ const next = { ...obj };
173
+ delete next[segment];
174
+ return next;
175
+ }
176
+ if (op.op === "add") {
177
+ return { ...obj, [segment]: clone(op.value) };
178
+ }
179
+ if (op.op === "replace") {
180
+ if (!(segment in obj)) {
181
+ throw new Error(
182
+ `applyPatch: replace on missing key "${segment}"`
183
+ );
184
+ }
185
+ return { ...obj, [segment]: clone(op.value) };
186
+ }
187
+ }
188
+ throw new Error(
189
+ `applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
190
+ );
191
+ }
192
+ function escapePathSegment(segment) {
193
+ return segment.replace(/~/g, "~0").replace(/\//g, "~1");
194
+ }
195
+ function unescapePathSegment(segment) {
196
+ return segment.replace(/~1/g, "/").replace(/~0/g, "~");
197
+ }
198
+ function parsePath(path) {
199
+ if (!path.startsWith("/")) {
200
+ throw new Error(`applyPatch: path must start with '/', got "${path}"`);
201
+ }
202
+ return path.slice(1).split("/").map(unescapePathSegment);
203
+ }
204
+ function parseArrayIndex(segment, max) {
205
+ if (!/^\d+$/.test(segment)) {
206
+ throw new Error(
207
+ `applyPatch: array index must be a non-negative integer, got "${segment}"`
208
+ );
209
+ }
210
+ const idx = Number.parseInt(segment, 10);
211
+ if (idx < 0 || idx > max) {
212
+ throw new Error(
213
+ `applyPatch: array index ${idx} out of range [0, ${max}]`
214
+ );
215
+ }
216
+ return idx;
217
+ }
218
+ function clone(value) {
219
+ if (value === null || value === void 0) return value;
220
+ if (typeof value !== "object") return value;
221
+ return JSON.parse(JSON.stringify(value));
222
+ }
223
+
224
+ // src/history/ledger/constants.ts
225
+ var LEDGER_COLLECTION = "_ledger";
226
+ var LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
227
+
228
+ // src/history/ledger/store.ts
229
+ var MAX_APPEND_ATTEMPTS = 8;
230
+ var LedgerStore = class {
231
+ adapter;
232
+ vault;
233
+ encrypted;
234
+ getDEK;
235
+ actor;
236
+ /**
237
+ * In-memory cache of the chain head — the most recently appended
238
+ * entry along with its precomputed hash. Without this, every
239
+ * `append()` would re-load every prior entry to recompute the
240
+ * prevHash, making N puts O(N²) — a 1K-record stress test goes from
241
+ * < 100ms to a multi-second timeout.
242
+ *
243
+ * The cache is populated on first read (`append`, `head`, `verify`)
244
+ * and updated in-place on every successful `append`. Single-writer
245
+ * usage (the assumption) keeps it consistent. A second
246
+ * LedgerStore instance writing to the same vault would not
247
+ * see the first instance's appends in its cached state — that's the
248
+ * concurrency caveat documented at the class level.
249
+ *
250
+ * Sentinel `undefined` means "not yet loaded"; an explicit `null`
251
+ * value means "loaded and confirmed empty" — distinguishing these
252
+ * matters because an empty ledger is a valid state (genesis prevHash
253
+ * is the empty string), and we don't want to re-scan the adapter
254
+ * just because the chain is freshly initialized.
255
+ */
256
+ headCache = void 0;
257
+ constructor(opts) {
258
+ this.adapter = opts.adapter;
259
+ this.vault = opts.vault;
260
+ this.encrypted = opts.encrypted;
261
+ this.getDEK = opts.getDEK;
262
+ this.actor = opts.actor;
263
+ }
264
+ /**
265
+ * Lazily load (or return cached) the current chain head. The cache
266
+ * sentinel is `undefined` until first access; after the first call,
267
+ * the cache holds either a `{ entry, hash }` for non-empty ledgers
268
+ * or `null` for empty ones.
269
+ */
270
+ async getCachedHead() {
271
+ if (this.headCache !== void 0) return this.headCache;
272
+ const entries = await this.loadAllEntries();
273
+ const last = entries[entries.length - 1];
274
+ if (!last) {
275
+ this.headCache = null;
276
+ return null;
277
+ }
278
+ this.headCache = { entry: last, hash: await hashEntry(last) };
279
+ return this.headCache;
280
+ }
281
+ /**
282
+ * Append a new entry to the ledger. Returns the full entry that was
283
+ * written (with its assigned index and computed prevHash) so the
284
+ * caller can use the hash for downstream purposes (e.g., embedding
285
+ * in a verifiable backup).
286
+ *
287
+ * This is the **only** way to add entries. Direct adapter writes to
288
+ * `_ledger/` would bypass the chain math and would be caught by the
289
+ * next `verify()` call as a divergence.
290
+ *
291
+ * ## Multi-writer correctness
292
+ *
293
+ * Append is implemented as an optimistic-CAS retry loop. On every
294
+ * attempt:
295
+ *
296
+ * 1. Read fresh head (cache invalidated on retry).
297
+ * 2. Compute `nextIndex = head.index + 1`, `prevHash = hash(head)`.
298
+ * 3. Encrypt delta payload IN MEMORY (no adapter write yet) so we
299
+ * can compute `deltaHash` before claiming the chain slot.
300
+ * 4. Build + encrypt the entry envelope.
301
+ * 5. `adapter.put(_ledger, paddedIndex, envelope, expectedVersion: 0)`
302
+ * — the `expectedVersion: 0` asserts "this slot must not exist."
303
+ * Stores with `casAtomic: true` honor the CAS check; under
304
+ * contention the second writer's put throws `ConflictError`.
305
+ * 6. On `ConflictError`: invalidate the head cache, sleep with
306
+ * bounded backoff + jitter, retry. After `MAX_APPEND_ATTEMPTS`
307
+ * retries throw {@link LedgerContentionError}.
308
+ * 7. On success: write the delta envelope (if any) at the same
309
+ * index. Update the head cache.
310
+ *
311
+ * Entry-first ordering matters: writing the delta first under
312
+ * contention would orphan delta records at indices the writer never
313
+ * actually claimed. The deltaHash is computed off the encrypted
314
+ * envelope's `_data` field, which doesn't require the envelope to
315
+ * be persisted.
316
+ *
317
+ * Stores with `casAtomic: false` (file, s3, r2 by default) silently
318
+ * accept the `expectedVersion: 0` argument and proceed without a
319
+ * CAS check. Concurrent appends against those stores remain
320
+ * best-effort — pair them with an advisory lock or with sync
321
+ * single-writer discipline.
322
+ */
323
+ async append(input) {
324
+ let lastConflict;
325
+ for (let attempt = 0; attempt < MAX_APPEND_ATTEMPTS; attempt++) {
326
+ if (attempt > 0) {
327
+ this.headCache = void 0;
328
+ }
329
+ try {
330
+ return await this.appendOnce(input);
331
+ } catch (err) {
332
+ if (err instanceof ConflictError) {
333
+ lastConflict = err;
334
+ if (attempt < MAX_APPEND_ATTEMPTS - 1) {
335
+ await sleepBackoff(attempt);
336
+ }
337
+ continue;
338
+ }
339
+ throw err;
340
+ }
341
+ }
342
+ void lastConflict;
343
+ throw new LedgerContentionError(MAX_APPEND_ATTEMPTS);
344
+ }
345
+ /**
346
+ * One attempt at the append cycle. Throws `ConflictError` when the
347
+ * CAS check on the entry put fails — `append()` catches that and
348
+ * retries. Any other error propagates to the caller.
349
+ */
350
+ async appendOnce(input) {
351
+ const cached = await this.getCachedHead();
352
+ const lastEntry = cached?.entry;
353
+ const prevHash = cached?.hash ?? "";
354
+ const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
355
+ let deltaEnvelope;
356
+ let deltaHash;
357
+ if (input.delta !== void 0) {
358
+ deltaEnvelope = await this.encryptDelta(input.delta);
359
+ deltaHash = await sha256Hex(deltaEnvelope._data);
360
+ }
361
+ const entryBase = {
362
+ index: nextIndex,
363
+ prevHash,
364
+ op: input.op,
365
+ collection: input.collection,
366
+ id: input.id,
367
+ version: input.version,
368
+ ts: (/* @__PURE__ */ new Date()).toISOString(),
369
+ actor: input.actor === "" ? this.actor : input.actor,
370
+ payloadHash: input.payloadHash
371
+ };
372
+ const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
373
+ const envelope = await this.encryptEntry(entry);
374
+ await this.adapter.put(
375
+ this.vault,
376
+ LEDGER_COLLECTION,
377
+ paddedIndex(entry.index),
378
+ envelope,
379
+ 0
380
+ );
381
+ if (deltaEnvelope) {
382
+ await this.adapter.put(
383
+ this.vault,
384
+ LEDGER_DELTAS_COLLECTION,
385
+ paddedIndex(entry.index),
386
+ deltaEnvelope,
387
+ 0
388
+ );
389
+ }
390
+ this.headCache = { entry, hash: await hashEntry(entry) };
391
+ return entry;
392
+ }
393
+ /**
394
+ * Load a delta payload by its entry index. Returns `null` if the
395
+ * entry at that index doesn't reference a delta (genesis puts and
396
+ * deletes leave the slot empty) or if the delta row is missing
397
+ * (possible after a `pruneHistory` fold).
398
+ *
399
+ * The caller is responsible for deciding what to do with a missing
400
+ * delta — `ledger.reconstruct()` uses it as a "stop walking
401
+ * backward" signal and falls back to the on-disk current value.
402
+ */
403
+ async loadDelta(index) {
404
+ const envelope = await this.adapter.get(
405
+ this.vault,
406
+ LEDGER_DELTAS_COLLECTION,
407
+ paddedIndex(index)
408
+ );
409
+ if (!envelope) return null;
410
+ if (!this.encrypted) {
411
+ return JSON.parse(envelope._data);
412
+ }
413
+ const dek = await this.getDEK(LEDGER_COLLECTION);
414
+ const json = await decrypt(envelope._iv, envelope._data, dek);
415
+ return JSON.parse(json);
416
+ }
417
+ /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
418
+ async encryptDelta(patch) {
419
+ const json = JSON.stringify(patch);
420
+ if (!this.encrypted) {
421
+ return {
422
+ _noydb: NOYDB_FORMAT_VERSION,
423
+ _v: 1,
424
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
425
+ _iv: "",
426
+ _data: json,
427
+ _by: this.actor
428
+ };
429
+ }
430
+ const dek = await this.getDEK(LEDGER_COLLECTION);
431
+ const { iv, data } = await encrypt(json, dek);
432
+ return {
433
+ _noydb: NOYDB_FORMAT_VERSION,
434
+ _v: 1,
435
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
436
+ _iv: iv,
437
+ _data: data,
438
+ _by: this.actor
439
+ };
440
+ }
441
+ /**
442
+ * Read all entries in ascending-index order. Used internally by
443
+ * `append()`, `head()`, `verify()`, and `entries()`. Decryption is
444
+ * serial because the entries are tiny and the overhead of a Promise
445
+ * pool would dominate at realistic chain lengths (< 100K entries).
446
+ */
447
+ async loadAllEntries() {
448
+ const keys = await this.adapter.list(this.vault, LEDGER_COLLECTION);
449
+ keys.sort();
450
+ const entries = [];
451
+ for (const key of keys) {
452
+ const envelope = await this.adapter.get(
453
+ this.vault,
454
+ LEDGER_COLLECTION,
455
+ key
456
+ );
457
+ if (!envelope) continue;
458
+ entries.push(await this.decryptEntry(envelope));
459
+ }
460
+ return entries;
461
+ }
462
+ /**
463
+ * Return the current head of the ledger: the last entry, its hash,
464
+ * and the total chain length. `null` on an empty ledger so callers
465
+ * can distinguish "no history yet" from "empty history".
466
+ */
467
+ async head() {
468
+ const cached = await this.getCachedHead();
469
+ if (!cached) return null;
470
+ return {
471
+ entry: cached.entry,
472
+ hash: cached.hash,
473
+ length: cached.entry.index + 1
474
+ };
475
+ }
476
+ /**
477
+ * Return entries in the requested half-open range `[from, to)`.
478
+ * Defaults: `from = 0`, `to = length`. The indices are clipped to
479
+ * the valid range; no error is thrown for out-of-range queries.
480
+ */
481
+ async entries(opts = {}) {
482
+ const all = await this.loadAllEntries();
483
+ const from = Math.max(0, opts.from ?? 0);
484
+ const to = Math.min(all.length, opts.to ?? all.length);
485
+ return all.slice(from, to);
486
+ }
487
+ /**
488
+ * Reconstruct a record's state at a given historical version by
489
+ * walking the ledger's delta chain backward from the current state.
490
+ *
491
+ * ## Algorithm
492
+ *
493
+ * Ledger deltas are stored in **reverse** form — each entry's
494
+ * patch describes how to undo that put, transforming the new
495
+ * record back into the previous one. `reconstruct` exploits this
496
+ * by:
497
+ *
498
+ * 1. Finding every ledger entry for `(collection, id)` in the
499
+ * chain, sorted by index ascending.
500
+ * 2. Starting from `current` (the present value of the record,
501
+ * as held by the caller — typically fetched via
502
+ * `Collection.get()`).
503
+ * 3. Walking entries in **descending** index order and applying
504
+ * each entry's reverse patch, stopping when we reach the
505
+ * entry whose version equals `atVersion`.
506
+ *
507
+ * The result is the record as it existed immediately AFTER the
508
+ * put at `atVersion`. To get the state at the genesis put
509
+ * (version 1), the walk runs all the way back through every put
510
+ * after the first.
511
+ *
512
+ * ## Caveats
513
+ *
514
+ * - **Delete entries** break the walk: once we see a delete, the
515
+ * record didn't exist before that point, so there's nothing to
516
+ * reconstruct. We return `null` in that case.
517
+ * - **Missing deltas** (e.g., after `pruneHistory` folds old
518
+ * entries into a base snapshot) also stop the walk. does
519
+ * not ship pruneHistory, so today this only happens if an entry
520
+ * was deleted out-of-band.
521
+ * - The caller MUST pass the correct current value. Passing a
522
+ * mutated object would corrupt the reconstruction — the patch
523
+ * chain is only valid against the exact state that was in
524
+ * effect when the most recent put happened.
525
+ *
526
+ * For, `reconstruct` is the only way to read a historical
527
+ * version via deltas. The legacy `_history` collection still
528
+ * holds full snapshots and `Collection.getVersion()` still reads
529
+ * from there — the two paths coexist until pruneHistory lands in
530
+ * a follow-up and delta becomes the default.
531
+ */
532
+ async reconstruct(collection, id, current, atVersion) {
533
+ const all = await this.loadAllEntries();
534
+ const matching = all.filter(
535
+ (e) => e.collection === collection && e.id === id
536
+ );
537
+ if (matching.length === 0) {
538
+ return null;
539
+ }
540
+ let state = current;
541
+ for (let i = matching.length - 1; i >= 0; i--) {
542
+ const entry = matching[i];
543
+ if (!entry) continue;
544
+ if (entry.version === atVersion && entry.op !== "delete") {
545
+ return state;
546
+ }
547
+ if (entry.op === "delete") {
548
+ return null;
549
+ }
550
+ if (entry.deltaHash === void 0) {
551
+ if (entry.version === atVersion) return state;
552
+ return null;
553
+ }
554
+ const patch = await this.loadDelta(entry.index);
555
+ if (!patch) {
556
+ return null;
557
+ }
558
+ if (state === null) {
559
+ return null;
560
+ }
561
+ state = applyPatch(state, patch);
562
+ }
563
+ return null;
564
+ }
565
+ /**
566
+ * Walk the chain from genesis forward and verify every link.
567
+ *
568
+ * Returns `{ ok: true, head, length }` if every entry's `prevHash`
569
+ * matches the recomputed hash of its predecessor (and the genesis
570
+ * entry's `prevHash` is the empty string).
571
+ *
572
+ * Returns `{ ok: false, divergedAt, expected, actual }` on the first
573
+ * mismatch. `divergedAt` is the 0-based index of the BROKEN entry
574
+ * — entries before that index still verify cleanly; entries at and
575
+ * after `divergedAt` are untrustworthy.
576
+ *
577
+ * This method detects:
578
+ * - Mutated entry content (fields changed)
579
+ * - Reordered entries (if any adjacent pair swaps, the prevHash
580
+ * of the second no longer matches)
581
+ * - Inserted entries (the inserted entry's prevHash likely fails,
582
+ * and the following entry's prevHash definitely fails)
583
+ * - Deleted entries (the entry after the deletion sees a wrong
584
+ * prevHash)
585
+ *
586
+ * It does NOT detect:
587
+ * - Tampering with the DATA collections that bypassed the ledger
588
+ * entirely (e.g., an attacker who modifies records without
589
+ * appending matching ledger entries — this is why we also
590
+ * plan a `verifyIntegrity()` helper in a follow-up)
591
+ * - Truncation of the chain at the tail (dropping the last N
592
+ * entries leaves a shorter but still consistent chain). External
593
+ * anchoring of `head.hash` to a trusted service is the defense
594
+ * against this.
595
+ */
596
+ async verify() {
597
+ const entries = await this.loadAllEntries();
598
+ let expectedPrevHash = "";
599
+ for (let i = 0; i < entries.length; i++) {
600
+ const entry = entries[i];
601
+ if (!entry) continue;
602
+ if (entry.prevHash !== expectedPrevHash) {
603
+ return {
604
+ ok: false,
605
+ divergedAt: i,
606
+ expected: expectedPrevHash,
607
+ actual: entry.prevHash
608
+ };
609
+ }
610
+ if (entry.index !== i) {
611
+ return {
612
+ ok: false,
613
+ divergedAt: i,
614
+ expected: `index=${i}`,
615
+ actual: `index=${entry.index}`
616
+ };
617
+ }
618
+ expectedPrevHash = await hashEntry(entry);
619
+ }
620
+ return {
621
+ ok: true,
622
+ head: expectedPrevHash,
623
+ length: entries.length
624
+ };
625
+ }
626
+ // ─── Encryption plumbing ─────────────────────────────────────────
627
+ /**
628
+ * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
629
+ * envelope's `_v` field is set to `entry.index + 1` so the usual
630
+ * optimistic-concurrency machinery has a reasonable version number
631
+ * to compare against (the ledger is append-only, so concurrent
632
+ * writes should always bump the index).
633
+ */
634
+ async encryptEntry(entry) {
635
+ const json = canonicalJson(entry);
636
+ if (!this.encrypted) {
637
+ return {
638
+ _noydb: NOYDB_FORMAT_VERSION,
639
+ _v: entry.index + 1,
640
+ _ts: entry.ts,
641
+ _iv: "",
642
+ _data: json,
643
+ _by: entry.actor
644
+ };
645
+ }
646
+ const dek = await this.getDEK(LEDGER_COLLECTION);
647
+ const { iv, data } = await encrypt(json, dek);
648
+ return {
649
+ _noydb: NOYDB_FORMAT_VERSION,
650
+ _v: entry.index + 1,
651
+ _ts: entry.ts,
652
+ _iv: iv,
653
+ _data: data,
654
+ _by: entry.actor
655
+ };
656
+ }
657
+ /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
658
+ async decryptEntry(envelope) {
659
+ if (!this.encrypted) {
660
+ return JSON.parse(envelope._data);
661
+ }
662
+ const dek = await this.getDEK(LEDGER_COLLECTION);
663
+ const json = await decrypt(envelope._iv, envelope._data, dek);
664
+ return JSON.parse(json);
665
+ }
666
+ };
667
+ function sleepBackoff(attempt) {
668
+ const base = 5 * Math.pow(2, attempt);
669
+ const jitter = Math.random() * base;
670
+ return new Promise((resolve) => setTimeout(resolve, base + jitter));
671
+ }
672
+
673
+ export {
674
+ computePatch,
675
+ applyPatch,
676
+ LEDGER_COLLECTION,
677
+ LEDGER_DELTAS_COLLECTION,
678
+ LedgerStore
679
+ };
680
+ //# sourceMappingURL=chunk-XHFOENR2.js.map