@noy-db/core 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -78,6 +78,74 @@ var ValidationError = class extends NoydbError {
78
78
  this.name = "ValidationError";
79
79
  }
80
80
  };
81
+ var SchemaValidationError = class extends NoydbError {
82
+ issues;
83
+ direction;
84
+ constructor(message, issues, direction) {
85
+ super("SCHEMA_VALIDATION_FAILED", message);
86
+ this.name = "SchemaValidationError";
87
+ this.issues = issues;
88
+ this.direction = direction;
89
+ }
90
+ };
91
+ var BackupLedgerError = class extends NoydbError {
92
+ /** First-broken-entry index, if known. */
93
+ divergedAt;
94
+ constructor(message, divergedAt) {
95
+ super("BACKUP_LEDGER", message);
96
+ this.name = "BackupLedgerError";
97
+ if (divergedAt !== void 0) this.divergedAt = divergedAt;
98
+ }
99
+ };
100
+ var BackupCorruptedError = class extends NoydbError {
101
+ /** The (collection, id) pair whose envelope failed the hash check. */
102
+ collection;
103
+ id;
104
+ constructor(collection, id, message) {
105
+ super("BACKUP_CORRUPTED", message);
106
+ this.name = "BackupCorruptedError";
107
+ this.collection = collection;
108
+ this.id = id;
109
+ }
110
+ };
111
+
112
+ // src/schema.ts
113
+ async function validateSchemaInput(schema, value, context) {
114
+ const result = await schema["~standard"].validate(value);
115
+ if (result.issues !== void 0 && result.issues.length > 0) {
116
+ throw new SchemaValidationError(
117
+ `Schema validation failed on ${context}: ${summarizeIssues(result.issues)}`,
118
+ result.issues,
119
+ "input"
120
+ );
121
+ }
122
+ return result.value;
123
+ }
124
+ async function validateSchemaOutput(schema, value, context) {
125
+ const result = await schema["~standard"].validate(value);
126
+ if (result.issues !== void 0 && result.issues.length > 0) {
127
+ throw new SchemaValidationError(
128
+ `Stored data for ${context} does not match the current schema \u2014 schema drift? ${summarizeIssues(result.issues)}`,
129
+ result.issues,
130
+ "output"
131
+ );
132
+ }
133
+ return result.value;
134
+ }
135
+ function summarizeIssues(issues) {
136
+ const shown = issues.slice(0, 3).map((issue) => {
137
+ const pathStr = formatPath(issue.path);
138
+ return `${pathStr}: ${issue.message}`;
139
+ });
140
+ const suffix = issues.length > 3 ? ` (+${issues.length - 3} more)` : "";
141
+ return shown.join("; ") + suffix;
142
+ }
143
+ function formatPath(path) {
144
+ if (!path || path.length === 0) return "root";
145
+ return path.map(
146
+ (segment) => typeof segment === "object" && segment !== null ? String(segment.key) : String(segment)
147
+ ).join(".");
148
+ }
81
149
 
82
150
  // src/crypto.ts
83
151
  var PBKDF2_ITERATIONS = 6e5;
@@ -188,6 +256,749 @@ function base64ToBuffer(base64) {
188
256
  return bytes;
189
257
  }
190
258
 
259
+ // src/ledger/entry.ts
260
+ function canonicalJson(value) {
261
+ if (value === null) return "null";
262
+ if (typeof value === "boolean") return value ? "true" : "false";
263
+ if (typeof value === "number") {
264
+ if (!Number.isFinite(value)) {
265
+ throw new Error(
266
+ `canonicalJson: refusing to encode non-finite number ${String(value)}`
267
+ );
268
+ }
269
+ return JSON.stringify(value);
270
+ }
271
+ if (typeof value === "string") return JSON.stringify(value);
272
+ if (typeof value === "bigint") {
273
+ throw new Error("canonicalJson: BigInt is not JSON-serializable");
274
+ }
275
+ if (typeof value === "undefined" || typeof value === "function") {
276
+ throw new Error(
277
+ `canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
278
+ );
279
+ }
280
+ if (Array.isArray(value)) {
281
+ return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
282
+ }
283
+ if (typeof value === "object") {
284
+ const obj = value;
285
+ const keys = Object.keys(obj).sort();
286
+ const parts = [];
287
+ for (const key of keys) {
288
+ parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
289
+ }
290
+ return "{" + parts.join(",") + "}";
291
+ }
292
+ throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
293
+ }
294
+ async function sha256Hex(input) {
295
+ const bytes = new TextEncoder().encode(input);
296
+ const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
297
+ return bytesToHex(new Uint8Array(digest));
298
+ }
299
+ async function hashEntry(entry) {
300
+ return sha256Hex(canonicalJson(entry));
301
+ }
302
+ function bytesToHex(bytes) {
303
+ const hex = new Array(bytes.length);
304
+ for (let i = 0; i < bytes.length; i++) {
305
+ hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
306
+ }
307
+ return hex.join("");
308
+ }
309
+ function paddedIndex(index) {
310
+ return String(index).padStart(10, "0");
311
+ }
312
+ function parseIndex(key) {
313
+ return Number.parseInt(key, 10);
314
+ }
315
+
316
+ // src/ledger/patch.ts
317
+ function computePatch(prev, next) {
318
+ const ops = [];
319
+ diff(prev, next, "", ops);
320
+ return ops;
321
+ }
322
+ function diff(prev, next, path, out) {
323
+ if (prev === next) return;
324
+ if (prev === null || next === null) {
325
+ out.push({ op: "replace", path, value: next });
326
+ return;
327
+ }
328
+ const prevIsArray = Array.isArray(prev);
329
+ const nextIsArray = Array.isArray(next);
330
+ const prevIsObject = typeof prev === "object" && !prevIsArray;
331
+ const nextIsObject = typeof next === "object" && !nextIsArray;
332
+ if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
333
+ out.push({ op: "replace", path, value: next });
334
+ return;
335
+ }
336
+ if (prevIsArray && nextIsArray) {
337
+ if (!arrayDeepEqual(prev, next)) {
338
+ out.push({ op: "replace", path, value: next });
339
+ }
340
+ return;
341
+ }
342
+ if (prevIsObject && nextIsObject) {
343
+ const prevObj = prev;
344
+ const nextObj = next;
345
+ const prevKeys = Object.keys(prevObj);
346
+ const nextKeys = Object.keys(nextObj);
347
+ for (const key of prevKeys) {
348
+ const childPath = path + "/" + escapePathSegment(key);
349
+ if (!(key in nextObj)) {
350
+ out.push({ op: "remove", path: childPath });
351
+ } else {
352
+ diff(prevObj[key], nextObj[key], childPath, out);
353
+ }
354
+ }
355
+ for (const key of nextKeys) {
356
+ if (!(key in prevObj)) {
357
+ out.push({
358
+ op: "add",
359
+ path: path + "/" + escapePathSegment(key),
360
+ value: nextObj[key]
361
+ });
362
+ }
363
+ }
364
+ return;
365
+ }
366
+ out.push({ op: "replace", path, value: next });
367
+ }
368
+ function arrayDeepEqual(a, b) {
369
+ if (a.length !== b.length) return false;
370
+ for (let i = 0; i < a.length; i++) {
371
+ if (!deepEqual(a[i], b[i])) return false;
372
+ }
373
+ return true;
374
+ }
375
+ function deepEqual(a, b) {
376
+ if (a === b) return true;
377
+ if (a === null || b === null) return false;
378
+ if (typeof a !== typeof b) return false;
379
+ if (typeof a !== "object") return false;
380
+ const aArray = Array.isArray(a);
381
+ const bArray = Array.isArray(b);
382
+ if (aArray !== bArray) return false;
383
+ if (aArray && bArray) return arrayDeepEqual(a, b);
384
+ const aObj = a;
385
+ const bObj = b;
386
+ const aKeys = Object.keys(aObj);
387
+ const bKeys = Object.keys(bObj);
388
+ if (aKeys.length !== bKeys.length) return false;
389
+ for (const key of aKeys) {
390
+ if (!(key in bObj)) return false;
391
+ if (!deepEqual(aObj[key], bObj[key])) return false;
392
+ }
393
+ return true;
394
+ }
395
+ function applyPatch(base, patch) {
396
+ let result = clone(base);
397
+ for (const op of patch) {
398
+ result = applyOp(result, op);
399
+ }
400
+ return result;
401
+ }
402
+ function applyOp(doc, op) {
403
+ if (op.path === "") {
404
+ if (op.op === "remove") return null;
405
+ return clone(op.value);
406
+ }
407
+ const segments = parsePath(op.path);
408
+ return walkAndApply(doc, segments, op);
409
+ }
410
+ function walkAndApply(doc, segments, op) {
411
+ if (segments.length === 0) {
412
+ throw new Error("walkAndApply: empty segments (internal error)");
413
+ }
414
+ const [head, ...rest] = segments;
415
+ if (head === void 0) throw new Error("walkAndApply: undefined segment");
416
+ if (rest.length === 0) {
417
+ return applyAtTerminal(doc, head, op);
418
+ }
419
+ if (Array.isArray(doc)) {
420
+ const idx = parseArrayIndex(head, doc.length);
421
+ const child = doc[idx];
422
+ const newChild = walkAndApply(child, rest, op);
423
+ const next = doc.slice();
424
+ next[idx] = newChild;
425
+ return next;
426
+ }
427
+ if (doc !== null && typeof doc === "object") {
428
+ const obj = doc;
429
+ if (!(head in obj)) {
430
+ throw new Error(`applyPatch: path segment "${head}" not found in object`);
431
+ }
432
+ const newChild = walkAndApply(obj[head], rest, op);
433
+ return { ...obj, [head]: newChild };
434
+ }
435
+ throw new Error(
436
+ `applyPatch: cannot step into ${typeof doc} at segment "${head}"`
437
+ );
438
+ }
439
+ function applyAtTerminal(doc, segment, op) {
440
+ if (Array.isArray(doc)) {
441
+ const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
442
+ const next = doc.slice();
443
+ if (op.op === "remove") {
444
+ next.splice(idx, 1);
445
+ return next;
446
+ }
447
+ if (op.op === "add") {
448
+ next.splice(idx, 0, clone(op.value));
449
+ return next;
450
+ }
451
+ if (op.op === "replace") {
452
+ if (idx >= doc.length) {
453
+ throw new Error(
454
+ `applyPatch: replace at out-of-bounds array index ${idx}`
455
+ );
456
+ }
457
+ next[idx] = clone(op.value);
458
+ return next;
459
+ }
460
+ }
461
+ if (doc !== null && typeof doc === "object") {
462
+ const obj = doc;
463
+ if (op.op === "remove") {
464
+ if (!(segment in obj)) {
465
+ throw new Error(
466
+ `applyPatch: remove on missing key "${segment}"`
467
+ );
468
+ }
469
+ const next = { ...obj };
470
+ delete next[segment];
471
+ return next;
472
+ }
473
+ if (op.op === "add") {
474
+ return { ...obj, [segment]: clone(op.value) };
475
+ }
476
+ if (op.op === "replace") {
477
+ if (!(segment in obj)) {
478
+ throw new Error(
479
+ `applyPatch: replace on missing key "${segment}"`
480
+ );
481
+ }
482
+ return { ...obj, [segment]: clone(op.value) };
483
+ }
484
+ }
485
+ throw new Error(
486
+ `applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
487
+ );
488
+ }
489
+ function escapePathSegment(segment) {
490
+ return segment.replace(/~/g, "~0").replace(/\//g, "~1");
491
+ }
492
+ function unescapePathSegment(segment) {
493
+ return segment.replace(/~1/g, "/").replace(/~0/g, "~");
494
+ }
495
+ function parsePath(path) {
496
+ if (!path.startsWith("/")) {
497
+ throw new Error(`applyPatch: path must start with '/', got "${path}"`);
498
+ }
499
+ return path.slice(1).split("/").map(unescapePathSegment);
500
+ }
501
+ function parseArrayIndex(segment, max) {
502
+ if (!/^\d+$/.test(segment)) {
503
+ throw new Error(
504
+ `applyPatch: array index must be a non-negative integer, got "${segment}"`
505
+ );
506
+ }
507
+ const idx = Number.parseInt(segment, 10);
508
+ if (idx < 0 || idx > max) {
509
+ throw new Error(
510
+ `applyPatch: array index ${idx} out of range [0, ${max}]`
511
+ );
512
+ }
513
+ return idx;
514
+ }
515
+ function clone(value) {
516
+ if (value === null || value === void 0) return value;
517
+ if (typeof value !== "object") return value;
518
+ return JSON.parse(JSON.stringify(value));
519
+ }
520
+
521
+ // src/ledger/store.ts
522
+ var LEDGER_COLLECTION = "_ledger";
523
+ var LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
524
+ var LedgerStore = class {
525
+ adapter;
526
+ compartment;
527
+ encrypted;
528
+ getDEK;
529
+ actor;
530
+ /**
531
+ * In-memory cache of the chain head — the most recently appended
532
+ * entry along with its precomputed hash. Without this, every
533
+ * `append()` would re-load every prior entry to recompute the
534
+ * prevHash, making N puts O(N²) — a 1K-record stress test goes from
535
+ * < 100ms to a multi-second timeout.
536
+ *
537
+ * The cache is populated on first read (`append`, `head`, `verify`)
538
+ * and updated in-place on every successful `append`. Single-writer
539
+ * usage (the v0.4 assumption) keeps it consistent. A second
540
+ * LedgerStore instance writing to the same compartment would not
541
+ * see the first instance's appends in its cached state — that's the
542
+ * concurrency caveat documented at the class level.
543
+ *
544
+ * Sentinel `undefined` means "not yet loaded"; an explicit `null`
545
+ * value means "loaded and confirmed empty" — distinguishing these
546
+ * matters because an empty ledger is a valid state (genesis prevHash
547
+ * is the empty string), and we don't want to re-scan the adapter
548
+ * just because the chain is freshly initialized.
549
+ */
550
+ headCache = void 0;
551
+ constructor(opts) {
552
+ this.adapter = opts.adapter;
553
+ this.compartment = opts.compartment;
554
+ this.encrypted = opts.encrypted;
555
+ this.getDEK = opts.getDEK;
556
+ this.actor = opts.actor;
557
+ }
558
+ /**
559
+ * Lazily load (or return cached) the current chain head. The cache
560
+ * sentinel is `undefined` until first access; after the first call,
561
+ * the cache holds either a `{ entry, hash }` for non-empty ledgers
562
+ * or `null` for empty ones.
563
+ */
564
+ async getCachedHead() {
565
+ if (this.headCache !== void 0) return this.headCache;
566
+ const entries = await this.loadAllEntries();
567
+ const last = entries[entries.length - 1];
568
+ if (!last) {
569
+ this.headCache = null;
570
+ return null;
571
+ }
572
+ this.headCache = { entry: last, hash: await hashEntry(last) };
573
+ return this.headCache;
574
+ }
575
+ /**
576
+ * Append a new entry to the ledger. Returns the full entry that was
577
+ * written (with its assigned index and computed prevHash) so the
578
+ * caller can use the hash for downstream purposes (e.g., embedding
579
+ * in a verifiable backup).
580
+ *
581
+ * This is the **only** way to add entries. Direct adapter writes to
582
+ * `_ledger/` would bypass the chain math and would be caught by the
583
+ * next `verify()` call as a divergence.
584
+ */
585
+ async append(input) {
586
+ const cached = await this.getCachedHead();
587
+ const lastEntry = cached?.entry;
588
+ const prevHash = cached?.hash ?? "";
589
+ const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
590
+ let deltaHash;
591
+ if (input.delta !== void 0) {
592
+ const deltaEnvelope = await this.encryptDelta(input.delta);
593
+ await this.adapter.put(
594
+ this.compartment,
595
+ LEDGER_DELTAS_COLLECTION,
596
+ paddedIndex(nextIndex),
597
+ deltaEnvelope
598
+ );
599
+ deltaHash = await sha256Hex(deltaEnvelope._data);
600
+ }
601
+ const entryBase = {
602
+ index: nextIndex,
603
+ prevHash,
604
+ op: input.op,
605
+ collection: input.collection,
606
+ id: input.id,
607
+ version: input.version,
608
+ ts: (/* @__PURE__ */ new Date()).toISOString(),
609
+ actor: input.actor === "" ? this.actor : input.actor,
610
+ payloadHash: input.payloadHash
611
+ };
612
+ const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
613
+ const envelope = await this.encryptEntry(entry);
614
+ await this.adapter.put(
615
+ this.compartment,
616
+ LEDGER_COLLECTION,
617
+ paddedIndex(entry.index),
618
+ envelope
619
+ );
620
+ this.headCache = { entry, hash: await hashEntry(entry) };
621
+ return entry;
622
+ }
623
+ /**
624
+ * Load a delta payload by its entry index. Returns `null` if the
625
+ * entry at that index doesn't reference a delta (genesis puts and
626
+ * deletes leave the slot empty) or if the delta row is missing
627
+ * (possible after a `pruneHistory` fold).
628
+ *
629
+ * The caller is responsible for deciding what to do with a missing
630
+ * delta — `ledger.reconstruct()` uses it as a "stop walking
631
+ * backward" signal and falls back to the on-disk current value.
632
+ */
633
+ async loadDelta(index) {
634
+ const envelope = await this.adapter.get(
635
+ this.compartment,
636
+ LEDGER_DELTAS_COLLECTION,
637
+ paddedIndex(index)
638
+ );
639
+ if (!envelope) return null;
640
+ if (!this.encrypted) {
641
+ return JSON.parse(envelope._data);
642
+ }
643
+ const dek = await this.getDEK(LEDGER_COLLECTION);
644
+ const json = await decrypt(envelope._iv, envelope._data, dek);
645
+ return JSON.parse(json);
646
+ }
647
+ /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
648
+ async encryptDelta(patch) {
649
+ const json = JSON.stringify(patch);
650
+ if (!this.encrypted) {
651
+ return {
652
+ _noydb: NOYDB_FORMAT_VERSION,
653
+ _v: 1,
654
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
655
+ _iv: "",
656
+ _data: json,
657
+ _by: this.actor
658
+ };
659
+ }
660
+ const dek = await this.getDEK(LEDGER_COLLECTION);
661
+ const { iv, data } = await encrypt(json, dek);
662
+ return {
663
+ _noydb: NOYDB_FORMAT_VERSION,
664
+ _v: 1,
665
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
666
+ _iv: iv,
667
+ _data: data,
668
+ _by: this.actor
669
+ };
670
+ }
671
+ /**
672
+ * Read all entries in ascending-index order. Used internally by
673
+ * `append()`, `head()`, `verify()`, and `entries()`. Decryption is
674
+ * serial because the entries are tiny and the overhead of a Promise
675
+ * pool would dominate at realistic chain lengths (< 100K entries).
676
+ */
677
+ async loadAllEntries() {
678
+ const keys = await this.adapter.list(this.compartment, LEDGER_COLLECTION);
679
+ keys.sort();
680
+ const entries = [];
681
+ for (const key of keys) {
682
+ const envelope = await this.adapter.get(
683
+ this.compartment,
684
+ LEDGER_COLLECTION,
685
+ key
686
+ );
687
+ if (!envelope) continue;
688
+ entries.push(await this.decryptEntry(envelope));
689
+ }
690
+ return entries;
691
+ }
692
+ /**
693
+ * Return the current head of the ledger: the last entry, its hash,
694
+ * and the total chain length. `null` on an empty ledger so callers
695
+ * can distinguish "no history yet" from "empty history".
696
+ */
697
+ async head() {
698
+ const cached = await this.getCachedHead();
699
+ if (!cached) return null;
700
+ return {
701
+ entry: cached.entry,
702
+ hash: cached.hash,
703
+ length: cached.entry.index + 1
704
+ };
705
+ }
706
+ /**
707
+ * Return entries in the requested half-open range `[from, to)`.
708
+ * Defaults: `from = 0`, `to = length`. The indices are clipped to
709
+ * the valid range; no error is thrown for out-of-range queries.
710
+ */
711
+ async entries(opts = {}) {
712
+ const all = await this.loadAllEntries();
713
+ const from = Math.max(0, opts.from ?? 0);
714
+ const to = Math.min(all.length, opts.to ?? all.length);
715
+ return all.slice(from, to);
716
+ }
717
+ /**
718
+ * Reconstruct a record's state at a given historical version by
719
+ * walking the ledger's delta chain backward from the current state.
720
+ *
721
+ * ## Algorithm
722
+ *
723
+ * Ledger deltas are stored in **reverse** form — each entry's
724
+ * patch describes how to undo that put, transforming the new
725
+ * record back into the previous one. `reconstruct` exploits this
726
+ * by:
727
+ *
728
+ * 1. Finding every ledger entry for `(collection, id)` in the
729
+ * chain, sorted by index ascending.
730
+ * 2. Starting from `current` (the present value of the record,
731
+ * as held by the caller — typically fetched via
732
+ * `Collection.get()`).
733
+ * 3. Walking entries in **descending** index order and applying
734
+ * each entry's reverse patch, stopping when we reach the
735
+ * entry whose version equals `atVersion`.
736
+ *
737
+ * The result is the record as it existed immediately AFTER the
738
+ * put at `atVersion`. To get the state at the genesis put
739
+ * (version 1), the walk runs all the way back through every put
740
+ * after the first.
741
+ *
742
+ * ## Caveats
743
+ *
744
+ * - **Delete entries** break the walk: once we see a delete, the
745
+ * record didn't exist before that point, so there's nothing to
746
+ * reconstruct. We return `null` in that case.
747
+ * - **Missing deltas** (e.g., after `pruneHistory` folds old
748
+ * entries into a base snapshot) also stop the walk. v0.4 does
749
+ * not ship pruneHistory, so today this only happens if an entry
750
+ * was deleted out-of-band.
751
+ * - The caller MUST pass the correct current value. Passing a
752
+ * mutated object would corrupt the reconstruction — the patch
753
+ * chain is only valid against the exact state that was in
754
+ * effect when the most recent put happened.
755
+ *
756
+ * For v0.4, `reconstruct` is the only way to read a historical
757
+ * version via deltas. The legacy `_history` collection still
758
+ * holds full snapshots and `Collection.getVersion()` still reads
759
+ * from there — the two paths coexist until pruneHistory lands in
760
+ * a follow-up and delta becomes the default.
761
+ */
762
+ async reconstruct(collection, id, current, atVersion) {
763
+ const all = await this.loadAllEntries();
764
+ const matching = all.filter(
765
+ (e) => e.collection === collection && e.id === id
766
+ );
767
+ if (matching.length === 0) {
768
+ return null;
769
+ }
770
+ let state = current;
771
+ for (let i = matching.length - 1; i >= 0; i--) {
772
+ const entry = matching[i];
773
+ if (!entry) continue;
774
+ if (entry.version === atVersion && entry.op !== "delete") {
775
+ return state;
776
+ }
777
+ if (entry.op === "delete") {
778
+ return null;
779
+ }
780
+ if (entry.deltaHash === void 0) {
781
+ if (entry.version === atVersion) return state;
782
+ return null;
783
+ }
784
+ const patch = await this.loadDelta(entry.index);
785
+ if (!patch) {
786
+ return null;
787
+ }
788
+ if (state === null) {
789
+ return null;
790
+ }
791
+ state = applyPatch(state, patch);
792
+ }
793
+ return null;
794
+ }
795
+ /**
796
+ * Walk the chain from genesis forward and verify every link.
797
+ *
798
+ * Returns `{ ok: true, head, length }` if every entry's `prevHash`
799
+ * matches the recomputed hash of its predecessor (and the genesis
800
+ * entry's `prevHash` is the empty string).
801
+ *
802
+ * Returns `{ ok: false, divergedAt, expected, actual }` on the first
803
+ * mismatch. `divergedAt` is the 0-based index of the BROKEN entry
804
+ * — entries before that index still verify cleanly; entries at and
805
+ * after `divergedAt` are untrustworthy.
806
+ *
807
+ * This method detects:
808
+ * - Mutated entry content (fields changed)
809
+ * - Reordered entries (if any adjacent pair swaps, the prevHash
810
+ * of the second no longer matches)
811
+ * - Inserted entries (the inserted entry's prevHash likely fails,
812
+ * and the following entry's prevHash definitely fails)
813
+ * - Deleted entries (the entry after the deletion sees a wrong
814
+ * prevHash)
815
+ *
816
+ * It does NOT detect:
817
+ * - Tampering with the DATA collections that bypassed the ledger
818
+ * entirely (e.g., an attacker who modifies records without
819
+ * appending matching ledger entries — this is why we also
820
+ * plan a `verifyIntegrity()` helper in a follow-up)
821
+ * - Truncation of the chain at the tail (dropping the last N
822
+ * entries leaves a shorter but still consistent chain). External
823
+ * anchoring of `head.hash` to a trusted service is the defense
824
+ * against this.
825
+ */
826
+ async verify() {
827
+ const entries = await this.loadAllEntries();
828
+ let expectedPrevHash = "";
829
+ for (let i = 0; i < entries.length; i++) {
830
+ const entry = entries[i];
831
+ if (!entry) continue;
832
+ if (entry.prevHash !== expectedPrevHash) {
833
+ return {
834
+ ok: false,
835
+ divergedAt: i,
836
+ expected: expectedPrevHash,
837
+ actual: entry.prevHash
838
+ };
839
+ }
840
+ if (entry.index !== i) {
841
+ return {
842
+ ok: false,
843
+ divergedAt: i,
844
+ expected: `index=${i}`,
845
+ actual: `index=${entry.index}`
846
+ };
847
+ }
848
+ expectedPrevHash = await hashEntry(entry);
849
+ }
850
+ return {
851
+ ok: true,
852
+ head: expectedPrevHash,
853
+ length: entries.length
854
+ };
855
+ }
856
+ // ─── Encryption plumbing ─────────────────────────────────────────
857
+ /**
858
+ * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
859
+ * envelope's `_v` field is set to `entry.index + 1` so the usual
860
+ * optimistic-concurrency machinery has a reasonable version number
861
+ * to compare against (the ledger is append-only, so concurrent
862
+ * writes should always bump the index).
863
+ */
864
+ async encryptEntry(entry) {
865
+ const json = canonicalJson(entry);
866
+ if (!this.encrypted) {
867
+ return {
868
+ _noydb: NOYDB_FORMAT_VERSION,
869
+ _v: entry.index + 1,
870
+ _ts: entry.ts,
871
+ _iv: "",
872
+ _data: json,
873
+ _by: entry.actor
874
+ };
875
+ }
876
+ const dek = await this.getDEK(LEDGER_COLLECTION);
877
+ const { iv, data } = await encrypt(json, dek);
878
+ return {
879
+ _noydb: NOYDB_FORMAT_VERSION,
880
+ _v: entry.index + 1,
881
+ _ts: entry.ts,
882
+ _iv: iv,
883
+ _data: data,
884
+ _by: entry.actor
885
+ };
886
+ }
887
+ /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
888
+ async decryptEntry(envelope) {
889
+ if (!this.encrypted) {
890
+ return JSON.parse(envelope._data);
891
+ }
892
+ const dek = await this.getDEK(LEDGER_COLLECTION);
893
+ const json = await decrypt(envelope._iv, envelope._data, dek);
894
+ return JSON.parse(json);
895
+ }
896
+ };
897
+ async function envelopePayloadHash(envelope) {
898
+ if (!envelope) return "";
899
+ return sha256Hex(envelope._data);
900
+ }
901
+
902
+ // src/refs.ts
903
+ var RefIntegrityError = class extends NoydbError {
904
+ collection;
905
+ id;
906
+ field;
907
+ refTo;
908
+ refId;
909
+ constructor(opts) {
910
+ super("REF_INTEGRITY", opts.message);
911
+ this.name = "RefIntegrityError";
912
+ this.collection = opts.collection;
913
+ this.id = opts.id;
914
+ this.field = opts.field;
915
+ this.refTo = opts.refTo;
916
+ this.refId = opts.refId;
917
+ }
918
+ };
919
+ var RefScopeError = class extends NoydbError {
920
+ constructor(target) {
921
+ super(
922
+ "REF_SCOPE",
923
+ `Cross-compartment references are not supported in v0.4 \u2014 got target "${target}". Use a simple collection name (e.g. "clients"), not a path. Cross-compartment refs are tracked for a future release.`
924
+ );
925
+ this.name = "RefScopeError";
926
+ }
927
+ };
928
+ function ref(target, mode = "strict") {
929
+ if (target.includes("/")) {
930
+ throw new RefScopeError(target);
931
+ }
932
+ if (!target || target.startsWith("_")) {
933
+ throw new Error(
934
+ `ref(): target collection name must be non-empty and cannot start with '_' (reserved for internal collections). Got "${target}".`
935
+ );
936
+ }
937
+ return { target, mode };
938
+ }
939
+ var RefRegistry = class {
940
+ outbound = /* @__PURE__ */ new Map();
941
+ inbound = /* @__PURE__ */ new Map();
942
+ /**
943
+ * Register the refs declared by a single collection. Idempotent in
944
+ * the happy path — calling twice with the same data is a no-op.
945
+ * Calling twice with DIFFERENT data throws, because silent
946
+ * overrides would be confusing ("I changed the ref and it doesn't
947
+ * update" vs "I declared the same collection twice with different
948
+ * refs and the second call won").
949
+ */
950
+ register(collection, refs) {
951
+ const existing = this.outbound.get(collection);
952
+ if (existing) {
953
+ const existingKeys = Object.keys(existing).sort();
954
+ const newKeys = Object.keys(refs).sort();
955
+ if (existingKeys.join(",") !== newKeys.join(",")) {
956
+ throw new Error(
957
+ `RefRegistry: conflicting ref declarations for collection "${collection}"`
958
+ );
959
+ }
960
+ for (const k of existingKeys) {
961
+ const a = existing[k];
962
+ const b = refs[k];
963
+ if (!a || !b || a.target !== b.target || a.mode !== b.mode) {
964
+ throw new Error(
965
+ `RefRegistry: conflicting ref declarations for collection "${collection}" field "${k}"`
966
+ );
967
+ }
968
+ }
969
+ return;
970
+ }
971
+ this.outbound.set(collection, { ...refs });
972
+ for (const [field, desc] of Object.entries(refs)) {
973
+ const list = this.inbound.get(desc.target) ?? [];
974
+ list.push({ collection, field, mode: desc.mode });
975
+ this.inbound.set(desc.target, list);
976
+ }
977
+ }
978
+ /** Get the outbound refs declared by a collection (or `{}` if none). */
979
+ getOutbound(collection) {
980
+ return this.outbound.get(collection) ?? {};
981
+ }
982
+ /** Get the inbound refs that target a given collection (or `[]`). */
983
+ getInbound(target) {
984
+ return this.inbound.get(target) ?? [];
985
+ }
986
+ /**
987
+ * Iterate every (collection → refs) pair that has at least one
988
+ * declared reference. Used by `checkIntegrity` to walk the full
989
+ * universe of outbound refs without needing to track collection
990
+ * names elsewhere.
991
+ */
992
+ entries() {
993
+ return [...this.outbound.entries()];
994
+ }
995
+ /** Clear the registry. Test-only escape hatch; never called from production code. */
996
+ clear() {
997
+ this.outbound.clear();
998
+ this.inbound.clear();
999
+ }
1000
+ };
1001
+
191
1002
  // src/keyring.ts
192
1003
  var GRANTABLE_BY_ADMIN = ["operator", "viewer", "client"];
193
1004
  function canGrant(callerRole, targetRole) {
@@ -272,6 +1083,11 @@ async function grant(adapter, compartment, callerKeyring, options) {
272
1083
  }
273
1084
  }
274
1085
  }
1086
+ for (const [collName, dek] of callerKeyring.deks) {
1087
+ if (collName.startsWith("_") && !(collName in wrappedDeks)) {
1088
+ wrappedDeks[collName] = await wrapKey(dek, newKek);
1089
+ }
1090
+ }
275
1091
  const keyringFile = {
276
1092
  _noydb_keyring: NOYDB_KEYRING_VERSION,
277
1093
  user_id: options.userId,
@@ -525,7 +1341,7 @@ async function clearHistory(adapter, compartment, collection, recordId) {
525
1341
  }
526
1342
 
527
1343
  // src/diff.ts
528
- function diff(oldObj, newObj, basePath = "") {
1344
+ function diff2(oldObj, newObj, basePath = "") {
529
1345
  const changes = [];
530
1346
  if (oldObj === newObj) return changes;
531
1347
  if (oldObj == null && newObj != null) {
@@ -549,7 +1365,7 @@ function diff(oldObj, newObj, basePath = "") {
549
1365
  } else if (i >= newObj.length) {
550
1366
  changes.push({ path: p, type: "removed", from: oldObj[i] });
551
1367
  } else {
552
- changes.push(...diff(oldObj[i], newObj[i], p));
1368
+ changes.push(...diff2(oldObj[i], newObj[i], p));
553
1369
  }
554
1370
  }
555
1371
  return changes;
@@ -564,7 +1380,7 @@ function diff(oldObj, newObj, basePath = "") {
564
1380
  } else if (!(key in newRecord)) {
565
1381
  changes.push({ path: p, type: "removed", from: oldRecord[key] });
566
1382
  } else {
567
- changes.push(...diff(oldRecord[key], newRecord[key], p));
1383
+ changes.push(...diff2(oldRecord[key], newRecord[key], p));
568
1384
  }
569
1385
  }
570
1386
  return changes;
@@ -1249,6 +2065,56 @@ var Collection = class {
1249
2065
  * disappear from the index without notification.
1250
2066
  */
1251
2067
  indexes = new CollectionIndexes();
2068
+ /**
2069
+ * Optional Standard Schema v1 validator. When set, every `put()` runs
2070
+ * the input through `validateSchemaInput` before encryption, and every
2071
+ * record coming OUT of `decryptRecord` runs through
2072
+ * `validateSchemaOutput`. A rejected input throws
2073
+ * `SchemaValidationError` with `direction: 'input'`; drifted stored
2074
+ * data throws with `direction: 'output'`. Both carry the rich issue
2075
+ * list from the validator so UI code can render field-level messages.
2076
+ *
2077
+ * The schema is stored as `StandardSchemaV1<unknown, T>` because the
2078
+ * collection type parameter `T` is the OUTPUT type — whatever the
2079
+ * validator produces after transforms and coercion. Users who pass a
2080
+ * schema to `defineNoydbStore` (or `Collection.constructor`) get their
2081
+ * `T` inferred automatically via `InferOutput<Schema>`.
2082
+ */
2083
+ schema;
2084
+ /**
2085
+ * Optional reference to the compartment-level hash-chained audit
2086
+ * log. When present, every successful `put()` and `delete()` appends
2087
+ * an entry to the ledger AFTER the adapter write succeeds (so a
2088
+ * failed adapter write never produces an orphan ledger entry).
2089
+ *
2090
+ * The ledger is always a compartment-wide singleton — all
2091
+ * collections in the same compartment share the same LedgerStore.
2092
+ * Compartment.ledger() does the lazy init; this field just holds
2093
+ * the reference so Collection doesn't need to reach back up to the
2094
+ * compartment on every mutation.
2095
+ *
2096
+ * `undefined` means "no ledger attached" — supported for tests that
2097
+ * construct a Collection directly without a compartment, and for
2098
+ * future backwards-compat scenarios. Production usage always has a
2099
+ * ledger because Compartment.collection() passes one through.
2100
+ */
2101
+ ledger;
2102
+ /**
2103
+ * Optional back-reference to the owning compartment's ref
2104
+ * enforcer. When present, `Collection.put` calls
2105
+ * `refEnforcer.enforceRefsOnPut(name, record)` before the adapter
2106
+ * write, and `Collection.delete` calls
2107
+ * `refEnforcer.enforceRefsOnDelete(name, id)` before its own
2108
+ * adapter delete. The Compartment handles the actual registry
2109
+ * lookup and cross-collection enforcement — Collection just
2110
+ * notifies it at the right points in the lifecycle.
2111
+ *
2112
+ * Typed as a structural interface rather than `Compartment`
2113
+ * directly to avoid a circular import. Compartment implements
2114
+ * these two methods; any other object with the same shape would
2115
+ * work too (used only in unit tests).
2116
+ */
2117
+ refEnforcer;
1252
2118
  constructor(opts) {
1253
2119
  this.adapter = opts.adapter;
1254
2120
  this.compartment = opts.compartment;
@@ -1259,6 +2125,9 @@ var Collection = class {
1259
2125
  this.getDEK = opts.getDEK;
1260
2126
  this.onDirty = opts.onDirty;
1261
2127
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2128
+ this.schema = opts.schema;
2129
+ this.ledger = opts.ledger;
2130
+ this.refEnforcer = opts.refEnforcer;
1262
2131
  this.lazy = opts.prefetch === false;
1263
2132
  if (this.lazy) {
1264
2133
  if (opts.indexes && opts.indexes.length > 0) {
@@ -1305,6 +2174,12 @@ var Collection = class {
1305
2174
  if (!hasWritePermission(this.keyring, this.name)) {
1306
2175
  throw new ReadOnlyError();
1307
2176
  }
2177
+ if (this.schema !== void 0) {
2178
+ record = await validateSchemaInput(this.schema, record, `put(${id})`);
2179
+ }
2180
+ if (this.refEnforcer !== void 0) {
2181
+ await this.refEnforcer.enforceRefsOnPut(this.name, record);
2182
+ }
1308
2183
  let existing;
1309
2184
  if (this.lazy && this.lru) {
1310
2185
  existing = this.lru.get(id);
@@ -1337,6 +2212,20 @@ var Collection = class {
1337
2212
  }
1338
2213
  const envelope = await this.encryptRecord(record, version);
1339
2214
  await this.adapter.put(this.compartment, this.name, id, envelope);
2215
+ if (this.ledger) {
2216
+ const appendInput = {
2217
+ op: "put",
2218
+ collection: this.name,
2219
+ id,
2220
+ version,
2221
+ actor: this.keyring.userId,
2222
+ payloadHash: await envelopePayloadHash(envelope)
2223
+ };
2224
+ if (existing) {
2225
+ appendInput.delta = computePatch(record, existing.record);
2226
+ }
2227
+ await this.ledger.append(appendInput);
2228
+ }
1340
2229
  if (this.lazy && this.lru) {
1341
2230
  this.lru.set(id, { record, version }, estimateRecordBytes(record));
1342
2231
  } else {
@@ -1356,14 +2245,17 @@ var Collection = class {
1356
2245
  if (!hasWritePermission(this.keyring, this.name)) {
1357
2246
  throw new ReadOnlyError();
1358
2247
  }
2248
+ if (this.refEnforcer !== void 0) {
2249
+ await this.refEnforcer.enforceRefsOnDelete(this.name, id);
2250
+ }
1359
2251
  let existing;
1360
2252
  if (this.lazy && this.lru) {
1361
2253
  existing = this.lru.get(id);
1362
2254
  if (!existing && this.historyConfig.enabled !== false) {
1363
- const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
1364
- if (previousEnvelope) {
1365
- const previousRecord = await this.decryptRecord(previousEnvelope);
1366
- existing = { record: previousRecord, version: previousEnvelope._v };
2255
+ const previousEnvelope2 = await this.adapter.get(this.compartment, this.name, id);
2256
+ if (previousEnvelope2) {
2257
+ const previousRecord = await this.decryptRecord(previousEnvelope2);
2258
+ existing = { record: previousRecord, version: previousEnvelope2._v };
1367
2259
  }
1368
2260
  }
1369
2261
  } else {
@@ -1373,7 +2265,19 @@ var Collection = class {
1373
2265
  const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
1374
2266
  await saveHistory(this.adapter, this.compartment, this.name, id, historyEnvelope);
1375
2267
  }
2268
+ const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
2269
+ const previousPayloadHash = await envelopePayloadHash(previousEnvelope);
1376
2270
  await this.adapter.delete(this.compartment, this.name, id);
2271
+ if (this.ledger) {
2272
+ await this.ledger.append({
2273
+ op: "delete",
2274
+ collection: this.name,
2275
+ id,
2276
+ version: existing?.version ?? 0,
2277
+ actor: this.keyring.userId,
2278
+ payloadHash: previousPayloadHash
2279
+ });
2280
+ }
1377
2281
  if (this.lazy && this.lru) {
1378
2282
  this.lru.remove(id);
1379
2283
  } else {
@@ -1467,7 +2371,7 @@ var Collection = class {
1467
2371
  );
1468
2372
  const entries = [];
1469
2373
  for (const env of envelopes) {
1470
- const record = await this.decryptRecord(env);
2374
+ const record = await this.decryptRecord(env, { skipValidation: true });
1471
2375
  entries.push({
1472
2376
  version: env._v,
1473
2377
  timestamp: env._ts,
@@ -1477,7 +2381,15 @@ var Collection = class {
1477
2381
  }
1478
2382
  return entries;
1479
2383
  }
1480
- /** Get a specific past version of a record. */
2384
+ /**
2385
+ * Get a specific past version of a record.
2386
+ *
2387
+ * History reads intentionally **skip schema validation** — historical
2388
+ * records predate the current schema by definition, so validating them
2389
+ * against today's shape would be a false positive on any schema
2390
+ * evolution. If a caller needs validated history, they should filter
2391
+ * and re-put the records through the normal `put()` path.
2392
+ */
1481
2393
  async getVersion(id, version) {
1482
2394
  const envelope = await getVersionEnvelope(
1483
2395
  this.adapter,
@@ -1487,7 +2399,7 @@ var Collection = class {
1487
2399
  version
1488
2400
  );
1489
2401
  if (!envelope) return null;
1490
- return this.decryptRecord(envelope);
2402
+ return this.decryptRecord(envelope, { skipValidation: true });
1491
2403
  }
1492
2404
  /** Revert a record to a past version. Creates a new version with the old content. */
1493
2405
  async revert(id, version) {
@@ -1505,7 +2417,7 @@ var Collection = class {
1505
2417
  async diff(id, versionA, versionB) {
1506
2418
  const recordA = versionA === 0 ? null : await this.resolveVersion(id, versionA);
1507
2419
  const recordB = versionB === void 0 || versionB === 0 ? versionB === 0 ? null : await this.resolveCurrentOrVersion(id) : await this.resolveVersion(id, versionB);
1508
- return diff(recordA, recordB);
2420
+ return diff2(recordA, recordB);
1509
2421
  }
1510
2422
  /** Resolve a version: try history first, then check if it's the current version. */
1511
2423
  async resolveVersion(id, version) {
@@ -1727,13 +2639,38 @@ var Collection = class {
1727
2639
  _by: by
1728
2640
  };
1729
2641
  }
1730
- async decryptRecord(envelope) {
2642
+ /**
2643
+ * Decrypt an envelope into a record of type `T`.
2644
+ *
2645
+ * When a schema is attached, the decrypted value is validated before
2646
+ * being returned. A divergence between the stored bytes and the
2647
+ * current schema throws `SchemaValidationError` with
2648
+ * `direction: 'output'` — silently returning drifted data would
2649
+ * propagate garbage into the UI and break the whole point of having
2650
+ * a schema.
2651
+ *
2652
+ * `skipValidation` exists for history reads: when calling
2653
+ * `getVersion()` the caller is explicitly asking for an old snapshot
2654
+ * that may predate a schema change, so validating it would be a
2655
+ * false positive. Every non-history read leaves this flag `false`.
2656
+ */
2657
+ async decryptRecord(envelope, opts = {}) {
2658
+ let record;
1731
2659
  if (!this.encrypted) {
1732
- return JSON.parse(envelope._data);
2660
+ record = JSON.parse(envelope._data);
2661
+ } else {
2662
+ const dek = await this.getDEK(this.name);
2663
+ const json = await decrypt(envelope._iv, envelope._data, dek);
2664
+ record = JSON.parse(json);
2665
+ }
2666
+ if (this.schema !== void 0 && !opts.skipValidation) {
2667
+ record = await validateSchemaOutput(
2668
+ this.schema,
2669
+ record,
2670
+ `${this.name}@v${envelope._v}`
2671
+ );
1733
2672
  }
1734
- const dek = await this.getDEK(this.name);
1735
- const json = await decrypt(envelope._iv, envelope._data, dek);
1736
- return JSON.parse(json);
2673
+ return record;
1737
2674
  }
1738
2675
  };
1739
2676
 
@@ -1741,13 +2678,62 @@ var Collection = class {
1741
2678
  var Compartment = class {
1742
2679
  adapter;
1743
2680
  name;
2681
+ /**
2682
+ * The active in-memory keyring. NOT readonly because `load()`
2683
+ * needs to refresh it after restoring a different keyring file —
2684
+ * otherwise the in-memory DEKs (from the pre-load session) and
2685
+ * the on-disk wrapped DEKs (from the loaded backup) drift apart
2686
+ * and every subsequent decrypt fails with TamperedError.
2687
+ */
1744
2688
  keyring;
1745
2689
  encrypted;
1746
2690
  emitter;
1747
2691
  onDirty;
1748
2692
  historyConfig;
1749
2693
  getDEK;
2694
+ /**
2695
+ * Optional callback that re-derives an UnlockedKeyring from the
2696
+ * adapter using the active user's passphrase. Called by `load()`
2697
+ * after the on-disk keyring file has been replaced — refreshes
2698
+ * `this.keyring` so the next DEK access uses the loaded wrapped
2699
+ * DEKs instead of the stale pre-load ones.
2700
+ *
2701
+ * Provided by Noydb at openCompartment() time. Tests that
2702
+ * construct Compartment directly can pass `undefined`; load()
2703
+ * skips the refresh in that case (which is fine for plaintext
2704
+ * compartments — there's nothing to re-unwrap).
2705
+ */
2706
+ reloadKeyring;
1750
2707
  collectionCache = /* @__PURE__ */ new Map();
2708
+ /**
2709
+ * Per-compartment ledger store. Lazy-initialized on first
2710
+ * `collection()` call (which passes it through to the Collection)
2711
+ * or on first `ledger()` call from user code.
2712
+ *
2713
+ * One LedgerStore is shared across all collections in a compartment
2714
+ * because the hash chain is compartment-scoped: the chain head is a
2715
+ * single "what did this compartment do last" identifier, not a
2716
+ * per-collection one. Two collections appending concurrently is the
2717
+ * single-writer concurrency concern documented in the LedgerStore
2718
+ * docstring.
2719
+ */
2720
+ ledgerStore = null;
2721
+ /**
2722
+ * Per-compartment foreign-key reference registry. Collections
2723
+ * register their `refs` option here on construction; the
2724
+ * compartment uses the registry on every put/delete/checkIntegrity
2725
+ * call. One instance lives for the compartment's lifetime.
2726
+ */
2727
+ refRegistry = new RefRegistry();
2728
+ /**
2729
+ * Set of collection record-ids currently being deleted as part of
2730
+ * a cascade. Populated on entry to `enforceRefsOnDelete` and
2731
+ * drained on exit. Used to break mutual-cascade cycles: deleting
2732
+ * A → cascade to B → cascade back to A would otherwise recurse
2733
+ * forever, so we short-circuit when we see an already-in-progress
2734
+ * delete on the same (collection, id) pair.
2735
+ */
2736
+ cascadeInProgress = /* @__PURE__ */ new Set();
1751
2737
  constructor(opts) {
1752
2738
  this.adapter = opts.adapter;
1753
2739
  this.name = opts.name;
@@ -1756,8 +2742,21 @@ var Compartment = class {
1756
2742
  this.emitter = opts.emitter;
1757
2743
  this.onDirty = opts.onDirty;
1758
2744
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2745
+ this.reloadKeyring = opts.reloadKeyring;
2746
+ this.getDEK = this.makeGetDEK();
2747
+ }
2748
+ /**
2749
+ * Construct (or reconstruct) the lazy DEK resolver. Captures the
2750
+ * CURRENT value of `this.keyring` and `this.adapter` in a closure,
2751
+ * memoizing the inner getDEKFn after first use so subsequent
2752
+ * lookups are O(1).
2753
+ *
2754
+ * `load()` calls this after refreshing `this.keyring` to discard
2755
+ * the prior session's cached DEKs.
2756
+ */
2757
+ makeGetDEK() {
1759
2758
  let getDEKFn = null;
1760
- this.getDEK = async (collectionName) => {
2759
+ return async (collectionName) => {
1761
2760
  if (!getDEKFn) {
1762
2761
  getDEKFn = await ensureCollectionDEK(this.adapter, this.name, this.keyring);
1763
2762
  }
@@ -1775,6 +2774,10 @@ var Compartment = class {
1775
2774
  * loads records on demand and bounds memory via the LRU cache.
1776
2775
  * - `options.cache` configures the LRU bounds. Required in lazy mode.
1777
2776
  * Accepts `{ maxRecords, maxBytes: '50MB' | 1024 }`.
2777
+ * - `options.schema` attaches a Standard Schema v1 validator (Zod,
2778
+ * Valibot, ArkType, Effect Schema, etc.). Every `put()` is validated
2779
+ * before encryption; every read is validated after decryption.
2780
+ * Failing records throw `SchemaValidationError`.
1778
2781
  *
1779
2782
  * Lazy mode + indexes is rejected at construction time — see the
1780
2783
  * Collection constructor for the rationale.
@@ -1782,6 +2785,9 @@ var Compartment = class {
1782
2785
  collection(collectionName, options) {
1783
2786
  let coll = this.collectionCache.get(collectionName);
1784
2787
  if (!coll) {
2788
+ if (options?.refs) {
2789
+ this.refRegistry.register(collectionName, options.refs);
2790
+ }
1785
2791
  const collOpts = {
1786
2792
  adapter: this.adapter,
1787
2793
  compartment: this.name,
@@ -1791,22 +2797,205 @@ var Compartment = class {
1791
2797
  emitter: this.emitter,
1792
2798
  getDEK: this.getDEK,
1793
2799
  onDirty: this.onDirty,
1794
- historyConfig: this.historyConfig
2800
+ historyConfig: this.historyConfig,
2801
+ ledger: this.ledger(),
2802
+ refEnforcer: this
1795
2803
  };
1796
2804
  if (options?.indexes !== void 0) collOpts.indexes = options.indexes;
1797
2805
  if (options?.prefetch !== void 0) collOpts.prefetch = options.prefetch;
1798
2806
  if (options?.cache !== void 0) collOpts.cache = options.cache;
2807
+ if (options?.schema !== void 0) collOpts.schema = options.schema;
1799
2808
  coll = new Collection(collOpts);
1800
2809
  this.collectionCache.set(collectionName, coll);
1801
2810
  }
1802
2811
  return coll;
1803
2812
  }
2813
+ /**
2814
+ * Enforce strict outbound refs on a `put()`. Called by Collection
2815
+ * just before it writes to the adapter. For every strict ref
2816
+ * declared on the collection, check that the target id exists in
2817
+ * the target collection; throw `RefIntegrityError` if not.
2818
+ *
2819
+ * `warn` and `cascade` modes don't affect put semantics — they're
2820
+ * enforced at delete time or via `checkIntegrity()`.
2821
+ */
2822
+ async enforceRefsOnPut(collectionName, record) {
2823
+ const outbound = this.refRegistry.getOutbound(collectionName);
2824
+ if (Object.keys(outbound).length === 0) return;
2825
+ if (!record || typeof record !== "object") return;
2826
+ const obj = record;
2827
+ for (const [field, descriptor] of Object.entries(outbound)) {
2828
+ if (descriptor.mode !== "strict") continue;
2829
+ const rawId = obj[field];
2830
+ if (rawId === null || rawId === void 0) continue;
2831
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
2832
+ throw new RefIntegrityError({
2833
+ collection: collectionName,
2834
+ id: obj["id"] ?? "<unknown>",
2835
+ field,
2836
+ refTo: descriptor.target,
2837
+ refId: null,
2838
+ message: `Ref field "${collectionName}.${field}" must be a string or number, got ${typeof rawId}.`
2839
+ });
2840
+ }
2841
+ const refId = String(rawId);
2842
+ const target = this.collection(descriptor.target);
2843
+ const exists = await target.get(refId);
2844
+ if (!exists) {
2845
+ throw new RefIntegrityError({
2846
+ collection: collectionName,
2847
+ id: obj["id"] ?? "<unknown>",
2848
+ field,
2849
+ refTo: descriptor.target,
2850
+ refId,
2851
+ message: `Strict ref "${collectionName}.${field}" \u2192 "${descriptor.target}" cannot be satisfied: target id "${refId}" not found in "${descriptor.target}".`
2852
+ });
2853
+ }
2854
+ }
2855
+ }
2856
+ /**
2857
+ * Enforce inbound ref modes on a `delete()`. Called by Collection
2858
+ * just before it deletes from the adapter. Walks every inbound
2859
+ * ref that targets this (collection, id) and:
2860
+ *
2861
+ * - `strict`: throws if any referencing records exist
2862
+ * - `cascade`: deletes every referencing record
2863
+ * - `warn`: no-op (checkIntegrity picks it up)
2864
+ *
2865
+ * Cascade cycles are broken via `cascadeInProgress` — re-entering
2866
+ * for the same (collection, id) returns immediately so two
2867
+ * mutually-cascading collections don't recurse forever.
2868
+ */
2869
+ async enforceRefsOnDelete(collectionName, id) {
2870
+ const key = `${collectionName}/${id}`;
2871
+ if (this.cascadeInProgress.has(key)) return;
2872
+ this.cascadeInProgress.add(key);
2873
+ try {
2874
+ const inbound = this.refRegistry.getInbound(collectionName);
2875
+ for (const rule of inbound) {
2876
+ const fromCollection = this.collection(rule.collection);
2877
+ const allRecords = await fromCollection.list();
2878
+ const matches = allRecords.filter((rec) => {
2879
+ const raw = rec[rule.field];
2880
+ if (typeof raw !== "string" && typeof raw !== "number") return false;
2881
+ return String(raw) === id;
2882
+ });
2883
+ if (matches.length === 0) continue;
2884
+ if (rule.mode === "strict") {
2885
+ const first = matches[0];
2886
+ throw new RefIntegrityError({
2887
+ collection: rule.collection,
2888
+ id: first?.["id"] ?? "<unknown>",
2889
+ field: rule.field,
2890
+ refTo: collectionName,
2891
+ refId: id,
2892
+ message: `Cannot delete "${collectionName}"/"${id}": ${matches.length} record(s) in "${rule.collection}" still reference it via strict ref "${rule.field}".`
2893
+ });
2894
+ }
2895
+ if (rule.mode === "cascade") {
2896
+ for (const match of matches) {
2897
+ const matchId = match["id"] ?? null;
2898
+ if (matchId === null) continue;
2899
+ await fromCollection.delete(matchId);
2900
+ }
2901
+ }
2902
+ }
2903
+ } finally {
2904
+ this.cascadeInProgress.delete(key);
2905
+ }
2906
+ }
2907
+ /**
2908
+ * Walk every collection that has declared refs, load its records,
2909
+ * and report any reference whose target id is missing. Modes are
2910
+ * reported alongside each violation so the caller can distinguish
2911
+ * "this is a warning the user asked for" from "this should never
2912
+ * have happened" (strict violations produced by out-of-band
2913
+ * writes).
2914
+ *
2915
+ * Returns `{ violations: [...] }` instead of throwing — the whole
2916
+ * point of `checkIntegrity()` is to surface a list for display
2917
+ * or repair, not to fail noisily.
2918
+ */
2919
+ async checkIntegrity() {
2920
+ const violations = [];
2921
+ for (const [collectionName, refs] of this.refRegistry.entries()) {
2922
+ const coll = this.collection(collectionName);
2923
+ const records = await coll.list();
2924
+ for (const record of records) {
2925
+ const recId = record["id"] ?? "<unknown>";
2926
+ for (const [field, descriptor] of Object.entries(refs)) {
2927
+ const rawId = record[field];
2928
+ if (rawId === null || rawId === void 0) continue;
2929
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
2930
+ violations.push({
2931
+ collection: collectionName,
2932
+ id: recId,
2933
+ field,
2934
+ refTo: descriptor.target,
2935
+ refId: rawId,
2936
+ mode: descriptor.mode
2937
+ });
2938
+ continue;
2939
+ }
2940
+ const refId = String(rawId);
2941
+ const target = this.collection(descriptor.target);
2942
+ const exists = await target.get(refId);
2943
+ if (!exists) {
2944
+ violations.push({
2945
+ collection: collectionName,
2946
+ id: recId,
2947
+ field,
2948
+ refTo: descriptor.target,
2949
+ refId: rawId,
2950
+ mode: descriptor.mode
2951
+ });
2952
+ }
2953
+ }
2954
+ }
2955
+ }
2956
+ return { violations };
2957
+ }
2958
+ /**
2959
+ * Return this compartment's hash-chained audit log.
2960
+ *
2961
+ * The ledger is lazy-initialized on first access and cached for the
2962
+ * lifetime of the Compartment instance. Every LedgerStore instance
2963
+ * shares the same adapter and DEK resolver, so `compartment.ledger()`
2964
+ * can be called repeatedly without performance cost.
2965
+ *
2966
+ * The LedgerStore itself is the public API: consumers call
2967
+ * `.append()` (via Collection internals), `.head()`, `.verify()`,
2968
+ * and `.entries({ from, to })`. See the LedgerStore docstring for
2969
+ * the full surface and the concurrency caveats.
2970
+ */
2971
+ ledger() {
2972
+ if (!this.ledgerStore) {
2973
+ this.ledgerStore = new LedgerStore({
2974
+ adapter: this.adapter,
2975
+ compartment: this.name,
2976
+ encrypted: this.encrypted,
2977
+ getDEK: this.getDEK,
2978
+ actor: this.keyring.userId
2979
+ });
2980
+ }
2981
+ return this.ledgerStore;
2982
+ }
1804
2983
  /** List all collection names in this compartment. */
1805
2984
  async collections() {
1806
2985
  const snapshot = await this.adapter.loadAll(this.name);
1807
2986
  return Object.keys(snapshot);
1808
2987
  }
1809
- /** Dump compartment as encrypted JSON backup string. */
2988
+ /**
2989
+ * Dump compartment as a verifiable encrypted JSON backup string.
2990
+ *
2991
+ * v0.4 backups embed the current ledger head and the full
2992
+ * `_ledger` + `_ledger_deltas` internal collections so the
2993
+ * receiver can run `verifyBackupIntegrity()` after `load()` and
2994
+ * detect any tampering between dump and restore. Pre-v0.4 callers
2995
+ * who didn't have a ledger get a backup without these fields, and
2996
+ * the corresponding `load()` skips the integrity check with a
2997
+ * warning — both modes round-trip cleanly.
2998
+ */
1810
2999
  async dump() {
1811
3000
  const snapshot = await this.adapter.loadAll(this.name);
1812
3001
  const keyringIds = await this.adapter.list(this.name, "_keyring");
@@ -1817,17 +3006,58 @@ var Compartment = class {
1817
3006
  keyrings[keyringId] = JSON.parse(envelope._data);
1818
3007
  }
1819
3008
  }
3009
+ const internalSnapshot = {};
3010
+ for (const internalName of [LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION]) {
3011
+ const ids = await this.adapter.list(this.name, internalName);
3012
+ if (ids.length === 0) continue;
3013
+ const records = {};
3014
+ for (const id of ids) {
3015
+ const envelope = await this.adapter.get(this.name, internalName, id);
3016
+ if (envelope) records[id] = envelope;
3017
+ }
3018
+ internalSnapshot[internalName] = records;
3019
+ }
3020
+ const head = await this.ledger().head();
1820
3021
  const backup = {
1821
3022
  _noydb_backup: NOYDB_BACKUP_VERSION,
1822
3023
  _compartment: this.name,
1823
3024
  _exported_at: (/* @__PURE__ */ new Date()).toISOString(),
1824
3025
  _exported_by: this.keyring.userId,
1825
3026
  keyrings,
1826
- collections: snapshot
3027
+ collections: snapshot,
3028
+ ...Object.keys(internalSnapshot).length > 0 ? { _internal: internalSnapshot } : {},
3029
+ ...head ? {
3030
+ ledgerHead: {
3031
+ hash: head.hash,
3032
+ index: head.entry.index,
3033
+ ts: head.entry.ts
3034
+ }
3035
+ } : {}
1827
3036
  };
1828
3037
  return JSON.stringify(backup);
1829
3038
  }
1830
- /** Restore compartment from an encrypted JSON backup string. */
3039
+ /**
3040
+ * Restore a compartment from a verifiable backup.
3041
+ *
3042
+ * After loading, runs `verifyBackupIntegrity()` to confirm:
3043
+ * 1. The hash chain is intact (no `prevHash` mismatches)
3044
+ * 2. The chain head matches the embedded `ledgerHead.hash`
3045
+ * from the backup
3046
+ * 3. Every data envelope's `payloadHash` matches the
3047
+ * corresponding ledger entry — i.e. nobody swapped
3048
+ * ciphertext between dump and restore
3049
+ *
3050
+ * On any failure, throws `BackupLedgerError` (chain or head
3051
+ * mismatch) or `BackupCorruptedError` (data envelope mismatch).
3052
+ * The compartment state on the adapter has already been written
3053
+ * by the time we throw, so the caller is responsible for either
3054
+ * accepting the suspect state or wiping it and trying a different
3055
+ * backup.
3056
+ *
3057
+ * Pre-v0.4 backups (no `ledgerHead` field, no `_internal`) load
3058
+ * with a console warning and skip the integrity check entirely
3059
+ * — there's no chain to verify against.
3060
+ */
1831
3061
  async load(backupJson) {
1832
3062
  const backup = JSON.parse(backupJson);
1833
3063
  await this.adapter.saveAll(this.name, backup.collections);
@@ -1841,7 +3071,124 @@ var Compartment = class {
1841
3071
  };
1842
3072
  await this.adapter.put(this.name, "_keyring", userId, envelope);
1843
3073
  }
3074
+ if (backup._internal) {
3075
+ for (const [internalName, records] of Object.entries(backup._internal)) {
3076
+ for (const [id, envelope] of Object.entries(records)) {
3077
+ await this.adapter.put(this.name, internalName, id, envelope);
3078
+ }
3079
+ }
3080
+ }
3081
+ if (this.reloadKeyring) {
3082
+ this.keyring = await this.reloadKeyring();
3083
+ this.getDEK = this.makeGetDEK();
3084
+ }
1844
3085
  this.collectionCache.clear();
3086
+ this.ledgerStore = null;
3087
+ if (!backup.ledgerHead) {
3088
+ console.warn(
3089
+ `[noy-db] Loaded a legacy backup with no ledgerHead \u2014 verifiable-backup integrity check skipped. Re-export with v0.4+ to get tamper detection.`
3090
+ );
3091
+ return;
3092
+ }
3093
+ const result = await this.verifyBackupIntegrity();
3094
+ if (!result.ok) {
3095
+ if (result.kind === "data") {
3096
+ throw new BackupCorruptedError(
3097
+ result.collection,
3098
+ result.id,
3099
+ result.message
3100
+ );
3101
+ }
3102
+ throw new BackupLedgerError(result.message, result.divergedAt);
3103
+ }
3104
+ if (result.head !== backup.ledgerHead.hash) {
3105
+ throw new BackupLedgerError(
3106
+ `Backup ledger head mismatch: embedded "${backup.ledgerHead.hash}" but reconstructed "${result.head}".`
3107
+ );
3108
+ }
3109
+ }
3110
+ /**
3111
+ * End-to-end backup integrity check. Runs both:
3112
+ *
3113
+ * 1. `ledger.verify()` — walks the hash chain and confirms
3114
+ * every `prevHash` matches the recomputed hash of its
3115
+ * predecessor.
3116
+ *
3117
+ * 2. **Data envelope cross-check** — for every (collection, id)
3118
+ * that has a current value, find the most recent ledger
3119
+ * entry recording a `put` for that pair, recompute the
3120
+ * sha256 of the stored envelope's `_data`, and compare to
3121
+ * the entry's `payloadHash`. Any mismatch means an
3122
+ * out-of-band write modified the data without updating the
3123
+ * ledger.
3124
+ *
3125
+ * Returns a discriminated union so callers can handle the two
3126
+ * failure modes differently:
3127
+ * - `{ ok: true, head, length }` — chain verified and all
3128
+ * data matches; safe to use.
3129
+ * - `{ ok: false, kind: 'chain', divergedAt, message }` — the
3130
+ * chain itself is broken at the given index.
3131
+ * - `{ ok: false, kind: 'data', collection, id, message }` —
3132
+ * a specific data envelope doesn't match its ledger entry.
3133
+ *
3134
+ * This method is exposed so users can call it any time, not just
3135
+ * during `load()`. A scheduled background check is the simplest
3136
+ * way to detect tampering of an in-place compartment.
3137
+ */
3138
+ async verifyBackupIntegrity() {
3139
+ const chainResult = await this.ledger().verify();
3140
+ if (!chainResult.ok) {
3141
+ return {
3142
+ ok: false,
3143
+ kind: "chain",
3144
+ divergedAt: chainResult.divergedAt,
3145
+ message: `Ledger chain diverged at index ${chainResult.divergedAt}: expected prevHash "${chainResult.expected}" but found "${chainResult.actual}".`
3146
+ };
3147
+ }
3148
+ const ledger = this.ledger();
3149
+ const allEntries = await ledger.loadAllEntries();
3150
+ const seen = /* @__PURE__ */ new Set();
3151
+ const latest = /* @__PURE__ */ new Map();
3152
+ for (let i = allEntries.length - 1; i >= 0; i--) {
3153
+ const entry = allEntries[i];
3154
+ if (!entry) continue;
3155
+ const key = `${entry.collection}/${entry.id}`;
3156
+ if (seen.has(key)) continue;
3157
+ seen.add(key);
3158
+ if (entry.op === "delete") continue;
3159
+ latest.set(key, {
3160
+ collection: entry.collection,
3161
+ id: entry.id,
3162
+ expectedHash: entry.payloadHash
3163
+ });
3164
+ }
3165
+ for (const { collection, id, expectedHash } of latest.values()) {
3166
+ const envelope = await this.adapter.get(this.name, collection, id);
3167
+ if (!envelope) {
3168
+ return {
3169
+ ok: false,
3170
+ kind: "data",
3171
+ collection,
3172
+ id,
3173
+ message: `Ledger expects data record "${collection}/${id}" to exist, but the adapter has no envelope for it.`
3174
+ };
3175
+ }
3176
+ const actualHash = await sha256Hex(envelope._data);
3177
+ if (actualHash !== expectedHash) {
3178
+ return {
3179
+ ok: false,
3180
+ kind: "data",
3181
+ collection,
3182
+ id,
3183
+ message: `Data envelope "${collection}/${id}" has been tampered with: expected payloadHash "${expectedHash}", got "${actualHash}".`
3184
+ };
3185
+ }
3186
+ }
3187
+ return {
3188
+ ok: true,
3189
+ head: chainResult.head,
3190
+ length: chainResult.length
3191
+ };
1845
3192
  }
1846
3193
  /** Export compartment as decrypted JSON (owner only). */
1847
3194
  async export() {
@@ -2207,7 +3554,23 @@ var Noydb = class {
2207
3554
  encrypted: this.options.encrypt !== false,
2208
3555
  emitter: this.emitter,
2209
3556
  onDirty: syncEngine ? (coll, id, action, version) => syncEngine.trackChange(coll, id, action, version) : void 0,
2210
- historyConfig: this.options.history
3557
+ historyConfig: this.options.history,
3558
+ // Refresh callback used by Compartment.load() to re-derive
3559
+ // the in-memory keyring from a freshly-loaded keyring file.
3560
+ // Encrypted compartments need this so post-load decrypts work
3561
+ // against the loaded session's wrapped DEKs; plaintext
3562
+ // compartments leave it null and load() skips the refresh.
3563
+ reloadKeyring: this.options.encrypt !== false && this.options.secret ? async () => {
3564
+ this.keyringCache.delete(name);
3565
+ const refreshed = await loadKeyring(
3566
+ this.options.adapter,
3567
+ name,
3568
+ this.options.user,
3569
+ this.options.secret
3570
+ );
3571
+ this.keyringCache.set(name, refreshed);
3572
+ return refreshed;
3573
+ } : void 0
2211
3574
  });
2212
3575
  this.compartmentCache.set(name, comp);
2213
3576
  return comp;
@@ -2495,12 +3858,17 @@ function estimateEntropy(passphrase) {
2495
3858
  return Math.floor(passphrase.length * Math.log2(charsetSize));
2496
3859
  }
2497
3860
  export {
3861
+ BackupCorruptedError,
3862
+ BackupLedgerError,
2498
3863
  Collection,
2499
3864
  CollectionIndexes,
2500
3865
  Compartment,
2501
3866
  ConflictError,
2502
3867
  DecryptionError,
2503
3868
  InvalidKeyError,
3869
+ LEDGER_COLLECTION,
3870
+ LEDGER_DELTAS_COLLECTION,
3871
+ LedgerStore,
2504
3872
  Lru,
2505
3873
  NOYDB_BACKUP_VERSION,
2506
3874
  NOYDB_FORMAT_VERSION,
@@ -2514,26 +3882,41 @@ export {
2514
3882
  PermissionDeniedError,
2515
3883
  Query,
2516
3884
  ReadOnlyError,
3885
+ RefIntegrityError,
3886
+ RefRegistry,
3887
+ RefScopeError,
3888
+ SchemaValidationError,
2517
3889
  SyncEngine,
2518
3890
  TamperedError,
2519
3891
  ValidationError,
3892
+ applyPatch,
3893
+ canonicalJson,
3894
+ computePatch,
2520
3895
  createNoydb,
2521
3896
  defineAdapter,
2522
- diff,
3897
+ diff2 as diff,
2523
3898
  enrollBiometric,
3899
+ envelopePayloadHash,
2524
3900
  estimateEntropy,
2525
3901
  estimateRecordBytes,
2526
3902
  evaluateClause,
2527
3903
  evaluateFieldClause,
2528
3904
  executePlan,
2529
3905
  formatDiff,
3906
+ hashEntry,
2530
3907
  isBiometricAvailable,
2531
3908
  loadBiometric,
3909
+ paddedIndex,
2532
3910
  parseBytes,
3911
+ parseIndex,
2533
3912
  readPath,
3913
+ ref,
2534
3914
  removeBiometric,
2535
3915
  saveBiometric,
3916
+ sha256Hex,
2536
3917
  unlockBiometric,
2537
- validatePassphrase
3918
+ validatePassphrase,
3919
+ validateSchemaInput,
3920
+ validateSchemaOutput
2538
3921
  };
2539
3922
  //# sourceMappingURL=index.js.map