@noy-db/core 0.3.0 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -20,12 +20,17 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var index_exports = {};
22
22
  __export(index_exports, {
23
+ BackupCorruptedError: () => BackupCorruptedError,
24
+ BackupLedgerError: () => BackupLedgerError,
23
25
  Collection: () => Collection,
24
26
  CollectionIndexes: () => CollectionIndexes,
25
27
  Compartment: () => Compartment,
26
28
  ConflictError: () => ConflictError,
27
29
  DecryptionError: () => DecryptionError,
28
30
  InvalidKeyError: () => InvalidKeyError,
31
+ LEDGER_COLLECTION: () => LEDGER_COLLECTION,
32
+ LEDGER_DELTAS_COLLECTION: () => LEDGER_DELTAS_COLLECTION,
33
+ LedgerStore: () => LedgerStore,
29
34
  Lru: () => Lru,
30
35
  NOYDB_BACKUP_VERSION: () => NOYDB_BACKUP_VERSION,
31
36
  NOYDB_FORMAT_VERSION: () => NOYDB_FORMAT_VERSION,
@@ -39,27 +44,42 @@ __export(index_exports, {
39
44
  PermissionDeniedError: () => PermissionDeniedError,
40
45
  Query: () => Query,
41
46
  ReadOnlyError: () => ReadOnlyError,
47
+ RefIntegrityError: () => RefIntegrityError,
48
+ RefRegistry: () => RefRegistry,
49
+ RefScopeError: () => RefScopeError,
50
+ SchemaValidationError: () => SchemaValidationError,
42
51
  SyncEngine: () => SyncEngine,
43
52
  TamperedError: () => TamperedError,
44
53
  ValidationError: () => ValidationError,
54
+ applyPatch: () => applyPatch,
55
+ canonicalJson: () => canonicalJson,
56
+ computePatch: () => computePatch,
45
57
  createNoydb: () => createNoydb,
46
58
  defineAdapter: () => defineAdapter,
47
- diff: () => diff,
59
+ diff: () => diff2,
48
60
  enrollBiometric: () => enrollBiometric,
61
+ envelopePayloadHash: () => envelopePayloadHash,
49
62
  estimateEntropy: () => estimateEntropy,
50
63
  estimateRecordBytes: () => estimateRecordBytes,
51
64
  evaluateClause: () => evaluateClause,
52
65
  evaluateFieldClause: () => evaluateFieldClause,
53
66
  executePlan: () => executePlan,
54
67
  formatDiff: () => formatDiff,
68
+ hashEntry: () => hashEntry,
55
69
  isBiometricAvailable: () => isBiometricAvailable,
56
70
  loadBiometric: () => loadBiometric,
71
+ paddedIndex: () => paddedIndex,
57
72
  parseBytes: () => parseBytes,
73
+ parseIndex: () => parseIndex,
58
74
  readPath: () => readPath,
75
+ ref: () => ref,
59
76
  removeBiometric: () => removeBiometric,
60
77
  saveBiometric: () => saveBiometric,
78
+ sha256Hex: () => sha256Hex,
61
79
  unlockBiometric: () => unlockBiometric,
62
- validatePassphrase: () => validatePassphrase
80
+ validatePassphrase: () => validatePassphrase,
81
+ validateSchemaInput: () => validateSchemaInput,
82
+ validateSchemaOutput: () => validateSchemaOutput
63
83
  });
64
84
  module.exports = __toCommonJS(index_exports);
65
85
 
@@ -143,6 +163,74 @@ var ValidationError = class extends NoydbError {
143
163
  this.name = "ValidationError";
144
164
  }
145
165
  };
166
+ var SchemaValidationError = class extends NoydbError {
167
+ issues;
168
+ direction;
169
+ constructor(message, issues, direction) {
170
+ super("SCHEMA_VALIDATION_FAILED", message);
171
+ this.name = "SchemaValidationError";
172
+ this.issues = issues;
173
+ this.direction = direction;
174
+ }
175
+ };
176
+ var BackupLedgerError = class extends NoydbError {
177
+ /** First-broken-entry index, if known. */
178
+ divergedAt;
179
+ constructor(message, divergedAt) {
180
+ super("BACKUP_LEDGER", message);
181
+ this.name = "BackupLedgerError";
182
+ if (divergedAt !== void 0) this.divergedAt = divergedAt;
183
+ }
184
+ };
185
+ var BackupCorruptedError = class extends NoydbError {
186
+ /** The (collection, id) pair whose envelope failed the hash check. */
187
+ collection;
188
+ id;
189
+ constructor(collection, id, message) {
190
+ super("BACKUP_CORRUPTED", message);
191
+ this.name = "BackupCorruptedError";
192
+ this.collection = collection;
193
+ this.id = id;
194
+ }
195
+ };
196
+
197
+ // src/schema.ts
198
+ async function validateSchemaInput(schema, value, context) {
199
+ const result = await schema["~standard"].validate(value);
200
+ if (result.issues !== void 0 && result.issues.length > 0) {
201
+ throw new SchemaValidationError(
202
+ `Schema validation failed on ${context}: ${summarizeIssues(result.issues)}`,
203
+ result.issues,
204
+ "input"
205
+ );
206
+ }
207
+ return result.value;
208
+ }
209
+ async function validateSchemaOutput(schema, value, context) {
210
+ const result = await schema["~standard"].validate(value);
211
+ if (result.issues !== void 0 && result.issues.length > 0) {
212
+ throw new SchemaValidationError(
213
+ `Stored data for ${context} does not match the current schema \u2014 schema drift? ${summarizeIssues(result.issues)}`,
214
+ result.issues,
215
+ "output"
216
+ );
217
+ }
218
+ return result.value;
219
+ }
220
+ function summarizeIssues(issues) {
221
+ const shown = issues.slice(0, 3).map((issue) => {
222
+ const pathStr = formatPath(issue.path);
223
+ return `${pathStr}: ${issue.message}`;
224
+ });
225
+ const suffix = issues.length > 3 ? ` (+${issues.length - 3} more)` : "";
226
+ return shown.join("; ") + suffix;
227
+ }
228
+ function formatPath(path) {
229
+ if (!path || path.length === 0) return "root";
230
+ return path.map(
231
+ (segment) => typeof segment === "object" && segment !== null ? String(segment.key) : String(segment)
232
+ ).join(".");
233
+ }
146
234
 
147
235
  // src/crypto.ts
148
236
  var PBKDF2_ITERATIONS = 6e5;
@@ -253,6 +341,749 @@ function base64ToBuffer(base64) {
253
341
  return bytes;
254
342
  }
255
343
 
344
+ // src/ledger/entry.ts
345
+ function canonicalJson(value) {
346
+ if (value === null) return "null";
347
+ if (typeof value === "boolean") return value ? "true" : "false";
348
+ if (typeof value === "number") {
349
+ if (!Number.isFinite(value)) {
350
+ throw new Error(
351
+ `canonicalJson: refusing to encode non-finite number ${String(value)}`
352
+ );
353
+ }
354
+ return JSON.stringify(value);
355
+ }
356
+ if (typeof value === "string") return JSON.stringify(value);
357
+ if (typeof value === "bigint") {
358
+ throw new Error("canonicalJson: BigInt is not JSON-serializable");
359
+ }
360
+ if (typeof value === "undefined" || typeof value === "function") {
361
+ throw new Error(
362
+ `canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
363
+ );
364
+ }
365
+ if (Array.isArray(value)) {
366
+ return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
367
+ }
368
+ if (typeof value === "object") {
369
+ const obj = value;
370
+ const keys = Object.keys(obj).sort();
371
+ const parts = [];
372
+ for (const key of keys) {
373
+ parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
374
+ }
375
+ return "{" + parts.join(",") + "}";
376
+ }
377
+ throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
378
+ }
379
+ async function sha256Hex(input) {
380
+ const bytes = new TextEncoder().encode(input);
381
+ const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
382
+ return bytesToHex(new Uint8Array(digest));
383
+ }
384
+ async function hashEntry(entry) {
385
+ return sha256Hex(canonicalJson(entry));
386
+ }
387
+ function bytesToHex(bytes) {
388
+ const hex = new Array(bytes.length);
389
+ for (let i = 0; i < bytes.length; i++) {
390
+ hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
391
+ }
392
+ return hex.join("");
393
+ }
394
+ function paddedIndex(index) {
395
+ return String(index).padStart(10, "0");
396
+ }
397
+ function parseIndex(key) {
398
+ return Number.parseInt(key, 10);
399
+ }
400
+
401
+ // src/ledger/patch.ts
402
+ function computePatch(prev, next) {
403
+ const ops = [];
404
+ diff(prev, next, "", ops);
405
+ return ops;
406
+ }
407
+ function diff(prev, next, path, out) {
408
+ if (prev === next) return;
409
+ if (prev === null || next === null) {
410
+ out.push({ op: "replace", path, value: next });
411
+ return;
412
+ }
413
+ const prevIsArray = Array.isArray(prev);
414
+ const nextIsArray = Array.isArray(next);
415
+ const prevIsObject = typeof prev === "object" && !prevIsArray;
416
+ const nextIsObject = typeof next === "object" && !nextIsArray;
417
+ if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
418
+ out.push({ op: "replace", path, value: next });
419
+ return;
420
+ }
421
+ if (prevIsArray && nextIsArray) {
422
+ if (!arrayDeepEqual(prev, next)) {
423
+ out.push({ op: "replace", path, value: next });
424
+ }
425
+ return;
426
+ }
427
+ if (prevIsObject && nextIsObject) {
428
+ const prevObj = prev;
429
+ const nextObj = next;
430
+ const prevKeys = Object.keys(prevObj);
431
+ const nextKeys = Object.keys(nextObj);
432
+ for (const key of prevKeys) {
433
+ const childPath = path + "/" + escapePathSegment(key);
434
+ if (!(key in nextObj)) {
435
+ out.push({ op: "remove", path: childPath });
436
+ } else {
437
+ diff(prevObj[key], nextObj[key], childPath, out);
438
+ }
439
+ }
440
+ for (const key of nextKeys) {
441
+ if (!(key in prevObj)) {
442
+ out.push({
443
+ op: "add",
444
+ path: path + "/" + escapePathSegment(key),
445
+ value: nextObj[key]
446
+ });
447
+ }
448
+ }
449
+ return;
450
+ }
451
+ out.push({ op: "replace", path, value: next });
452
+ }
453
+ function arrayDeepEqual(a, b) {
454
+ if (a.length !== b.length) return false;
455
+ for (let i = 0; i < a.length; i++) {
456
+ if (!deepEqual(a[i], b[i])) return false;
457
+ }
458
+ return true;
459
+ }
460
+ function deepEqual(a, b) {
461
+ if (a === b) return true;
462
+ if (a === null || b === null) return false;
463
+ if (typeof a !== typeof b) return false;
464
+ if (typeof a !== "object") return false;
465
+ const aArray = Array.isArray(a);
466
+ const bArray = Array.isArray(b);
467
+ if (aArray !== bArray) return false;
468
+ if (aArray && bArray) return arrayDeepEqual(a, b);
469
+ const aObj = a;
470
+ const bObj = b;
471
+ const aKeys = Object.keys(aObj);
472
+ const bKeys = Object.keys(bObj);
473
+ if (aKeys.length !== bKeys.length) return false;
474
+ for (const key of aKeys) {
475
+ if (!(key in bObj)) return false;
476
+ if (!deepEqual(aObj[key], bObj[key])) return false;
477
+ }
478
+ return true;
479
+ }
480
+ function applyPatch(base, patch) {
481
+ let result = clone(base);
482
+ for (const op of patch) {
483
+ result = applyOp(result, op);
484
+ }
485
+ return result;
486
+ }
487
+ function applyOp(doc, op) {
488
+ if (op.path === "") {
489
+ if (op.op === "remove") return null;
490
+ return clone(op.value);
491
+ }
492
+ const segments = parsePath(op.path);
493
+ return walkAndApply(doc, segments, op);
494
+ }
495
+ function walkAndApply(doc, segments, op) {
496
+ if (segments.length === 0) {
497
+ throw new Error("walkAndApply: empty segments (internal error)");
498
+ }
499
+ const [head, ...rest] = segments;
500
+ if (head === void 0) throw new Error("walkAndApply: undefined segment");
501
+ if (rest.length === 0) {
502
+ return applyAtTerminal(doc, head, op);
503
+ }
504
+ if (Array.isArray(doc)) {
505
+ const idx = parseArrayIndex(head, doc.length);
506
+ const child = doc[idx];
507
+ const newChild = walkAndApply(child, rest, op);
508
+ const next = doc.slice();
509
+ next[idx] = newChild;
510
+ return next;
511
+ }
512
+ if (doc !== null && typeof doc === "object") {
513
+ const obj = doc;
514
+ if (!(head in obj)) {
515
+ throw new Error(`applyPatch: path segment "${head}" not found in object`);
516
+ }
517
+ const newChild = walkAndApply(obj[head], rest, op);
518
+ return { ...obj, [head]: newChild };
519
+ }
520
+ throw new Error(
521
+ `applyPatch: cannot step into ${typeof doc} at segment "${head}"`
522
+ );
523
+ }
524
+ function applyAtTerminal(doc, segment, op) {
525
+ if (Array.isArray(doc)) {
526
+ const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
527
+ const next = doc.slice();
528
+ if (op.op === "remove") {
529
+ next.splice(idx, 1);
530
+ return next;
531
+ }
532
+ if (op.op === "add") {
533
+ next.splice(idx, 0, clone(op.value));
534
+ return next;
535
+ }
536
+ if (op.op === "replace") {
537
+ if (idx >= doc.length) {
538
+ throw new Error(
539
+ `applyPatch: replace at out-of-bounds array index ${idx}`
540
+ );
541
+ }
542
+ next[idx] = clone(op.value);
543
+ return next;
544
+ }
545
+ }
546
+ if (doc !== null && typeof doc === "object") {
547
+ const obj = doc;
548
+ if (op.op === "remove") {
549
+ if (!(segment in obj)) {
550
+ throw new Error(
551
+ `applyPatch: remove on missing key "${segment}"`
552
+ );
553
+ }
554
+ const next = { ...obj };
555
+ delete next[segment];
556
+ return next;
557
+ }
558
+ if (op.op === "add") {
559
+ return { ...obj, [segment]: clone(op.value) };
560
+ }
561
+ if (op.op === "replace") {
562
+ if (!(segment in obj)) {
563
+ throw new Error(
564
+ `applyPatch: replace on missing key "${segment}"`
565
+ );
566
+ }
567
+ return { ...obj, [segment]: clone(op.value) };
568
+ }
569
+ }
570
+ throw new Error(
571
+ `applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
572
+ );
573
+ }
574
+ function escapePathSegment(segment) {
575
+ return segment.replace(/~/g, "~0").replace(/\//g, "~1");
576
+ }
577
+ function unescapePathSegment(segment) {
578
+ return segment.replace(/~1/g, "/").replace(/~0/g, "~");
579
+ }
580
+ function parsePath(path) {
581
+ if (!path.startsWith("/")) {
582
+ throw new Error(`applyPatch: path must start with '/', got "${path}"`);
583
+ }
584
+ return path.slice(1).split("/").map(unescapePathSegment);
585
+ }
586
+ function parseArrayIndex(segment, max) {
587
+ if (!/^\d+$/.test(segment)) {
588
+ throw new Error(
589
+ `applyPatch: array index must be a non-negative integer, got "${segment}"`
590
+ );
591
+ }
592
+ const idx = Number.parseInt(segment, 10);
593
+ if (idx < 0 || idx > max) {
594
+ throw new Error(
595
+ `applyPatch: array index ${idx} out of range [0, ${max}]`
596
+ );
597
+ }
598
+ return idx;
599
+ }
600
+ function clone(value) {
601
+ if (value === null || value === void 0) return value;
602
+ if (typeof value !== "object") return value;
603
+ return JSON.parse(JSON.stringify(value));
604
+ }
605
+
606
+ // src/ledger/store.ts
607
+ var LEDGER_COLLECTION = "_ledger";
608
+ var LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
609
+ var LedgerStore = class {
610
+ adapter;
611
+ compartment;
612
+ encrypted;
613
+ getDEK;
614
+ actor;
615
+ /**
616
+ * In-memory cache of the chain head — the most recently appended
617
+ * entry along with its precomputed hash. Without this, every
618
+ * `append()` would re-load every prior entry to recompute the
619
+ * prevHash, making N puts O(N²) — a 1K-record stress test goes from
620
+ * < 100ms to a multi-second timeout.
621
+ *
622
+ * The cache is populated on first read (`append`, `head`, `verify`)
623
+ * and updated in-place on every successful `append`. Single-writer
624
+ * usage (the v0.4 assumption) keeps it consistent. A second
625
+ * LedgerStore instance writing to the same compartment would not
626
+ * see the first instance's appends in its cached state — that's the
627
+ * concurrency caveat documented at the class level.
628
+ *
629
+ * Sentinel `undefined` means "not yet loaded"; an explicit `null`
630
+ * value means "loaded and confirmed empty" — distinguishing these
631
+ * matters because an empty ledger is a valid state (genesis prevHash
632
+ * is the empty string), and we don't want to re-scan the adapter
633
+ * just because the chain is freshly initialized.
634
+ */
635
+ headCache = void 0;
636
+ constructor(opts) {
637
+ this.adapter = opts.adapter;
638
+ this.compartment = opts.compartment;
639
+ this.encrypted = opts.encrypted;
640
+ this.getDEK = opts.getDEK;
641
+ this.actor = opts.actor;
642
+ }
643
+ /**
644
+ * Lazily load (or return cached) the current chain head. The cache
645
+ * sentinel is `undefined` until first access; after the first call,
646
+ * the cache holds either a `{ entry, hash }` for non-empty ledgers
647
+ * or `null` for empty ones.
648
+ */
649
+ async getCachedHead() {
650
+ if (this.headCache !== void 0) return this.headCache;
651
+ const entries = await this.loadAllEntries();
652
+ const last = entries[entries.length - 1];
653
+ if (!last) {
654
+ this.headCache = null;
655
+ return null;
656
+ }
657
+ this.headCache = { entry: last, hash: await hashEntry(last) };
658
+ return this.headCache;
659
+ }
660
+ /**
661
+ * Append a new entry to the ledger. Returns the full entry that was
662
+ * written (with its assigned index and computed prevHash) so the
663
+ * caller can use the hash for downstream purposes (e.g., embedding
664
+ * in a verifiable backup).
665
+ *
666
+ * This is the **only** way to add entries. Direct adapter writes to
667
+ * `_ledger/` would bypass the chain math and would be caught by the
668
+ * next `verify()` call as a divergence.
669
+ */
670
+ async append(input) {
671
+ const cached = await this.getCachedHead();
672
+ const lastEntry = cached?.entry;
673
+ const prevHash = cached?.hash ?? "";
674
+ const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
675
+ let deltaHash;
676
+ if (input.delta !== void 0) {
677
+ const deltaEnvelope = await this.encryptDelta(input.delta);
678
+ await this.adapter.put(
679
+ this.compartment,
680
+ LEDGER_DELTAS_COLLECTION,
681
+ paddedIndex(nextIndex),
682
+ deltaEnvelope
683
+ );
684
+ deltaHash = await sha256Hex(deltaEnvelope._data);
685
+ }
686
+ const entryBase = {
687
+ index: nextIndex,
688
+ prevHash,
689
+ op: input.op,
690
+ collection: input.collection,
691
+ id: input.id,
692
+ version: input.version,
693
+ ts: (/* @__PURE__ */ new Date()).toISOString(),
694
+ actor: input.actor === "" ? this.actor : input.actor,
695
+ payloadHash: input.payloadHash
696
+ };
697
+ const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
698
+ const envelope = await this.encryptEntry(entry);
699
+ await this.adapter.put(
700
+ this.compartment,
701
+ LEDGER_COLLECTION,
702
+ paddedIndex(entry.index),
703
+ envelope
704
+ );
705
+ this.headCache = { entry, hash: await hashEntry(entry) };
706
+ return entry;
707
+ }
708
+ /**
709
+ * Load a delta payload by its entry index. Returns `null` if the
710
+ * entry at that index doesn't reference a delta (genesis puts and
711
+ * deletes leave the slot empty) or if the delta row is missing
712
+ * (possible after a `pruneHistory` fold).
713
+ *
714
+ * The caller is responsible for deciding what to do with a missing
715
+ * delta — `ledger.reconstruct()` uses it as a "stop walking
716
+ * backward" signal and falls back to the on-disk current value.
717
+ */
718
+ async loadDelta(index) {
719
+ const envelope = await this.adapter.get(
720
+ this.compartment,
721
+ LEDGER_DELTAS_COLLECTION,
722
+ paddedIndex(index)
723
+ );
724
+ if (!envelope) return null;
725
+ if (!this.encrypted) {
726
+ return JSON.parse(envelope._data);
727
+ }
728
+ const dek = await this.getDEK(LEDGER_COLLECTION);
729
+ const json = await decrypt(envelope._iv, envelope._data, dek);
730
+ return JSON.parse(json);
731
+ }
732
+ /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
733
+ async encryptDelta(patch) {
734
+ const json = JSON.stringify(patch);
735
+ if (!this.encrypted) {
736
+ return {
737
+ _noydb: NOYDB_FORMAT_VERSION,
738
+ _v: 1,
739
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
740
+ _iv: "",
741
+ _data: json,
742
+ _by: this.actor
743
+ };
744
+ }
745
+ const dek = await this.getDEK(LEDGER_COLLECTION);
746
+ const { iv, data } = await encrypt(json, dek);
747
+ return {
748
+ _noydb: NOYDB_FORMAT_VERSION,
749
+ _v: 1,
750
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
751
+ _iv: iv,
752
+ _data: data,
753
+ _by: this.actor
754
+ };
755
+ }
756
+ /**
757
+ * Read all entries in ascending-index order. Used internally by
758
+ * `append()`, `head()`, `verify()`, and `entries()`. Decryption is
759
+ * serial because the entries are tiny and the overhead of a Promise
760
+ * pool would dominate at realistic chain lengths (< 100K entries).
761
+ */
762
+ async loadAllEntries() {
763
+ const keys = await this.adapter.list(this.compartment, LEDGER_COLLECTION);
764
+ keys.sort();
765
+ const entries = [];
766
+ for (const key of keys) {
767
+ const envelope = await this.adapter.get(
768
+ this.compartment,
769
+ LEDGER_COLLECTION,
770
+ key
771
+ );
772
+ if (!envelope) continue;
773
+ entries.push(await this.decryptEntry(envelope));
774
+ }
775
+ return entries;
776
+ }
777
+ /**
778
+ * Return the current head of the ledger: the last entry, its hash,
779
+ * and the total chain length. `null` on an empty ledger so callers
780
+ * can distinguish "no history yet" from "empty history".
781
+ */
782
+ async head() {
783
+ const cached = await this.getCachedHead();
784
+ if (!cached) return null;
785
+ return {
786
+ entry: cached.entry,
787
+ hash: cached.hash,
788
+ length: cached.entry.index + 1
789
+ };
790
+ }
791
+ /**
792
+ * Return entries in the requested half-open range `[from, to)`.
793
+ * Defaults: `from = 0`, `to = length`. The indices are clipped to
794
+ * the valid range; no error is thrown for out-of-range queries.
795
+ */
796
+ async entries(opts = {}) {
797
+ const all = await this.loadAllEntries();
798
+ const from = Math.max(0, opts.from ?? 0);
799
+ const to = Math.min(all.length, opts.to ?? all.length);
800
+ return all.slice(from, to);
801
+ }
802
+ /**
803
+ * Reconstruct a record's state at a given historical version by
804
+ * walking the ledger's delta chain backward from the current state.
805
+ *
806
+ * ## Algorithm
807
+ *
808
+ * Ledger deltas are stored in **reverse** form — each entry's
809
+ * patch describes how to undo that put, transforming the new
810
+ * record back into the previous one. `reconstruct` exploits this
811
+ * by:
812
+ *
813
+ * 1. Finding every ledger entry for `(collection, id)` in the
814
+ * chain, sorted by index ascending.
815
+ * 2. Starting from `current` (the present value of the record,
816
+ * as held by the caller — typically fetched via
817
+ * `Collection.get()`).
818
+ * 3. Walking entries in **descending** index order and applying
819
+ * each entry's reverse patch, stopping when we reach the
820
+ * entry whose version equals `atVersion`.
821
+ *
822
+ * The result is the record as it existed immediately AFTER the
823
+ * put at `atVersion`. To get the state at the genesis put
824
+ * (version 1), the walk runs all the way back through every put
825
+ * after the first.
826
+ *
827
+ * ## Caveats
828
+ *
829
+ * - **Delete entries** break the walk: once we see a delete, the
830
+ * record didn't exist before that point, so there's nothing to
831
+ * reconstruct. We return `null` in that case.
832
+ * - **Missing deltas** (e.g., after `pruneHistory` folds old
833
+ * entries into a base snapshot) also stop the walk. v0.4 does
834
+ * not ship pruneHistory, so today this only happens if an entry
835
+ * was deleted out-of-band.
836
+ * - The caller MUST pass the correct current value. Passing a
837
+ * mutated object would corrupt the reconstruction — the patch
838
+ * chain is only valid against the exact state that was in
839
+ * effect when the most recent put happened.
840
+ *
841
+ * For v0.4, `reconstruct` is the only way to read a historical
842
+ * version via deltas. The legacy `_history` collection still
843
+ * holds full snapshots and `Collection.getVersion()` still reads
844
+ * from there — the two paths coexist until pruneHistory lands in
845
+ * a follow-up and delta becomes the default.
846
+ */
847
+ async reconstruct(collection, id, current, atVersion) {
848
+ const all = await this.loadAllEntries();
849
+ const matching = all.filter(
850
+ (e) => e.collection === collection && e.id === id
851
+ );
852
+ if (matching.length === 0) {
853
+ return null;
854
+ }
855
+ let state = current;
856
+ for (let i = matching.length - 1; i >= 0; i--) {
857
+ const entry = matching[i];
858
+ if (!entry) continue;
859
+ if (entry.version === atVersion && entry.op !== "delete") {
860
+ return state;
861
+ }
862
+ if (entry.op === "delete") {
863
+ return null;
864
+ }
865
+ if (entry.deltaHash === void 0) {
866
+ if (entry.version === atVersion) return state;
867
+ return null;
868
+ }
869
+ const patch = await this.loadDelta(entry.index);
870
+ if (!patch) {
871
+ return null;
872
+ }
873
+ if (state === null) {
874
+ return null;
875
+ }
876
+ state = applyPatch(state, patch);
877
+ }
878
+ return null;
879
+ }
880
+ /**
881
+ * Walk the chain from genesis forward and verify every link.
882
+ *
883
+ * Returns `{ ok: true, head, length }` if every entry's `prevHash`
884
+ * matches the recomputed hash of its predecessor (and the genesis
885
+ * entry's `prevHash` is the empty string).
886
+ *
887
+ * Returns `{ ok: false, divergedAt, expected, actual }` on the first
888
+ * mismatch. `divergedAt` is the 0-based index of the BROKEN entry
889
+ * — entries before that index still verify cleanly; entries at and
890
+ * after `divergedAt` are untrustworthy.
891
+ *
892
+ * This method detects:
893
+ * - Mutated entry content (fields changed)
894
+ * - Reordered entries (if any adjacent pair swaps, the prevHash
895
+ * of the second no longer matches)
896
+ * - Inserted entries (the inserted entry's prevHash likely fails,
897
+ * and the following entry's prevHash definitely fails)
898
+ * - Deleted entries (the entry after the deletion sees a wrong
899
+ * prevHash)
900
+ *
901
+ * It does NOT detect:
902
+ * - Tampering with the DATA collections that bypassed the ledger
903
+ * entirely (e.g., an attacker who modifies records without
904
+ * appending matching ledger entries — this is why we also
905
+ * plan a `verifyIntegrity()` helper in a follow-up)
906
+ * - Truncation of the chain at the tail (dropping the last N
907
+ * entries leaves a shorter but still consistent chain). External
908
+ * anchoring of `head.hash` to a trusted service is the defense
909
+ * against this.
910
+ */
911
+ async verify() {
912
+ const entries = await this.loadAllEntries();
913
+ let expectedPrevHash = "";
914
+ for (let i = 0; i < entries.length; i++) {
915
+ const entry = entries[i];
916
+ if (!entry) continue;
917
+ if (entry.prevHash !== expectedPrevHash) {
918
+ return {
919
+ ok: false,
920
+ divergedAt: i,
921
+ expected: expectedPrevHash,
922
+ actual: entry.prevHash
923
+ };
924
+ }
925
+ if (entry.index !== i) {
926
+ return {
927
+ ok: false,
928
+ divergedAt: i,
929
+ expected: `index=${i}`,
930
+ actual: `index=${entry.index}`
931
+ };
932
+ }
933
+ expectedPrevHash = await hashEntry(entry);
934
+ }
935
+ return {
936
+ ok: true,
937
+ head: expectedPrevHash,
938
+ length: entries.length
939
+ };
940
+ }
941
+ // ─── Encryption plumbing ─────────────────────────────────────────
942
+ /**
943
+ * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
944
+ * envelope's `_v` field is set to `entry.index + 1` so the usual
945
+ * optimistic-concurrency machinery has a reasonable version number
946
+ * to compare against (the ledger is append-only, so concurrent
947
+ * writes should always bump the index).
948
+ */
949
+ async encryptEntry(entry) {
950
+ const json = canonicalJson(entry);
951
+ if (!this.encrypted) {
952
+ return {
953
+ _noydb: NOYDB_FORMAT_VERSION,
954
+ _v: entry.index + 1,
955
+ _ts: entry.ts,
956
+ _iv: "",
957
+ _data: json,
958
+ _by: entry.actor
959
+ };
960
+ }
961
+ const dek = await this.getDEK(LEDGER_COLLECTION);
962
+ const { iv, data } = await encrypt(json, dek);
963
+ return {
964
+ _noydb: NOYDB_FORMAT_VERSION,
965
+ _v: entry.index + 1,
966
+ _ts: entry.ts,
967
+ _iv: iv,
968
+ _data: data,
969
+ _by: entry.actor
970
+ };
971
+ }
972
+ /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
973
+ async decryptEntry(envelope) {
974
+ if (!this.encrypted) {
975
+ return JSON.parse(envelope._data);
976
+ }
977
+ const dek = await this.getDEK(LEDGER_COLLECTION);
978
+ const json = await decrypt(envelope._iv, envelope._data, dek);
979
+ return JSON.parse(json);
980
+ }
981
+ };
982
+ async function envelopePayloadHash(envelope) {
983
+ if (!envelope) return "";
984
+ return sha256Hex(envelope._data);
985
+ }
986
+
987
+ // src/refs.ts
988
+ var RefIntegrityError = class extends NoydbError {
989
+ collection;
990
+ id;
991
+ field;
992
+ refTo;
993
+ refId;
994
+ constructor(opts) {
995
+ super("REF_INTEGRITY", opts.message);
996
+ this.name = "RefIntegrityError";
997
+ this.collection = opts.collection;
998
+ this.id = opts.id;
999
+ this.field = opts.field;
1000
+ this.refTo = opts.refTo;
1001
+ this.refId = opts.refId;
1002
+ }
1003
+ };
1004
+ var RefScopeError = class extends NoydbError {
1005
+ constructor(target) {
1006
+ super(
1007
+ "REF_SCOPE",
1008
+ `Cross-compartment references are not supported in v0.4 \u2014 got target "${target}". Use a simple collection name (e.g. "clients"), not a path. Cross-compartment refs are tracked for a future release.`
1009
+ );
1010
+ this.name = "RefScopeError";
1011
+ }
1012
+ };
1013
+ function ref(target, mode = "strict") {
1014
+ if (target.includes("/")) {
1015
+ throw new RefScopeError(target);
1016
+ }
1017
+ if (!target || target.startsWith("_")) {
1018
+ throw new Error(
1019
+ `ref(): target collection name must be non-empty and cannot start with '_' (reserved for internal collections). Got "${target}".`
1020
+ );
1021
+ }
1022
+ return { target, mode };
1023
+ }
1024
+ var RefRegistry = class {
1025
+ outbound = /* @__PURE__ */ new Map();
1026
+ inbound = /* @__PURE__ */ new Map();
1027
+ /**
1028
+ * Register the refs declared by a single collection. Idempotent in
1029
+ * the happy path — calling twice with the same data is a no-op.
1030
+ * Calling twice with DIFFERENT data throws, because silent
1031
+ * overrides would be confusing ("I changed the ref and it doesn't
1032
+ * update" vs "I declared the same collection twice with different
1033
+ * refs and the second call won").
1034
+ */
1035
+ register(collection, refs) {
1036
+ const existing = this.outbound.get(collection);
1037
+ if (existing) {
1038
+ const existingKeys = Object.keys(existing).sort();
1039
+ const newKeys = Object.keys(refs).sort();
1040
+ if (existingKeys.join(",") !== newKeys.join(",")) {
1041
+ throw new Error(
1042
+ `RefRegistry: conflicting ref declarations for collection "${collection}"`
1043
+ );
1044
+ }
1045
+ for (const k of existingKeys) {
1046
+ const a = existing[k];
1047
+ const b = refs[k];
1048
+ if (!a || !b || a.target !== b.target || a.mode !== b.mode) {
1049
+ throw new Error(
1050
+ `RefRegistry: conflicting ref declarations for collection "${collection}" field "${k}"`
1051
+ );
1052
+ }
1053
+ }
1054
+ return;
1055
+ }
1056
+ this.outbound.set(collection, { ...refs });
1057
+ for (const [field, desc] of Object.entries(refs)) {
1058
+ const list = this.inbound.get(desc.target) ?? [];
1059
+ list.push({ collection, field, mode: desc.mode });
1060
+ this.inbound.set(desc.target, list);
1061
+ }
1062
+ }
1063
+ /** Get the outbound refs declared by a collection (or `{}` if none). */
1064
+ getOutbound(collection) {
1065
+ return this.outbound.get(collection) ?? {};
1066
+ }
1067
+ /** Get the inbound refs that target a given collection (or `[]`). */
1068
+ getInbound(target) {
1069
+ return this.inbound.get(target) ?? [];
1070
+ }
1071
+ /**
1072
+ * Iterate every (collection → refs) pair that has at least one
1073
+ * declared reference. Used by `checkIntegrity` to walk the full
1074
+ * universe of outbound refs without needing to track collection
1075
+ * names elsewhere.
1076
+ */
1077
+ entries() {
1078
+ return [...this.outbound.entries()];
1079
+ }
1080
+ /** Clear the registry. Test-only escape hatch; never called from production code. */
1081
+ clear() {
1082
+ this.outbound.clear();
1083
+ this.inbound.clear();
1084
+ }
1085
+ };
1086
+
256
1087
  // src/keyring.ts
257
1088
  var GRANTABLE_BY_ADMIN = ["operator", "viewer", "client"];
258
1089
  function canGrant(callerRole, targetRole) {
@@ -337,6 +1168,11 @@ async function grant(adapter, compartment, callerKeyring, options) {
337
1168
  }
338
1169
  }
339
1170
  }
1171
+ for (const [collName, dek] of callerKeyring.deks) {
1172
+ if (collName.startsWith("_") && !(collName in wrappedDeks)) {
1173
+ wrappedDeks[collName] = await wrapKey(dek, newKek);
1174
+ }
1175
+ }
340
1176
  const keyringFile = {
341
1177
  _noydb_keyring: NOYDB_KEYRING_VERSION,
342
1178
  user_id: options.userId,
@@ -590,7 +1426,7 @@ async function clearHistory(adapter, compartment, collection, recordId) {
590
1426
  }
591
1427
 
592
1428
  // src/diff.ts
593
- function diff(oldObj, newObj, basePath = "") {
1429
+ function diff2(oldObj, newObj, basePath = "") {
594
1430
  const changes = [];
595
1431
  if (oldObj === newObj) return changes;
596
1432
  if (oldObj == null && newObj != null) {
@@ -614,7 +1450,7 @@ function diff(oldObj, newObj, basePath = "") {
614
1450
  } else if (i >= newObj.length) {
615
1451
  changes.push({ path: p, type: "removed", from: oldObj[i] });
616
1452
  } else {
617
- changes.push(...diff(oldObj[i], newObj[i], p));
1453
+ changes.push(...diff2(oldObj[i], newObj[i], p));
618
1454
  }
619
1455
  }
620
1456
  return changes;
@@ -629,7 +1465,7 @@ function diff(oldObj, newObj, basePath = "") {
629
1465
  } else if (!(key in newRecord)) {
630
1466
  changes.push({ path: p, type: "removed", from: oldRecord[key] });
631
1467
  } else {
632
- changes.push(...diff(oldRecord[key], newRecord[key], p));
1468
+ changes.push(...diff2(oldRecord[key], newRecord[key], p));
633
1469
  }
634
1470
  }
635
1471
  return changes;
@@ -1314,6 +2150,56 @@ var Collection = class {
1314
2150
  * disappear from the index without notification.
1315
2151
  */
1316
2152
  indexes = new CollectionIndexes();
2153
+ /**
2154
+ * Optional Standard Schema v1 validator. When set, every `put()` runs
2155
+ * the input through `validateSchemaInput` before encryption, and every
2156
+ * record coming OUT of `decryptRecord` runs through
2157
+ * `validateSchemaOutput`. A rejected input throws
2158
+ * `SchemaValidationError` with `direction: 'input'`; drifted stored
2159
+ * data throws with `direction: 'output'`. Both carry the rich issue
2160
+ * list from the validator so UI code can render field-level messages.
2161
+ *
2162
+ * The schema is stored as `StandardSchemaV1<unknown, T>` because the
2163
+ * collection type parameter `T` is the OUTPUT type — whatever the
2164
+ * validator produces after transforms and coercion. Users who pass a
2165
+ * schema to `defineNoydbStore` (or `Collection.constructor`) get their
2166
+ * `T` inferred automatically via `InferOutput<Schema>`.
2167
+ */
2168
+ schema;
2169
+ /**
2170
+ * Optional reference to the compartment-level hash-chained audit
2171
+ * log. When present, every successful `put()` and `delete()` appends
2172
+ * an entry to the ledger AFTER the adapter write succeeds (so a
2173
+ * failed adapter write never produces an orphan ledger entry).
2174
+ *
2175
+ * The ledger is always a compartment-wide singleton — all
2176
+ * collections in the same compartment share the same LedgerStore.
2177
+ * Compartment.ledger() does the lazy init; this field just holds
2178
+ * the reference so Collection doesn't need to reach back up to the
2179
+ * compartment on every mutation.
2180
+ *
2181
+ * `undefined` means "no ledger attached" — supported for tests that
2182
+ * construct a Collection directly without a compartment, and for
2183
+ * future backwards-compat scenarios. Production usage always has a
2184
+ * ledger because Compartment.collection() passes one through.
2185
+ */
2186
+ ledger;
2187
+ /**
2188
+ * Optional back-reference to the owning compartment's ref
2189
+ * enforcer. When present, `Collection.put` calls
2190
+ * `refEnforcer.enforceRefsOnPut(name, record)` before the adapter
2191
+ * write, and `Collection.delete` calls
2192
+ * `refEnforcer.enforceRefsOnDelete(name, id)` before its own
2193
+ * adapter delete. The Compartment handles the actual registry
2194
+ * lookup and cross-collection enforcement — Collection just
2195
+ * notifies it at the right points in the lifecycle.
2196
+ *
2197
+ * Typed as a structural interface rather than `Compartment`
2198
+ * directly to avoid a circular import. Compartment implements
2199
+ * these two methods; any other object with the same shape would
2200
+ * work too (used only in unit tests).
2201
+ */
2202
+ refEnforcer;
1317
2203
  constructor(opts) {
1318
2204
  this.adapter = opts.adapter;
1319
2205
  this.compartment = opts.compartment;
@@ -1324,6 +2210,9 @@ var Collection = class {
1324
2210
  this.getDEK = opts.getDEK;
1325
2211
  this.onDirty = opts.onDirty;
1326
2212
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2213
+ this.schema = opts.schema;
2214
+ this.ledger = opts.ledger;
2215
+ this.refEnforcer = opts.refEnforcer;
1327
2216
  this.lazy = opts.prefetch === false;
1328
2217
  if (this.lazy) {
1329
2218
  if (opts.indexes && opts.indexes.length > 0) {
@@ -1370,6 +2259,12 @@ var Collection = class {
1370
2259
  if (!hasWritePermission(this.keyring, this.name)) {
1371
2260
  throw new ReadOnlyError();
1372
2261
  }
2262
+ if (this.schema !== void 0) {
2263
+ record = await validateSchemaInput(this.schema, record, `put(${id})`);
2264
+ }
2265
+ if (this.refEnforcer !== void 0) {
2266
+ await this.refEnforcer.enforceRefsOnPut(this.name, record);
2267
+ }
1373
2268
  let existing;
1374
2269
  if (this.lazy && this.lru) {
1375
2270
  existing = this.lru.get(id);
@@ -1402,6 +2297,20 @@ var Collection = class {
1402
2297
  }
1403
2298
  const envelope = await this.encryptRecord(record, version);
1404
2299
  await this.adapter.put(this.compartment, this.name, id, envelope);
2300
+ if (this.ledger) {
2301
+ const appendInput = {
2302
+ op: "put",
2303
+ collection: this.name,
2304
+ id,
2305
+ version,
2306
+ actor: this.keyring.userId,
2307
+ payloadHash: await envelopePayloadHash(envelope)
2308
+ };
2309
+ if (existing) {
2310
+ appendInput.delta = computePatch(record, existing.record);
2311
+ }
2312
+ await this.ledger.append(appendInput);
2313
+ }
1405
2314
  if (this.lazy && this.lru) {
1406
2315
  this.lru.set(id, { record, version }, estimateRecordBytes(record));
1407
2316
  } else {
@@ -1421,14 +2330,17 @@ var Collection = class {
1421
2330
  if (!hasWritePermission(this.keyring, this.name)) {
1422
2331
  throw new ReadOnlyError();
1423
2332
  }
2333
+ if (this.refEnforcer !== void 0) {
2334
+ await this.refEnforcer.enforceRefsOnDelete(this.name, id);
2335
+ }
1424
2336
  let existing;
1425
2337
  if (this.lazy && this.lru) {
1426
2338
  existing = this.lru.get(id);
1427
2339
  if (!existing && this.historyConfig.enabled !== false) {
1428
- const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
1429
- if (previousEnvelope) {
1430
- const previousRecord = await this.decryptRecord(previousEnvelope);
1431
- existing = { record: previousRecord, version: previousEnvelope._v };
2340
+ const previousEnvelope2 = await this.adapter.get(this.compartment, this.name, id);
2341
+ if (previousEnvelope2) {
2342
+ const previousRecord = await this.decryptRecord(previousEnvelope2);
2343
+ existing = { record: previousRecord, version: previousEnvelope2._v };
1432
2344
  }
1433
2345
  }
1434
2346
  } else {
@@ -1438,7 +2350,19 @@ var Collection = class {
1438
2350
  const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
1439
2351
  await saveHistory(this.adapter, this.compartment, this.name, id, historyEnvelope);
1440
2352
  }
2353
+ const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
2354
+ const previousPayloadHash = await envelopePayloadHash(previousEnvelope);
1441
2355
  await this.adapter.delete(this.compartment, this.name, id);
2356
+ if (this.ledger) {
2357
+ await this.ledger.append({
2358
+ op: "delete",
2359
+ collection: this.name,
2360
+ id,
2361
+ version: existing?.version ?? 0,
2362
+ actor: this.keyring.userId,
2363
+ payloadHash: previousPayloadHash
2364
+ });
2365
+ }
1442
2366
  if (this.lazy && this.lru) {
1443
2367
  this.lru.remove(id);
1444
2368
  } else {
@@ -1532,7 +2456,7 @@ var Collection = class {
1532
2456
  );
1533
2457
  const entries = [];
1534
2458
  for (const env of envelopes) {
1535
- const record = await this.decryptRecord(env);
2459
+ const record = await this.decryptRecord(env, { skipValidation: true });
1536
2460
  entries.push({
1537
2461
  version: env._v,
1538
2462
  timestamp: env._ts,
@@ -1542,7 +2466,15 @@ var Collection = class {
1542
2466
  }
1543
2467
  return entries;
1544
2468
  }
1545
- /** Get a specific past version of a record. */
2469
+ /**
2470
+ * Get a specific past version of a record.
2471
+ *
2472
+ * History reads intentionally **skip schema validation** — historical
2473
+ * records predate the current schema by definition, so validating them
2474
+ * against today's shape would be a false positive on any schema
2475
+ * evolution. If a caller needs validated history, they should filter
2476
+ * and re-put the records through the normal `put()` path.
2477
+ */
1546
2478
  async getVersion(id, version) {
1547
2479
  const envelope = await getVersionEnvelope(
1548
2480
  this.adapter,
@@ -1552,7 +2484,7 @@ var Collection = class {
1552
2484
  version
1553
2485
  );
1554
2486
  if (!envelope) return null;
1555
- return this.decryptRecord(envelope);
2487
+ return this.decryptRecord(envelope, { skipValidation: true });
1556
2488
  }
1557
2489
  /** Revert a record to a past version. Creates a new version with the old content. */
1558
2490
  async revert(id, version) {
@@ -1570,7 +2502,7 @@ var Collection = class {
1570
2502
  async diff(id, versionA, versionB) {
1571
2503
  const recordA = versionA === 0 ? null : await this.resolveVersion(id, versionA);
1572
2504
  const recordB = versionB === void 0 || versionB === 0 ? versionB === 0 ? null : await this.resolveCurrentOrVersion(id) : await this.resolveVersion(id, versionB);
1573
- return diff(recordA, recordB);
2505
+ return diff2(recordA, recordB);
1574
2506
  }
1575
2507
  /** Resolve a version: try history first, then check if it's the current version. */
1576
2508
  async resolveVersion(id, version) {
@@ -1792,13 +2724,38 @@ var Collection = class {
1792
2724
  _by: by
1793
2725
  };
1794
2726
  }
1795
- async decryptRecord(envelope) {
2727
+ /**
2728
+ * Decrypt an envelope into a record of type `T`.
2729
+ *
2730
+ * When a schema is attached, the decrypted value is validated before
2731
+ * being returned. A divergence between the stored bytes and the
2732
+ * current schema throws `SchemaValidationError` with
2733
+ * `direction: 'output'` — silently returning drifted data would
2734
+ * propagate garbage into the UI and break the whole point of having
2735
+ * a schema.
2736
+ *
2737
+ * `skipValidation` exists for history reads: when calling
2738
+ * `getVersion()` the caller is explicitly asking for an old snapshot
2739
+ * that may predate a schema change, so validating it would be a
2740
+ * false positive. Every non-history read leaves this flag `false`.
2741
+ */
2742
+ async decryptRecord(envelope, opts = {}) {
2743
+ let record;
1796
2744
  if (!this.encrypted) {
1797
- return JSON.parse(envelope._data);
2745
+ record = JSON.parse(envelope._data);
2746
+ } else {
2747
+ const dek = await this.getDEK(this.name);
2748
+ const json = await decrypt(envelope._iv, envelope._data, dek);
2749
+ record = JSON.parse(json);
2750
+ }
2751
+ if (this.schema !== void 0 && !opts.skipValidation) {
2752
+ record = await validateSchemaOutput(
2753
+ this.schema,
2754
+ record,
2755
+ `${this.name}@v${envelope._v}`
2756
+ );
1798
2757
  }
1799
- const dek = await this.getDEK(this.name);
1800
- const json = await decrypt(envelope._iv, envelope._data, dek);
1801
- return JSON.parse(json);
2758
+ return record;
1802
2759
  }
1803
2760
  };
1804
2761
 
@@ -1806,13 +2763,62 @@ var Collection = class {
1806
2763
  var Compartment = class {
1807
2764
  adapter;
1808
2765
  name;
2766
+ /**
2767
+ * The active in-memory keyring. NOT readonly because `load()`
2768
+ * needs to refresh it after restoring a different keyring file —
2769
+ * otherwise the in-memory DEKs (from the pre-load session) and
2770
+ * the on-disk wrapped DEKs (from the loaded backup) drift apart
2771
+ * and every subsequent decrypt fails with TamperedError.
2772
+ */
1809
2773
  keyring;
1810
2774
  encrypted;
1811
2775
  emitter;
1812
2776
  onDirty;
1813
2777
  historyConfig;
1814
2778
  getDEK;
2779
+ /**
2780
+ * Optional callback that re-derives an UnlockedKeyring from the
2781
+ * adapter using the active user's passphrase. Called by `load()`
2782
+ * after the on-disk keyring file has been replaced — refreshes
2783
+ * `this.keyring` so the next DEK access uses the loaded wrapped
2784
+ * DEKs instead of the stale pre-load ones.
2785
+ *
2786
+ * Provided by Noydb at openCompartment() time. Tests that
2787
+ * construct Compartment directly can pass `undefined`; load()
2788
+ * skips the refresh in that case (which is fine for plaintext
2789
+ * compartments — there's nothing to re-unwrap).
2790
+ */
2791
+ reloadKeyring;
1815
2792
  collectionCache = /* @__PURE__ */ new Map();
2793
+ /**
2794
+ * Per-compartment ledger store. Lazy-initialized on first
2795
+ * `collection()` call (which passes it through to the Collection)
2796
+ * or on first `ledger()` call from user code.
2797
+ *
2798
+ * One LedgerStore is shared across all collections in a compartment
2799
+ * because the hash chain is compartment-scoped: the chain head is a
2800
+ * single "what did this compartment do last" identifier, not a
2801
+ * per-collection one. Two collections appending concurrently is the
2802
+ * single-writer concurrency concern documented in the LedgerStore
2803
+ * docstring.
2804
+ */
2805
+ ledgerStore = null;
2806
+ /**
2807
+ * Per-compartment foreign-key reference registry. Collections
2808
+ * register their `refs` option here on construction; the
2809
+ * compartment uses the registry on every put/delete/checkIntegrity
2810
+ * call. One instance lives for the compartment's lifetime.
2811
+ */
2812
+ refRegistry = new RefRegistry();
2813
+ /**
2814
+ * Set of collection record-ids currently being deleted as part of
2815
+ * a cascade. Populated on entry to `enforceRefsOnDelete` and
2816
+ * drained on exit. Used to break mutual-cascade cycles: deleting
2817
+ * A → cascade to B → cascade back to A would otherwise recurse
2818
+ * forever, so we short-circuit when we see an already-in-progress
2819
+ * delete on the same (collection, id) pair.
2820
+ */
2821
+ cascadeInProgress = /* @__PURE__ */ new Set();
1816
2822
  constructor(opts) {
1817
2823
  this.adapter = opts.adapter;
1818
2824
  this.name = opts.name;
@@ -1821,8 +2827,21 @@ var Compartment = class {
1821
2827
  this.emitter = opts.emitter;
1822
2828
  this.onDirty = opts.onDirty;
1823
2829
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2830
+ this.reloadKeyring = opts.reloadKeyring;
2831
+ this.getDEK = this.makeGetDEK();
2832
+ }
2833
+ /**
2834
+ * Construct (or reconstruct) the lazy DEK resolver. Captures the
2835
+ * CURRENT value of `this.keyring` and `this.adapter` in a closure,
2836
+ * memoizing the inner getDEKFn after first use so subsequent
2837
+ * lookups are O(1).
2838
+ *
2839
+ * `load()` calls this after refreshing `this.keyring` to discard
2840
+ * the prior session's cached DEKs.
2841
+ */
2842
+ makeGetDEK() {
1824
2843
  let getDEKFn = null;
1825
- this.getDEK = async (collectionName) => {
2844
+ return async (collectionName) => {
1826
2845
  if (!getDEKFn) {
1827
2846
  getDEKFn = await ensureCollectionDEK(this.adapter, this.name, this.keyring);
1828
2847
  }
@@ -1840,6 +2859,10 @@ var Compartment = class {
1840
2859
  * loads records on demand and bounds memory via the LRU cache.
1841
2860
  * - `options.cache` configures the LRU bounds. Required in lazy mode.
1842
2861
  * Accepts `{ maxRecords, maxBytes: '50MB' | 1024 }`.
2862
+ * - `options.schema` attaches a Standard Schema v1 validator (Zod,
2863
+ * Valibot, ArkType, Effect Schema, etc.). Every `put()` is validated
2864
+ * before encryption; every read is validated after decryption.
2865
+ * Failing records throw `SchemaValidationError`.
1843
2866
  *
1844
2867
  * Lazy mode + indexes is rejected at construction time — see the
1845
2868
  * Collection constructor for the rationale.
@@ -1847,6 +2870,9 @@ var Compartment = class {
1847
2870
  collection(collectionName, options) {
1848
2871
  let coll = this.collectionCache.get(collectionName);
1849
2872
  if (!coll) {
2873
+ if (options?.refs) {
2874
+ this.refRegistry.register(collectionName, options.refs);
2875
+ }
1850
2876
  const collOpts = {
1851
2877
  adapter: this.adapter,
1852
2878
  compartment: this.name,
@@ -1856,22 +2882,205 @@ var Compartment = class {
1856
2882
  emitter: this.emitter,
1857
2883
  getDEK: this.getDEK,
1858
2884
  onDirty: this.onDirty,
1859
- historyConfig: this.historyConfig
2885
+ historyConfig: this.historyConfig,
2886
+ ledger: this.ledger(),
2887
+ refEnforcer: this
1860
2888
  };
1861
2889
  if (options?.indexes !== void 0) collOpts.indexes = options.indexes;
1862
2890
  if (options?.prefetch !== void 0) collOpts.prefetch = options.prefetch;
1863
2891
  if (options?.cache !== void 0) collOpts.cache = options.cache;
2892
+ if (options?.schema !== void 0) collOpts.schema = options.schema;
1864
2893
  coll = new Collection(collOpts);
1865
2894
  this.collectionCache.set(collectionName, coll);
1866
2895
  }
1867
2896
  return coll;
1868
2897
  }
2898
+ /**
2899
+ * Enforce strict outbound refs on a `put()`. Called by Collection
2900
+ * just before it writes to the adapter. For every strict ref
2901
+ * declared on the collection, check that the target id exists in
2902
+ * the target collection; throw `RefIntegrityError` if not.
2903
+ *
2904
+ * `warn` and `cascade` modes don't affect put semantics — they're
2905
+ * enforced at delete time or via `checkIntegrity()`.
2906
+ */
2907
+ async enforceRefsOnPut(collectionName, record) {
2908
+ const outbound = this.refRegistry.getOutbound(collectionName);
2909
+ if (Object.keys(outbound).length === 0) return;
2910
+ if (!record || typeof record !== "object") return;
2911
+ const obj = record;
2912
+ for (const [field, descriptor] of Object.entries(outbound)) {
2913
+ if (descriptor.mode !== "strict") continue;
2914
+ const rawId = obj[field];
2915
+ if (rawId === null || rawId === void 0) continue;
2916
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
2917
+ throw new RefIntegrityError({
2918
+ collection: collectionName,
2919
+ id: obj["id"] ?? "<unknown>",
2920
+ field,
2921
+ refTo: descriptor.target,
2922
+ refId: null,
2923
+ message: `Ref field "${collectionName}.${field}" must be a string or number, got ${typeof rawId}.`
2924
+ });
2925
+ }
2926
+ const refId = String(rawId);
2927
+ const target = this.collection(descriptor.target);
2928
+ const exists = await target.get(refId);
2929
+ if (!exists) {
2930
+ throw new RefIntegrityError({
2931
+ collection: collectionName,
2932
+ id: obj["id"] ?? "<unknown>",
2933
+ field,
2934
+ refTo: descriptor.target,
2935
+ refId,
2936
+ message: `Strict ref "${collectionName}.${field}" \u2192 "${descriptor.target}" cannot be satisfied: target id "${refId}" not found in "${descriptor.target}".`
2937
+ });
2938
+ }
2939
+ }
2940
+ }
2941
+ /**
2942
+ * Enforce inbound ref modes on a `delete()`. Called by Collection
2943
+ * just before it deletes from the adapter. Walks every inbound
2944
+ * ref that targets this (collection, id) and:
2945
+ *
2946
+ * - `strict`: throws if any referencing records exist
2947
+ * - `cascade`: deletes every referencing record
2948
+ * - `warn`: no-op (checkIntegrity picks it up)
2949
+ *
2950
+ * Cascade cycles are broken via `cascadeInProgress` — re-entering
2951
+ * for the same (collection, id) returns immediately so two
2952
+ * mutually-cascading collections don't recurse forever.
2953
+ */
2954
+ async enforceRefsOnDelete(collectionName, id) {
2955
+ const key = `${collectionName}/${id}`;
2956
+ if (this.cascadeInProgress.has(key)) return;
2957
+ this.cascadeInProgress.add(key);
2958
+ try {
2959
+ const inbound = this.refRegistry.getInbound(collectionName);
2960
+ for (const rule of inbound) {
2961
+ const fromCollection = this.collection(rule.collection);
2962
+ const allRecords = await fromCollection.list();
2963
+ const matches = allRecords.filter((rec) => {
2964
+ const raw = rec[rule.field];
2965
+ if (typeof raw !== "string" && typeof raw !== "number") return false;
2966
+ return String(raw) === id;
2967
+ });
2968
+ if (matches.length === 0) continue;
2969
+ if (rule.mode === "strict") {
2970
+ const first = matches[0];
2971
+ throw new RefIntegrityError({
2972
+ collection: rule.collection,
2973
+ id: first?.["id"] ?? "<unknown>",
2974
+ field: rule.field,
2975
+ refTo: collectionName,
2976
+ refId: id,
2977
+ message: `Cannot delete "${collectionName}"/"${id}": ${matches.length} record(s) in "${rule.collection}" still reference it via strict ref "${rule.field}".`
2978
+ });
2979
+ }
2980
+ if (rule.mode === "cascade") {
2981
+ for (const match of matches) {
2982
+ const matchId = match["id"] ?? null;
2983
+ if (matchId === null) continue;
2984
+ await fromCollection.delete(matchId);
2985
+ }
2986
+ }
2987
+ }
2988
+ } finally {
2989
+ this.cascadeInProgress.delete(key);
2990
+ }
2991
+ }
2992
+ /**
2993
+ * Walk every collection that has declared refs, load its records,
2994
+ * and report any reference whose target id is missing. Modes are
2995
+ * reported alongside each violation so the caller can distinguish
2996
+ * "this is a warning the user asked for" from "this should never
2997
+ * have happened" (strict violations produced by out-of-band
2998
+ * writes).
2999
+ *
3000
+ * Returns `{ violations: [...] }` instead of throwing — the whole
3001
+ * point of `checkIntegrity()` is to surface a list for display
3002
+ * or repair, not to fail noisily.
3003
+ */
3004
+ async checkIntegrity() {
3005
+ const violations = [];
3006
+ for (const [collectionName, refs] of this.refRegistry.entries()) {
3007
+ const coll = this.collection(collectionName);
3008
+ const records = await coll.list();
3009
+ for (const record of records) {
3010
+ const recId = record["id"] ?? "<unknown>";
3011
+ for (const [field, descriptor] of Object.entries(refs)) {
3012
+ const rawId = record[field];
3013
+ if (rawId === null || rawId === void 0) continue;
3014
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
3015
+ violations.push({
3016
+ collection: collectionName,
3017
+ id: recId,
3018
+ field,
3019
+ refTo: descriptor.target,
3020
+ refId: rawId,
3021
+ mode: descriptor.mode
3022
+ });
3023
+ continue;
3024
+ }
3025
+ const refId = String(rawId);
3026
+ const target = this.collection(descriptor.target);
3027
+ const exists = await target.get(refId);
3028
+ if (!exists) {
3029
+ violations.push({
3030
+ collection: collectionName,
3031
+ id: recId,
3032
+ field,
3033
+ refTo: descriptor.target,
3034
+ refId: rawId,
3035
+ mode: descriptor.mode
3036
+ });
3037
+ }
3038
+ }
3039
+ }
3040
+ }
3041
+ return { violations };
3042
+ }
3043
+ /**
3044
+ * Return this compartment's hash-chained audit log.
3045
+ *
3046
+ * The ledger is lazy-initialized on first access and cached for the
3047
+ * lifetime of the Compartment instance. Every LedgerStore instance
3048
+ * shares the same adapter and DEK resolver, so `compartment.ledger()`
3049
+ * can be called repeatedly without performance cost.
3050
+ *
3051
+ * The LedgerStore itself is the public API: consumers call
3052
+ * `.append()` (via Collection internals), `.head()`, `.verify()`,
3053
+ * and `.entries({ from, to })`. See the LedgerStore docstring for
3054
+ * the full surface and the concurrency caveats.
3055
+ */
3056
+ ledger() {
3057
+ if (!this.ledgerStore) {
3058
+ this.ledgerStore = new LedgerStore({
3059
+ adapter: this.adapter,
3060
+ compartment: this.name,
3061
+ encrypted: this.encrypted,
3062
+ getDEK: this.getDEK,
3063
+ actor: this.keyring.userId
3064
+ });
3065
+ }
3066
+ return this.ledgerStore;
3067
+ }
1869
3068
  /** List all collection names in this compartment. */
1870
3069
  async collections() {
1871
3070
  const snapshot = await this.adapter.loadAll(this.name);
1872
3071
  return Object.keys(snapshot);
1873
3072
  }
1874
- /** Dump compartment as encrypted JSON backup string. */
3073
+ /**
3074
+ * Dump compartment as a verifiable encrypted JSON backup string.
3075
+ *
3076
+ * v0.4 backups embed the current ledger head and the full
3077
+ * `_ledger` + `_ledger_deltas` internal collections so the
3078
+ * receiver can run `verifyBackupIntegrity()` after `load()` and
3079
+ * detect any tampering between dump and restore. Pre-v0.4 callers
3080
+ * who didn't have a ledger get a backup without these fields, and
3081
+ * the corresponding `load()` skips the integrity check with a
3082
+ * warning — both modes round-trip cleanly.
3083
+ */
1875
3084
  async dump() {
1876
3085
  const snapshot = await this.adapter.loadAll(this.name);
1877
3086
  const keyringIds = await this.adapter.list(this.name, "_keyring");
@@ -1882,17 +3091,58 @@ var Compartment = class {
1882
3091
  keyrings[keyringId] = JSON.parse(envelope._data);
1883
3092
  }
1884
3093
  }
3094
+ const internalSnapshot = {};
3095
+ for (const internalName of [LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION]) {
3096
+ const ids = await this.adapter.list(this.name, internalName);
3097
+ if (ids.length === 0) continue;
3098
+ const records = {};
3099
+ for (const id of ids) {
3100
+ const envelope = await this.adapter.get(this.name, internalName, id);
3101
+ if (envelope) records[id] = envelope;
3102
+ }
3103
+ internalSnapshot[internalName] = records;
3104
+ }
3105
+ const head = await this.ledger().head();
1885
3106
  const backup = {
1886
3107
  _noydb_backup: NOYDB_BACKUP_VERSION,
1887
3108
  _compartment: this.name,
1888
3109
  _exported_at: (/* @__PURE__ */ new Date()).toISOString(),
1889
3110
  _exported_by: this.keyring.userId,
1890
3111
  keyrings,
1891
- collections: snapshot
3112
+ collections: snapshot,
3113
+ ...Object.keys(internalSnapshot).length > 0 ? { _internal: internalSnapshot } : {},
3114
+ ...head ? {
3115
+ ledgerHead: {
3116
+ hash: head.hash,
3117
+ index: head.entry.index,
3118
+ ts: head.entry.ts
3119
+ }
3120
+ } : {}
1892
3121
  };
1893
3122
  return JSON.stringify(backup);
1894
3123
  }
1895
- /** Restore compartment from an encrypted JSON backup string. */
3124
+ /**
3125
+ * Restore a compartment from a verifiable backup.
3126
+ *
3127
+ * After loading, runs `verifyBackupIntegrity()` to confirm:
3128
+ * 1. The hash chain is intact (no `prevHash` mismatches)
3129
+ * 2. The chain head matches the embedded `ledgerHead.hash`
3130
+ * from the backup
3131
+ * 3. Every data envelope's `payloadHash` matches the
3132
+ * corresponding ledger entry — i.e. nobody swapped
3133
+ * ciphertext between dump and restore
3134
+ *
3135
+ * On any failure, throws `BackupLedgerError` (chain or head
3136
+ * mismatch) or `BackupCorruptedError` (data envelope mismatch).
3137
+ * The compartment state on the adapter has already been written
3138
+ * by the time we throw, so the caller is responsible for either
3139
+ * accepting the suspect state or wiping it and trying a different
3140
+ * backup.
3141
+ *
3142
+ * Pre-v0.4 backups (no `ledgerHead` field, no `_internal`) load
3143
+ * with a console warning and skip the integrity check entirely
3144
+ * — there's no chain to verify against.
3145
+ */
1896
3146
  async load(backupJson) {
1897
3147
  const backup = JSON.parse(backupJson);
1898
3148
  await this.adapter.saveAll(this.name, backup.collections);
@@ -1906,7 +3156,124 @@ var Compartment = class {
1906
3156
  };
1907
3157
  await this.adapter.put(this.name, "_keyring", userId, envelope);
1908
3158
  }
3159
+ if (backup._internal) {
3160
+ for (const [internalName, records] of Object.entries(backup._internal)) {
3161
+ for (const [id, envelope] of Object.entries(records)) {
3162
+ await this.adapter.put(this.name, internalName, id, envelope);
3163
+ }
3164
+ }
3165
+ }
3166
+ if (this.reloadKeyring) {
3167
+ this.keyring = await this.reloadKeyring();
3168
+ this.getDEK = this.makeGetDEK();
3169
+ }
1909
3170
  this.collectionCache.clear();
3171
+ this.ledgerStore = null;
3172
+ if (!backup.ledgerHead) {
3173
+ console.warn(
3174
+ `[noy-db] Loaded a legacy backup with no ledgerHead \u2014 verifiable-backup integrity check skipped. Re-export with v0.4+ to get tamper detection.`
3175
+ );
3176
+ return;
3177
+ }
3178
+ const result = await this.verifyBackupIntegrity();
3179
+ if (!result.ok) {
3180
+ if (result.kind === "data") {
3181
+ throw new BackupCorruptedError(
3182
+ result.collection,
3183
+ result.id,
3184
+ result.message
3185
+ );
3186
+ }
3187
+ throw new BackupLedgerError(result.message, result.divergedAt);
3188
+ }
3189
+ if (result.head !== backup.ledgerHead.hash) {
3190
+ throw new BackupLedgerError(
3191
+ `Backup ledger head mismatch: embedded "${backup.ledgerHead.hash}" but reconstructed "${result.head}".`
3192
+ );
3193
+ }
3194
+ }
3195
+ /**
3196
+ * End-to-end backup integrity check. Runs both:
3197
+ *
3198
+ * 1. `ledger.verify()` — walks the hash chain and confirms
3199
+ * every `prevHash` matches the recomputed hash of its
3200
+ * predecessor.
3201
+ *
3202
+ * 2. **Data envelope cross-check** — for every (collection, id)
3203
+ * that has a current value, find the most recent ledger
3204
+ * entry recording a `put` for that pair, recompute the
3205
+ * sha256 of the stored envelope's `_data`, and compare to
3206
+ * the entry's `payloadHash`. Any mismatch means an
3207
+ * out-of-band write modified the data without updating the
3208
+ * ledger.
3209
+ *
3210
+ * Returns a discriminated union so callers can handle the two
3211
+ * failure modes differently:
3212
+ * - `{ ok: true, head, length }` — chain verified and all
3213
+ * data matches; safe to use.
3214
+ * - `{ ok: false, kind: 'chain', divergedAt, message }` — the
3215
+ * chain itself is broken at the given index.
3216
+ * - `{ ok: false, kind: 'data', collection, id, message }` —
3217
+ * a specific data envelope doesn't match its ledger entry.
3218
+ *
3219
+ * This method is exposed so users can call it any time, not just
3220
+ * during `load()`. A scheduled background check is the simplest
3221
+ * way to detect tampering of an in-place compartment.
3222
+ */
3223
+ async verifyBackupIntegrity() {
3224
+ const chainResult = await this.ledger().verify();
3225
+ if (!chainResult.ok) {
3226
+ return {
3227
+ ok: false,
3228
+ kind: "chain",
3229
+ divergedAt: chainResult.divergedAt,
3230
+ message: `Ledger chain diverged at index ${chainResult.divergedAt}: expected prevHash "${chainResult.expected}" but found "${chainResult.actual}".`
3231
+ };
3232
+ }
3233
+ const ledger = this.ledger();
3234
+ const allEntries = await ledger.loadAllEntries();
3235
+ const seen = /* @__PURE__ */ new Set();
3236
+ const latest = /* @__PURE__ */ new Map();
3237
+ for (let i = allEntries.length - 1; i >= 0; i--) {
3238
+ const entry = allEntries[i];
3239
+ if (!entry) continue;
3240
+ const key = `${entry.collection}/${entry.id}`;
3241
+ if (seen.has(key)) continue;
3242
+ seen.add(key);
3243
+ if (entry.op === "delete") continue;
3244
+ latest.set(key, {
3245
+ collection: entry.collection,
3246
+ id: entry.id,
3247
+ expectedHash: entry.payloadHash
3248
+ });
3249
+ }
3250
+ for (const { collection, id, expectedHash } of latest.values()) {
3251
+ const envelope = await this.adapter.get(this.name, collection, id);
3252
+ if (!envelope) {
3253
+ return {
3254
+ ok: false,
3255
+ kind: "data",
3256
+ collection,
3257
+ id,
3258
+ message: `Ledger expects data record "${collection}/${id}" to exist, but the adapter has no envelope for it.`
3259
+ };
3260
+ }
3261
+ const actualHash = await sha256Hex(envelope._data);
3262
+ if (actualHash !== expectedHash) {
3263
+ return {
3264
+ ok: false,
3265
+ kind: "data",
3266
+ collection,
3267
+ id,
3268
+ message: `Data envelope "${collection}/${id}" has been tampered with: expected payloadHash "${expectedHash}", got "${actualHash}".`
3269
+ };
3270
+ }
3271
+ }
3272
+ return {
3273
+ ok: true,
3274
+ head: chainResult.head,
3275
+ length: chainResult.length
3276
+ };
1910
3277
  }
1911
3278
  /** Export compartment as decrypted JSON (owner only). */
1912
3279
  async export() {
@@ -2272,7 +3639,23 @@ var Noydb = class {
2272
3639
  encrypted: this.options.encrypt !== false,
2273
3640
  emitter: this.emitter,
2274
3641
  onDirty: syncEngine ? (coll, id, action, version) => syncEngine.trackChange(coll, id, action, version) : void 0,
2275
- historyConfig: this.options.history
3642
+ historyConfig: this.options.history,
3643
+ // Refresh callback used by Compartment.load() to re-derive
3644
+ // the in-memory keyring from a freshly-loaded keyring file.
3645
+ // Encrypted compartments need this so post-load decrypts work
3646
+ // against the loaded session's wrapped DEKs; plaintext
3647
+ // compartments leave it null and load() skips the refresh.
3648
+ reloadKeyring: this.options.encrypt !== false && this.options.secret ? async () => {
3649
+ this.keyringCache.delete(name);
3650
+ const refreshed = await loadKeyring(
3651
+ this.options.adapter,
3652
+ name,
3653
+ this.options.user,
3654
+ this.options.secret
3655
+ );
3656
+ this.keyringCache.set(name, refreshed);
3657
+ return refreshed;
3658
+ } : void 0
2276
3659
  });
2277
3660
  this.compartmentCache.set(name, comp);
2278
3661
  return comp;
@@ -2561,12 +3944,17 @@ function estimateEntropy(passphrase) {
2561
3944
  }
2562
3945
  // Annotate the CommonJS export names for ESM import in node:
2563
3946
  0 && (module.exports = {
3947
+ BackupCorruptedError,
3948
+ BackupLedgerError,
2564
3949
  Collection,
2565
3950
  CollectionIndexes,
2566
3951
  Compartment,
2567
3952
  ConflictError,
2568
3953
  DecryptionError,
2569
3954
  InvalidKeyError,
3955
+ LEDGER_COLLECTION,
3956
+ LEDGER_DELTAS_COLLECTION,
3957
+ LedgerStore,
2570
3958
  Lru,
2571
3959
  NOYDB_BACKUP_VERSION,
2572
3960
  NOYDB_FORMAT_VERSION,
@@ -2580,26 +3968,41 @@ function estimateEntropy(passphrase) {
2580
3968
  PermissionDeniedError,
2581
3969
  Query,
2582
3970
  ReadOnlyError,
3971
+ RefIntegrityError,
3972
+ RefRegistry,
3973
+ RefScopeError,
3974
+ SchemaValidationError,
2583
3975
  SyncEngine,
2584
3976
  TamperedError,
2585
3977
  ValidationError,
3978
+ applyPatch,
3979
+ canonicalJson,
3980
+ computePatch,
2586
3981
  createNoydb,
2587
3982
  defineAdapter,
2588
3983
  diff,
2589
3984
  enrollBiometric,
3985
+ envelopePayloadHash,
2590
3986
  estimateEntropy,
2591
3987
  estimateRecordBytes,
2592
3988
  evaluateClause,
2593
3989
  evaluateFieldClause,
2594
3990
  executePlan,
2595
3991
  formatDiff,
3992
+ hashEntry,
2596
3993
  isBiometricAvailable,
2597
3994
  loadBiometric,
3995
+ paddedIndex,
2598
3996
  parseBytes,
3997
+ parseIndex,
2599
3998
  readPath,
3999
+ ref,
2600
4000
  removeBiometric,
2601
4001
  saveBiometric,
4002
+ sha256Hex,
2602
4003
  unlockBiometric,
2603
- validatePassphrase
4004
+ validatePassphrase,
4005
+ validateSchemaInput,
4006
+ validateSchemaOutput
2604
4007
  });
2605
4008
  //# sourceMappingURL=index.cjs.map