@noy-db/core 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -20,11 +20,18 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var index_exports = {};
22
22
  __export(index_exports, {
23
+ BackupCorruptedError: () => BackupCorruptedError,
24
+ BackupLedgerError: () => BackupLedgerError,
23
25
  Collection: () => Collection,
26
+ CollectionIndexes: () => CollectionIndexes,
24
27
  Compartment: () => Compartment,
25
28
  ConflictError: () => ConflictError,
26
29
  DecryptionError: () => DecryptionError,
27
30
  InvalidKeyError: () => InvalidKeyError,
31
+ LEDGER_COLLECTION: () => LEDGER_COLLECTION,
32
+ LEDGER_DELTAS_COLLECTION: () => LEDGER_DELTAS_COLLECTION,
33
+ LedgerStore: () => LedgerStore,
34
+ Lru: () => Lru,
28
35
  NOYDB_BACKUP_VERSION: () => NOYDB_BACKUP_VERSION,
29
36
  NOYDB_FORMAT_VERSION: () => NOYDB_FORMAT_VERSION,
30
37
  NOYDB_KEYRING_VERSION: () => NOYDB_KEYRING_VERSION,
@@ -35,22 +42,44 @@ __export(index_exports, {
35
42
  Noydb: () => Noydb,
36
43
  NoydbError: () => NoydbError,
37
44
  PermissionDeniedError: () => PermissionDeniedError,
45
+ Query: () => Query,
38
46
  ReadOnlyError: () => ReadOnlyError,
47
+ RefIntegrityError: () => RefIntegrityError,
48
+ RefRegistry: () => RefRegistry,
49
+ RefScopeError: () => RefScopeError,
50
+ SchemaValidationError: () => SchemaValidationError,
39
51
  SyncEngine: () => SyncEngine,
40
52
  TamperedError: () => TamperedError,
41
53
  ValidationError: () => ValidationError,
54
+ applyPatch: () => applyPatch,
55
+ canonicalJson: () => canonicalJson,
56
+ computePatch: () => computePatch,
42
57
  createNoydb: () => createNoydb,
43
58
  defineAdapter: () => defineAdapter,
44
- diff: () => diff,
59
+ diff: () => diff2,
45
60
  enrollBiometric: () => enrollBiometric,
61
+ envelopePayloadHash: () => envelopePayloadHash,
46
62
  estimateEntropy: () => estimateEntropy,
63
+ estimateRecordBytes: () => estimateRecordBytes,
64
+ evaluateClause: () => evaluateClause,
65
+ evaluateFieldClause: () => evaluateFieldClause,
66
+ executePlan: () => executePlan,
47
67
  formatDiff: () => formatDiff,
68
+ hashEntry: () => hashEntry,
48
69
  isBiometricAvailable: () => isBiometricAvailable,
49
70
  loadBiometric: () => loadBiometric,
71
+ paddedIndex: () => paddedIndex,
72
+ parseBytes: () => parseBytes,
73
+ parseIndex: () => parseIndex,
74
+ readPath: () => readPath,
75
+ ref: () => ref,
50
76
  removeBiometric: () => removeBiometric,
51
77
  saveBiometric: () => saveBiometric,
78
+ sha256Hex: () => sha256Hex,
52
79
  unlockBiometric: () => unlockBiometric,
53
- validatePassphrase: () => validatePassphrase
80
+ validatePassphrase: () => validatePassphrase,
81
+ validateSchemaInput: () => validateSchemaInput,
82
+ validateSchemaOutput: () => validateSchemaOutput
54
83
  });
55
84
  module.exports = __toCommonJS(index_exports);
56
85
 
@@ -134,6 +163,74 @@ var ValidationError = class extends NoydbError {
134
163
  this.name = "ValidationError";
135
164
  }
136
165
  };
166
+ var SchemaValidationError = class extends NoydbError {
167
+ issues;
168
+ direction;
169
+ constructor(message, issues, direction) {
170
+ super("SCHEMA_VALIDATION_FAILED", message);
171
+ this.name = "SchemaValidationError";
172
+ this.issues = issues;
173
+ this.direction = direction;
174
+ }
175
+ };
176
+ var BackupLedgerError = class extends NoydbError {
177
+ /** First-broken-entry index, if known. */
178
+ divergedAt;
179
+ constructor(message, divergedAt) {
180
+ super("BACKUP_LEDGER", message);
181
+ this.name = "BackupLedgerError";
182
+ if (divergedAt !== void 0) this.divergedAt = divergedAt;
183
+ }
184
+ };
185
+ var BackupCorruptedError = class extends NoydbError {
186
+ /** The (collection, id) pair whose envelope failed the hash check. */
187
+ collection;
188
+ id;
189
+ constructor(collection, id, message) {
190
+ super("BACKUP_CORRUPTED", message);
191
+ this.name = "BackupCorruptedError";
192
+ this.collection = collection;
193
+ this.id = id;
194
+ }
195
+ };
196
+
197
+ // src/schema.ts
198
+ async function validateSchemaInput(schema, value, context) {
199
+ const result = await schema["~standard"].validate(value);
200
+ if (result.issues !== void 0 && result.issues.length > 0) {
201
+ throw new SchemaValidationError(
202
+ `Schema validation failed on ${context}: ${summarizeIssues(result.issues)}`,
203
+ result.issues,
204
+ "input"
205
+ );
206
+ }
207
+ return result.value;
208
+ }
209
+ async function validateSchemaOutput(schema, value, context) {
210
+ const result = await schema["~standard"].validate(value);
211
+ if (result.issues !== void 0 && result.issues.length > 0) {
212
+ throw new SchemaValidationError(
213
+ `Stored data for ${context} does not match the current schema \u2014 schema drift? ${summarizeIssues(result.issues)}`,
214
+ result.issues,
215
+ "output"
216
+ );
217
+ }
218
+ return result.value;
219
+ }
220
+ function summarizeIssues(issues) {
221
+ const shown = issues.slice(0, 3).map((issue) => {
222
+ const pathStr = formatPath(issue.path);
223
+ return `${pathStr}: ${issue.message}`;
224
+ });
225
+ const suffix = issues.length > 3 ? ` (+${issues.length - 3} more)` : "";
226
+ return shown.join("; ") + suffix;
227
+ }
228
+ function formatPath(path) {
229
+ if (!path || path.length === 0) return "root";
230
+ return path.map(
231
+ (segment) => typeof segment === "object" && segment !== null ? String(segment.key) : String(segment)
232
+ ).join(".");
233
+ }
137
234
 
138
235
  // src/crypto.ts
139
236
  var PBKDF2_ITERATIONS = 6e5;
@@ -244,6 +341,749 @@ function base64ToBuffer(base64) {
244
341
  return bytes;
245
342
  }
246
343
 
344
+ // src/ledger/entry.ts
345
+ function canonicalJson(value) {
346
+ if (value === null) return "null";
347
+ if (typeof value === "boolean") return value ? "true" : "false";
348
+ if (typeof value === "number") {
349
+ if (!Number.isFinite(value)) {
350
+ throw new Error(
351
+ `canonicalJson: refusing to encode non-finite number ${String(value)}`
352
+ );
353
+ }
354
+ return JSON.stringify(value);
355
+ }
356
+ if (typeof value === "string") return JSON.stringify(value);
357
+ if (typeof value === "bigint") {
358
+ throw new Error("canonicalJson: BigInt is not JSON-serializable");
359
+ }
360
+ if (typeof value === "undefined" || typeof value === "function") {
361
+ throw new Error(
362
+ `canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
363
+ );
364
+ }
365
+ if (Array.isArray(value)) {
366
+ return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
367
+ }
368
+ if (typeof value === "object") {
369
+ const obj = value;
370
+ const keys = Object.keys(obj).sort();
371
+ const parts = [];
372
+ for (const key of keys) {
373
+ parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
374
+ }
375
+ return "{" + parts.join(",") + "}";
376
+ }
377
+ throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
378
+ }
379
+ async function sha256Hex(input) {
380
+ const bytes = new TextEncoder().encode(input);
381
+ const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
382
+ return bytesToHex(new Uint8Array(digest));
383
+ }
384
+ async function hashEntry(entry) {
385
+ return sha256Hex(canonicalJson(entry));
386
+ }
387
+ function bytesToHex(bytes) {
388
+ const hex = new Array(bytes.length);
389
+ for (let i = 0; i < bytes.length; i++) {
390
+ hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
391
+ }
392
+ return hex.join("");
393
+ }
394
+ function paddedIndex(index) {
395
+ return String(index).padStart(10, "0");
396
+ }
397
+ function parseIndex(key) {
398
+ return Number.parseInt(key, 10);
399
+ }
400
+
401
+ // src/ledger/patch.ts
402
+ function computePatch(prev, next) {
403
+ const ops = [];
404
+ diff(prev, next, "", ops);
405
+ return ops;
406
+ }
407
+ function diff(prev, next, path, out) {
408
+ if (prev === next) return;
409
+ if (prev === null || next === null) {
410
+ out.push({ op: "replace", path, value: next });
411
+ return;
412
+ }
413
+ const prevIsArray = Array.isArray(prev);
414
+ const nextIsArray = Array.isArray(next);
415
+ const prevIsObject = typeof prev === "object" && !prevIsArray;
416
+ const nextIsObject = typeof next === "object" && !nextIsArray;
417
+ if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
418
+ out.push({ op: "replace", path, value: next });
419
+ return;
420
+ }
421
+ if (prevIsArray && nextIsArray) {
422
+ if (!arrayDeepEqual(prev, next)) {
423
+ out.push({ op: "replace", path, value: next });
424
+ }
425
+ return;
426
+ }
427
+ if (prevIsObject && nextIsObject) {
428
+ const prevObj = prev;
429
+ const nextObj = next;
430
+ const prevKeys = Object.keys(prevObj);
431
+ const nextKeys = Object.keys(nextObj);
432
+ for (const key of prevKeys) {
433
+ const childPath = path + "/" + escapePathSegment(key);
434
+ if (!(key in nextObj)) {
435
+ out.push({ op: "remove", path: childPath });
436
+ } else {
437
+ diff(prevObj[key], nextObj[key], childPath, out);
438
+ }
439
+ }
440
+ for (const key of nextKeys) {
441
+ if (!(key in prevObj)) {
442
+ out.push({
443
+ op: "add",
444
+ path: path + "/" + escapePathSegment(key),
445
+ value: nextObj[key]
446
+ });
447
+ }
448
+ }
449
+ return;
450
+ }
451
+ out.push({ op: "replace", path, value: next });
452
+ }
453
+ function arrayDeepEqual(a, b) {
454
+ if (a.length !== b.length) return false;
455
+ for (let i = 0; i < a.length; i++) {
456
+ if (!deepEqual(a[i], b[i])) return false;
457
+ }
458
+ return true;
459
+ }
460
+ function deepEqual(a, b) {
461
+ if (a === b) return true;
462
+ if (a === null || b === null) return false;
463
+ if (typeof a !== typeof b) return false;
464
+ if (typeof a !== "object") return false;
465
+ const aArray = Array.isArray(a);
466
+ const bArray = Array.isArray(b);
467
+ if (aArray !== bArray) return false;
468
+ if (aArray && bArray) return arrayDeepEqual(a, b);
469
+ const aObj = a;
470
+ const bObj = b;
471
+ const aKeys = Object.keys(aObj);
472
+ const bKeys = Object.keys(bObj);
473
+ if (aKeys.length !== bKeys.length) return false;
474
+ for (const key of aKeys) {
475
+ if (!(key in bObj)) return false;
476
+ if (!deepEqual(aObj[key], bObj[key])) return false;
477
+ }
478
+ return true;
479
+ }
480
+ function applyPatch(base, patch) {
481
+ let result = clone(base);
482
+ for (const op of patch) {
483
+ result = applyOp(result, op);
484
+ }
485
+ return result;
486
+ }
487
+ function applyOp(doc, op) {
488
+ if (op.path === "") {
489
+ if (op.op === "remove") return null;
490
+ return clone(op.value);
491
+ }
492
+ const segments = parsePath(op.path);
493
+ return walkAndApply(doc, segments, op);
494
+ }
495
+ function walkAndApply(doc, segments, op) {
496
+ if (segments.length === 0) {
497
+ throw new Error("walkAndApply: empty segments (internal error)");
498
+ }
499
+ const [head, ...rest] = segments;
500
+ if (head === void 0) throw new Error("walkAndApply: undefined segment");
501
+ if (rest.length === 0) {
502
+ return applyAtTerminal(doc, head, op);
503
+ }
504
+ if (Array.isArray(doc)) {
505
+ const idx = parseArrayIndex(head, doc.length);
506
+ const child = doc[idx];
507
+ const newChild = walkAndApply(child, rest, op);
508
+ const next = doc.slice();
509
+ next[idx] = newChild;
510
+ return next;
511
+ }
512
+ if (doc !== null && typeof doc === "object") {
513
+ const obj = doc;
514
+ if (!(head in obj)) {
515
+ throw new Error(`applyPatch: path segment "${head}" not found in object`);
516
+ }
517
+ const newChild = walkAndApply(obj[head], rest, op);
518
+ return { ...obj, [head]: newChild };
519
+ }
520
+ throw new Error(
521
+ `applyPatch: cannot step into ${typeof doc} at segment "${head}"`
522
+ );
523
+ }
524
+ function applyAtTerminal(doc, segment, op) {
525
+ if (Array.isArray(doc)) {
526
+ const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
527
+ const next = doc.slice();
528
+ if (op.op === "remove") {
529
+ next.splice(idx, 1);
530
+ return next;
531
+ }
532
+ if (op.op === "add") {
533
+ next.splice(idx, 0, clone(op.value));
534
+ return next;
535
+ }
536
+ if (op.op === "replace") {
537
+ if (idx >= doc.length) {
538
+ throw new Error(
539
+ `applyPatch: replace at out-of-bounds array index ${idx}`
540
+ );
541
+ }
542
+ next[idx] = clone(op.value);
543
+ return next;
544
+ }
545
+ }
546
+ if (doc !== null && typeof doc === "object") {
547
+ const obj = doc;
548
+ if (op.op === "remove") {
549
+ if (!(segment in obj)) {
550
+ throw new Error(
551
+ `applyPatch: remove on missing key "${segment}"`
552
+ );
553
+ }
554
+ const next = { ...obj };
555
+ delete next[segment];
556
+ return next;
557
+ }
558
+ if (op.op === "add") {
559
+ return { ...obj, [segment]: clone(op.value) };
560
+ }
561
+ if (op.op === "replace") {
562
+ if (!(segment in obj)) {
563
+ throw new Error(
564
+ `applyPatch: replace on missing key "${segment}"`
565
+ );
566
+ }
567
+ return { ...obj, [segment]: clone(op.value) };
568
+ }
569
+ }
570
+ throw new Error(
571
+ `applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
572
+ );
573
+ }
574
+ function escapePathSegment(segment) {
575
+ return segment.replace(/~/g, "~0").replace(/\//g, "~1");
576
+ }
577
+ function unescapePathSegment(segment) {
578
+ return segment.replace(/~1/g, "/").replace(/~0/g, "~");
579
+ }
580
+ function parsePath(path) {
581
+ if (!path.startsWith("/")) {
582
+ throw new Error(`applyPatch: path must start with '/', got "${path}"`);
583
+ }
584
+ return path.slice(1).split("/").map(unescapePathSegment);
585
+ }
586
+ function parseArrayIndex(segment, max) {
587
+ if (!/^\d+$/.test(segment)) {
588
+ throw new Error(
589
+ `applyPatch: array index must be a non-negative integer, got "${segment}"`
590
+ );
591
+ }
592
+ const idx = Number.parseInt(segment, 10);
593
+ if (idx < 0 || idx > max) {
594
+ throw new Error(
595
+ `applyPatch: array index ${idx} out of range [0, ${max}]`
596
+ );
597
+ }
598
+ return idx;
599
+ }
600
+ function clone(value) {
601
+ if (value === null || value === void 0) return value;
602
+ if (typeof value !== "object") return value;
603
+ return JSON.parse(JSON.stringify(value));
604
+ }
605
+
606
+ // src/ledger/store.ts
607
+ var LEDGER_COLLECTION = "_ledger";
608
+ var LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
609
+ var LedgerStore = class {
610
+ adapter;
611
+ compartment;
612
+ encrypted;
613
+ getDEK;
614
+ actor;
615
+ /**
616
+ * In-memory cache of the chain head — the most recently appended
617
+ * entry along with its precomputed hash. Without this, every
618
+ * `append()` would re-load every prior entry to recompute the
619
+ * prevHash, making N puts O(N²) — a 1K-record stress test goes from
620
+ * < 100ms to a multi-second timeout.
621
+ *
622
+ * The cache is populated on first read (`append`, `head`, `verify`)
623
+ * and updated in-place on every successful `append`. Single-writer
624
+ * usage (the v0.4 assumption) keeps it consistent. A second
625
+ * LedgerStore instance writing to the same compartment would not
626
+ * see the first instance's appends in its cached state — that's the
627
+ * concurrency caveat documented at the class level.
628
+ *
629
+ * Sentinel `undefined` means "not yet loaded"; an explicit `null`
630
+ * value means "loaded and confirmed empty" — distinguishing these
631
+ * matters because an empty ledger is a valid state (genesis prevHash
632
+ * is the empty string), and we don't want to re-scan the adapter
633
+ * just because the chain is freshly initialized.
634
+ */
635
+ headCache = void 0;
636
+ constructor(opts) {
637
+ this.adapter = opts.adapter;
638
+ this.compartment = opts.compartment;
639
+ this.encrypted = opts.encrypted;
640
+ this.getDEK = opts.getDEK;
641
+ this.actor = opts.actor;
642
+ }
643
+ /**
644
+ * Lazily load (or return cached) the current chain head. The cache
645
+ * sentinel is `undefined` until first access; after the first call,
646
+ * the cache holds either a `{ entry, hash }` for non-empty ledgers
647
+ * or `null` for empty ones.
648
+ */
649
+ async getCachedHead() {
650
+ if (this.headCache !== void 0) return this.headCache;
651
+ const entries = await this.loadAllEntries();
652
+ const last = entries[entries.length - 1];
653
+ if (!last) {
654
+ this.headCache = null;
655
+ return null;
656
+ }
657
+ this.headCache = { entry: last, hash: await hashEntry(last) };
658
+ return this.headCache;
659
+ }
660
+ /**
661
+ * Append a new entry to the ledger. Returns the full entry that was
662
+ * written (with its assigned index and computed prevHash) so the
663
+ * caller can use the hash for downstream purposes (e.g., embedding
664
+ * in a verifiable backup).
665
+ *
666
+ * This is the **only** way to add entries. Direct adapter writes to
667
+ * `_ledger/` would bypass the chain math and would be caught by the
668
+ * next `verify()` call as a divergence.
669
+ */
670
+ async append(input) {
671
+ const cached = await this.getCachedHead();
672
+ const lastEntry = cached?.entry;
673
+ const prevHash = cached?.hash ?? "";
674
+ const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
675
+ let deltaHash;
676
+ if (input.delta !== void 0) {
677
+ const deltaEnvelope = await this.encryptDelta(input.delta);
678
+ await this.adapter.put(
679
+ this.compartment,
680
+ LEDGER_DELTAS_COLLECTION,
681
+ paddedIndex(nextIndex),
682
+ deltaEnvelope
683
+ );
684
+ deltaHash = await sha256Hex(deltaEnvelope._data);
685
+ }
686
+ const entryBase = {
687
+ index: nextIndex,
688
+ prevHash,
689
+ op: input.op,
690
+ collection: input.collection,
691
+ id: input.id,
692
+ version: input.version,
693
+ ts: (/* @__PURE__ */ new Date()).toISOString(),
694
+ actor: input.actor === "" ? this.actor : input.actor,
695
+ payloadHash: input.payloadHash
696
+ };
697
+ const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
698
+ const envelope = await this.encryptEntry(entry);
699
+ await this.adapter.put(
700
+ this.compartment,
701
+ LEDGER_COLLECTION,
702
+ paddedIndex(entry.index),
703
+ envelope
704
+ );
705
+ this.headCache = { entry, hash: await hashEntry(entry) };
706
+ return entry;
707
+ }
708
+ /**
709
+ * Load a delta payload by its entry index. Returns `null` if the
710
+ * entry at that index doesn't reference a delta (genesis puts and
711
+ * deletes leave the slot empty) or if the delta row is missing
712
+ * (possible after a `pruneHistory` fold).
713
+ *
714
+ * The caller is responsible for deciding what to do with a missing
715
+ * delta — `ledger.reconstruct()` uses it as a "stop walking
716
+ * backward" signal and falls back to the on-disk current value.
717
+ */
718
+ async loadDelta(index) {
719
+ const envelope = await this.adapter.get(
720
+ this.compartment,
721
+ LEDGER_DELTAS_COLLECTION,
722
+ paddedIndex(index)
723
+ );
724
+ if (!envelope) return null;
725
+ if (!this.encrypted) {
726
+ return JSON.parse(envelope._data);
727
+ }
728
+ const dek = await this.getDEK(LEDGER_COLLECTION);
729
+ const json = await decrypt(envelope._iv, envelope._data, dek);
730
+ return JSON.parse(json);
731
+ }
732
+ /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
733
+ async encryptDelta(patch) {
734
+ const json = JSON.stringify(patch);
735
+ if (!this.encrypted) {
736
+ return {
737
+ _noydb: NOYDB_FORMAT_VERSION,
738
+ _v: 1,
739
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
740
+ _iv: "",
741
+ _data: json,
742
+ _by: this.actor
743
+ };
744
+ }
745
+ const dek = await this.getDEK(LEDGER_COLLECTION);
746
+ const { iv, data } = await encrypt(json, dek);
747
+ return {
748
+ _noydb: NOYDB_FORMAT_VERSION,
749
+ _v: 1,
750
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
751
+ _iv: iv,
752
+ _data: data,
753
+ _by: this.actor
754
+ };
755
+ }
756
+ /**
757
+ * Read all entries in ascending-index order. Used internally by
758
+ * `append()`, `head()`, `verify()`, and `entries()`. Decryption is
759
+ * serial because the entries are tiny and the overhead of a Promise
760
+ * pool would dominate at realistic chain lengths (< 100K entries).
761
+ */
762
+ async loadAllEntries() {
763
+ const keys = await this.adapter.list(this.compartment, LEDGER_COLLECTION);
764
+ keys.sort();
765
+ const entries = [];
766
+ for (const key of keys) {
767
+ const envelope = await this.adapter.get(
768
+ this.compartment,
769
+ LEDGER_COLLECTION,
770
+ key
771
+ );
772
+ if (!envelope) continue;
773
+ entries.push(await this.decryptEntry(envelope));
774
+ }
775
+ return entries;
776
+ }
777
+ /**
778
+ * Return the current head of the ledger: the last entry, its hash,
779
+ * and the total chain length. `null` on an empty ledger so callers
780
+ * can distinguish "no history yet" from "empty history".
781
+ */
782
+ async head() {
783
+ const cached = await this.getCachedHead();
784
+ if (!cached) return null;
785
+ return {
786
+ entry: cached.entry,
787
+ hash: cached.hash,
788
+ length: cached.entry.index + 1
789
+ };
790
+ }
791
+ /**
792
+ * Return entries in the requested half-open range `[from, to)`.
793
+ * Defaults: `from = 0`, `to = length`. The indices are clipped to
794
+ * the valid range; no error is thrown for out-of-range queries.
795
+ */
796
+ async entries(opts = {}) {
797
+ const all = await this.loadAllEntries();
798
+ const from = Math.max(0, opts.from ?? 0);
799
+ const to = Math.min(all.length, opts.to ?? all.length);
800
+ return all.slice(from, to);
801
+ }
802
+ /**
803
+ * Reconstruct a record's state at a given historical version by
804
+ * walking the ledger's delta chain backward from the current state.
805
+ *
806
+ * ## Algorithm
807
+ *
808
+ * Ledger deltas are stored in **reverse** form — each entry's
809
+ * patch describes how to undo that put, transforming the new
810
+ * record back into the previous one. `reconstruct` exploits this
811
+ * by:
812
+ *
813
+ * 1. Finding every ledger entry for `(collection, id)` in the
814
+ * chain, sorted by index ascending.
815
+ * 2. Starting from `current` (the present value of the record,
816
+ * as held by the caller — typically fetched via
817
+ * `Collection.get()`).
818
+ * 3. Walking entries in **descending** index order and applying
819
+ * each entry's reverse patch, stopping when we reach the
820
+ * entry whose version equals `atVersion`.
821
+ *
822
+ * The result is the record as it existed immediately AFTER the
823
+ * put at `atVersion`. To get the state at the genesis put
824
+ * (version 1), the walk runs all the way back through every put
825
+ * after the first.
826
+ *
827
+ * ## Caveats
828
+ *
829
+ * - **Delete entries** break the walk: once we see a delete, the
830
+ * record didn't exist before that point, so there's nothing to
831
+ * reconstruct. We return `null` in that case.
832
+ * - **Missing deltas** (e.g., after `pruneHistory` folds old
833
+ * entries into a base snapshot) also stop the walk. v0.4 does
834
+ * not ship pruneHistory, so today this only happens if an entry
835
+ * was deleted out-of-band.
836
+ * - The caller MUST pass the correct current value. Passing a
837
+ * mutated object would corrupt the reconstruction — the patch
838
+ * chain is only valid against the exact state that was in
839
+ * effect when the most recent put happened.
840
+ *
841
+ * For v0.4, `reconstruct` is the only way to read a historical
842
+ * version via deltas. The legacy `_history` collection still
843
+ * holds full snapshots and `Collection.getVersion()` still reads
844
+ * from there — the two paths coexist until pruneHistory lands in
845
+ * a follow-up and delta becomes the default.
846
+ */
847
+ async reconstruct(collection, id, current, atVersion) {
848
+ const all = await this.loadAllEntries();
849
+ const matching = all.filter(
850
+ (e) => e.collection === collection && e.id === id
851
+ );
852
+ if (matching.length === 0) {
853
+ return null;
854
+ }
855
+ let state = current;
856
+ for (let i = matching.length - 1; i >= 0; i--) {
857
+ const entry = matching[i];
858
+ if (!entry) continue;
859
+ if (entry.version === atVersion && entry.op !== "delete") {
860
+ return state;
861
+ }
862
+ if (entry.op === "delete") {
863
+ return null;
864
+ }
865
+ if (entry.deltaHash === void 0) {
866
+ if (entry.version === atVersion) return state;
867
+ return null;
868
+ }
869
+ const patch = await this.loadDelta(entry.index);
870
+ if (!patch) {
871
+ return null;
872
+ }
873
+ if (state === null) {
874
+ return null;
875
+ }
876
+ state = applyPatch(state, patch);
877
+ }
878
+ return null;
879
+ }
880
+ /**
881
+ * Walk the chain from genesis forward and verify every link.
882
+ *
883
+ * Returns `{ ok: true, head, length }` if every entry's `prevHash`
884
+ * matches the recomputed hash of its predecessor (and the genesis
885
+ * entry's `prevHash` is the empty string).
886
+ *
887
+ * Returns `{ ok: false, divergedAt, expected, actual }` on the first
888
+ * mismatch. `divergedAt` is the 0-based index of the BROKEN entry
889
+ * — entries before that index still verify cleanly; entries at and
890
+ * after `divergedAt` are untrustworthy.
891
+ *
892
+ * This method detects:
893
+ * - Mutated entry content (fields changed)
894
+ * - Reordered entries (if any adjacent pair swaps, the prevHash
895
+ * of the second no longer matches)
896
+ * - Inserted entries (the inserted entry's prevHash likely fails,
897
+ * and the following entry's prevHash definitely fails)
898
+ * - Deleted entries (the entry after the deletion sees a wrong
899
+ * prevHash)
900
+ *
901
+ * It does NOT detect:
902
+ * - Tampering with the DATA collections that bypassed the ledger
903
+ * entirely (e.g., an attacker who modifies records without
904
+ * appending matching ledger entries — this is why we also
905
+ * plan a `verifyIntegrity()` helper in a follow-up)
906
+ * - Truncation of the chain at the tail (dropping the last N
907
+ * entries leaves a shorter but still consistent chain). External
908
+ * anchoring of `head.hash` to a trusted service is the defense
909
+ * against this.
910
+ */
911
+ async verify() {
912
+ const entries = await this.loadAllEntries();
913
+ let expectedPrevHash = "";
914
+ for (let i = 0; i < entries.length; i++) {
915
+ const entry = entries[i];
916
+ if (!entry) continue;
917
+ if (entry.prevHash !== expectedPrevHash) {
918
+ return {
919
+ ok: false,
920
+ divergedAt: i,
921
+ expected: expectedPrevHash,
922
+ actual: entry.prevHash
923
+ };
924
+ }
925
+ if (entry.index !== i) {
926
+ return {
927
+ ok: false,
928
+ divergedAt: i,
929
+ expected: `index=${i}`,
930
+ actual: `index=${entry.index}`
931
+ };
932
+ }
933
+ expectedPrevHash = await hashEntry(entry);
934
+ }
935
+ return {
936
+ ok: true,
937
+ head: expectedPrevHash,
938
+ length: entries.length
939
+ };
940
+ }
941
+ // ─── Encryption plumbing ─────────────────────────────────────────
942
+ /**
943
+ * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
944
+ * envelope's `_v` field is set to `entry.index + 1` so the usual
945
+ * optimistic-concurrency machinery has a reasonable version number
946
+ * to compare against (the ledger is append-only, so concurrent
947
+ * writes should always bump the index).
948
+ */
949
+ async encryptEntry(entry) {
950
+ const json = canonicalJson(entry);
951
+ if (!this.encrypted) {
952
+ return {
953
+ _noydb: NOYDB_FORMAT_VERSION,
954
+ _v: entry.index + 1,
955
+ _ts: entry.ts,
956
+ _iv: "",
957
+ _data: json,
958
+ _by: entry.actor
959
+ };
960
+ }
961
+ const dek = await this.getDEK(LEDGER_COLLECTION);
962
+ const { iv, data } = await encrypt(json, dek);
963
+ return {
964
+ _noydb: NOYDB_FORMAT_VERSION,
965
+ _v: entry.index + 1,
966
+ _ts: entry.ts,
967
+ _iv: iv,
968
+ _data: data,
969
+ _by: entry.actor
970
+ };
971
+ }
972
+ /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
973
+ async decryptEntry(envelope) {
974
+ if (!this.encrypted) {
975
+ return JSON.parse(envelope._data);
976
+ }
977
+ const dek = await this.getDEK(LEDGER_COLLECTION);
978
+ const json = await decrypt(envelope._iv, envelope._data, dek);
979
+ return JSON.parse(json);
980
+ }
981
+ };
982
+ async function envelopePayloadHash(envelope) {
983
+ if (!envelope) return "";
984
+ return sha256Hex(envelope._data);
985
+ }
986
+
987
+ // src/refs.ts
988
+ var RefIntegrityError = class extends NoydbError {
989
+ collection;
990
+ id;
991
+ field;
992
+ refTo;
993
+ refId;
994
+ constructor(opts) {
995
+ super("REF_INTEGRITY", opts.message);
996
+ this.name = "RefIntegrityError";
997
+ this.collection = opts.collection;
998
+ this.id = opts.id;
999
+ this.field = opts.field;
1000
+ this.refTo = opts.refTo;
1001
+ this.refId = opts.refId;
1002
+ }
1003
+ };
1004
+ var RefScopeError = class extends NoydbError {
1005
+ constructor(target) {
1006
+ super(
1007
+ "REF_SCOPE",
1008
+ `Cross-compartment references are not supported in v0.4 \u2014 got target "${target}". Use a simple collection name (e.g. "clients"), not a path. Cross-compartment refs are tracked for a future release.`
1009
+ );
1010
+ this.name = "RefScopeError";
1011
+ }
1012
+ };
1013
+ function ref(target, mode = "strict") {
1014
+ if (target.includes("/")) {
1015
+ throw new RefScopeError(target);
1016
+ }
1017
+ if (!target || target.startsWith("_")) {
1018
+ throw new Error(
1019
+ `ref(): target collection name must be non-empty and cannot start with '_' (reserved for internal collections). Got "${target}".`
1020
+ );
1021
+ }
1022
+ return { target, mode };
1023
+ }
1024
+ var RefRegistry = class {
1025
+ outbound = /* @__PURE__ */ new Map();
1026
+ inbound = /* @__PURE__ */ new Map();
1027
+ /**
1028
+ * Register the refs declared by a single collection. Idempotent in
1029
+ * the happy path — calling twice with the same data is a no-op.
1030
+ * Calling twice with DIFFERENT data throws, because silent
1031
+ * overrides would be confusing ("I changed the ref and it doesn't
1032
+ * update" vs "I declared the same collection twice with different
1033
+ * refs and the second call won").
1034
+ */
1035
+ register(collection, refs) {
1036
+ const existing = this.outbound.get(collection);
1037
+ if (existing) {
1038
+ const existingKeys = Object.keys(existing).sort();
1039
+ const newKeys = Object.keys(refs).sort();
1040
+ if (existingKeys.join(",") !== newKeys.join(",")) {
1041
+ throw new Error(
1042
+ `RefRegistry: conflicting ref declarations for collection "${collection}"`
1043
+ );
1044
+ }
1045
+ for (const k of existingKeys) {
1046
+ const a = existing[k];
1047
+ const b = refs[k];
1048
+ if (!a || !b || a.target !== b.target || a.mode !== b.mode) {
1049
+ throw new Error(
1050
+ `RefRegistry: conflicting ref declarations for collection "${collection}" field "${k}"`
1051
+ );
1052
+ }
1053
+ }
1054
+ return;
1055
+ }
1056
+ this.outbound.set(collection, { ...refs });
1057
+ for (const [field, desc] of Object.entries(refs)) {
1058
+ const list = this.inbound.get(desc.target) ?? [];
1059
+ list.push({ collection, field, mode: desc.mode });
1060
+ this.inbound.set(desc.target, list);
1061
+ }
1062
+ }
1063
+ /** Get the outbound refs declared by a collection (or `{}` if none). */
1064
+ getOutbound(collection) {
1065
+ return this.outbound.get(collection) ?? {};
1066
+ }
1067
+ /** Get the inbound refs that target a given collection (or `[]`). */
1068
+ getInbound(target) {
1069
+ return this.inbound.get(target) ?? [];
1070
+ }
1071
+ /**
1072
+ * Iterate every (collection → refs) pair that has at least one
1073
+ * declared reference. Used by `checkIntegrity` to walk the full
1074
+ * universe of outbound refs without needing to track collection
1075
+ * names elsewhere.
1076
+ */
1077
+ entries() {
1078
+ return [...this.outbound.entries()];
1079
+ }
1080
+ /** Clear the registry. Test-only escape hatch; never called from production code. */
1081
+ clear() {
1082
+ this.outbound.clear();
1083
+ this.inbound.clear();
1084
+ }
1085
+ };
1086
+
247
1087
  // src/keyring.ts
248
1088
  var GRANTABLE_BY_ADMIN = ["operator", "viewer", "client"];
249
1089
  function canGrant(callerRole, targetRole) {
@@ -328,6 +1168,11 @@ async function grant(adapter, compartment, callerKeyring, options) {
328
1168
  }
329
1169
  }
330
1170
  }
1171
+ for (const [collName, dek] of callerKeyring.deks) {
1172
+ if (collName.startsWith("_") && !(collName in wrappedDeks)) {
1173
+ wrappedDeks[collName] = await wrapKey(dek, newKek);
1174
+ }
1175
+ }
331
1176
  const keyringFile = {
332
1177
  _noydb_keyring: NOYDB_KEYRING_VERSION,
333
1178
  user_id: options.userId,
@@ -581,65 +1426,685 @@ async function clearHistory(adapter, compartment, collection, recordId) {
581
1426
  }
582
1427
 
583
1428
  // src/diff.ts
584
- function diff(oldObj, newObj, basePath = "") {
1429
+ function diff2(oldObj, newObj, basePath = "") {
585
1430
  const changes = [];
586
1431
  if (oldObj === newObj) return changes;
587
1432
  if (oldObj == null && newObj != null) {
588
1433
  return [{ path: basePath || "(root)", type: "added", to: newObj }];
589
1434
  }
590
- if (oldObj != null && newObj == null) {
591
- return [{ path: basePath || "(root)", type: "removed", from: oldObj }];
1435
+ if (oldObj != null && newObj == null) {
1436
+ return [{ path: basePath || "(root)", type: "removed", from: oldObj }];
1437
+ }
1438
+ if (typeof oldObj !== typeof newObj) {
1439
+ return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
1440
+ }
1441
+ if (typeof oldObj !== "object") {
1442
+ return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
1443
+ }
1444
+ if (Array.isArray(oldObj) && Array.isArray(newObj)) {
1445
+ const maxLen = Math.max(oldObj.length, newObj.length);
1446
+ for (let i = 0; i < maxLen; i++) {
1447
+ const p = basePath ? `${basePath}[${i}]` : `[${i}]`;
1448
+ if (i >= oldObj.length) {
1449
+ changes.push({ path: p, type: "added", to: newObj[i] });
1450
+ } else if (i >= newObj.length) {
1451
+ changes.push({ path: p, type: "removed", from: oldObj[i] });
1452
+ } else {
1453
+ changes.push(...diff2(oldObj[i], newObj[i], p));
1454
+ }
1455
+ }
1456
+ return changes;
1457
+ }
1458
+ const oldRecord = oldObj;
1459
+ const newRecord = newObj;
1460
+ const allKeys = /* @__PURE__ */ new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)]);
1461
+ for (const key of allKeys) {
1462
+ const p = basePath ? `${basePath}.${key}` : key;
1463
+ if (!(key in oldRecord)) {
1464
+ changes.push({ path: p, type: "added", to: newRecord[key] });
1465
+ } else if (!(key in newRecord)) {
1466
+ changes.push({ path: p, type: "removed", from: oldRecord[key] });
1467
+ } else {
1468
+ changes.push(...diff2(oldRecord[key], newRecord[key], p));
1469
+ }
1470
+ }
1471
+ return changes;
1472
+ }
1473
+ function formatDiff(changes) {
1474
+ if (changes.length === 0) return "(no changes)";
1475
+ return changes.map((c) => {
1476
+ switch (c.type) {
1477
+ case "added":
1478
+ return `+ ${c.path}: ${JSON.stringify(c.to)}`;
1479
+ case "removed":
1480
+ return `- ${c.path}: ${JSON.stringify(c.from)}`;
1481
+ case "changed":
1482
+ return `~ ${c.path}: ${JSON.stringify(c.from)} \u2192 ${JSON.stringify(c.to)}`;
1483
+ }
1484
+ }).join("\n");
1485
+ }
1486
+
1487
+ // src/query/predicate.ts
1488
+ function readPath(record, path) {
1489
+ if (record === null || record === void 0) return void 0;
1490
+ if (!path.includes(".")) {
1491
+ return record[path];
1492
+ }
1493
+ const segments = path.split(".");
1494
+ let cursor = record;
1495
+ for (const segment of segments) {
1496
+ if (cursor === null || cursor === void 0) return void 0;
1497
+ cursor = cursor[segment];
1498
+ }
1499
+ return cursor;
1500
+ }
1501
+ function evaluateFieldClause(record, clause) {
1502
+ const actual = readPath(record, clause.field);
1503
+ const { op, value } = clause;
1504
+ switch (op) {
1505
+ case "==":
1506
+ return actual === value;
1507
+ case "!=":
1508
+ return actual !== value;
1509
+ case "<":
1510
+ return isComparable(actual, value) && actual < value;
1511
+ case "<=":
1512
+ return isComparable(actual, value) && actual <= value;
1513
+ case ">":
1514
+ return isComparable(actual, value) && actual > value;
1515
+ case ">=":
1516
+ return isComparable(actual, value) && actual >= value;
1517
+ case "in":
1518
+ return Array.isArray(value) && value.includes(actual);
1519
+ case "contains":
1520
+ if (typeof actual === "string") return typeof value === "string" && actual.includes(value);
1521
+ if (Array.isArray(actual)) return actual.includes(value);
1522
+ return false;
1523
+ case "startsWith":
1524
+ return typeof actual === "string" && typeof value === "string" && actual.startsWith(value);
1525
+ case "between": {
1526
+ if (!Array.isArray(value) || value.length !== 2) return false;
1527
+ const [lo, hi] = value;
1528
+ if (!isComparable(actual, lo) || !isComparable(actual, hi)) return false;
1529
+ return actual >= lo && actual <= hi;
1530
+ }
1531
+ default: {
1532
+ const _exhaustive = op;
1533
+ void _exhaustive;
1534
+ return false;
1535
+ }
1536
+ }
1537
+ }
1538
+ function isComparable(a, b) {
1539
+ if (typeof a === "number" && typeof b === "number") return true;
1540
+ if (typeof a === "string" && typeof b === "string") return true;
1541
+ if (a instanceof Date && b instanceof Date) return true;
1542
+ return false;
1543
+ }
1544
+ function evaluateClause(record, clause) {
1545
+ switch (clause.type) {
1546
+ case "field":
1547
+ return evaluateFieldClause(record, clause);
1548
+ case "filter":
1549
+ return clause.fn(record);
1550
+ case "group":
1551
+ if (clause.op === "and") {
1552
+ for (const child of clause.clauses) {
1553
+ if (!evaluateClause(record, child)) return false;
1554
+ }
1555
+ return true;
1556
+ } else {
1557
+ for (const child of clause.clauses) {
1558
+ if (evaluateClause(record, child)) return true;
1559
+ }
1560
+ return false;
1561
+ }
1562
+ }
1563
+ }
1564
+
1565
+ // src/query/builder.ts
1566
+ var EMPTY_PLAN = {
1567
+ clauses: [],
1568
+ orderBy: [],
1569
+ limit: void 0,
1570
+ offset: 0
1571
+ };
1572
+ var Query = class _Query {
1573
+ source;
1574
+ plan;
1575
+ constructor(source, plan = EMPTY_PLAN) {
1576
+ this.source = source;
1577
+ this.plan = plan;
1578
+ }
1579
+ /** Add a field comparison. Multiple where() calls are AND-combined. */
1580
+ where(field, op, value) {
1581
+ const clause = { type: "field", field, op, value };
1582
+ return new _Query(this.source, {
1583
+ ...this.plan,
1584
+ clauses: [...this.plan.clauses, clause]
1585
+ });
1586
+ }
1587
+ /**
1588
+ * Logical OR group. Pass a callback that builds a sub-query.
1589
+ * Each clause inside the callback is OR-combined; the group itself
1590
+ * joins the parent plan with AND.
1591
+ */
1592
+ or(builder) {
1593
+ const sub = builder(new _Query(this.source));
1594
+ const group = {
1595
+ type: "group",
1596
+ op: "or",
1597
+ clauses: sub.plan.clauses
1598
+ };
1599
+ return new _Query(this.source, {
1600
+ ...this.plan,
1601
+ clauses: [...this.plan.clauses, group]
1602
+ });
1603
+ }
1604
+ /**
1605
+ * Logical AND group. Same shape as `or()` but every clause inside the group
1606
+ * must match. Useful for explicit grouping inside a larger OR.
1607
+ */
1608
+ and(builder) {
1609
+ const sub = builder(new _Query(this.source));
1610
+ const group = {
1611
+ type: "group",
1612
+ op: "and",
1613
+ clauses: sub.plan.clauses
1614
+ };
1615
+ return new _Query(this.source, {
1616
+ ...this.plan,
1617
+ clauses: [...this.plan.clauses, group]
1618
+ });
1619
+ }
1620
+ /** Escape hatch: add an arbitrary predicate function. Not serializable. */
1621
+ filter(fn) {
1622
+ const clause = {
1623
+ type: "filter",
1624
+ fn
1625
+ };
1626
+ return new _Query(this.source, {
1627
+ ...this.plan,
1628
+ clauses: [...this.plan.clauses, clause]
1629
+ });
1630
+ }
1631
+ /** Sort by a field. Subsequent calls are tie-breakers. */
1632
+ orderBy(field, direction = "asc") {
1633
+ return new _Query(this.source, {
1634
+ ...this.plan,
1635
+ orderBy: [...this.plan.orderBy, { field, direction }]
1636
+ });
1637
+ }
1638
+ /** Cap the result size. */
1639
+ limit(n) {
1640
+ return new _Query(this.source, { ...this.plan, limit: n });
1641
+ }
1642
+ /** Skip the first N matching records (after ordering). */
1643
+ offset(n) {
1644
+ return new _Query(this.source, { ...this.plan, offset: n });
1645
+ }
1646
+ /** Execute the plan and return the matching records. */
1647
+ toArray() {
1648
+ return executePlanWithSource(this.source, this.plan);
1649
+ }
1650
+ /** Return the first matching record, or null. */
1651
+ first() {
1652
+ const result = executePlanWithSource(this.source, { ...this.plan, limit: 1 });
1653
+ return result[0] ?? null;
1654
+ }
1655
+ /** Return the number of matching records (after where/filter, before limit). */
1656
+ count() {
1657
+ const { candidates, remainingClauses } = candidateRecords(this.source, this.plan.clauses);
1658
+ if (remainingClauses.length === 0) return candidates.length;
1659
+ return filterRecords(candidates, remainingClauses).length;
1660
+ }
1661
+ /**
1662
+ * Re-run the query whenever the source notifies of changes.
1663
+ * Returns an unsubscribe function. The callback receives the latest result.
1664
+ * Throws if the source does not support subscriptions.
1665
+ */
1666
+ subscribe(cb) {
1667
+ if (!this.source.subscribe) {
1668
+ throw new Error("Query source does not support subscriptions. Pass a source with a subscribe() method.");
1669
+ }
1670
+ cb(this.toArray());
1671
+ return this.source.subscribe(() => cb(this.toArray()));
1672
+ }
1673
+ /**
1674
+ * Return the plan as a JSON-friendly object. FilterClause entries are
1675
+ * stripped (their `fn` cannot be serialized) and replaced with
1676
+ * { type: 'filter', fn: '[function]' } so devtools can still see them.
1677
+ */
1678
+ toPlan() {
1679
+ return serializePlan(this.plan);
1680
+ }
1681
+ };
1682
+ function executePlanWithSource(source, plan) {
1683
+ const { candidates, remainingClauses } = candidateRecords(source, plan.clauses);
1684
+ let result = remainingClauses.length === 0 ? [...candidates] : filterRecords(candidates, remainingClauses);
1685
+ if (plan.orderBy.length > 0) {
1686
+ result = sortRecords(result, plan.orderBy);
1687
+ }
1688
+ if (plan.offset > 0) {
1689
+ result = result.slice(plan.offset);
1690
+ }
1691
+ if (plan.limit !== void 0) {
1692
+ result = result.slice(0, plan.limit);
1693
+ }
1694
+ return result;
1695
+ }
1696
+ function candidateRecords(source, clauses) {
1697
+ const indexes = source.getIndexes?.();
1698
+ if (!indexes || !source.lookupById || clauses.length === 0) {
1699
+ return { candidates: source.snapshot(), remainingClauses: clauses };
1700
+ }
1701
+ const lookupById = (id) => source.lookupById?.(id);
1702
+ for (let i = 0; i < clauses.length; i++) {
1703
+ const clause = clauses[i];
1704
+ if (clause.type !== "field") continue;
1705
+ if (!indexes.has(clause.field)) continue;
1706
+ let ids = null;
1707
+ if (clause.op === "==") {
1708
+ ids = indexes.lookupEqual(clause.field, clause.value);
1709
+ } else if (clause.op === "in" && Array.isArray(clause.value)) {
1710
+ ids = indexes.lookupIn(clause.field, clause.value);
1711
+ }
1712
+ if (ids !== null) {
1713
+ const remaining = [];
1714
+ for (let j = 0; j < clauses.length; j++) {
1715
+ if (j !== i) remaining.push(clauses[j]);
1716
+ }
1717
+ return {
1718
+ candidates: materializeIds(ids, lookupById),
1719
+ remainingClauses: remaining
1720
+ };
1721
+ }
1722
+ }
1723
+ return { candidates: source.snapshot(), remainingClauses: clauses };
1724
+ }
1725
+ function materializeIds(ids, lookupById) {
1726
+ const out = [];
1727
+ for (const id of ids) {
1728
+ const record = lookupById(id);
1729
+ if (record !== void 0) out.push(record);
1730
+ }
1731
+ return out;
1732
+ }
1733
+ function executePlan(records, plan) {
1734
+ let result = filterRecords(records, plan.clauses);
1735
+ if (plan.orderBy.length > 0) {
1736
+ result = sortRecords(result, plan.orderBy);
1737
+ }
1738
+ if (plan.offset > 0) {
1739
+ result = result.slice(plan.offset);
1740
+ }
1741
+ if (plan.limit !== void 0) {
1742
+ result = result.slice(0, plan.limit);
1743
+ }
1744
+ return result;
1745
+ }
1746
+ function filterRecords(records, clauses) {
1747
+ if (clauses.length === 0) return [...records];
1748
+ const out = [];
1749
+ for (const r of records) {
1750
+ let matches = true;
1751
+ for (const clause of clauses) {
1752
+ if (!evaluateClause(r, clause)) {
1753
+ matches = false;
1754
+ break;
1755
+ }
1756
+ }
1757
+ if (matches) out.push(r);
1758
+ }
1759
+ return out;
1760
+ }
1761
+ function sortRecords(records, orderBy) {
1762
+ return [...records].sort((a, b) => {
1763
+ for (const { field, direction } of orderBy) {
1764
+ const av = readField(a, field);
1765
+ const bv = readField(b, field);
1766
+ const cmp = compareValues(av, bv);
1767
+ if (cmp !== 0) return direction === "asc" ? cmp : -cmp;
1768
+ }
1769
+ return 0;
1770
+ });
1771
+ }
1772
+ function readField(record, field) {
1773
+ if (record === null || record === void 0) return void 0;
1774
+ if (!field.includes(".")) {
1775
+ return record[field];
1776
+ }
1777
+ const segments = field.split(".");
1778
+ let cursor = record;
1779
+ for (const segment of segments) {
1780
+ if (cursor === null || cursor === void 0) return void 0;
1781
+ cursor = cursor[segment];
1782
+ }
1783
+ return cursor;
1784
+ }
1785
+ function compareValues(a, b) {
1786
+ if (a === void 0 || a === null) return b === void 0 || b === null ? 0 : 1;
1787
+ if (b === void 0 || b === null) return -1;
1788
+ if (typeof a === "number" && typeof b === "number") return a - b;
1789
+ if (typeof a === "string" && typeof b === "string") return a < b ? -1 : a > b ? 1 : 0;
1790
+ if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();
1791
+ return 0;
1792
+ }
1793
+ function serializePlan(plan) {
1794
+ return {
1795
+ clauses: plan.clauses.map(serializeClause),
1796
+ orderBy: plan.orderBy,
1797
+ limit: plan.limit,
1798
+ offset: plan.offset
1799
+ };
1800
+ }
1801
+ function serializeClause(clause) {
1802
+ if (clause.type === "filter") {
1803
+ return { type: "filter", fn: "[function]" };
1804
+ }
1805
+ if (clause.type === "group") {
1806
+ return {
1807
+ type: "group",
1808
+ op: clause.op,
1809
+ clauses: clause.clauses.map(serializeClause)
1810
+ };
1811
+ }
1812
+ return clause;
1813
+ }
1814
+
1815
+ // src/query/indexes.ts
1816
+ var CollectionIndexes = class {
1817
+ indexes = /* @__PURE__ */ new Map();
1818
+ /**
1819
+ * Declare an index. Subsequent record additions are tracked under it.
1820
+ * Calling this twice for the same field is a no-op (idempotent).
1821
+ */
1822
+ declare(field) {
1823
+ if (this.indexes.has(field)) return;
1824
+ this.indexes.set(field, { field, buckets: /* @__PURE__ */ new Map() });
1825
+ }
1826
+ /** True if the given field has a declared index. */
1827
+ has(field) {
1828
+ return this.indexes.has(field);
1829
+ }
1830
+ /** All declared field names, in declaration order. */
1831
+ fields() {
1832
+ return [...this.indexes.keys()];
1833
+ }
1834
+ /**
1835
+ * Build all declared indexes from a snapshot of records.
1836
+ * Called once per hydration. O(N × indexes.size).
1837
+ */
1838
+ build(records) {
1839
+ for (const idx of this.indexes.values()) {
1840
+ idx.buckets.clear();
1841
+ for (const { id, record } of records) {
1842
+ addToIndex(idx, id, record);
1843
+ }
1844
+ }
1845
+ }
1846
+ /**
1847
+ * Insert or update a single record across all indexes.
1848
+ * Called by `Collection.put()` after the encrypted write succeeds.
1849
+ *
1850
+ * If `previousRecord` is provided, the record is removed from any old
1851
+ * buckets first — this is the update path. Pass `null` for fresh adds.
1852
+ */
1853
+ upsert(id, newRecord, previousRecord) {
1854
+ if (this.indexes.size === 0) return;
1855
+ if (previousRecord !== null) {
1856
+ this.remove(id, previousRecord);
1857
+ }
1858
+ for (const idx of this.indexes.values()) {
1859
+ addToIndex(idx, id, newRecord);
1860
+ }
1861
+ }
1862
+ /**
1863
+ * Remove a record from all indexes. Called by `Collection.delete()`
1864
+ * (and as the first half of `upsert` for the update path).
1865
+ */
1866
+ remove(id, record) {
1867
+ if (this.indexes.size === 0) return;
1868
+ for (const idx of this.indexes.values()) {
1869
+ removeFromIndex(idx, id, record);
1870
+ }
1871
+ }
1872
+ /** Drop all index data. Called when the collection is invalidated. */
1873
+ clear() {
1874
+ for (const idx of this.indexes.values()) {
1875
+ idx.buckets.clear();
1876
+ }
1877
+ }
1878
+ /**
1879
+ * Equality lookup: return the set of record ids whose `field` matches
1880
+ * the given value. Returns `null` if no index covers the field — the
1881
+ * caller should fall back to a linear scan.
1882
+ *
1883
+ * The returned Set is a reference to the index's internal storage —
1884
+ * callers must NOT mutate it.
1885
+ */
1886
+ lookupEqual(field, value) {
1887
+ const idx = this.indexes.get(field);
1888
+ if (!idx) return null;
1889
+ const key = stringifyKey(value);
1890
+ return idx.buckets.get(key) ?? EMPTY_SET;
1891
+ }
1892
+ /**
1893
+ * Set lookup: return the union of record ids whose `field` matches any
1894
+ * of the given values. Returns `null` if no index covers the field.
1895
+ */
1896
+ lookupIn(field, values) {
1897
+ const idx = this.indexes.get(field);
1898
+ if (!idx) return null;
1899
+ const out = /* @__PURE__ */ new Set();
1900
+ for (const value of values) {
1901
+ const key = stringifyKey(value);
1902
+ const bucket = idx.buckets.get(key);
1903
+ if (bucket) {
1904
+ for (const id of bucket) out.add(id);
1905
+ }
1906
+ }
1907
+ return out;
1908
+ }
1909
+ };
1910
+ var EMPTY_SET = /* @__PURE__ */ new Set();
1911
+ function stringifyKey(value) {
1912
+ if (value === null || value === void 0) return "\0NULL\0";
1913
+ if (typeof value === "string") return value;
1914
+ if (typeof value === "number" || typeof value === "boolean") return String(value);
1915
+ if (value instanceof Date) return value.toISOString();
1916
+ return "\0OBJECT\0";
1917
+ }
1918
+ function addToIndex(idx, id, record) {
1919
+ const value = readPath(record, idx.field);
1920
+ if (value === null || value === void 0) return;
1921
+ const key = stringifyKey(value);
1922
+ let bucket = idx.buckets.get(key);
1923
+ if (!bucket) {
1924
+ bucket = /* @__PURE__ */ new Set();
1925
+ idx.buckets.set(key, bucket);
1926
+ }
1927
+ bucket.add(id);
1928
+ }
1929
+ function removeFromIndex(idx, id, record) {
1930
+ const value = readPath(record, idx.field);
1931
+ if (value === null || value === void 0) return;
1932
+ const key = stringifyKey(value);
1933
+ const bucket = idx.buckets.get(key);
1934
+ if (!bucket) return;
1935
+ bucket.delete(id);
1936
+ if (bucket.size === 0) idx.buckets.delete(key);
1937
+ }
1938
+
1939
+ // src/cache/lru.ts
1940
+ var Lru = class {
1941
+ entries = /* @__PURE__ */ new Map();
1942
+ maxRecords;
1943
+ maxBytes;
1944
+ currentBytes = 0;
1945
+ hits = 0;
1946
+ misses = 0;
1947
+ evictions = 0;
1948
+ constructor(options) {
1949
+ if (options.maxRecords === void 0 && options.maxBytes === void 0) {
1950
+ throw new Error("Lru: must specify maxRecords, maxBytes, or both");
1951
+ }
1952
+ this.maxRecords = options.maxRecords;
1953
+ this.maxBytes = options.maxBytes;
1954
+ }
1955
+ /**
1956
+ * Look up a key. Hits promote the entry to most-recently-used; misses
1957
+ * return undefined. Both update the running stats counters.
1958
+ */
1959
+ get(key) {
1960
+ const entry = this.entries.get(key);
1961
+ if (!entry) {
1962
+ this.misses++;
1963
+ return void 0;
1964
+ }
1965
+ this.entries.delete(key);
1966
+ this.entries.set(key, entry);
1967
+ this.hits++;
1968
+ return entry.value;
1969
+ }
1970
+ /**
1971
+ * Insert or update a key. If the key already exists, its size is
1972
+ * accounted for and the entry is promoted to MRU. After insertion,
1973
+ * eviction runs to maintain both budgets.
1974
+ */
1975
+ set(key, value, size) {
1976
+ const existing = this.entries.get(key);
1977
+ if (existing) {
1978
+ this.currentBytes -= existing.size;
1979
+ this.entries.delete(key);
1980
+ }
1981
+ this.entries.set(key, { value, size });
1982
+ this.currentBytes += size;
1983
+ this.evictUntilUnderBudget();
1984
+ }
1985
+ /**
1986
+ * Remove a key without affecting hit/miss stats. Used by `Collection.delete()`.
1987
+ * Returns true if the key was present.
1988
+ */
1989
+ remove(key) {
1990
+ const existing = this.entries.get(key);
1991
+ if (!existing) return false;
1992
+ this.currentBytes -= existing.size;
1993
+ this.entries.delete(key);
1994
+ return true;
1995
+ }
1996
+ /** True if the cache currently holds an entry for the given key. */
1997
+ has(key) {
1998
+ return this.entries.has(key);
592
1999
  }
593
- if (typeof oldObj !== typeof newObj) {
594
- return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
2000
+ /**
2001
+ * Drop every entry. Stats counters survive call `resetStats()` if you
2002
+ * want a clean slate. Used by `Collection.invalidate()` on key rotation.
2003
+ */
2004
+ clear() {
2005
+ this.entries.clear();
2006
+ this.currentBytes = 0;
2007
+ }
2008
+ /** Reset hit/miss/eviction counters to zero. Does NOT touch entries. */
2009
+ resetStats() {
2010
+ this.hits = 0;
2011
+ this.misses = 0;
2012
+ this.evictions = 0;
2013
+ }
2014
+ /** Snapshot of current cache statistics. Cheap — no copying. */
2015
+ stats() {
2016
+ return {
2017
+ hits: this.hits,
2018
+ misses: this.misses,
2019
+ evictions: this.evictions,
2020
+ size: this.entries.size,
2021
+ bytes: this.currentBytes
2022
+ };
595
2023
  }
596
- if (typeof oldObj !== "object") {
597
- return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
2024
+ /**
2025
+ * Iterate over all currently-cached values. Order is least-recently-used
2026
+ * first. Used by tests and devtools — production callers should use
2027
+ * `Collection.scan()` instead.
2028
+ */
2029
+ *values() {
2030
+ for (const entry of this.entries.values()) yield entry.value;
598
2031
  }
599
- if (Array.isArray(oldObj) && Array.isArray(newObj)) {
600
- const maxLen = Math.max(oldObj.length, newObj.length);
601
- for (let i = 0; i < maxLen; i++) {
602
- const p = basePath ? `${basePath}[${i}]` : `[${i}]`;
603
- if (i >= oldObj.length) {
604
- changes.push({ path: p, type: "added", to: newObj[i] });
605
- } else if (i >= newObj.length) {
606
- changes.push({ path: p, type: "removed", from: oldObj[i] });
607
- } else {
608
- changes.push(...diff(oldObj[i], newObj[i], p));
609
- }
2032
+ /**
2033
+ * Walk the cache from the LRU end and drop entries until both budgets
2034
+ * are satisfied. Called after every `set()`. Single pass — entries are
2035
+ * never re-promoted during eviction.
2036
+ */
2037
+ evictUntilUnderBudget() {
2038
+ while (this.overBudget()) {
2039
+ const oldest = this.entries.keys().next();
2040
+ if (oldest.done) return;
2041
+ const key = oldest.value;
2042
+ const entry = this.entries.get(key);
2043
+ if (entry) this.currentBytes -= entry.size;
2044
+ this.entries.delete(key);
2045
+ this.evictions++;
610
2046
  }
611
- return changes;
612
2047
  }
613
- const oldRecord = oldObj;
614
- const newRecord = newObj;
615
- const allKeys = /* @__PURE__ */ new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)]);
616
- for (const key of allKeys) {
617
- const p = basePath ? `${basePath}.${key}` : key;
618
- if (!(key in oldRecord)) {
619
- changes.push({ path: p, type: "added", to: newRecord[key] });
620
- } else if (!(key in newRecord)) {
621
- changes.push({ path: p, type: "removed", from: oldRecord[key] });
622
- } else {
623
- changes.push(...diff(oldRecord[key], newRecord[key], p));
2048
+ overBudget() {
2049
+ if (this.maxRecords !== void 0 && this.entries.size > this.maxRecords) return true;
2050
+ if (this.maxBytes !== void 0 && this.currentBytes > this.maxBytes) return true;
2051
+ return false;
2052
+ }
2053
+ };
2054
+
2055
+ // src/cache/policy.ts
2056
+ var UNITS = {
2057
+ "": 1,
2058
+ "B": 1,
2059
+ "KB": 1024,
2060
+ "MB": 1024 * 1024,
2061
+ "GB": 1024 * 1024 * 1024
2062
+ // 'TB' deliberately not supported — if you need it, you're not using NOYDB.
2063
+ };
2064
+ function parseBytes(input) {
2065
+ if (typeof input === "number") {
2066
+ if (!Number.isFinite(input) || input <= 0) {
2067
+ throw new Error(`parseBytes: numeric input must be a positive finite number, got ${String(input)}`);
624
2068
  }
2069
+ return Math.floor(input);
625
2070
  }
626
- return changes;
2071
+ const trimmed = input.trim();
2072
+ if (trimmed === "") {
2073
+ throw new Error("parseBytes: empty string is not a valid byte budget");
2074
+ }
2075
+ const match = /^([0-9]+(?:\.[0-9]+)?)\s*([A-Za-z]*)$/.exec(trimmed);
2076
+ if (!match) {
2077
+ throw new Error(`parseBytes: invalid byte budget "${input}". Expected format: "1024", "50KB", "50MB", "1GB"`);
2078
+ }
2079
+ const value = parseFloat(match[1]);
2080
+ const unit = (match[2] ?? "").toUpperCase();
2081
+ if (!(unit in UNITS)) {
2082
+ throw new Error(`parseBytes: unknown unit "${match[2]}" in "${input}". Supported: B, KB, MB, GB`);
2083
+ }
2084
+ const bytes = Math.floor(value * UNITS[unit]);
2085
+ if (bytes <= 0) {
2086
+ throw new Error(`parseBytes: byte budget must be > 0, got ${bytes} from "${input}"`);
2087
+ }
2088
+ return bytes;
627
2089
  }
628
- function formatDiff(changes) {
629
- if (changes.length === 0) return "(no changes)";
630
- return changes.map((c) => {
631
- switch (c.type) {
632
- case "added":
633
- return `+ ${c.path}: ${JSON.stringify(c.to)}`;
634
- case "removed":
635
- return `- ${c.path}: ${JSON.stringify(c.from)}`;
636
- case "changed":
637
- return `~ ${c.path}: ${JSON.stringify(c.from)} \u2192 ${JSON.stringify(c.to)}`;
638
- }
639
- }).join("\n");
2090
+ function estimateRecordBytes(record) {
2091
+ try {
2092
+ return JSON.stringify(record).length;
2093
+ } catch {
2094
+ return 0;
2095
+ }
640
2096
  }
641
2097
 
642
2098
  // src/collection.ts
2099
+ var fallbackWarned = /* @__PURE__ */ new Set();
2100
+ function warnOnceFallback(adapterName) {
2101
+ if (fallbackWarned.has(adapterName)) return;
2102
+ fallbackWarned.add(adapterName);
2103
+ if (typeof process !== "undefined" && process.env["NODE_ENV"] === "test") return;
2104
+ console.warn(
2105
+ `[noy-db] Adapter "${adapterName}" does not implement listPage(); Collection.scan()/listPage() are using a synthetic fallback (slower). Add a listPage method to opt into the streaming fast path.`
2106
+ );
2107
+ }
643
2108
  var Collection = class {
644
2109
  adapter;
645
2110
  compartment;
@@ -650,9 +2115,91 @@ var Collection = class {
650
2115
  getDEK;
651
2116
  onDirty;
652
2117
  historyConfig;
653
- // In-memory cache of decrypted records
2118
+ // In-memory cache of decrypted records (eager mode only). Lazy mode
2119
+ // uses `lru` instead. Both fields exist so a single Collection instance
2120
+ // doesn't need a runtime branch on every cache access.
654
2121
  cache = /* @__PURE__ */ new Map();
655
2122
  hydrated = false;
2123
+ /**
2124
+ * Lazy mode flag. `true` when constructed with `prefetch: false`.
2125
+ * In lazy mode the cache is bounded by an LRU and `list()`/`query()`
2126
+ * throw — callers must use `scan()` or per-id `get()` instead.
2127
+ */
2128
+ lazy;
2129
+ /**
2130
+ * LRU cache for lazy mode. Only allocated when `prefetch: false` is set.
2131
+ * Stores `{ record, version }` entries the same shape as `this.cache`.
2132
+ * Tree-shaking note: importing Collection without setting `prefetch:false`
2133
+ * still pulls in the Lru class today; future bundle-size work could
2134
+ * lazy-import the cache module.
2135
+ */
2136
+ lru;
2137
+ /**
2138
+ * In-memory secondary indexes for the query DSL.
2139
+ *
2140
+ * Built during `ensureHydrated()` and maintained on every put/delete.
2141
+ * The query executor consults these for `==` and `in` operators on
2142
+ * indexed fields, falling back to a linear scan for unindexed fields
2143
+ * or unsupported operators.
2144
+ *
2145
+ * v0.3 ships in-memory only — persistence as encrypted blobs is a
2146
+ * follow-up. See `query/indexes.ts` for the design rationale.
2147
+ *
2148
+ * Indexes are INCOMPATIBLE with lazy mode in v0.3 — the constructor
2149
+ * rejects the combination because evicted records would silently
2150
+ * disappear from the index without notification.
2151
+ */
2152
+ indexes = new CollectionIndexes();
2153
+ /**
2154
+ * Optional Standard Schema v1 validator. When set, every `put()` runs
2155
+ * the input through `validateSchemaInput` before encryption, and every
2156
+ * record coming OUT of `decryptRecord` runs through
2157
+ * `validateSchemaOutput`. A rejected input throws
2158
+ * `SchemaValidationError` with `direction: 'input'`; drifted stored
2159
+ * data throws with `direction: 'output'`. Both carry the rich issue
2160
+ * list from the validator so UI code can render field-level messages.
2161
+ *
2162
+ * The schema is stored as `StandardSchemaV1<unknown, T>` because the
2163
+ * collection type parameter `T` is the OUTPUT type — whatever the
2164
+ * validator produces after transforms and coercion. Users who pass a
2165
+ * schema to `defineNoydbStore` (or `Collection.constructor`) get their
2166
+ * `T` inferred automatically via `InferOutput<Schema>`.
2167
+ */
2168
+ schema;
2169
+ /**
2170
+ * Optional reference to the compartment-level hash-chained audit
2171
+ * log. When present, every successful `put()` and `delete()` appends
2172
+ * an entry to the ledger AFTER the adapter write succeeds (so a
2173
+ * failed adapter write never produces an orphan ledger entry).
2174
+ *
2175
+ * The ledger is always a compartment-wide singleton — all
2176
+ * collections in the same compartment share the same LedgerStore.
2177
+ * Compartment.ledger() does the lazy init; this field just holds
2178
+ * the reference so Collection doesn't need to reach back up to the
2179
+ * compartment on every mutation.
2180
+ *
2181
+ * `undefined` means "no ledger attached" — supported for tests that
2182
+ * construct a Collection directly without a compartment, and for
2183
+ * future backwards-compat scenarios. Production usage always has a
2184
+ * ledger because Compartment.collection() passes one through.
2185
+ */
2186
+ ledger;
2187
+ /**
2188
+ * Optional back-reference to the owning compartment's ref
2189
+ * enforcer. When present, `Collection.put` calls
2190
+ * `refEnforcer.enforceRefsOnPut(name, record)` before the adapter
2191
+ * write, and `Collection.delete` calls
2192
+ * `refEnforcer.enforceRefsOnDelete(name, id)` before its own
2193
+ * adapter delete. The Compartment handles the actual registry
2194
+ * lookup and cross-collection enforcement — Collection just
2195
+ * notifies it at the right points in the lifecycle.
2196
+ *
2197
+ * Typed as a structural interface rather than `Compartment`
2198
+ * directly to avoid a circular import. Compartment implements
2199
+ * these two methods; any other object with the same shape would
2200
+ * work too (used only in unit tests).
2201
+ */
2202
+ refEnforcer;
656
2203
  constructor(opts) {
657
2204
  this.adapter = opts.adapter;
658
2205
  this.compartment = opts.compartment;
@@ -663,9 +2210,46 @@ var Collection = class {
663
2210
  this.getDEK = opts.getDEK;
664
2211
  this.onDirty = opts.onDirty;
665
2212
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2213
+ this.schema = opts.schema;
2214
+ this.ledger = opts.ledger;
2215
+ this.refEnforcer = opts.refEnforcer;
2216
+ this.lazy = opts.prefetch === false;
2217
+ if (this.lazy) {
2218
+ if (opts.indexes && opts.indexes.length > 0) {
2219
+ throw new Error(
2220
+ `Collection "${this.name}": secondary indexes are not supported in lazy mode (prefetch: false). Either remove the indexes option or use prefetch: true. Index + lazy support is tracked as a v0.4 follow-up.`
2221
+ );
2222
+ }
2223
+ if (!opts.cache || opts.cache.maxRecords === void 0 && opts.cache.maxBytes === void 0) {
2224
+ throw new Error(
2225
+ `Collection "${this.name}": lazy mode (prefetch: false) requires a cache option with maxRecords and/or maxBytes. An unbounded lazy cache defeats the purpose.`
2226
+ );
2227
+ }
2228
+ const lruOptions = {};
2229
+ if (opts.cache.maxRecords !== void 0) lruOptions.maxRecords = opts.cache.maxRecords;
2230
+ if (opts.cache.maxBytes !== void 0) lruOptions.maxBytes = parseBytes(opts.cache.maxBytes);
2231
+ this.lru = new Lru(lruOptions);
2232
+ this.hydrated = true;
2233
+ } else {
2234
+ this.lru = null;
2235
+ if (opts.indexes) {
2236
+ for (const def of opts.indexes) {
2237
+ this.indexes.declare(def);
2238
+ }
2239
+ }
2240
+ }
666
2241
  }
667
2242
  /** Get a single record by ID. Returns null if not found. */
668
2243
  async get(id) {
2244
+ if (this.lazy && this.lru) {
2245
+ const cached = this.lru.get(id);
2246
+ if (cached) return cached.record;
2247
+ const envelope = await this.adapter.get(this.compartment, this.name, id);
2248
+ if (!envelope) return null;
2249
+ const record = await this.decryptRecord(envelope);
2250
+ this.lru.set(id, { record, version: envelope._v }, estimateRecordBytes(record));
2251
+ return record;
2252
+ }
669
2253
  await this.ensureHydrated();
670
2254
  const entry = this.cache.get(id);
671
2255
  return entry ? entry.record : null;
@@ -675,8 +2259,26 @@ var Collection = class {
675
2259
  if (!hasWritePermission(this.keyring, this.name)) {
676
2260
  throw new ReadOnlyError();
677
2261
  }
678
- await this.ensureHydrated();
679
- const existing = this.cache.get(id);
2262
+ if (this.schema !== void 0) {
2263
+ record = await validateSchemaInput(this.schema, record, `put(${id})`);
2264
+ }
2265
+ if (this.refEnforcer !== void 0) {
2266
+ await this.refEnforcer.enforceRefsOnPut(this.name, record);
2267
+ }
2268
+ let existing;
2269
+ if (this.lazy && this.lru) {
2270
+ existing = this.lru.get(id);
2271
+ if (!existing) {
2272
+ const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
2273
+ if (previousEnvelope) {
2274
+ const previousRecord = await this.decryptRecord(previousEnvelope);
2275
+ existing = { record: previousRecord, version: previousEnvelope._v };
2276
+ }
2277
+ }
2278
+ } else {
2279
+ await this.ensureHydrated();
2280
+ existing = this.cache.get(id);
2281
+ }
680
2282
  const version = existing ? existing.version + 1 : 1;
681
2283
  if (existing && this.historyConfig.enabled !== false) {
682
2284
  const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
@@ -695,7 +2297,26 @@ var Collection = class {
695
2297
  }
696
2298
  const envelope = await this.encryptRecord(record, version);
697
2299
  await this.adapter.put(this.compartment, this.name, id, envelope);
698
- this.cache.set(id, { record, version });
2300
+ if (this.ledger) {
2301
+ const appendInput = {
2302
+ op: "put",
2303
+ collection: this.name,
2304
+ id,
2305
+ version,
2306
+ actor: this.keyring.userId,
2307
+ payloadHash: await envelopePayloadHash(envelope)
2308
+ };
2309
+ if (existing) {
2310
+ appendInput.delta = computePatch(record, existing.record);
2311
+ }
2312
+ await this.ledger.append(appendInput);
2313
+ }
2314
+ if (this.lazy && this.lru) {
2315
+ this.lru.set(id, { record, version }, estimateRecordBytes(record));
2316
+ } else {
2317
+ this.cache.set(id, { record, version });
2318
+ this.indexes.upsert(id, record, existing ? existing.record : null);
2319
+ }
699
2320
  await this.onDirty?.(this.name, id, "put", version);
700
2321
  this.emitter.emit("change", {
701
2322
  compartment: this.compartment,
@@ -709,13 +2330,47 @@ var Collection = class {
709
2330
  if (!hasWritePermission(this.keyring, this.name)) {
710
2331
  throw new ReadOnlyError();
711
2332
  }
712
- const existing = this.cache.get(id);
2333
+ if (this.refEnforcer !== void 0) {
2334
+ await this.refEnforcer.enforceRefsOnDelete(this.name, id);
2335
+ }
2336
+ let existing;
2337
+ if (this.lazy && this.lru) {
2338
+ existing = this.lru.get(id);
2339
+ if (!existing && this.historyConfig.enabled !== false) {
2340
+ const previousEnvelope2 = await this.adapter.get(this.compartment, this.name, id);
2341
+ if (previousEnvelope2) {
2342
+ const previousRecord = await this.decryptRecord(previousEnvelope2);
2343
+ existing = { record: previousRecord, version: previousEnvelope2._v };
2344
+ }
2345
+ }
2346
+ } else {
2347
+ existing = this.cache.get(id);
2348
+ }
713
2349
  if (existing && this.historyConfig.enabled !== false) {
714
2350
  const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
715
2351
  await saveHistory(this.adapter, this.compartment, this.name, id, historyEnvelope);
716
2352
  }
2353
+ const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
2354
+ const previousPayloadHash = await envelopePayloadHash(previousEnvelope);
717
2355
  await this.adapter.delete(this.compartment, this.name, id);
718
- this.cache.delete(id);
2356
+ if (this.ledger) {
2357
+ await this.ledger.append({
2358
+ op: "delete",
2359
+ collection: this.name,
2360
+ id,
2361
+ version: existing?.version ?? 0,
2362
+ actor: this.keyring.userId,
2363
+ payloadHash: previousPayloadHash
2364
+ });
2365
+ }
2366
+ if (this.lazy && this.lru) {
2367
+ this.lru.remove(id);
2368
+ } else {
2369
+ this.cache.delete(id);
2370
+ if (existing) {
2371
+ this.indexes.remove(id, existing.record);
2372
+ }
2373
+ }
719
2374
  await this.onDirty?.(this.name, id, "delete", existing?.version ?? 0);
720
2375
  this.emitter.emit("change", {
721
2376
  compartment: this.compartment,
@@ -724,14 +2379,70 @@ var Collection = class {
724
2379
  action: "delete"
725
2380
  });
726
2381
  }
727
- /** List all records in the collection. */
2382
+ /**
2383
+ * List all records in the collection.
2384
+ *
2385
+ * Throws in lazy mode — bulk listing defeats the purpose of lazy
2386
+ * hydration. Use `scan()` to iterate over the full collection
2387
+ * page-by-page without holding more than `pageSize` records in memory.
2388
+ */
728
2389
  async list() {
2390
+ if (this.lazy) {
2391
+ throw new Error(
2392
+ `Collection "${this.name}": list() is not available in lazy mode (prefetch: false). Use collection.scan({ pageSize }) to iterate over the full collection.`
2393
+ );
2394
+ }
729
2395
  await this.ensureHydrated();
730
2396
  return [...this.cache.values()].map((e) => e.record);
731
2397
  }
732
- /** Filter records by a predicate. */
733
2398
  query(predicate) {
734
- return [...this.cache.values()].map((e) => e.record).filter(predicate);
2399
+ if (this.lazy) {
2400
+ throw new Error(
2401
+ `Collection "${this.name}": query() is not available in lazy mode (prefetch: false). Use collection.scan({ pageSize }) and filter the streamed records with a regular for-await loop. Streaming queries land in v0.4.`
2402
+ );
2403
+ }
2404
+ if (predicate !== void 0) {
2405
+ return [...this.cache.values()].map((e) => e.record).filter(predicate);
2406
+ }
2407
+ const source = {
2408
+ snapshot: () => [...this.cache.values()].map((e) => e.record),
2409
+ subscribe: (cb) => {
2410
+ const handler = (event) => {
2411
+ if (event.compartment === this.compartment && event.collection === this.name) {
2412
+ cb();
2413
+ }
2414
+ };
2415
+ this.emitter.on("change", handler);
2416
+ return () => this.emitter.off("change", handler);
2417
+ },
2418
+ // Index-aware fast path for `==` and `in` operators on indexed
2419
+ // fields. The Query builder consults these when present and falls
2420
+ // back to a linear scan otherwise.
2421
+ getIndexes: () => this.getIndexes(),
2422
+ lookupById: (id) => this.cache.get(id)?.record
2423
+ };
2424
+ return new Query(source);
2425
+ }
2426
+ /**
2427
+ * Cache statistics — useful for devtools, monitoring, and verifying
2428
+ * that LRU eviction is happening as expected in lazy mode.
2429
+ *
2430
+ * In eager mode, returns size only (no hits/misses are tracked because
2431
+ * every read is a cache hit by construction). In lazy mode, returns
2432
+ * the full LRU stats: `{ hits, misses, evictions, size, bytes }`.
2433
+ */
2434
+ cacheStats() {
2435
+ if (this.lazy && this.lru) {
2436
+ return { ...this.lru.stats(), lazy: true };
2437
+ }
2438
+ return {
2439
+ hits: 0,
2440
+ misses: 0,
2441
+ evictions: 0,
2442
+ size: this.cache.size,
2443
+ bytes: 0,
2444
+ lazy: false
2445
+ };
735
2446
  }
736
2447
  // ─── History Methods ────────────────────────────────────────────
737
2448
  /** Get version history for a record, newest first. */
@@ -745,7 +2456,7 @@ var Collection = class {
745
2456
  );
746
2457
  const entries = [];
747
2458
  for (const env of envelopes) {
748
- const record = await this.decryptRecord(env);
2459
+ const record = await this.decryptRecord(env, { skipValidation: true });
749
2460
  entries.push({
750
2461
  version: env._v,
751
2462
  timestamp: env._ts,
@@ -755,7 +2466,15 @@ var Collection = class {
755
2466
  }
756
2467
  return entries;
757
2468
  }
758
- /** Get a specific past version of a record. */
2469
+ /**
2470
+ * Get a specific past version of a record.
2471
+ *
2472
+ * History reads intentionally **skip schema validation** — historical
2473
+ * records predate the current schema by definition, so validating them
2474
+ * against today's shape would be a false positive on any schema
2475
+ * evolution. If a caller needs validated history, they should filter
2476
+ * and re-put the records through the normal `put()` path.
2477
+ */
759
2478
  async getVersion(id, version) {
760
2479
  const envelope = await getVersionEnvelope(
761
2480
  this.adapter,
@@ -765,7 +2484,7 @@ var Collection = class {
765
2484
  version
766
2485
  );
767
2486
  if (!envelope) return null;
768
- return this.decryptRecord(envelope);
2487
+ return this.decryptRecord(envelope, { skipValidation: true });
769
2488
  }
770
2489
  /** Revert a record to a past version. Creates a new version with the old content. */
771
2490
  async revert(id, version) {
@@ -783,7 +2502,7 @@ var Collection = class {
783
2502
  async diff(id, versionA, versionB) {
784
2503
  const recordA = versionA === 0 ? null : await this.resolveVersion(id, versionA);
785
2504
  const recordB = versionB === void 0 || versionB === 0 ? versionB === 0 ? null : await this.resolveCurrentOrVersion(id) : await this.resolveVersion(id, versionB);
786
- return diff(recordA, recordB);
2505
+ return diff2(recordA, recordB);
787
2506
  }
788
2507
  /** Resolve a version: try history first, then check if it's the current version. */
789
2508
  async resolveVersion(id, version) {
@@ -822,11 +2541,105 @@ var Collection = class {
822
2541
  return clearHistory(this.adapter, this.compartment, this.name, id);
823
2542
  }
824
2543
  // ─── Core Methods ─────────────────────────────────────────────
825
- /** Count records in the collection. */
2544
+ /**
2545
+ * Count records in the collection.
2546
+ *
2547
+ * In eager mode this returns the in-memory cache size (instant). In
2548
+ * lazy mode it asks the adapter via `list()` to enumerate ids — slower
2549
+ * but still correct, and avoids loading any record bodies into memory.
2550
+ */
826
2551
  async count() {
2552
+ if (this.lazy) {
2553
+ const ids = await this.adapter.list(this.compartment, this.name);
2554
+ return ids.length;
2555
+ }
827
2556
  await this.ensureHydrated();
828
2557
  return this.cache.size;
829
2558
  }
2559
+ // ─── Pagination & Streaming ───────────────────────────────────
2560
+ /**
2561
+ * Fetch a single page of records via the adapter's optional `listPage`
2562
+ * extension. Returns the decrypted records for this page plus an opaque
2563
+ * cursor for the next page.
2564
+ *
2565
+ * Pass `cursor: undefined` (or omit it) to start from the beginning.
2566
+ * The final page returns `nextCursor: null`.
2567
+ *
2568
+ * If the adapter does NOT implement `listPage`, this falls back to a
2569
+ * synthetic implementation: it loads all ids via `list()`, sorts them,
2570
+ * and slices a window. The first call emits a one-time console.warn so
2571
+ * developers can spot adapters that should opt into the fast path.
2572
+ */
2573
+ async listPage(opts = {}) {
2574
+ const limit = opts.limit ?? 100;
2575
+ if (this.adapter.listPage) {
2576
+ const result = await this.adapter.listPage(this.compartment, this.name, opts.cursor, limit);
2577
+ const decrypted = [];
2578
+ for (const { record, version, id } of await this.decryptPage(result.items)) {
2579
+ if (!this.lazy && !this.cache.has(id)) {
2580
+ this.cache.set(id, { record, version });
2581
+ }
2582
+ decrypted.push(record);
2583
+ }
2584
+ return { items: decrypted, nextCursor: result.nextCursor };
2585
+ }
2586
+ warnOnceFallback(this.adapter.name ?? "unknown");
2587
+ const ids = (await this.adapter.list(this.compartment, this.name)).slice().sort();
2588
+ const start = opts.cursor ? parseInt(opts.cursor, 10) : 0;
2589
+ const end = Math.min(start + limit, ids.length);
2590
+ const items = [];
2591
+ for (let i = start; i < end; i++) {
2592
+ const id = ids[i];
2593
+ const envelope = await this.adapter.get(this.compartment, this.name, id);
2594
+ if (envelope) {
2595
+ const record = await this.decryptRecord(envelope);
2596
+ items.push(record);
2597
+ if (!this.lazy && !this.cache.has(id)) {
2598
+ this.cache.set(id, { record, version: envelope._v });
2599
+ }
2600
+ }
2601
+ }
2602
+ return {
2603
+ items,
2604
+ nextCursor: end < ids.length ? String(end) : null
2605
+ };
2606
+ }
2607
+ /**
2608
+ * Stream every record in the collection page-by-page, yielding decrypted
2609
+ * records as an `AsyncIterable<T>`. The whole point: process collections
2610
+ * larger than RAM without ever holding more than `pageSize` records
2611
+ * decrypted at once.
2612
+ *
2613
+ * @example
2614
+ * ```ts
2615
+ * for await (const record of invoices.scan({ pageSize: 500 })) {
2616
+ * await processOne(record)
2617
+ * }
2618
+ * ```
2619
+ *
2620
+ * Uses `adapter.listPage` when available; otherwise falls back to the
2621
+ * synthetic pagination path with the same one-time warning.
2622
+ */
2623
+ async *scan(opts = {}) {
2624
+ const pageSize = opts.pageSize ?? 100;
2625
+ let page = await this.listPage({ limit: pageSize });
2626
+ while (true) {
2627
+ for (const item of page.items) {
2628
+ yield item;
2629
+ }
2630
+ if (page.nextCursor === null) return;
2631
+ page = await this.listPage({ cursor: page.nextCursor, limit: pageSize });
2632
+ }
2633
+ }
2634
+ /** Decrypt a page of envelopes returned by `adapter.listPage`. */
2635
+ async decryptPage(items) {
2636
+ const out = [];
2637
+ for (const { id, envelope } of items) {
2638
+ const record = await this.decryptRecord(envelope);
2639
+ out.push({ id, record, version: envelope._v });
2640
+ }
2641
+ return out;
2642
+ }
830
2643
  // ─── Internal ──────────────────────────────────────────────────
831
2644
  /** Load all records from adapter into memory cache. */
832
2645
  async ensureHydrated() {
@@ -840,6 +2653,7 @@ var Collection = class {
840
2653
  }
841
2654
  }
842
2655
  this.hydrated = true;
2656
+ this.rebuildIndexes();
843
2657
  }
844
2658
  /** Hydrate from a pre-loaded snapshot (used by Compartment). */
845
2659
  async hydrateFromSnapshot(records) {
@@ -848,6 +2662,34 @@ var Collection = class {
848
2662
  this.cache.set(id, { record, version: envelope._v });
849
2663
  }
850
2664
  this.hydrated = true;
2665
+ this.rebuildIndexes();
2666
+ }
2667
+ /**
2668
+ * Rebuild secondary indexes from the current in-memory cache.
2669
+ *
2670
+ * Called after any bulk hydration. Incremental put/delete updates
2671
+ * are handled by `indexes.upsert()` / `indexes.remove()` directly,
2672
+ * so this only fires for full reloads.
2673
+ *
2674
+ * Synchronous and O(N × indexes.size); for the v0.3 target scale of
2675
+ * 1K–50K records this completes in single-digit milliseconds.
2676
+ */
2677
+ rebuildIndexes() {
2678
+ if (this.indexes.fields().length === 0) return;
2679
+ const snapshot = [];
2680
+ for (const [id, entry] of this.cache) {
2681
+ snapshot.push({ id, record: entry.record });
2682
+ }
2683
+ this.indexes.build(snapshot);
2684
+ }
2685
+ /**
2686
+ * Get the in-memory index store. Used by `Query` to short-circuit
2687
+ * `==` and `in` lookups when an index covers the where clause.
2688
+ *
2689
+ * Returns `null` if no indexes are declared on this collection.
2690
+ */
2691
+ getIndexes() {
2692
+ return this.indexes.fields().length > 0 ? this.indexes : null;
851
2693
  }
852
2694
  /** Get all records as encrypted envelopes (for dump). */
853
2695
  async dumpEnvelopes() {
@@ -882,13 +2724,38 @@ var Collection = class {
882
2724
  _by: by
883
2725
  };
884
2726
  }
885
- async decryptRecord(envelope) {
2727
+ /**
2728
+ * Decrypt an envelope into a record of type `T`.
2729
+ *
2730
+ * When a schema is attached, the decrypted value is validated before
2731
+ * being returned. A divergence between the stored bytes and the
2732
+ * current schema throws `SchemaValidationError` with
2733
+ * `direction: 'output'` — silently returning drifted data would
2734
+ * propagate garbage into the UI and break the whole point of having
2735
+ * a schema.
2736
+ *
2737
+ * `skipValidation` exists for history reads: when calling
2738
+ * `getVersion()` the caller is explicitly asking for an old snapshot
2739
+ * that may predate a schema change, so validating it would be a
2740
+ * false positive. Every non-history read leaves this flag `false`.
2741
+ */
2742
+ async decryptRecord(envelope, opts = {}) {
2743
+ let record;
886
2744
  if (!this.encrypted) {
887
- return JSON.parse(envelope._data);
2745
+ record = JSON.parse(envelope._data);
2746
+ } else {
2747
+ const dek = await this.getDEK(this.name);
2748
+ const json = await decrypt(envelope._iv, envelope._data, dek);
2749
+ record = JSON.parse(json);
888
2750
  }
889
- const dek = await this.getDEK(this.name);
890
- const json = await decrypt(envelope._iv, envelope._data, dek);
891
- return JSON.parse(json);
2751
+ if (this.schema !== void 0 && !opts.skipValidation) {
2752
+ record = await validateSchemaOutput(
2753
+ this.schema,
2754
+ record,
2755
+ `${this.name}@v${envelope._v}`
2756
+ );
2757
+ }
2758
+ return record;
892
2759
  }
893
2760
  };
894
2761
 
@@ -896,13 +2763,62 @@ var Collection = class {
896
2763
  var Compartment = class {
897
2764
  adapter;
898
2765
  name;
2766
+ /**
2767
+ * The active in-memory keyring. NOT readonly because `load()`
2768
+ * needs to refresh it after restoring a different keyring file —
2769
+ * otherwise the in-memory DEKs (from the pre-load session) and
2770
+ * the on-disk wrapped DEKs (from the loaded backup) drift apart
2771
+ * and every subsequent decrypt fails with TamperedError.
2772
+ */
899
2773
  keyring;
900
2774
  encrypted;
901
2775
  emitter;
902
2776
  onDirty;
903
2777
  historyConfig;
904
2778
  getDEK;
2779
+ /**
2780
+ * Optional callback that re-derives an UnlockedKeyring from the
2781
+ * adapter using the active user's passphrase. Called by `load()`
2782
+ * after the on-disk keyring file has been replaced — refreshes
2783
+ * `this.keyring` so the next DEK access uses the loaded wrapped
2784
+ * DEKs instead of the stale pre-load ones.
2785
+ *
2786
+ * Provided by Noydb at openCompartment() time. Tests that
2787
+ * construct Compartment directly can pass `undefined`; load()
2788
+ * skips the refresh in that case (which is fine for plaintext
2789
+ * compartments — there's nothing to re-unwrap).
2790
+ */
2791
+ reloadKeyring;
905
2792
  collectionCache = /* @__PURE__ */ new Map();
2793
+ /**
2794
+ * Per-compartment ledger store. Lazy-initialized on first
2795
+ * `collection()` call (which passes it through to the Collection)
2796
+ * or on first `ledger()` call from user code.
2797
+ *
2798
+ * One LedgerStore is shared across all collections in a compartment
2799
+ * because the hash chain is compartment-scoped: the chain head is a
2800
+ * single "what did this compartment do last" identifier, not a
2801
+ * per-collection one. Two collections appending concurrently is the
2802
+ * single-writer concurrency concern documented in the LedgerStore
2803
+ * docstring.
2804
+ */
2805
+ ledgerStore = null;
2806
+ /**
2807
+ * Per-compartment foreign-key reference registry. Collections
2808
+ * register their `refs` option here on construction; the
2809
+ * compartment uses the registry on every put/delete/checkIntegrity
2810
+ * call. One instance lives for the compartment's lifetime.
2811
+ */
2812
+ refRegistry = new RefRegistry();
2813
+ /**
2814
+ * Set of collection record-ids currently being deleted as part of
2815
+ * a cascade. Populated on entry to `enforceRefsOnDelete` and
2816
+ * drained on exit. Used to break mutual-cascade cycles: deleting
2817
+ * A → cascade to B → cascade back to A would otherwise recurse
2818
+ * forever, so we short-circuit when we see an already-in-progress
2819
+ * delete on the same (collection, id) pair.
2820
+ */
2821
+ cascadeInProgress = /* @__PURE__ */ new Set();
906
2822
  constructor(opts) {
907
2823
  this.adapter = opts.adapter;
908
2824
  this.name = opts.name;
@@ -911,19 +2827,53 @@ var Compartment = class {
911
2827
  this.emitter = opts.emitter;
912
2828
  this.onDirty = opts.onDirty;
913
2829
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2830
+ this.reloadKeyring = opts.reloadKeyring;
2831
+ this.getDEK = this.makeGetDEK();
2832
+ }
2833
+ /**
2834
+ * Construct (or reconstruct) the lazy DEK resolver. Captures the
2835
+ * CURRENT value of `this.keyring` and `this.adapter` in a closure,
2836
+ * memoizing the inner getDEKFn after first use so subsequent
2837
+ * lookups are O(1).
2838
+ *
2839
+ * `load()` calls this after refreshing `this.keyring` to discard
2840
+ * the prior session's cached DEKs.
2841
+ */
2842
+ makeGetDEK() {
914
2843
  let getDEKFn = null;
915
- this.getDEK = async (collectionName) => {
2844
+ return async (collectionName) => {
916
2845
  if (!getDEKFn) {
917
2846
  getDEKFn = await ensureCollectionDEK(this.adapter, this.name, this.keyring);
918
2847
  }
919
2848
  return getDEKFn(collectionName);
920
2849
  };
921
2850
  }
922
- /** Open a typed collection within this compartment. */
923
- collection(collectionName) {
2851
+ /**
2852
+ * Open a typed collection within this compartment.
2853
+ *
2854
+ * - `options.indexes` declares secondary indexes for the query DSL.
2855
+ * Indexes are computed in memory after decryption; adapters never
2856
+ * see plaintext index data.
2857
+ * - `options.prefetch` (default `true`) controls hydration. Eager mode
2858
+ * loads everything on first access; lazy mode (`prefetch: false`)
2859
+ * loads records on demand and bounds memory via the LRU cache.
2860
+ * - `options.cache` configures the LRU bounds. Required in lazy mode.
2861
+ * Accepts `{ maxRecords, maxBytes: '50MB' | 1024 }`.
2862
+ * - `options.schema` attaches a Standard Schema v1 validator (Zod,
2863
+ * Valibot, ArkType, Effect Schema, etc.). Every `put()` is validated
2864
+ * before encryption; every read is validated after decryption.
2865
+ * Failing records throw `SchemaValidationError`.
2866
+ *
2867
+ * Lazy mode + indexes is rejected at construction time — see the
2868
+ * Collection constructor for the rationale.
2869
+ */
2870
+ collection(collectionName, options) {
924
2871
  let coll = this.collectionCache.get(collectionName);
925
2872
  if (!coll) {
926
- coll = new Collection({
2873
+ if (options?.refs) {
2874
+ this.refRegistry.register(collectionName, options.refs);
2875
+ }
2876
+ const collOpts = {
927
2877
  adapter: this.adapter,
928
2878
  compartment: this.name,
929
2879
  name: collectionName,
@@ -932,18 +2882,205 @@ var Compartment = class {
932
2882
  emitter: this.emitter,
933
2883
  getDEK: this.getDEK,
934
2884
  onDirty: this.onDirty,
935
- historyConfig: this.historyConfig
936
- });
2885
+ historyConfig: this.historyConfig,
2886
+ ledger: this.ledger(),
2887
+ refEnforcer: this
2888
+ };
2889
+ if (options?.indexes !== void 0) collOpts.indexes = options.indexes;
2890
+ if (options?.prefetch !== void 0) collOpts.prefetch = options.prefetch;
2891
+ if (options?.cache !== void 0) collOpts.cache = options.cache;
2892
+ if (options?.schema !== void 0) collOpts.schema = options.schema;
2893
+ coll = new Collection(collOpts);
937
2894
  this.collectionCache.set(collectionName, coll);
938
2895
  }
939
2896
  return coll;
940
2897
  }
2898
+ /**
2899
+ * Enforce strict outbound refs on a `put()`. Called by Collection
2900
+ * just before it writes to the adapter. For every strict ref
2901
+ * declared on the collection, check that the target id exists in
2902
+ * the target collection; throw `RefIntegrityError` if not.
2903
+ *
2904
+ * `warn` and `cascade` modes don't affect put semantics — they're
2905
+ * enforced at delete time or via `checkIntegrity()`.
2906
+ */
2907
+ async enforceRefsOnPut(collectionName, record) {
2908
+ const outbound = this.refRegistry.getOutbound(collectionName);
2909
+ if (Object.keys(outbound).length === 0) return;
2910
+ if (!record || typeof record !== "object") return;
2911
+ const obj = record;
2912
+ for (const [field, descriptor] of Object.entries(outbound)) {
2913
+ if (descriptor.mode !== "strict") continue;
2914
+ const rawId = obj[field];
2915
+ if (rawId === null || rawId === void 0) continue;
2916
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
2917
+ throw new RefIntegrityError({
2918
+ collection: collectionName,
2919
+ id: obj["id"] ?? "<unknown>",
2920
+ field,
2921
+ refTo: descriptor.target,
2922
+ refId: null,
2923
+ message: `Ref field "${collectionName}.${field}" must be a string or number, got ${typeof rawId}.`
2924
+ });
2925
+ }
2926
+ const refId = String(rawId);
2927
+ const target = this.collection(descriptor.target);
2928
+ const exists = await target.get(refId);
2929
+ if (!exists) {
2930
+ throw new RefIntegrityError({
2931
+ collection: collectionName,
2932
+ id: obj["id"] ?? "<unknown>",
2933
+ field,
2934
+ refTo: descriptor.target,
2935
+ refId,
2936
+ message: `Strict ref "${collectionName}.${field}" \u2192 "${descriptor.target}" cannot be satisfied: target id "${refId}" not found in "${descriptor.target}".`
2937
+ });
2938
+ }
2939
+ }
2940
+ }
2941
+ /**
2942
+ * Enforce inbound ref modes on a `delete()`. Called by Collection
2943
+ * just before it deletes from the adapter. Walks every inbound
2944
+ * ref that targets this (collection, id) and:
2945
+ *
2946
+ * - `strict`: throws if any referencing records exist
2947
+ * - `cascade`: deletes every referencing record
2948
+ * - `warn`: no-op (checkIntegrity picks it up)
2949
+ *
2950
+ * Cascade cycles are broken via `cascadeInProgress` — re-entering
2951
+ * for the same (collection, id) returns immediately so two
2952
+ * mutually-cascading collections don't recurse forever.
2953
+ */
2954
+ async enforceRefsOnDelete(collectionName, id) {
2955
+ const key = `${collectionName}/${id}`;
2956
+ if (this.cascadeInProgress.has(key)) return;
2957
+ this.cascadeInProgress.add(key);
2958
+ try {
2959
+ const inbound = this.refRegistry.getInbound(collectionName);
2960
+ for (const rule of inbound) {
2961
+ const fromCollection = this.collection(rule.collection);
2962
+ const allRecords = await fromCollection.list();
2963
+ const matches = allRecords.filter((rec) => {
2964
+ const raw = rec[rule.field];
2965
+ if (typeof raw !== "string" && typeof raw !== "number") return false;
2966
+ return String(raw) === id;
2967
+ });
2968
+ if (matches.length === 0) continue;
2969
+ if (rule.mode === "strict") {
2970
+ const first = matches[0];
2971
+ throw new RefIntegrityError({
2972
+ collection: rule.collection,
2973
+ id: first?.["id"] ?? "<unknown>",
2974
+ field: rule.field,
2975
+ refTo: collectionName,
2976
+ refId: id,
2977
+ message: `Cannot delete "${collectionName}"/"${id}": ${matches.length} record(s) in "${rule.collection}" still reference it via strict ref "${rule.field}".`
2978
+ });
2979
+ }
2980
+ if (rule.mode === "cascade") {
2981
+ for (const match of matches) {
2982
+ const matchId = match["id"] ?? null;
2983
+ if (matchId === null) continue;
2984
+ await fromCollection.delete(matchId);
2985
+ }
2986
+ }
2987
+ }
2988
+ } finally {
2989
+ this.cascadeInProgress.delete(key);
2990
+ }
2991
+ }
2992
+ /**
2993
+ * Walk every collection that has declared refs, load its records,
2994
+ * and report any reference whose target id is missing. Modes are
2995
+ * reported alongside each violation so the caller can distinguish
2996
+ * "this is a warning the user asked for" from "this should never
2997
+ * have happened" (strict violations produced by out-of-band
2998
+ * writes).
2999
+ *
3000
+ * Returns `{ violations: [...] }` instead of throwing — the whole
3001
+ * point of `checkIntegrity()` is to surface a list for display
3002
+ * or repair, not to fail noisily.
3003
+ */
3004
+ async checkIntegrity() {
3005
+ const violations = [];
3006
+ for (const [collectionName, refs] of this.refRegistry.entries()) {
3007
+ const coll = this.collection(collectionName);
3008
+ const records = await coll.list();
3009
+ for (const record of records) {
3010
+ const recId = record["id"] ?? "<unknown>";
3011
+ for (const [field, descriptor] of Object.entries(refs)) {
3012
+ const rawId = record[field];
3013
+ if (rawId === null || rawId === void 0) continue;
3014
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
3015
+ violations.push({
3016
+ collection: collectionName,
3017
+ id: recId,
3018
+ field,
3019
+ refTo: descriptor.target,
3020
+ refId: rawId,
3021
+ mode: descriptor.mode
3022
+ });
3023
+ continue;
3024
+ }
3025
+ const refId = String(rawId);
3026
+ const target = this.collection(descriptor.target);
3027
+ const exists = await target.get(refId);
3028
+ if (!exists) {
3029
+ violations.push({
3030
+ collection: collectionName,
3031
+ id: recId,
3032
+ field,
3033
+ refTo: descriptor.target,
3034
+ refId: rawId,
3035
+ mode: descriptor.mode
3036
+ });
3037
+ }
3038
+ }
3039
+ }
3040
+ }
3041
+ return { violations };
3042
+ }
3043
+ /**
3044
+ * Return this compartment's hash-chained audit log.
3045
+ *
3046
+ * The ledger is lazy-initialized on first access and cached for the
3047
+ * lifetime of the Compartment instance. Every LedgerStore instance
3048
+ * shares the same adapter and DEK resolver, so `compartment.ledger()`
3049
+ * can be called repeatedly without performance cost.
3050
+ *
3051
+ * The LedgerStore itself is the public API: consumers call
3052
+ * `.append()` (via Collection internals), `.head()`, `.verify()`,
3053
+ * and `.entries({ from, to })`. See the LedgerStore docstring for
3054
+ * the full surface and the concurrency caveats.
3055
+ */
3056
+ ledger() {
3057
+ if (!this.ledgerStore) {
3058
+ this.ledgerStore = new LedgerStore({
3059
+ adapter: this.adapter,
3060
+ compartment: this.name,
3061
+ encrypted: this.encrypted,
3062
+ getDEK: this.getDEK,
3063
+ actor: this.keyring.userId
3064
+ });
3065
+ }
3066
+ return this.ledgerStore;
3067
+ }
941
3068
  /** List all collection names in this compartment. */
942
3069
  async collections() {
943
3070
  const snapshot = await this.adapter.loadAll(this.name);
944
3071
  return Object.keys(snapshot);
945
3072
  }
946
- /** Dump compartment as encrypted JSON backup string. */
3073
+ /**
3074
+ * Dump compartment as a verifiable encrypted JSON backup string.
3075
+ *
3076
+ * v0.4 backups embed the current ledger head and the full
3077
+ * `_ledger` + `_ledger_deltas` internal collections so the
3078
+ * receiver can run `verifyBackupIntegrity()` after `load()` and
3079
+ * detect any tampering between dump and restore. Pre-v0.4 callers
3080
+ * who didn't have a ledger get a backup without these fields, and
3081
+ * the corresponding `load()` skips the integrity check with a
3082
+ * warning — both modes round-trip cleanly.
3083
+ */
947
3084
  async dump() {
948
3085
  const snapshot = await this.adapter.loadAll(this.name);
949
3086
  const keyringIds = await this.adapter.list(this.name, "_keyring");
@@ -954,17 +3091,58 @@ var Compartment = class {
954
3091
  keyrings[keyringId] = JSON.parse(envelope._data);
955
3092
  }
956
3093
  }
3094
+ const internalSnapshot = {};
3095
+ for (const internalName of [LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION]) {
3096
+ const ids = await this.adapter.list(this.name, internalName);
3097
+ if (ids.length === 0) continue;
3098
+ const records = {};
3099
+ for (const id of ids) {
3100
+ const envelope = await this.adapter.get(this.name, internalName, id);
3101
+ if (envelope) records[id] = envelope;
3102
+ }
3103
+ internalSnapshot[internalName] = records;
3104
+ }
3105
+ const head = await this.ledger().head();
957
3106
  const backup = {
958
3107
  _noydb_backup: NOYDB_BACKUP_VERSION,
959
3108
  _compartment: this.name,
960
3109
  _exported_at: (/* @__PURE__ */ new Date()).toISOString(),
961
3110
  _exported_by: this.keyring.userId,
962
3111
  keyrings,
963
- collections: snapshot
3112
+ collections: snapshot,
3113
+ ...Object.keys(internalSnapshot).length > 0 ? { _internal: internalSnapshot } : {},
3114
+ ...head ? {
3115
+ ledgerHead: {
3116
+ hash: head.hash,
3117
+ index: head.entry.index,
3118
+ ts: head.entry.ts
3119
+ }
3120
+ } : {}
964
3121
  };
965
3122
  return JSON.stringify(backup);
966
3123
  }
967
- /** Restore compartment from an encrypted JSON backup string. */
3124
+ /**
3125
+ * Restore a compartment from a verifiable backup.
3126
+ *
3127
+ * After loading, runs `verifyBackupIntegrity()` to confirm:
3128
+ * 1. The hash chain is intact (no `prevHash` mismatches)
3129
+ * 2. The chain head matches the embedded `ledgerHead.hash`
3130
+ * from the backup
3131
+ * 3. Every data envelope's `payloadHash` matches the
3132
+ * corresponding ledger entry — i.e. nobody swapped
3133
+ * ciphertext between dump and restore
3134
+ *
3135
+ * On any failure, throws `BackupLedgerError` (chain or head
3136
+ * mismatch) or `BackupCorruptedError` (data envelope mismatch).
3137
+ * The compartment state on the adapter has already been written
3138
+ * by the time we throw, so the caller is responsible for either
3139
+ * accepting the suspect state or wiping it and trying a different
3140
+ * backup.
3141
+ *
3142
+ * Pre-v0.4 backups (no `ledgerHead` field, no `_internal`) load
3143
+ * with a console warning and skip the integrity check entirely
3144
+ * — there's no chain to verify against.
3145
+ */
968
3146
  async load(backupJson) {
969
3147
  const backup = JSON.parse(backupJson);
970
3148
  await this.adapter.saveAll(this.name, backup.collections);
@@ -978,7 +3156,124 @@ var Compartment = class {
978
3156
  };
979
3157
  await this.adapter.put(this.name, "_keyring", userId, envelope);
980
3158
  }
3159
+ if (backup._internal) {
3160
+ for (const [internalName, records] of Object.entries(backup._internal)) {
3161
+ for (const [id, envelope] of Object.entries(records)) {
3162
+ await this.adapter.put(this.name, internalName, id, envelope);
3163
+ }
3164
+ }
3165
+ }
3166
+ if (this.reloadKeyring) {
3167
+ this.keyring = await this.reloadKeyring();
3168
+ this.getDEK = this.makeGetDEK();
3169
+ }
981
3170
  this.collectionCache.clear();
3171
+ this.ledgerStore = null;
3172
+ if (!backup.ledgerHead) {
3173
+ console.warn(
3174
+ `[noy-db] Loaded a legacy backup with no ledgerHead \u2014 verifiable-backup integrity check skipped. Re-export with v0.4+ to get tamper detection.`
3175
+ );
3176
+ return;
3177
+ }
3178
+ const result = await this.verifyBackupIntegrity();
3179
+ if (!result.ok) {
3180
+ if (result.kind === "data") {
3181
+ throw new BackupCorruptedError(
3182
+ result.collection,
3183
+ result.id,
3184
+ result.message
3185
+ );
3186
+ }
3187
+ throw new BackupLedgerError(result.message, result.divergedAt);
3188
+ }
3189
+ if (result.head !== backup.ledgerHead.hash) {
3190
+ throw new BackupLedgerError(
3191
+ `Backup ledger head mismatch: embedded "${backup.ledgerHead.hash}" but reconstructed "${result.head}".`
3192
+ );
3193
+ }
3194
+ }
3195
+ /**
3196
+ * End-to-end backup integrity check. Runs both:
3197
+ *
3198
+ * 1. `ledger.verify()` — walks the hash chain and confirms
3199
+ * every `prevHash` matches the recomputed hash of its
3200
+ * predecessor.
3201
+ *
3202
+ * 2. **Data envelope cross-check** — for every (collection, id)
3203
+ * that has a current value, find the most recent ledger
3204
+ * entry recording a `put` for that pair, recompute the
3205
+ * sha256 of the stored envelope's `_data`, and compare to
3206
+ * the entry's `payloadHash`. Any mismatch means an
3207
+ * out-of-band write modified the data without updating the
3208
+ * ledger.
3209
+ *
3210
+ * Returns a discriminated union so callers can handle the two
3211
+ * failure modes differently:
3212
+ * - `{ ok: true, head, length }` — chain verified and all
3213
+ * data matches; safe to use.
3214
+ * - `{ ok: false, kind: 'chain', divergedAt, message }` — the
3215
+ * chain itself is broken at the given index.
3216
+ * - `{ ok: false, kind: 'data', collection, id, message }` —
3217
+ * a specific data envelope doesn't match its ledger entry.
3218
+ *
3219
+ * This method is exposed so users can call it any time, not just
3220
+ * during `load()`. A scheduled background check is the simplest
3221
+ * way to detect tampering of an in-place compartment.
3222
+ */
3223
+ async verifyBackupIntegrity() {
3224
+ const chainResult = await this.ledger().verify();
3225
+ if (!chainResult.ok) {
3226
+ return {
3227
+ ok: false,
3228
+ kind: "chain",
3229
+ divergedAt: chainResult.divergedAt,
3230
+ message: `Ledger chain diverged at index ${chainResult.divergedAt}: expected prevHash "${chainResult.expected}" but found "${chainResult.actual}".`
3231
+ };
3232
+ }
3233
+ const ledger = this.ledger();
3234
+ const allEntries = await ledger.loadAllEntries();
3235
+ const seen = /* @__PURE__ */ new Set();
3236
+ const latest = /* @__PURE__ */ new Map();
3237
+ for (let i = allEntries.length - 1; i >= 0; i--) {
3238
+ const entry = allEntries[i];
3239
+ if (!entry) continue;
3240
+ const key = `${entry.collection}/${entry.id}`;
3241
+ if (seen.has(key)) continue;
3242
+ seen.add(key);
3243
+ if (entry.op === "delete") continue;
3244
+ latest.set(key, {
3245
+ collection: entry.collection,
3246
+ id: entry.id,
3247
+ expectedHash: entry.payloadHash
3248
+ });
3249
+ }
3250
+ for (const { collection, id, expectedHash } of latest.values()) {
3251
+ const envelope = await this.adapter.get(this.name, collection, id);
3252
+ if (!envelope) {
3253
+ return {
3254
+ ok: false,
3255
+ kind: "data",
3256
+ collection,
3257
+ id,
3258
+ message: `Ledger expects data record "${collection}/${id}" to exist, but the adapter has no envelope for it.`
3259
+ };
3260
+ }
3261
+ const actualHash = await sha256Hex(envelope._data);
3262
+ if (actualHash !== expectedHash) {
3263
+ return {
3264
+ ok: false,
3265
+ kind: "data",
3266
+ collection,
3267
+ id,
3268
+ message: `Data envelope "${collection}/${id}" has been tampered with: expected payloadHash "${expectedHash}", got "${actualHash}".`
3269
+ };
3270
+ }
3271
+ }
3272
+ return {
3273
+ ok: true,
3274
+ head: chainResult.head,
3275
+ length: chainResult.length
3276
+ };
982
3277
  }
983
3278
  /** Export compartment as decrypted JSON (owner only). */
984
3279
  async export() {
@@ -1344,7 +3639,23 @@ var Noydb = class {
1344
3639
  encrypted: this.options.encrypt !== false,
1345
3640
  emitter: this.emitter,
1346
3641
  onDirty: syncEngine ? (coll, id, action, version) => syncEngine.trackChange(coll, id, action, version) : void 0,
1347
- historyConfig: this.options.history
3642
+ historyConfig: this.options.history,
3643
+ // Refresh callback used by Compartment.load() to re-derive
3644
+ // the in-memory keyring from a freshly-loaded keyring file.
3645
+ // Encrypted compartments need this so post-load decrypts work
3646
+ // against the loaded session's wrapped DEKs; plaintext
3647
+ // compartments leave it null and load() skips the refresh.
3648
+ reloadKeyring: this.options.encrypt !== false && this.options.secret ? async () => {
3649
+ this.keyringCache.delete(name);
3650
+ const refreshed = await loadKeyring(
3651
+ this.options.adapter,
3652
+ name,
3653
+ this.options.user,
3654
+ this.options.secret
3655
+ );
3656
+ this.keyringCache.set(name, refreshed);
3657
+ return refreshed;
3658
+ } : void 0
1348
3659
  });
1349
3660
  this.compartmentCache.set(name, comp);
1350
3661
  return comp;
@@ -1633,11 +3944,18 @@ function estimateEntropy(passphrase) {
1633
3944
  }
1634
3945
  // Annotate the CommonJS export names for ESM import in node:
1635
3946
  0 && (module.exports = {
3947
+ BackupCorruptedError,
3948
+ BackupLedgerError,
1636
3949
  Collection,
3950
+ CollectionIndexes,
1637
3951
  Compartment,
1638
3952
  ConflictError,
1639
3953
  DecryptionError,
1640
3954
  InvalidKeyError,
3955
+ LEDGER_COLLECTION,
3956
+ LEDGER_DELTAS_COLLECTION,
3957
+ LedgerStore,
3958
+ Lru,
1641
3959
  NOYDB_BACKUP_VERSION,
1642
3960
  NOYDB_FORMAT_VERSION,
1643
3961
  NOYDB_KEYRING_VERSION,
@@ -1648,21 +3966,43 @@ function estimateEntropy(passphrase) {
1648
3966
  Noydb,
1649
3967
  NoydbError,
1650
3968
  PermissionDeniedError,
3969
+ Query,
1651
3970
  ReadOnlyError,
3971
+ RefIntegrityError,
3972
+ RefRegistry,
3973
+ RefScopeError,
3974
+ SchemaValidationError,
1652
3975
  SyncEngine,
1653
3976
  TamperedError,
1654
3977
  ValidationError,
3978
+ applyPatch,
3979
+ canonicalJson,
3980
+ computePatch,
1655
3981
  createNoydb,
1656
3982
  defineAdapter,
1657
3983
  diff,
1658
3984
  enrollBiometric,
3985
+ envelopePayloadHash,
1659
3986
  estimateEntropy,
3987
+ estimateRecordBytes,
3988
+ evaluateClause,
3989
+ evaluateFieldClause,
3990
+ executePlan,
1660
3991
  formatDiff,
3992
+ hashEntry,
1661
3993
  isBiometricAvailable,
1662
3994
  loadBiometric,
3995
+ paddedIndex,
3996
+ parseBytes,
3997
+ parseIndex,
3998
+ readPath,
3999
+ ref,
1663
4000
  removeBiometric,
1664
4001
  saveBiometric,
4002
+ sha256Hex,
1665
4003
  unlockBiometric,
1666
- validatePassphrase
4004
+ validatePassphrase,
4005
+ validateSchemaInput,
4006
+ validateSchemaOutput
1667
4007
  });
1668
4008
  //# sourceMappingURL=index.cjs.map