@noy-db/core 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -78,6 +78,74 @@ var ValidationError = class extends NoydbError {
78
78
  this.name = "ValidationError";
79
79
  }
80
80
  };
81
+ var SchemaValidationError = class extends NoydbError {
82
+ issues;
83
+ direction;
84
+ constructor(message, issues, direction) {
85
+ super("SCHEMA_VALIDATION_FAILED", message);
86
+ this.name = "SchemaValidationError";
87
+ this.issues = issues;
88
+ this.direction = direction;
89
+ }
90
+ };
91
+ var BackupLedgerError = class extends NoydbError {
92
+ /** First-broken-entry index, if known. */
93
+ divergedAt;
94
+ constructor(message, divergedAt) {
95
+ super("BACKUP_LEDGER", message);
96
+ this.name = "BackupLedgerError";
97
+ if (divergedAt !== void 0) this.divergedAt = divergedAt;
98
+ }
99
+ };
100
+ var BackupCorruptedError = class extends NoydbError {
101
+ /** The (collection, id) pair whose envelope failed the hash check. */
102
+ collection;
103
+ id;
104
+ constructor(collection, id, message) {
105
+ super("BACKUP_CORRUPTED", message);
106
+ this.name = "BackupCorruptedError";
107
+ this.collection = collection;
108
+ this.id = id;
109
+ }
110
+ };
111
+
112
+ // src/schema.ts
113
+ async function validateSchemaInput(schema, value, context) {
114
+ const result = await schema["~standard"].validate(value);
115
+ if (result.issues !== void 0 && result.issues.length > 0) {
116
+ throw new SchemaValidationError(
117
+ `Schema validation failed on ${context}: ${summarizeIssues(result.issues)}`,
118
+ result.issues,
119
+ "input"
120
+ );
121
+ }
122
+ return result.value;
123
+ }
124
+ async function validateSchemaOutput(schema, value, context) {
125
+ const result = await schema["~standard"].validate(value);
126
+ if (result.issues !== void 0 && result.issues.length > 0) {
127
+ throw new SchemaValidationError(
128
+ `Stored data for ${context} does not match the current schema \u2014 schema drift? ${summarizeIssues(result.issues)}`,
129
+ result.issues,
130
+ "output"
131
+ );
132
+ }
133
+ return result.value;
134
+ }
135
+ function summarizeIssues(issues) {
136
+ const shown = issues.slice(0, 3).map((issue) => {
137
+ const pathStr = formatPath(issue.path);
138
+ return `${pathStr}: ${issue.message}`;
139
+ });
140
+ const suffix = issues.length > 3 ? ` (+${issues.length - 3} more)` : "";
141
+ return shown.join("; ") + suffix;
142
+ }
143
+ function formatPath(path) {
144
+ if (!path || path.length === 0) return "root";
145
+ return path.map(
146
+ (segment) => typeof segment === "object" && segment !== null ? String(segment.key) : String(segment)
147
+ ).join(".");
148
+ }
81
149
 
82
150
  // src/crypto.ts
83
151
  var PBKDF2_ITERATIONS = 6e5;
@@ -188,6 +256,749 @@ function base64ToBuffer(base64) {
188
256
  return bytes;
189
257
  }
190
258
 
259
+ // src/ledger/entry.ts
260
+ function canonicalJson(value) {
261
+ if (value === null) return "null";
262
+ if (typeof value === "boolean") return value ? "true" : "false";
263
+ if (typeof value === "number") {
264
+ if (!Number.isFinite(value)) {
265
+ throw new Error(
266
+ `canonicalJson: refusing to encode non-finite number ${String(value)}`
267
+ );
268
+ }
269
+ return JSON.stringify(value);
270
+ }
271
+ if (typeof value === "string") return JSON.stringify(value);
272
+ if (typeof value === "bigint") {
273
+ throw new Error("canonicalJson: BigInt is not JSON-serializable");
274
+ }
275
+ if (typeof value === "undefined" || typeof value === "function") {
276
+ throw new Error(
277
+ `canonicalJson: refusing to encode ${typeof value} \u2014 include all fields explicitly`
278
+ );
279
+ }
280
+ if (Array.isArray(value)) {
281
+ return "[" + value.map((v) => canonicalJson(v)).join(",") + "]";
282
+ }
283
+ if (typeof value === "object") {
284
+ const obj = value;
285
+ const keys = Object.keys(obj).sort();
286
+ const parts = [];
287
+ for (const key of keys) {
288
+ parts.push(JSON.stringify(key) + ":" + canonicalJson(obj[key]));
289
+ }
290
+ return "{" + parts.join(",") + "}";
291
+ }
292
+ throw new Error(`canonicalJson: unexpected value type: ${typeof value}`);
293
+ }
294
+ async function sha256Hex(input) {
295
+ const bytes = new TextEncoder().encode(input);
296
+ const digest = await globalThis.crypto.subtle.digest("SHA-256", bytes);
297
+ return bytesToHex(new Uint8Array(digest));
298
+ }
299
+ async function hashEntry(entry) {
300
+ return sha256Hex(canonicalJson(entry));
301
+ }
302
+ function bytesToHex(bytes) {
303
+ const hex = new Array(bytes.length);
304
+ for (let i = 0; i < bytes.length; i++) {
305
+ hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, "0");
306
+ }
307
+ return hex.join("");
308
+ }
309
+ function paddedIndex(index) {
310
+ return String(index).padStart(10, "0");
311
+ }
312
+ function parseIndex(key) {
313
+ return Number.parseInt(key, 10);
314
+ }
315
+
316
+ // src/ledger/patch.ts
317
+ function computePatch(prev, next) {
318
+ const ops = [];
319
+ diff(prev, next, "", ops);
320
+ return ops;
321
+ }
322
+ function diff(prev, next, path, out) {
323
+ if (prev === next) return;
324
+ if (prev === null || next === null) {
325
+ out.push({ op: "replace", path, value: next });
326
+ return;
327
+ }
328
+ const prevIsArray = Array.isArray(prev);
329
+ const nextIsArray = Array.isArray(next);
330
+ const prevIsObject = typeof prev === "object" && !prevIsArray;
331
+ const nextIsObject = typeof next === "object" && !nextIsArray;
332
+ if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {
333
+ out.push({ op: "replace", path, value: next });
334
+ return;
335
+ }
336
+ if (prevIsArray && nextIsArray) {
337
+ if (!arrayDeepEqual(prev, next)) {
338
+ out.push({ op: "replace", path, value: next });
339
+ }
340
+ return;
341
+ }
342
+ if (prevIsObject && nextIsObject) {
343
+ const prevObj = prev;
344
+ const nextObj = next;
345
+ const prevKeys = Object.keys(prevObj);
346
+ const nextKeys = Object.keys(nextObj);
347
+ for (const key of prevKeys) {
348
+ const childPath = path + "/" + escapePathSegment(key);
349
+ if (!(key in nextObj)) {
350
+ out.push({ op: "remove", path: childPath });
351
+ } else {
352
+ diff(prevObj[key], nextObj[key], childPath, out);
353
+ }
354
+ }
355
+ for (const key of nextKeys) {
356
+ if (!(key in prevObj)) {
357
+ out.push({
358
+ op: "add",
359
+ path: path + "/" + escapePathSegment(key),
360
+ value: nextObj[key]
361
+ });
362
+ }
363
+ }
364
+ return;
365
+ }
366
+ out.push({ op: "replace", path, value: next });
367
+ }
368
+ function arrayDeepEqual(a, b) {
369
+ if (a.length !== b.length) return false;
370
+ for (let i = 0; i < a.length; i++) {
371
+ if (!deepEqual(a[i], b[i])) return false;
372
+ }
373
+ return true;
374
+ }
375
+ function deepEqual(a, b) {
376
+ if (a === b) return true;
377
+ if (a === null || b === null) return false;
378
+ if (typeof a !== typeof b) return false;
379
+ if (typeof a !== "object") return false;
380
+ const aArray = Array.isArray(a);
381
+ const bArray = Array.isArray(b);
382
+ if (aArray !== bArray) return false;
383
+ if (aArray && bArray) return arrayDeepEqual(a, b);
384
+ const aObj = a;
385
+ const bObj = b;
386
+ const aKeys = Object.keys(aObj);
387
+ const bKeys = Object.keys(bObj);
388
+ if (aKeys.length !== bKeys.length) return false;
389
+ for (const key of aKeys) {
390
+ if (!(key in bObj)) return false;
391
+ if (!deepEqual(aObj[key], bObj[key])) return false;
392
+ }
393
+ return true;
394
+ }
395
+ function applyPatch(base, patch) {
396
+ let result = clone(base);
397
+ for (const op of patch) {
398
+ result = applyOp(result, op);
399
+ }
400
+ return result;
401
+ }
402
+ function applyOp(doc, op) {
403
+ if (op.path === "") {
404
+ if (op.op === "remove") return null;
405
+ return clone(op.value);
406
+ }
407
+ const segments = parsePath(op.path);
408
+ return walkAndApply(doc, segments, op);
409
+ }
410
+ function walkAndApply(doc, segments, op) {
411
+ if (segments.length === 0) {
412
+ throw new Error("walkAndApply: empty segments (internal error)");
413
+ }
414
+ const [head, ...rest] = segments;
415
+ if (head === void 0) throw new Error("walkAndApply: undefined segment");
416
+ if (rest.length === 0) {
417
+ return applyAtTerminal(doc, head, op);
418
+ }
419
+ if (Array.isArray(doc)) {
420
+ const idx = parseArrayIndex(head, doc.length);
421
+ const child = doc[idx];
422
+ const newChild = walkAndApply(child, rest, op);
423
+ const next = doc.slice();
424
+ next[idx] = newChild;
425
+ return next;
426
+ }
427
+ if (doc !== null && typeof doc === "object") {
428
+ const obj = doc;
429
+ if (!(head in obj)) {
430
+ throw new Error(`applyPatch: path segment "${head}" not found in object`);
431
+ }
432
+ const newChild = walkAndApply(obj[head], rest, op);
433
+ return { ...obj, [head]: newChild };
434
+ }
435
+ throw new Error(
436
+ `applyPatch: cannot step into ${typeof doc} at segment "${head}"`
437
+ );
438
+ }
439
+ function applyAtTerminal(doc, segment, op) {
440
+ if (Array.isArray(doc)) {
441
+ const idx = segment === "-" ? doc.length : parseArrayIndex(segment, doc.length + 1);
442
+ const next = doc.slice();
443
+ if (op.op === "remove") {
444
+ next.splice(idx, 1);
445
+ return next;
446
+ }
447
+ if (op.op === "add") {
448
+ next.splice(idx, 0, clone(op.value));
449
+ return next;
450
+ }
451
+ if (op.op === "replace") {
452
+ if (idx >= doc.length) {
453
+ throw new Error(
454
+ `applyPatch: replace at out-of-bounds array index ${idx}`
455
+ );
456
+ }
457
+ next[idx] = clone(op.value);
458
+ return next;
459
+ }
460
+ }
461
+ if (doc !== null && typeof doc === "object") {
462
+ const obj = doc;
463
+ if (op.op === "remove") {
464
+ if (!(segment in obj)) {
465
+ throw new Error(
466
+ `applyPatch: remove on missing key "${segment}"`
467
+ );
468
+ }
469
+ const next = { ...obj };
470
+ delete next[segment];
471
+ return next;
472
+ }
473
+ if (op.op === "add") {
474
+ return { ...obj, [segment]: clone(op.value) };
475
+ }
476
+ if (op.op === "replace") {
477
+ if (!(segment in obj)) {
478
+ throw new Error(
479
+ `applyPatch: replace on missing key "${segment}"`
480
+ );
481
+ }
482
+ return { ...obj, [segment]: clone(op.value) };
483
+ }
484
+ }
485
+ throw new Error(
486
+ `applyPatch: cannot apply ${op.op} at terminal segment "${segment}"`
487
+ );
488
+ }
489
+ function escapePathSegment(segment) {
490
+ return segment.replace(/~/g, "~0").replace(/\//g, "~1");
491
+ }
492
+ function unescapePathSegment(segment) {
493
+ return segment.replace(/~1/g, "/").replace(/~0/g, "~");
494
+ }
495
+ function parsePath(path) {
496
+ if (!path.startsWith("/")) {
497
+ throw new Error(`applyPatch: path must start with '/', got "${path}"`);
498
+ }
499
+ return path.slice(1).split("/").map(unescapePathSegment);
500
+ }
501
+ function parseArrayIndex(segment, max) {
502
+ if (!/^\d+$/.test(segment)) {
503
+ throw new Error(
504
+ `applyPatch: array index must be a non-negative integer, got "${segment}"`
505
+ );
506
+ }
507
+ const idx = Number.parseInt(segment, 10);
508
+ if (idx < 0 || idx > max) {
509
+ throw new Error(
510
+ `applyPatch: array index ${idx} out of range [0, ${max}]`
511
+ );
512
+ }
513
+ return idx;
514
+ }
515
+ function clone(value) {
516
+ if (value === null || value === void 0) return value;
517
+ if (typeof value !== "object") return value;
518
+ return JSON.parse(JSON.stringify(value));
519
+ }
520
+
521
+ // src/ledger/store.ts
522
+ var LEDGER_COLLECTION = "_ledger";
523
+ var LEDGER_DELTAS_COLLECTION = "_ledger_deltas";
524
+ var LedgerStore = class {
525
+ adapter;
526
+ compartment;
527
+ encrypted;
528
+ getDEK;
529
+ actor;
530
+ /**
531
+ * In-memory cache of the chain head — the most recently appended
532
+ * entry along with its precomputed hash. Without this, every
533
+ * `append()` would re-load every prior entry to recompute the
534
+ * prevHash, making N puts O(N²) — a 1K-record stress test goes from
535
+ * < 100ms to a multi-second timeout.
536
+ *
537
+ * The cache is populated on first read (`append`, `head`, `verify`)
538
+ * and updated in-place on every successful `append`. Single-writer
539
+ * usage (the v0.4 assumption) keeps it consistent. A second
540
+ * LedgerStore instance writing to the same compartment would not
541
+ * see the first instance's appends in its cached state — that's the
542
+ * concurrency caveat documented at the class level.
543
+ *
544
+ * Sentinel `undefined` means "not yet loaded"; an explicit `null`
545
+ * value means "loaded and confirmed empty" — distinguishing these
546
+ * matters because an empty ledger is a valid state (genesis prevHash
547
+ * is the empty string), and we don't want to re-scan the adapter
548
+ * just because the chain is freshly initialized.
549
+ */
550
+ headCache = void 0;
551
+ constructor(opts) {
552
+ this.adapter = opts.adapter;
553
+ this.compartment = opts.compartment;
554
+ this.encrypted = opts.encrypted;
555
+ this.getDEK = opts.getDEK;
556
+ this.actor = opts.actor;
557
+ }
558
+ /**
559
+ * Lazily load (or return cached) the current chain head. The cache
560
+ * sentinel is `undefined` until first access; after the first call,
561
+ * the cache holds either a `{ entry, hash }` for non-empty ledgers
562
+ * or `null` for empty ones.
563
+ */
564
+ async getCachedHead() {
565
+ if (this.headCache !== void 0) return this.headCache;
566
+ const entries = await this.loadAllEntries();
567
+ const last = entries[entries.length - 1];
568
+ if (!last) {
569
+ this.headCache = null;
570
+ return null;
571
+ }
572
+ this.headCache = { entry: last, hash: await hashEntry(last) };
573
+ return this.headCache;
574
+ }
575
+ /**
576
+ * Append a new entry to the ledger. Returns the full entry that was
577
+ * written (with its assigned index and computed prevHash) so the
578
+ * caller can use the hash for downstream purposes (e.g., embedding
579
+ * in a verifiable backup).
580
+ *
581
+ * This is the **only** way to add entries. Direct adapter writes to
582
+ * `_ledger/` would bypass the chain math and would be caught by the
583
+ * next `verify()` call as a divergence.
584
+ */
585
+ async append(input) {
586
+ const cached = await this.getCachedHead();
587
+ const lastEntry = cached?.entry;
588
+ const prevHash = cached?.hash ?? "";
589
+ const nextIndex = lastEntry ? lastEntry.index + 1 : 0;
590
+ let deltaHash;
591
+ if (input.delta !== void 0) {
592
+ const deltaEnvelope = await this.encryptDelta(input.delta);
593
+ await this.adapter.put(
594
+ this.compartment,
595
+ LEDGER_DELTAS_COLLECTION,
596
+ paddedIndex(nextIndex),
597
+ deltaEnvelope
598
+ );
599
+ deltaHash = await sha256Hex(deltaEnvelope._data);
600
+ }
601
+ const entryBase = {
602
+ index: nextIndex,
603
+ prevHash,
604
+ op: input.op,
605
+ collection: input.collection,
606
+ id: input.id,
607
+ version: input.version,
608
+ ts: (/* @__PURE__ */ new Date()).toISOString(),
609
+ actor: input.actor === "" ? this.actor : input.actor,
610
+ payloadHash: input.payloadHash
611
+ };
612
+ const entry = deltaHash !== void 0 ? { ...entryBase, deltaHash } : entryBase;
613
+ const envelope = await this.encryptEntry(entry);
614
+ await this.adapter.put(
615
+ this.compartment,
616
+ LEDGER_COLLECTION,
617
+ paddedIndex(entry.index),
618
+ envelope
619
+ );
620
+ this.headCache = { entry, hash: await hashEntry(entry) };
621
+ return entry;
622
+ }
623
+ /**
624
+ * Load a delta payload by its entry index. Returns `null` if the
625
+ * entry at that index doesn't reference a delta (genesis puts and
626
+ * deletes leave the slot empty) or if the delta row is missing
627
+ * (possible after a `pruneHistory` fold).
628
+ *
629
+ * The caller is responsible for deciding what to do with a missing
630
+ * delta — `ledger.reconstruct()` uses it as a "stop walking
631
+ * backward" signal and falls back to the on-disk current value.
632
+ */
633
+ async loadDelta(index) {
634
+ const envelope = await this.adapter.get(
635
+ this.compartment,
636
+ LEDGER_DELTAS_COLLECTION,
637
+ paddedIndex(index)
638
+ );
639
+ if (!envelope) return null;
640
+ if (!this.encrypted) {
641
+ return JSON.parse(envelope._data);
642
+ }
643
+ const dek = await this.getDEK(LEDGER_COLLECTION);
644
+ const json = await decrypt(envelope._iv, envelope._data, dek);
645
+ return JSON.parse(json);
646
+ }
647
+ /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */
648
+ async encryptDelta(patch) {
649
+ const json = JSON.stringify(patch);
650
+ if (!this.encrypted) {
651
+ return {
652
+ _noydb: NOYDB_FORMAT_VERSION,
653
+ _v: 1,
654
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
655
+ _iv: "",
656
+ _data: json,
657
+ _by: this.actor
658
+ };
659
+ }
660
+ const dek = await this.getDEK(LEDGER_COLLECTION);
661
+ const { iv, data } = await encrypt(json, dek);
662
+ return {
663
+ _noydb: NOYDB_FORMAT_VERSION,
664
+ _v: 1,
665
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
666
+ _iv: iv,
667
+ _data: data,
668
+ _by: this.actor
669
+ };
670
+ }
671
+ /**
672
+ * Read all entries in ascending-index order. Used internally by
673
+ * `append()`, `head()`, `verify()`, and `entries()`. Decryption is
674
+ * serial because the entries are tiny and the overhead of a Promise
675
+ * pool would dominate at realistic chain lengths (< 100K entries).
676
+ */
677
+ async loadAllEntries() {
678
+ const keys = await this.adapter.list(this.compartment, LEDGER_COLLECTION);
679
+ keys.sort();
680
+ const entries = [];
681
+ for (const key of keys) {
682
+ const envelope = await this.adapter.get(
683
+ this.compartment,
684
+ LEDGER_COLLECTION,
685
+ key
686
+ );
687
+ if (!envelope) continue;
688
+ entries.push(await this.decryptEntry(envelope));
689
+ }
690
+ return entries;
691
+ }
692
+ /**
693
+ * Return the current head of the ledger: the last entry, its hash,
694
+ * and the total chain length. `null` on an empty ledger so callers
695
+ * can distinguish "no history yet" from "empty history".
696
+ */
697
+ async head() {
698
+ const cached = await this.getCachedHead();
699
+ if (!cached) return null;
700
+ return {
701
+ entry: cached.entry,
702
+ hash: cached.hash,
703
+ length: cached.entry.index + 1
704
+ };
705
+ }
706
+ /**
707
+ * Return entries in the requested half-open range `[from, to)`.
708
+ * Defaults: `from = 0`, `to = length`. The indices are clipped to
709
+ * the valid range; no error is thrown for out-of-range queries.
710
+ */
711
+ async entries(opts = {}) {
712
+ const all = await this.loadAllEntries();
713
+ const from = Math.max(0, opts.from ?? 0);
714
+ const to = Math.min(all.length, opts.to ?? all.length);
715
+ return all.slice(from, to);
716
+ }
717
+ /**
718
+ * Reconstruct a record's state at a given historical version by
719
+ * walking the ledger's delta chain backward from the current state.
720
+ *
721
+ * ## Algorithm
722
+ *
723
+ * Ledger deltas are stored in **reverse** form — each entry's
724
+ * patch describes how to undo that put, transforming the new
725
+ * record back into the previous one. `reconstruct` exploits this
726
+ * by:
727
+ *
728
+ * 1. Finding every ledger entry for `(collection, id)` in the
729
+ * chain, sorted by index ascending.
730
+ * 2. Starting from `current` (the present value of the record,
731
+ * as held by the caller — typically fetched via
732
+ * `Collection.get()`).
733
+ * 3. Walking entries in **descending** index order and applying
734
+ * each entry's reverse patch, stopping when we reach the
735
+ * entry whose version equals `atVersion`.
736
+ *
737
+ * The result is the record as it existed immediately AFTER the
738
+ * put at `atVersion`. To get the state at the genesis put
739
+ * (version 1), the walk runs all the way back through every put
740
+ * after the first.
741
+ *
742
+ * ## Caveats
743
+ *
744
+ * - **Delete entries** break the walk: once we see a delete, the
745
+ * record didn't exist before that point, so there's nothing to
746
+ * reconstruct. We return `null` in that case.
747
+ * - **Missing deltas** (e.g., after `pruneHistory` folds old
748
+ * entries into a base snapshot) also stop the walk. v0.4 does
749
+ * not ship pruneHistory, so today this only happens if an entry
750
+ * was deleted out-of-band.
751
+ * - The caller MUST pass the correct current value. Passing a
752
+ * mutated object would corrupt the reconstruction — the patch
753
+ * chain is only valid against the exact state that was in
754
+ * effect when the most recent put happened.
755
+ *
756
+ * For v0.4, `reconstruct` is the only way to read a historical
757
+ * version via deltas. The legacy `_history` collection still
758
+ * holds full snapshots and `Collection.getVersion()` still reads
759
+ * from there — the two paths coexist until pruneHistory lands in
760
+ * a follow-up and delta becomes the default.
761
+ */
762
+ async reconstruct(collection, id, current, atVersion) {
763
+ const all = await this.loadAllEntries();
764
+ const matching = all.filter(
765
+ (e) => e.collection === collection && e.id === id
766
+ );
767
+ if (matching.length === 0) {
768
+ return null;
769
+ }
770
+ let state = current;
771
+ for (let i = matching.length - 1; i >= 0; i--) {
772
+ const entry = matching[i];
773
+ if (!entry) continue;
774
+ if (entry.version === atVersion && entry.op !== "delete") {
775
+ return state;
776
+ }
777
+ if (entry.op === "delete") {
778
+ return null;
779
+ }
780
+ if (entry.deltaHash === void 0) {
781
+ if (entry.version === atVersion) return state;
782
+ return null;
783
+ }
784
+ const patch = await this.loadDelta(entry.index);
785
+ if (!patch) {
786
+ return null;
787
+ }
788
+ if (state === null) {
789
+ return null;
790
+ }
791
+ state = applyPatch(state, patch);
792
+ }
793
+ return null;
794
+ }
795
+ /**
796
+ * Walk the chain from genesis forward and verify every link.
797
+ *
798
+ * Returns `{ ok: true, head, length }` if every entry's `prevHash`
799
+ * matches the recomputed hash of its predecessor (and the genesis
800
+ * entry's `prevHash` is the empty string).
801
+ *
802
+ * Returns `{ ok: false, divergedAt, expected, actual }` on the first
803
+ * mismatch. `divergedAt` is the 0-based index of the BROKEN entry
804
+ * — entries before that index still verify cleanly; entries at and
805
+ * after `divergedAt` are untrustworthy.
806
+ *
807
+ * This method detects:
808
+ * - Mutated entry content (fields changed)
809
+ * - Reordered entries (if any adjacent pair swaps, the prevHash
810
+ * of the second no longer matches)
811
+ * - Inserted entries (the inserted entry's prevHash likely fails,
812
+ * and the following entry's prevHash definitely fails)
813
+ * - Deleted entries (the entry after the deletion sees a wrong
814
+ * prevHash)
815
+ *
816
+ * It does NOT detect:
817
+ * - Tampering with the DATA collections that bypassed the ledger
818
+ * entirely (e.g., an attacker who modifies records without
819
+ * appending matching ledger entries — this is why we also
820
+ * plan a `verifyIntegrity()` helper in a follow-up)
821
+ * - Truncation of the chain at the tail (dropping the last N
822
+ * entries leaves a shorter but still consistent chain). External
823
+ * anchoring of `head.hash` to a trusted service is the defense
824
+ * against this.
825
+ */
826
+ async verify() {
827
+ const entries = await this.loadAllEntries();
828
+ let expectedPrevHash = "";
829
+ for (let i = 0; i < entries.length; i++) {
830
+ const entry = entries[i];
831
+ if (!entry) continue;
832
+ if (entry.prevHash !== expectedPrevHash) {
833
+ return {
834
+ ok: false,
835
+ divergedAt: i,
836
+ expected: expectedPrevHash,
837
+ actual: entry.prevHash
838
+ };
839
+ }
840
+ if (entry.index !== i) {
841
+ return {
842
+ ok: false,
843
+ divergedAt: i,
844
+ expected: `index=${i}`,
845
+ actual: `index=${entry.index}`
846
+ };
847
+ }
848
+ expectedPrevHash = await hashEntry(entry);
849
+ }
850
+ return {
851
+ ok: true,
852
+ head: expectedPrevHash,
853
+ length: entries.length
854
+ };
855
+ }
856
+ // ─── Encryption plumbing ─────────────────────────────────────────
857
+ /**
858
+ * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The
859
+ * envelope's `_v` field is set to `entry.index + 1` so the usual
860
+ * optimistic-concurrency machinery has a reasonable version number
861
+ * to compare against (the ledger is append-only, so concurrent
862
+ * writes should always bump the index).
863
+ */
864
+ async encryptEntry(entry) {
865
+ const json = canonicalJson(entry);
866
+ if (!this.encrypted) {
867
+ return {
868
+ _noydb: NOYDB_FORMAT_VERSION,
869
+ _v: entry.index + 1,
870
+ _ts: entry.ts,
871
+ _iv: "",
872
+ _data: json,
873
+ _by: entry.actor
874
+ };
875
+ }
876
+ const dek = await this.getDEK(LEDGER_COLLECTION);
877
+ const { iv, data } = await encrypt(json, dek);
878
+ return {
879
+ _noydb: NOYDB_FORMAT_VERSION,
880
+ _v: entry.index + 1,
881
+ _ts: entry.ts,
882
+ _iv: iv,
883
+ _data: data,
884
+ _by: entry.actor
885
+ };
886
+ }
887
+ /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */
888
+ async decryptEntry(envelope) {
889
+ if (!this.encrypted) {
890
+ return JSON.parse(envelope._data);
891
+ }
892
+ const dek = await this.getDEK(LEDGER_COLLECTION);
893
+ const json = await decrypt(envelope._iv, envelope._data, dek);
894
+ return JSON.parse(json);
895
+ }
896
+ };
897
+ async function envelopePayloadHash(envelope) {
898
+ if (!envelope) return "";
899
+ return sha256Hex(envelope._data);
900
+ }
901
+
902
+ // src/refs.ts
903
+ var RefIntegrityError = class extends NoydbError {
904
+ collection;
905
+ id;
906
+ field;
907
+ refTo;
908
+ refId;
909
+ constructor(opts) {
910
+ super("REF_INTEGRITY", opts.message);
911
+ this.name = "RefIntegrityError";
912
+ this.collection = opts.collection;
913
+ this.id = opts.id;
914
+ this.field = opts.field;
915
+ this.refTo = opts.refTo;
916
+ this.refId = opts.refId;
917
+ }
918
+ };
919
+ var RefScopeError = class extends NoydbError {
920
+ constructor(target) {
921
+ super(
922
+ "REF_SCOPE",
923
+ `Cross-compartment references are not supported in v0.4 \u2014 got target "${target}". Use a simple collection name (e.g. "clients"), not a path. Cross-compartment refs are tracked for a future release.`
924
+ );
925
+ this.name = "RefScopeError";
926
+ }
927
+ };
928
+ function ref(target, mode = "strict") {
929
+ if (target.includes("/")) {
930
+ throw new RefScopeError(target);
931
+ }
932
+ if (!target || target.startsWith("_")) {
933
+ throw new Error(
934
+ `ref(): target collection name must be non-empty and cannot start with '_' (reserved for internal collections). Got "${target}".`
935
+ );
936
+ }
937
+ return { target, mode };
938
+ }
939
+ var RefRegistry = class {
940
+ outbound = /* @__PURE__ */ new Map();
941
+ inbound = /* @__PURE__ */ new Map();
942
+ /**
943
+ * Register the refs declared by a single collection. Idempotent in
944
+ * the happy path — calling twice with the same data is a no-op.
945
+ * Calling twice with DIFFERENT data throws, because silent
946
+ * overrides would be confusing ("I changed the ref and it doesn't
947
+ * update" vs "I declared the same collection twice with different
948
+ * refs and the second call won").
949
+ */
950
+ register(collection, refs) {
951
+ const existing = this.outbound.get(collection);
952
+ if (existing) {
953
+ const existingKeys = Object.keys(existing).sort();
954
+ const newKeys = Object.keys(refs).sort();
955
+ if (existingKeys.join(",") !== newKeys.join(",")) {
956
+ throw new Error(
957
+ `RefRegistry: conflicting ref declarations for collection "${collection}"`
958
+ );
959
+ }
960
+ for (const k of existingKeys) {
961
+ const a = existing[k];
962
+ const b = refs[k];
963
+ if (!a || !b || a.target !== b.target || a.mode !== b.mode) {
964
+ throw new Error(
965
+ `RefRegistry: conflicting ref declarations for collection "${collection}" field "${k}"`
966
+ );
967
+ }
968
+ }
969
+ return;
970
+ }
971
+ this.outbound.set(collection, { ...refs });
972
+ for (const [field, desc] of Object.entries(refs)) {
973
+ const list = this.inbound.get(desc.target) ?? [];
974
+ list.push({ collection, field, mode: desc.mode });
975
+ this.inbound.set(desc.target, list);
976
+ }
977
+ }
978
+ /** Get the outbound refs declared by a collection (or `{}` if none). */
979
+ getOutbound(collection) {
980
+ return this.outbound.get(collection) ?? {};
981
+ }
982
+ /** Get the inbound refs that target a given collection (or `[]`). */
983
+ getInbound(target) {
984
+ return this.inbound.get(target) ?? [];
985
+ }
986
+ /**
987
+ * Iterate every (collection → refs) pair that has at least one
988
+ * declared reference. Used by `checkIntegrity` to walk the full
989
+ * universe of outbound refs without needing to track collection
990
+ * names elsewhere.
991
+ */
992
+ entries() {
993
+ return [...this.outbound.entries()];
994
+ }
995
+ /** Clear the registry. Test-only escape hatch; never called from production code. */
996
+ clear() {
997
+ this.outbound.clear();
998
+ this.inbound.clear();
999
+ }
1000
+ };
1001
+
191
1002
  // src/keyring.ts
192
1003
  var GRANTABLE_BY_ADMIN = ["operator", "viewer", "client"];
193
1004
  function canGrant(callerRole, targetRole) {
@@ -272,6 +1083,11 @@ async function grant(adapter, compartment, callerKeyring, options) {
272
1083
  }
273
1084
  }
274
1085
  }
1086
+ for (const [collName, dek] of callerKeyring.deks) {
1087
+ if (collName.startsWith("_") && !(collName in wrappedDeks)) {
1088
+ wrappedDeks[collName] = await wrapKey(dek, newKek);
1089
+ }
1090
+ }
275
1091
  const keyringFile = {
276
1092
  _noydb_keyring: NOYDB_KEYRING_VERSION,
277
1093
  user_id: options.userId,
@@ -525,65 +1341,685 @@ async function clearHistory(adapter, compartment, collection, recordId) {
525
1341
  }
526
1342
 
527
1343
  // src/diff.ts
528
- function diff(oldObj, newObj, basePath = "") {
1344
+ function diff2(oldObj, newObj, basePath = "") {
529
1345
  const changes = [];
530
1346
  if (oldObj === newObj) return changes;
531
1347
  if (oldObj == null && newObj != null) {
532
1348
  return [{ path: basePath || "(root)", type: "added", to: newObj }];
533
1349
  }
534
- if (oldObj != null && newObj == null) {
535
- return [{ path: basePath || "(root)", type: "removed", from: oldObj }];
1350
+ if (oldObj != null && newObj == null) {
1351
+ return [{ path: basePath || "(root)", type: "removed", from: oldObj }];
1352
+ }
1353
+ if (typeof oldObj !== typeof newObj) {
1354
+ return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
1355
+ }
1356
+ if (typeof oldObj !== "object") {
1357
+ return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
1358
+ }
1359
+ if (Array.isArray(oldObj) && Array.isArray(newObj)) {
1360
+ const maxLen = Math.max(oldObj.length, newObj.length);
1361
+ for (let i = 0; i < maxLen; i++) {
1362
+ const p = basePath ? `${basePath}[${i}]` : `[${i}]`;
1363
+ if (i >= oldObj.length) {
1364
+ changes.push({ path: p, type: "added", to: newObj[i] });
1365
+ } else if (i >= newObj.length) {
1366
+ changes.push({ path: p, type: "removed", from: oldObj[i] });
1367
+ } else {
1368
+ changes.push(...diff2(oldObj[i], newObj[i], p));
1369
+ }
1370
+ }
1371
+ return changes;
1372
+ }
1373
+ const oldRecord = oldObj;
1374
+ const newRecord = newObj;
1375
+ const allKeys = /* @__PURE__ */ new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)]);
1376
+ for (const key of allKeys) {
1377
+ const p = basePath ? `${basePath}.${key}` : key;
1378
+ if (!(key in oldRecord)) {
1379
+ changes.push({ path: p, type: "added", to: newRecord[key] });
1380
+ } else if (!(key in newRecord)) {
1381
+ changes.push({ path: p, type: "removed", from: oldRecord[key] });
1382
+ } else {
1383
+ changes.push(...diff2(oldRecord[key], newRecord[key], p));
1384
+ }
1385
+ }
1386
+ return changes;
1387
+ }
1388
+ function formatDiff(changes) {
1389
+ if (changes.length === 0) return "(no changes)";
1390
+ return changes.map((c) => {
1391
+ switch (c.type) {
1392
+ case "added":
1393
+ return `+ ${c.path}: ${JSON.stringify(c.to)}`;
1394
+ case "removed":
1395
+ return `- ${c.path}: ${JSON.stringify(c.from)}`;
1396
+ case "changed":
1397
+ return `~ ${c.path}: ${JSON.stringify(c.from)} \u2192 ${JSON.stringify(c.to)}`;
1398
+ }
1399
+ }).join("\n");
1400
+ }
1401
+
1402
+ // src/query/predicate.ts
1403
+ function readPath(record, path) {
1404
+ if (record === null || record === void 0) return void 0;
1405
+ if (!path.includes(".")) {
1406
+ return record[path];
1407
+ }
1408
+ const segments = path.split(".");
1409
+ let cursor = record;
1410
+ for (const segment of segments) {
1411
+ if (cursor === null || cursor === void 0) return void 0;
1412
+ cursor = cursor[segment];
1413
+ }
1414
+ return cursor;
1415
+ }
1416
+ function evaluateFieldClause(record, clause) {
1417
+ const actual = readPath(record, clause.field);
1418
+ const { op, value } = clause;
1419
+ switch (op) {
1420
+ case "==":
1421
+ return actual === value;
1422
+ case "!=":
1423
+ return actual !== value;
1424
+ case "<":
1425
+ return isComparable(actual, value) && actual < value;
1426
+ case "<=":
1427
+ return isComparable(actual, value) && actual <= value;
1428
+ case ">":
1429
+ return isComparable(actual, value) && actual > value;
1430
+ case ">=":
1431
+ return isComparable(actual, value) && actual >= value;
1432
+ case "in":
1433
+ return Array.isArray(value) && value.includes(actual);
1434
+ case "contains":
1435
+ if (typeof actual === "string") return typeof value === "string" && actual.includes(value);
1436
+ if (Array.isArray(actual)) return actual.includes(value);
1437
+ return false;
1438
+ case "startsWith":
1439
+ return typeof actual === "string" && typeof value === "string" && actual.startsWith(value);
1440
+ case "between": {
1441
+ if (!Array.isArray(value) || value.length !== 2) return false;
1442
+ const [lo, hi] = value;
1443
+ if (!isComparable(actual, lo) || !isComparable(actual, hi)) return false;
1444
+ return actual >= lo && actual <= hi;
1445
+ }
1446
+ default: {
1447
+ const _exhaustive = op;
1448
+ void _exhaustive;
1449
+ return false;
1450
+ }
1451
+ }
1452
+ }
1453
+ function isComparable(a, b) {
1454
+ if (typeof a === "number" && typeof b === "number") return true;
1455
+ if (typeof a === "string" && typeof b === "string") return true;
1456
+ if (a instanceof Date && b instanceof Date) return true;
1457
+ return false;
1458
+ }
1459
+ function evaluateClause(record, clause) {
1460
+ switch (clause.type) {
1461
+ case "field":
1462
+ return evaluateFieldClause(record, clause);
1463
+ case "filter":
1464
+ return clause.fn(record);
1465
+ case "group":
1466
+ if (clause.op === "and") {
1467
+ for (const child of clause.clauses) {
1468
+ if (!evaluateClause(record, child)) return false;
1469
+ }
1470
+ return true;
1471
+ } else {
1472
+ for (const child of clause.clauses) {
1473
+ if (evaluateClause(record, child)) return true;
1474
+ }
1475
+ return false;
1476
+ }
1477
+ }
1478
+ }
1479
+
1480
+ // src/query/builder.ts
1481
+ var EMPTY_PLAN = {
1482
+ clauses: [],
1483
+ orderBy: [],
1484
+ limit: void 0,
1485
+ offset: 0
1486
+ };
1487
+ var Query = class _Query {
1488
+ source;
1489
+ plan;
1490
+ constructor(source, plan = EMPTY_PLAN) {
1491
+ this.source = source;
1492
+ this.plan = plan;
1493
+ }
1494
+ /** Add a field comparison. Multiple where() calls are AND-combined. */
1495
+ where(field, op, value) {
1496
+ const clause = { type: "field", field, op, value };
1497
+ return new _Query(this.source, {
1498
+ ...this.plan,
1499
+ clauses: [...this.plan.clauses, clause]
1500
+ });
1501
+ }
1502
+ /**
1503
+ * Logical OR group. Pass a callback that builds a sub-query.
1504
+ * Each clause inside the callback is OR-combined; the group itself
1505
+ * joins the parent plan with AND.
1506
+ */
1507
+ or(builder) {
1508
+ const sub = builder(new _Query(this.source));
1509
+ const group = {
1510
+ type: "group",
1511
+ op: "or",
1512
+ clauses: sub.plan.clauses
1513
+ };
1514
+ return new _Query(this.source, {
1515
+ ...this.plan,
1516
+ clauses: [...this.plan.clauses, group]
1517
+ });
1518
+ }
1519
+ /**
1520
+ * Logical AND group. Same shape as `or()` but every clause inside the group
1521
+ * must match. Useful for explicit grouping inside a larger OR.
1522
+ */
1523
+ and(builder) {
1524
+ const sub = builder(new _Query(this.source));
1525
+ const group = {
1526
+ type: "group",
1527
+ op: "and",
1528
+ clauses: sub.plan.clauses
1529
+ };
1530
+ return new _Query(this.source, {
1531
+ ...this.plan,
1532
+ clauses: [...this.plan.clauses, group]
1533
+ });
1534
+ }
1535
+ /** Escape hatch: add an arbitrary predicate function. Not serializable. */
1536
+ filter(fn) {
1537
+ const clause = {
1538
+ type: "filter",
1539
+ fn
1540
+ };
1541
+ return new _Query(this.source, {
1542
+ ...this.plan,
1543
+ clauses: [...this.plan.clauses, clause]
1544
+ });
1545
+ }
1546
+ /** Sort by a field. Subsequent calls are tie-breakers. */
1547
+ orderBy(field, direction = "asc") {
1548
+ return new _Query(this.source, {
1549
+ ...this.plan,
1550
+ orderBy: [...this.plan.orderBy, { field, direction }]
1551
+ });
1552
+ }
1553
+ /** Cap the result size. */
1554
+ limit(n) {
1555
+ return new _Query(this.source, { ...this.plan, limit: n });
1556
+ }
1557
+ /** Skip the first N matching records (after ordering). */
1558
+ offset(n) {
1559
+ return new _Query(this.source, { ...this.plan, offset: n });
1560
+ }
1561
+ /** Execute the plan and return the matching records. */
1562
+ toArray() {
1563
+ return executePlanWithSource(this.source, this.plan);
1564
+ }
1565
+ /** Return the first matching record, or null. */
1566
+ first() {
1567
+ const result = executePlanWithSource(this.source, { ...this.plan, limit: 1 });
1568
+ return result[0] ?? null;
1569
+ }
1570
+ /** Return the number of matching records (after where/filter, before limit). */
1571
+ count() {
1572
+ const { candidates, remainingClauses } = candidateRecords(this.source, this.plan.clauses);
1573
+ if (remainingClauses.length === 0) return candidates.length;
1574
+ return filterRecords(candidates, remainingClauses).length;
1575
+ }
1576
+ /**
1577
+ * Re-run the query whenever the source notifies of changes.
1578
+ * Returns an unsubscribe function. The callback receives the latest result.
1579
+ * Throws if the source does not support subscriptions.
1580
+ */
1581
+ subscribe(cb) {
1582
+ if (!this.source.subscribe) {
1583
+ throw new Error("Query source does not support subscriptions. Pass a source with a subscribe() method.");
1584
+ }
1585
+ cb(this.toArray());
1586
+ return this.source.subscribe(() => cb(this.toArray()));
1587
+ }
1588
+ /**
1589
+ * Return the plan as a JSON-friendly object. FilterClause entries are
1590
+ * stripped (their `fn` cannot be serialized) and replaced with
1591
+ * { type: 'filter', fn: '[function]' } so devtools can still see them.
1592
+ */
1593
+ toPlan() {
1594
+ return serializePlan(this.plan);
1595
+ }
1596
+ };
1597
+ function executePlanWithSource(source, plan) {
1598
+ const { candidates, remainingClauses } = candidateRecords(source, plan.clauses);
1599
+ let result = remainingClauses.length === 0 ? [...candidates] : filterRecords(candidates, remainingClauses);
1600
+ if (plan.orderBy.length > 0) {
1601
+ result = sortRecords(result, plan.orderBy);
1602
+ }
1603
+ if (plan.offset > 0) {
1604
+ result = result.slice(plan.offset);
1605
+ }
1606
+ if (plan.limit !== void 0) {
1607
+ result = result.slice(0, plan.limit);
1608
+ }
1609
+ return result;
1610
+ }
1611
+ function candidateRecords(source, clauses) {
1612
+ const indexes = source.getIndexes?.();
1613
+ if (!indexes || !source.lookupById || clauses.length === 0) {
1614
+ return { candidates: source.snapshot(), remainingClauses: clauses };
1615
+ }
1616
+ const lookupById = (id) => source.lookupById?.(id);
1617
+ for (let i = 0; i < clauses.length; i++) {
1618
+ const clause = clauses[i];
1619
+ if (clause.type !== "field") continue;
1620
+ if (!indexes.has(clause.field)) continue;
1621
+ let ids = null;
1622
+ if (clause.op === "==") {
1623
+ ids = indexes.lookupEqual(clause.field, clause.value);
1624
+ } else if (clause.op === "in" && Array.isArray(clause.value)) {
1625
+ ids = indexes.lookupIn(clause.field, clause.value);
1626
+ }
1627
+ if (ids !== null) {
1628
+ const remaining = [];
1629
+ for (let j = 0; j < clauses.length; j++) {
1630
+ if (j !== i) remaining.push(clauses[j]);
1631
+ }
1632
+ return {
1633
+ candidates: materializeIds(ids, lookupById),
1634
+ remainingClauses: remaining
1635
+ };
1636
+ }
1637
+ }
1638
+ return { candidates: source.snapshot(), remainingClauses: clauses };
1639
+ }
1640
+ function materializeIds(ids, lookupById) {
1641
+ const out = [];
1642
+ for (const id of ids) {
1643
+ const record = lookupById(id);
1644
+ if (record !== void 0) out.push(record);
1645
+ }
1646
+ return out;
1647
+ }
1648
+ function executePlan(records, plan) {
1649
+ let result = filterRecords(records, plan.clauses);
1650
+ if (plan.orderBy.length > 0) {
1651
+ result = sortRecords(result, plan.orderBy);
1652
+ }
1653
+ if (plan.offset > 0) {
1654
+ result = result.slice(plan.offset);
1655
+ }
1656
+ if (plan.limit !== void 0) {
1657
+ result = result.slice(0, plan.limit);
1658
+ }
1659
+ return result;
1660
+ }
1661
+ function filterRecords(records, clauses) {
1662
+ if (clauses.length === 0) return [...records];
1663
+ const out = [];
1664
+ for (const r of records) {
1665
+ let matches = true;
1666
+ for (const clause of clauses) {
1667
+ if (!evaluateClause(r, clause)) {
1668
+ matches = false;
1669
+ break;
1670
+ }
1671
+ }
1672
+ if (matches) out.push(r);
1673
+ }
1674
+ return out;
1675
+ }
1676
+ function sortRecords(records, orderBy) {
1677
+ return [...records].sort((a, b) => {
1678
+ for (const { field, direction } of orderBy) {
1679
+ const av = readField(a, field);
1680
+ const bv = readField(b, field);
1681
+ const cmp = compareValues(av, bv);
1682
+ if (cmp !== 0) return direction === "asc" ? cmp : -cmp;
1683
+ }
1684
+ return 0;
1685
+ });
1686
+ }
1687
+ function readField(record, field) {
1688
+ if (record === null || record === void 0) return void 0;
1689
+ if (!field.includes(".")) {
1690
+ return record[field];
1691
+ }
1692
+ const segments = field.split(".");
1693
+ let cursor = record;
1694
+ for (const segment of segments) {
1695
+ if (cursor === null || cursor === void 0) return void 0;
1696
+ cursor = cursor[segment];
1697
+ }
1698
+ return cursor;
1699
+ }
1700
+ function compareValues(a, b) {
1701
+ if (a === void 0 || a === null) return b === void 0 || b === null ? 0 : 1;
1702
+ if (b === void 0 || b === null) return -1;
1703
+ if (typeof a === "number" && typeof b === "number") return a - b;
1704
+ if (typeof a === "string" && typeof b === "string") return a < b ? -1 : a > b ? 1 : 0;
1705
+ if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();
1706
+ return 0;
1707
+ }
1708
+ function serializePlan(plan) {
1709
+ return {
1710
+ clauses: plan.clauses.map(serializeClause),
1711
+ orderBy: plan.orderBy,
1712
+ limit: plan.limit,
1713
+ offset: plan.offset
1714
+ };
1715
+ }
1716
+ function serializeClause(clause) {
1717
+ if (clause.type === "filter") {
1718
+ return { type: "filter", fn: "[function]" };
1719
+ }
1720
+ if (clause.type === "group") {
1721
+ return {
1722
+ type: "group",
1723
+ op: clause.op,
1724
+ clauses: clause.clauses.map(serializeClause)
1725
+ };
1726
+ }
1727
+ return clause;
1728
+ }
1729
+
1730
+ // src/query/indexes.ts
1731
+ var CollectionIndexes = class {
1732
+ indexes = /* @__PURE__ */ new Map();
1733
+ /**
1734
+ * Declare an index. Subsequent record additions are tracked under it.
1735
+ * Calling this twice for the same field is a no-op (idempotent).
1736
+ */
1737
+ declare(field) {
1738
+ if (this.indexes.has(field)) return;
1739
+ this.indexes.set(field, { field, buckets: /* @__PURE__ */ new Map() });
1740
+ }
1741
+ /** True if the given field has a declared index. */
1742
+ has(field) {
1743
+ return this.indexes.has(field);
1744
+ }
1745
+ /** All declared field names, in declaration order. */
1746
+ fields() {
1747
+ return [...this.indexes.keys()];
1748
+ }
1749
+ /**
1750
+ * Build all declared indexes from a snapshot of records.
1751
+ * Called once per hydration. O(N × indexes.size).
1752
+ */
1753
+ build(records) {
1754
+ for (const idx of this.indexes.values()) {
1755
+ idx.buckets.clear();
1756
+ for (const { id, record } of records) {
1757
+ addToIndex(idx, id, record);
1758
+ }
1759
+ }
1760
+ }
1761
+ /**
1762
+ * Insert or update a single record across all indexes.
1763
+ * Called by `Collection.put()` after the encrypted write succeeds.
1764
+ *
1765
+ * If `previousRecord` is provided, the record is removed from any old
1766
+ * buckets first — this is the update path. Pass `null` for fresh adds.
1767
+ */
1768
+ upsert(id, newRecord, previousRecord) {
1769
+ if (this.indexes.size === 0) return;
1770
+ if (previousRecord !== null) {
1771
+ this.remove(id, previousRecord);
1772
+ }
1773
+ for (const idx of this.indexes.values()) {
1774
+ addToIndex(idx, id, newRecord);
1775
+ }
1776
+ }
1777
+ /**
1778
+ * Remove a record from all indexes. Called by `Collection.delete()`
1779
+ * (and as the first half of `upsert` for the update path).
1780
+ */
1781
+ remove(id, record) {
1782
+ if (this.indexes.size === 0) return;
1783
+ for (const idx of this.indexes.values()) {
1784
+ removeFromIndex(idx, id, record);
1785
+ }
1786
+ }
1787
+ /** Drop all index data. Called when the collection is invalidated. */
1788
+ clear() {
1789
+ for (const idx of this.indexes.values()) {
1790
+ idx.buckets.clear();
1791
+ }
1792
+ }
1793
+ /**
1794
+ * Equality lookup: return the set of record ids whose `field` matches
1795
+ * the given value. Returns `null` if no index covers the field — the
1796
+ * caller should fall back to a linear scan.
1797
+ *
1798
+ * The returned Set is a reference to the index's internal storage —
1799
+ * callers must NOT mutate it.
1800
+ */
1801
+ lookupEqual(field, value) {
1802
+ const idx = this.indexes.get(field);
1803
+ if (!idx) return null;
1804
+ const key = stringifyKey(value);
1805
+ return idx.buckets.get(key) ?? EMPTY_SET;
1806
+ }
1807
+ /**
1808
+ * Set lookup: return the union of record ids whose `field` matches any
1809
+ * of the given values. Returns `null` if no index covers the field.
1810
+ */
1811
+ lookupIn(field, values) {
1812
+ const idx = this.indexes.get(field);
1813
+ if (!idx) return null;
1814
+ const out = /* @__PURE__ */ new Set();
1815
+ for (const value of values) {
1816
+ const key = stringifyKey(value);
1817
+ const bucket = idx.buckets.get(key);
1818
+ if (bucket) {
1819
+ for (const id of bucket) out.add(id);
1820
+ }
1821
+ }
1822
+ return out;
1823
+ }
1824
+ };
1825
+ var EMPTY_SET = /* @__PURE__ */ new Set();
1826
+ function stringifyKey(value) {
1827
+ if (value === null || value === void 0) return "\0NULL\0";
1828
+ if (typeof value === "string") return value;
1829
+ if (typeof value === "number" || typeof value === "boolean") return String(value);
1830
+ if (value instanceof Date) return value.toISOString();
1831
+ return "\0OBJECT\0";
1832
+ }
1833
+ function addToIndex(idx, id, record) {
1834
+ const value = readPath(record, idx.field);
1835
+ if (value === null || value === void 0) return;
1836
+ const key = stringifyKey(value);
1837
+ let bucket = idx.buckets.get(key);
1838
+ if (!bucket) {
1839
+ bucket = /* @__PURE__ */ new Set();
1840
+ idx.buckets.set(key, bucket);
1841
+ }
1842
+ bucket.add(id);
1843
+ }
1844
+ function removeFromIndex(idx, id, record) {
1845
+ const value = readPath(record, idx.field);
1846
+ if (value === null || value === void 0) return;
1847
+ const key = stringifyKey(value);
1848
+ const bucket = idx.buckets.get(key);
1849
+ if (!bucket) return;
1850
+ bucket.delete(id);
1851
+ if (bucket.size === 0) idx.buckets.delete(key);
1852
+ }
1853
+
1854
+ // src/cache/lru.ts
1855
+ var Lru = class {
1856
+ entries = /* @__PURE__ */ new Map();
1857
+ maxRecords;
1858
+ maxBytes;
1859
+ currentBytes = 0;
1860
+ hits = 0;
1861
+ misses = 0;
1862
+ evictions = 0;
1863
+ constructor(options) {
1864
+ if (options.maxRecords === void 0 && options.maxBytes === void 0) {
1865
+ throw new Error("Lru: must specify maxRecords, maxBytes, or both");
1866
+ }
1867
+ this.maxRecords = options.maxRecords;
1868
+ this.maxBytes = options.maxBytes;
1869
+ }
1870
+ /**
1871
+ * Look up a key. Hits promote the entry to most-recently-used; misses
1872
+ * return undefined. Both update the running stats counters.
1873
+ */
1874
+ get(key) {
1875
+ const entry = this.entries.get(key);
1876
+ if (!entry) {
1877
+ this.misses++;
1878
+ return void 0;
1879
+ }
1880
+ this.entries.delete(key);
1881
+ this.entries.set(key, entry);
1882
+ this.hits++;
1883
+ return entry.value;
1884
+ }
1885
+ /**
1886
+ * Insert or update a key. If the key already exists, its size is
1887
+ * accounted for and the entry is promoted to MRU. After insertion,
1888
+ * eviction runs to maintain both budgets.
1889
+ */
1890
+ set(key, value, size) {
1891
+ const existing = this.entries.get(key);
1892
+ if (existing) {
1893
+ this.currentBytes -= existing.size;
1894
+ this.entries.delete(key);
1895
+ }
1896
+ this.entries.set(key, { value, size });
1897
+ this.currentBytes += size;
1898
+ this.evictUntilUnderBudget();
1899
+ }
1900
+ /**
1901
+ * Remove a key without affecting hit/miss stats. Used by `Collection.delete()`.
1902
+ * Returns true if the key was present.
1903
+ */
1904
+ remove(key) {
1905
+ const existing = this.entries.get(key);
1906
+ if (!existing) return false;
1907
+ this.currentBytes -= existing.size;
1908
+ this.entries.delete(key);
1909
+ return true;
1910
+ }
1911
+ /** True if the cache currently holds an entry for the given key. */
1912
+ has(key) {
1913
+ return this.entries.has(key);
536
1914
  }
537
- if (typeof oldObj !== typeof newObj) {
538
- return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
1915
+ /**
1916
+ * Drop every entry. Stats counters survive call `resetStats()` if you
1917
+ * want a clean slate. Used by `Collection.invalidate()` on key rotation.
1918
+ */
1919
+ clear() {
1920
+ this.entries.clear();
1921
+ this.currentBytes = 0;
1922
+ }
1923
+ /** Reset hit/miss/eviction counters to zero. Does NOT touch entries. */
1924
+ resetStats() {
1925
+ this.hits = 0;
1926
+ this.misses = 0;
1927
+ this.evictions = 0;
1928
+ }
1929
+ /** Snapshot of current cache statistics. Cheap — no copying. */
1930
+ stats() {
1931
+ return {
1932
+ hits: this.hits,
1933
+ misses: this.misses,
1934
+ evictions: this.evictions,
1935
+ size: this.entries.size,
1936
+ bytes: this.currentBytes
1937
+ };
539
1938
  }
540
- if (typeof oldObj !== "object") {
541
- return [{ path: basePath || "(root)", type: "changed", from: oldObj, to: newObj }];
1939
+ /**
1940
+ * Iterate over all currently-cached values. Order is least-recently-used
1941
+ * first. Used by tests and devtools — production callers should use
1942
+ * `Collection.scan()` instead.
1943
+ */
1944
+ *values() {
1945
+ for (const entry of this.entries.values()) yield entry.value;
542
1946
  }
543
- if (Array.isArray(oldObj) && Array.isArray(newObj)) {
544
- const maxLen = Math.max(oldObj.length, newObj.length);
545
- for (let i = 0; i < maxLen; i++) {
546
- const p = basePath ? `${basePath}[${i}]` : `[${i}]`;
547
- if (i >= oldObj.length) {
548
- changes.push({ path: p, type: "added", to: newObj[i] });
549
- } else if (i >= newObj.length) {
550
- changes.push({ path: p, type: "removed", from: oldObj[i] });
551
- } else {
552
- changes.push(...diff(oldObj[i], newObj[i], p));
553
- }
1947
+ /**
1948
+ * Walk the cache from the LRU end and drop entries until both budgets
1949
+ * are satisfied. Called after every `set()`. Single pass — entries are
1950
+ * never re-promoted during eviction.
1951
+ */
1952
+ evictUntilUnderBudget() {
1953
+ while (this.overBudget()) {
1954
+ const oldest = this.entries.keys().next();
1955
+ if (oldest.done) return;
1956
+ const key = oldest.value;
1957
+ const entry = this.entries.get(key);
1958
+ if (entry) this.currentBytes -= entry.size;
1959
+ this.entries.delete(key);
1960
+ this.evictions++;
554
1961
  }
555
- return changes;
556
1962
  }
557
- const oldRecord = oldObj;
558
- const newRecord = newObj;
559
- const allKeys = /* @__PURE__ */ new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)]);
560
- for (const key of allKeys) {
561
- const p = basePath ? `${basePath}.${key}` : key;
562
- if (!(key in oldRecord)) {
563
- changes.push({ path: p, type: "added", to: newRecord[key] });
564
- } else if (!(key in newRecord)) {
565
- changes.push({ path: p, type: "removed", from: oldRecord[key] });
566
- } else {
567
- changes.push(...diff(oldRecord[key], newRecord[key], p));
1963
+ overBudget() {
1964
+ if (this.maxRecords !== void 0 && this.entries.size > this.maxRecords) return true;
1965
+ if (this.maxBytes !== void 0 && this.currentBytes > this.maxBytes) return true;
1966
+ return false;
1967
+ }
1968
+ };
1969
+
1970
+ // src/cache/policy.ts
1971
+ var UNITS = {
1972
+ "": 1,
1973
+ "B": 1,
1974
+ "KB": 1024,
1975
+ "MB": 1024 * 1024,
1976
+ "GB": 1024 * 1024 * 1024
1977
+ // 'TB' deliberately not supported — if you need it, you're not using NOYDB.
1978
+ };
1979
+ function parseBytes(input) {
1980
+ if (typeof input === "number") {
1981
+ if (!Number.isFinite(input) || input <= 0) {
1982
+ throw new Error(`parseBytes: numeric input must be a positive finite number, got ${String(input)}`);
568
1983
  }
1984
+ return Math.floor(input);
569
1985
  }
570
- return changes;
1986
+ const trimmed = input.trim();
1987
+ if (trimmed === "") {
1988
+ throw new Error("parseBytes: empty string is not a valid byte budget");
1989
+ }
1990
+ const match = /^([0-9]+(?:\.[0-9]+)?)\s*([A-Za-z]*)$/.exec(trimmed);
1991
+ if (!match) {
1992
+ throw new Error(`parseBytes: invalid byte budget "${input}". Expected format: "1024", "50KB", "50MB", "1GB"`);
1993
+ }
1994
+ const value = parseFloat(match[1]);
1995
+ const unit = (match[2] ?? "").toUpperCase();
1996
+ if (!(unit in UNITS)) {
1997
+ throw new Error(`parseBytes: unknown unit "${match[2]}" in "${input}". Supported: B, KB, MB, GB`);
1998
+ }
1999
+ const bytes = Math.floor(value * UNITS[unit]);
2000
+ if (bytes <= 0) {
2001
+ throw new Error(`parseBytes: byte budget must be > 0, got ${bytes} from "${input}"`);
2002
+ }
2003
+ return bytes;
571
2004
  }
572
- function formatDiff(changes) {
573
- if (changes.length === 0) return "(no changes)";
574
- return changes.map((c) => {
575
- switch (c.type) {
576
- case "added":
577
- return `+ ${c.path}: ${JSON.stringify(c.to)}`;
578
- case "removed":
579
- return `- ${c.path}: ${JSON.stringify(c.from)}`;
580
- case "changed":
581
- return `~ ${c.path}: ${JSON.stringify(c.from)} \u2192 ${JSON.stringify(c.to)}`;
582
- }
583
- }).join("\n");
2005
+ function estimateRecordBytes(record) {
2006
+ try {
2007
+ return JSON.stringify(record).length;
2008
+ } catch {
2009
+ return 0;
2010
+ }
584
2011
  }
585
2012
 
586
2013
  // src/collection.ts
2014
+ var fallbackWarned = /* @__PURE__ */ new Set();
2015
+ function warnOnceFallback(adapterName) {
2016
+ if (fallbackWarned.has(adapterName)) return;
2017
+ fallbackWarned.add(adapterName);
2018
+ if (typeof process !== "undefined" && process.env["NODE_ENV"] === "test") return;
2019
+ console.warn(
2020
+ `[noy-db] Adapter "${adapterName}" does not implement listPage(); Collection.scan()/listPage() are using a synthetic fallback (slower). Add a listPage method to opt into the streaming fast path.`
2021
+ );
2022
+ }
587
2023
  var Collection = class {
588
2024
  adapter;
589
2025
  compartment;
@@ -594,9 +2030,91 @@ var Collection = class {
594
2030
  getDEK;
595
2031
  onDirty;
596
2032
  historyConfig;
597
- // In-memory cache of decrypted records
2033
+ // In-memory cache of decrypted records (eager mode only). Lazy mode
2034
+ // uses `lru` instead. Both fields exist so a single Collection instance
2035
+ // doesn't need a runtime branch on every cache access.
598
2036
  cache = /* @__PURE__ */ new Map();
599
2037
  hydrated = false;
2038
+ /**
2039
+ * Lazy mode flag. `true` when constructed with `prefetch: false`.
2040
+ * In lazy mode the cache is bounded by an LRU and `list()`/`query()`
2041
+ * throw — callers must use `scan()` or per-id `get()` instead.
2042
+ */
2043
+ lazy;
2044
+ /**
2045
+ * LRU cache for lazy mode. Only allocated when `prefetch: false` is set.
2046
+ * Stores `{ record, version }` entries the same shape as `this.cache`.
2047
+ * Tree-shaking note: importing Collection without setting `prefetch:false`
2048
+ * still pulls in the Lru class today; future bundle-size work could
2049
+ * lazy-import the cache module.
2050
+ */
2051
+ lru;
2052
+ /**
2053
+ * In-memory secondary indexes for the query DSL.
2054
+ *
2055
+ * Built during `ensureHydrated()` and maintained on every put/delete.
2056
+ * The query executor consults these for `==` and `in` operators on
2057
+ * indexed fields, falling back to a linear scan for unindexed fields
2058
+ * or unsupported operators.
2059
+ *
2060
+ * v0.3 ships in-memory only — persistence as encrypted blobs is a
2061
+ * follow-up. See `query/indexes.ts` for the design rationale.
2062
+ *
2063
+ * Indexes are INCOMPATIBLE with lazy mode in v0.3 — the constructor
2064
+ * rejects the combination because evicted records would silently
2065
+ * disappear from the index without notification.
2066
+ */
2067
+ indexes = new CollectionIndexes();
2068
+ /**
2069
+ * Optional Standard Schema v1 validator. When set, every `put()` runs
2070
+ * the input through `validateSchemaInput` before encryption, and every
2071
+ * record coming OUT of `decryptRecord` runs through
2072
+ * `validateSchemaOutput`. A rejected input throws
2073
+ * `SchemaValidationError` with `direction: 'input'`; drifted stored
2074
+ * data throws with `direction: 'output'`. Both carry the rich issue
2075
+ * list from the validator so UI code can render field-level messages.
2076
+ *
2077
+ * The schema is stored as `StandardSchemaV1<unknown, T>` because the
2078
+ * collection type parameter `T` is the OUTPUT type — whatever the
2079
+ * validator produces after transforms and coercion. Users who pass a
2080
+ * schema to `defineNoydbStore` (or `Collection.constructor`) get their
2081
+ * `T` inferred automatically via `InferOutput<Schema>`.
2082
+ */
2083
+ schema;
2084
+ /**
2085
+ * Optional reference to the compartment-level hash-chained audit
2086
+ * log. When present, every successful `put()` and `delete()` appends
2087
+ * an entry to the ledger AFTER the adapter write succeeds (so a
2088
+ * failed adapter write never produces an orphan ledger entry).
2089
+ *
2090
+ * The ledger is always a compartment-wide singleton — all
2091
+ * collections in the same compartment share the same LedgerStore.
2092
+ * Compartment.ledger() does the lazy init; this field just holds
2093
+ * the reference so Collection doesn't need to reach back up to the
2094
+ * compartment on every mutation.
2095
+ *
2096
+ * `undefined` means "no ledger attached" — supported for tests that
2097
+ * construct a Collection directly without a compartment, and for
2098
+ * future backwards-compat scenarios. Production usage always has a
2099
+ * ledger because Compartment.collection() passes one through.
2100
+ */
2101
+ ledger;
2102
+ /**
2103
+ * Optional back-reference to the owning compartment's ref
2104
+ * enforcer. When present, `Collection.put` calls
2105
+ * `refEnforcer.enforceRefsOnPut(name, record)` before the adapter
2106
+ * write, and `Collection.delete` calls
2107
+ * `refEnforcer.enforceRefsOnDelete(name, id)` before its own
2108
+ * adapter delete. The Compartment handles the actual registry
2109
+ * lookup and cross-collection enforcement — Collection just
2110
+ * notifies it at the right points in the lifecycle.
2111
+ *
2112
+ * Typed as a structural interface rather than `Compartment`
2113
+ * directly to avoid a circular import. Compartment implements
2114
+ * these two methods; any other object with the same shape would
2115
+ * work too (used only in unit tests).
2116
+ */
2117
+ refEnforcer;
600
2118
  constructor(opts) {
601
2119
  this.adapter = opts.adapter;
602
2120
  this.compartment = opts.compartment;
@@ -607,9 +2125,46 @@ var Collection = class {
607
2125
  this.getDEK = opts.getDEK;
608
2126
  this.onDirty = opts.onDirty;
609
2127
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2128
+ this.schema = opts.schema;
2129
+ this.ledger = opts.ledger;
2130
+ this.refEnforcer = opts.refEnforcer;
2131
+ this.lazy = opts.prefetch === false;
2132
+ if (this.lazy) {
2133
+ if (opts.indexes && opts.indexes.length > 0) {
2134
+ throw new Error(
2135
+ `Collection "${this.name}": secondary indexes are not supported in lazy mode (prefetch: false). Either remove the indexes option or use prefetch: true. Index + lazy support is tracked as a v0.4 follow-up.`
2136
+ );
2137
+ }
2138
+ if (!opts.cache || opts.cache.maxRecords === void 0 && opts.cache.maxBytes === void 0) {
2139
+ throw new Error(
2140
+ `Collection "${this.name}": lazy mode (prefetch: false) requires a cache option with maxRecords and/or maxBytes. An unbounded lazy cache defeats the purpose.`
2141
+ );
2142
+ }
2143
+ const lruOptions = {};
2144
+ if (opts.cache.maxRecords !== void 0) lruOptions.maxRecords = opts.cache.maxRecords;
2145
+ if (opts.cache.maxBytes !== void 0) lruOptions.maxBytes = parseBytes(opts.cache.maxBytes);
2146
+ this.lru = new Lru(lruOptions);
2147
+ this.hydrated = true;
2148
+ } else {
2149
+ this.lru = null;
2150
+ if (opts.indexes) {
2151
+ for (const def of opts.indexes) {
2152
+ this.indexes.declare(def);
2153
+ }
2154
+ }
2155
+ }
610
2156
  }
611
2157
  /** Get a single record by ID. Returns null if not found. */
612
2158
  async get(id) {
2159
+ if (this.lazy && this.lru) {
2160
+ const cached = this.lru.get(id);
2161
+ if (cached) return cached.record;
2162
+ const envelope = await this.adapter.get(this.compartment, this.name, id);
2163
+ if (!envelope) return null;
2164
+ const record = await this.decryptRecord(envelope);
2165
+ this.lru.set(id, { record, version: envelope._v }, estimateRecordBytes(record));
2166
+ return record;
2167
+ }
613
2168
  await this.ensureHydrated();
614
2169
  const entry = this.cache.get(id);
615
2170
  return entry ? entry.record : null;
@@ -619,8 +2174,26 @@ var Collection = class {
619
2174
  if (!hasWritePermission(this.keyring, this.name)) {
620
2175
  throw new ReadOnlyError();
621
2176
  }
622
- await this.ensureHydrated();
623
- const existing = this.cache.get(id);
2177
+ if (this.schema !== void 0) {
2178
+ record = await validateSchemaInput(this.schema, record, `put(${id})`);
2179
+ }
2180
+ if (this.refEnforcer !== void 0) {
2181
+ await this.refEnforcer.enforceRefsOnPut(this.name, record);
2182
+ }
2183
+ let existing;
2184
+ if (this.lazy && this.lru) {
2185
+ existing = this.lru.get(id);
2186
+ if (!existing) {
2187
+ const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
2188
+ if (previousEnvelope) {
2189
+ const previousRecord = await this.decryptRecord(previousEnvelope);
2190
+ existing = { record: previousRecord, version: previousEnvelope._v };
2191
+ }
2192
+ }
2193
+ } else {
2194
+ await this.ensureHydrated();
2195
+ existing = this.cache.get(id);
2196
+ }
624
2197
  const version = existing ? existing.version + 1 : 1;
625
2198
  if (existing && this.historyConfig.enabled !== false) {
626
2199
  const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
@@ -639,7 +2212,26 @@ var Collection = class {
639
2212
  }
640
2213
  const envelope = await this.encryptRecord(record, version);
641
2214
  await this.adapter.put(this.compartment, this.name, id, envelope);
642
- this.cache.set(id, { record, version });
2215
+ if (this.ledger) {
2216
+ const appendInput = {
2217
+ op: "put",
2218
+ collection: this.name,
2219
+ id,
2220
+ version,
2221
+ actor: this.keyring.userId,
2222
+ payloadHash: await envelopePayloadHash(envelope)
2223
+ };
2224
+ if (existing) {
2225
+ appendInput.delta = computePatch(record, existing.record);
2226
+ }
2227
+ await this.ledger.append(appendInput);
2228
+ }
2229
+ if (this.lazy && this.lru) {
2230
+ this.lru.set(id, { record, version }, estimateRecordBytes(record));
2231
+ } else {
2232
+ this.cache.set(id, { record, version });
2233
+ this.indexes.upsert(id, record, existing ? existing.record : null);
2234
+ }
643
2235
  await this.onDirty?.(this.name, id, "put", version);
644
2236
  this.emitter.emit("change", {
645
2237
  compartment: this.compartment,
@@ -653,13 +2245,47 @@ var Collection = class {
653
2245
  if (!hasWritePermission(this.keyring, this.name)) {
654
2246
  throw new ReadOnlyError();
655
2247
  }
656
- const existing = this.cache.get(id);
2248
+ if (this.refEnforcer !== void 0) {
2249
+ await this.refEnforcer.enforceRefsOnDelete(this.name, id);
2250
+ }
2251
+ let existing;
2252
+ if (this.lazy && this.lru) {
2253
+ existing = this.lru.get(id);
2254
+ if (!existing && this.historyConfig.enabled !== false) {
2255
+ const previousEnvelope2 = await this.adapter.get(this.compartment, this.name, id);
2256
+ if (previousEnvelope2) {
2257
+ const previousRecord = await this.decryptRecord(previousEnvelope2);
2258
+ existing = { record: previousRecord, version: previousEnvelope2._v };
2259
+ }
2260
+ }
2261
+ } else {
2262
+ existing = this.cache.get(id);
2263
+ }
657
2264
  if (existing && this.historyConfig.enabled !== false) {
658
2265
  const historyEnvelope = await this.encryptRecord(existing.record, existing.version);
659
2266
  await saveHistory(this.adapter, this.compartment, this.name, id, historyEnvelope);
660
2267
  }
2268
+ const previousEnvelope = await this.adapter.get(this.compartment, this.name, id);
2269
+ const previousPayloadHash = await envelopePayloadHash(previousEnvelope);
661
2270
  await this.adapter.delete(this.compartment, this.name, id);
662
- this.cache.delete(id);
2271
+ if (this.ledger) {
2272
+ await this.ledger.append({
2273
+ op: "delete",
2274
+ collection: this.name,
2275
+ id,
2276
+ version: existing?.version ?? 0,
2277
+ actor: this.keyring.userId,
2278
+ payloadHash: previousPayloadHash
2279
+ });
2280
+ }
2281
+ if (this.lazy && this.lru) {
2282
+ this.lru.remove(id);
2283
+ } else {
2284
+ this.cache.delete(id);
2285
+ if (existing) {
2286
+ this.indexes.remove(id, existing.record);
2287
+ }
2288
+ }
663
2289
  await this.onDirty?.(this.name, id, "delete", existing?.version ?? 0);
664
2290
  this.emitter.emit("change", {
665
2291
  compartment: this.compartment,
@@ -668,14 +2294,70 @@ var Collection = class {
668
2294
  action: "delete"
669
2295
  });
670
2296
  }
671
- /** List all records in the collection. */
2297
+ /**
2298
+ * List all records in the collection.
2299
+ *
2300
+ * Throws in lazy mode — bulk listing defeats the purpose of lazy
2301
+ * hydration. Use `scan()` to iterate over the full collection
2302
+ * page-by-page without holding more than `pageSize` records in memory.
2303
+ */
672
2304
  async list() {
2305
+ if (this.lazy) {
2306
+ throw new Error(
2307
+ `Collection "${this.name}": list() is not available in lazy mode (prefetch: false). Use collection.scan({ pageSize }) to iterate over the full collection.`
2308
+ );
2309
+ }
673
2310
  await this.ensureHydrated();
674
2311
  return [...this.cache.values()].map((e) => e.record);
675
2312
  }
676
- /** Filter records by a predicate. */
677
2313
  query(predicate) {
678
- return [...this.cache.values()].map((e) => e.record).filter(predicate);
2314
+ if (this.lazy) {
2315
+ throw new Error(
2316
+ `Collection "${this.name}": query() is not available in lazy mode (prefetch: false). Use collection.scan({ pageSize }) and filter the streamed records with a regular for-await loop. Streaming queries land in v0.4.`
2317
+ );
2318
+ }
2319
+ if (predicate !== void 0) {
2320
+ return [...this.cache.values()].map((e) => e.record).filter(predicate);
2321
+ }
2322
+ const source = {
2323
+ snapshot: () => [...this.cache.values()].map((e) => e.record),
2324
+ subscribe: (cb) => {
2325
+ const handler = (event) => {
2326
+ if (event.compartment === this.compartment && event.collection === this.name) {
2327
+ cb();
2328
+ }
2329
+ };
2330
+ this.emitter.on("change", handler);
2331
+ return () => this.emitter.off("change", handler);
2332
+ },
2333
+ // Index-aware fast path for `==` and `in` operators on indexed
2334
+ // fields. The Query builder consults these when present and falls
2335
+ // back to a linear scan otherwise.
2336
+ getIndexes: () => this.getIndexes(),
2337
+ lookupById: (id) => this.cache.get(id)?.record
2338
+ };
2339
+ return new Query(source);
2340
+ }
2341
+ /**
2342
+ * Cache statistics — useful for devtools, monitoring, and verifying
2343
+ * that LRU eviction is happening as expected in lazy mode.
2344
+ *
2345
+ * In eager mode, returns size only (no hits/misses are tracked because
2346
+ * every read is a cache hit by construction). In lazy mode, returns
2347
+ * the full LRU stats: `{ hits, misses, evictions, size, bytes }`.
2348
+ */
2349
+ cacheStats() {
2350
+ if (this.lazy && this.lru) {
2351
+ return { ...this.lru.stats(), lazy: true };
2352
+ }
2353
+ return {
2354
+ hits: 0,
2355
+ misses: 0,
2356
+ evictions: 0,
2357
+ size: this.cache.size,
2358
+ bytes: 0,
2359
+ lazy: false
2360
+ };
679
2361
  }
680
2362
  // ─── History Methods ────────────────────────────────────────────
681
2363
  /** Get version history for a record, newest first. */
@@ -689,7 +2371,7 @@ var Collection = class {
689
2371
  );
690
2372
  const entries = [];
691
2373
  for (const env of envelopes) {
692
- const record = await this.decryptRecord(env);
2374
+ const record = await this.decryptRecord(env, { skipValidation: true });
693
2375
  entries.push({
694
2376
  version: env._v,
695
2377
  timestamp: env._ts,
@@ -699,7 +2381,15 @@ var Collection = class {
699
2381
  }
700
2382
  return entries;
701
2383
  }
702
- /** Get a specific past version of a record. */
2384
+ /**
2385
+ * Get a specific past version of a record.
2386
+ *
2387
+ * History reads intentionally **skip schema validation** — historical
2388
+ * records predate the current schema by definition, so validating them
2389
+ * against today's shape would be a false positive on any schema
2390
+ * evolution. If a caller needs validated history, they should filter
2391
+ * and re-put the records through the normal `put()` path.
2392
+ */
703
2393
  async getVersion(id, version) {
704
2394
  const envelope = await getVersionEnvelope(
705
2395
  this.adapter,
@@ -709,7 +2399,7 @@ var Collection = class {
709
2399
  version
710
2400
  );
711
2401
  if (!envelope) return null;
712
- return this.decryptRecord(envelope);
2402
+ return this.decryptRecord(envelope, { skipValidation: true });
713
2403
  }
714
2404
  /** Revert a record to a past version. Creates a new version with the old content. */
715
2405
  async revert(id, version) {
@@ -727,7 +2417,7 @@ var Collection = class {
727
2417
  async diff(id, versionA, versionB) {
728
2418
  const recordA = versionA === 0 ? null : await this.resolveVersion(id, versionA);
729
2419
  const recordB = versionB === void 0 || versionB === 0 ? versionB === 0 ? null : await this.resolveCurrentOrVersion(id) : await this.resolveVersion(id, versionB);
730
- return diff(recordA, recordB);
2420
+ return diff2(recordA, recordB);
731
2421
  }
732
2422
  /** Resolve a version: try history first, then check if it's the current version. */
733
2423
  async resolveVersion(id, version) {
@@ -766,11 +2456,105 @@ var Collection = class {
766
2456
  return clearHistory(this.adapter, this.compartment, this.name, id);
767
2457
  }
768
2458
  // ─── Core Methods ─────────────────────────────────────────────
769
- /** Count records in the collection. */
2459
+ /**
2460
+ * Count records in the collection.
2461
+ *
2462
+ * In eager mode this returns the in-memory cache size (instant). In
2463
+ * lazy mode it asks the adapter via `list()` to enumerate ids — slower
2464
+ * but still correct, and avoids loading any record bodies into memory.
2465
+ */
770
2466
  async count() {
2467
+ if (this.lazy) {
2468
+ const ids = await this.adapter.list(this.compartment, this.name);
2469
+ return ids.length;
2470
+ }
771
2471
  await this.ensureHydrated();
772
2472
  return this.cache.size;
773
2473
  }
2474
+ // ─── Pagination & Streaming ───────────────────────────────────
2475
+ /**
2476
+ * Fetch a single page of records via the adapter's optional `listPage`
2477
+ * extension. Returns the decrypted records for this page plus an opaque
2478
+ * cursor for the next page.
2479
+ *
2480
+ * Pass `cursor: undefined` (or omit it) to start from the beginning.
2481
+ * The final page returns `nextCursor: null`.
2482
+ *
2483
+ * If the adapter does NOT implement `listPage`, this falls back to a
2484
+ * synthetic implementation: it loads all ids via `list()`, sorts them,
2485
+ * and slices a window. The first call emits a one-time console.warn so
2486
+ * developers can spot adapters that should opt into the fast path.
2487
+ */
2488
+ async listPage(opts = {}) {
2489
+ const limit = opts.limit ?? 100;
2490
+ if (this.adapter.listPage) {
2491
+ const result = await this.adapter.listPage(this.compartment, this.name, opts.cursor, limit);
2492
+ const decrypted = [];
2493
+ for (const { record, version, id } of await this.decryptPage(result.items)) {
2494
+ if (!this.lazy && !this.cache.has(id)) {
2495
+ this.cache.set(id, { record, version });
2496
+ }
2497
+ decrypted.push(record);
2498
+ }
2499
+ return { items: decrypted, nextCursor: result.nextCursor };
2500
+ }
2501
+ warnOnceFallback(this.adapter.name ?? "unknown");
2502
+ const ids = (await this.adapter.list(this.compartment, this.name)).slice().sort();
2503
+ const start = opts.cursor ? parseInt(opts.cursor, 10) : 0;
2504
+ const end = Math.min(start + limit, ids.length);
2505
+ const items = [];
2506
+ for (let i = start; i < end; i++) {
2507
+ const id = ids[i];
2508
+ const envelope = await this.adapter.get(this.compartment, this.name, id);
2509
+ if (envelope) {
2510
+ const record = await this.decryptRecord(envelope);
2511
+ items.push(record);
2512
+ if (!this.lazy && !this.cache.has(id)) {
2513
+ this.cache.set(id, { record, version: envelope._v });
2514
+ }
2515
+ }
2516
+ }
2517
+ return {
2518
+ items,
2519
+ nextCursor: end < ids.length ? String(end) : null
2520
+ };
2521
+ }
2522
+ /**
2523
+ * Stream every record in the collection page-by-page, yielding decrypted
2524
+ * records as an `AsyncIterable<T>`. The whole point: process collections
2525
+ * larger than RAM without ever holding more than `pageSize` records
2526
+ * decrypted at once.
2527
+ *
2528
+ * @example
2529
+ * ```ts
2530
+ * for await (const record of invoices.scan({ pageSize: 500 })) {
2531
+ * await processOne(record)
2532
+ * }
2533
+ * ```
2534
+ *
2535
+ * Uses `adapter.listPage` when available; otherwise falls back to the
2536
+ * synthetic pagination path with the same one-time warning.
2537
+ */
2538
+ async *scan(opts = {}) {
2539
+ const pageSize = opts.pageSize ?? 100;
2540
+ let page = await this.listPage({ limit: pageSize });
2541
+ while (true) {
2542
+ for (const item of page.items) {
2543
+ yield item;
2544
+ }
2545
+ if (page.nextCursor === null) return;
2546
+ page = await this.listPage({ cursor: page.nextCursor, limit: pageSize });
2547
+ }
2548
+ }
2549
+ /** Decrypt a page of envelopes returned by `adapter.listPage`. */
2550
+ async decryptPage(items) {
2551
+ const out = [];
2552
+ for (const { id, envelope } of items) {
2553
+ const record = await this.decryptRecord(envelope);
2554
+ out.push({ id, record, version: envelope._v });
2555
+ }
2556
+ return out;
2557
+ }
774
2558
  // ─── Internal ──────────────────────────────────────────────────
775
2559
  /** Load all records from adapter into memory cache. */
776
2560
  async ensureHydrated() {
@@ -784,6 +2568,7 @@ var Collection = class {
784
2568
  }
785
2569
  }
786
2570
  this.hydrated = true;
2571
+ this.rebuildIndexes();
787
2572
  }
788
2573
  /** Hydrate from a pre-loaded snapshot (used by Compartment). */
789
2574
  async hydrateFromSnapshot(records) {
@@ -792,6 +2577,34 @@ var Collection = class {
792
2577
  this.cache.set(id, { record, version: envelope._v });
793
2578
  }
794
2579
  this.hydrated = true;
2580
+ this.rebuildIndexes();
2581
+ }
2582
+ /**
2583
+ * Rebuild secondary indexes from the current in-memory cache.
2584
+ *
2585
+ * Called after any bulk hydration. Incremental put/delete updates
2586
+ * are handled by `indexes.upsert()` / `indexes.remove()` directly,
2587
+ * so this only fires for full reloads.
2588
+ *
2589
+ * Synchronous and O(N × indexes.size); for the v0.3 target scale of
2590
+ * 1K–50K records this completes in single-digit milliseconds.
2591
+ */
2592
+ rebuildIndexes() {
2593
+ if (this.indexes.fields().length === 0) return;
2594
+ const snapshot = [];
2595
+ for (const [id, entry] of this.cache) {
2596
+ snapshot.push({ id, record: entry.record });
2597
+ }
2598
+ this.indexes.build(snapshot);
2599
+ }
2600
+ /**
2601
+ * Get the in-memory index store. Used by `Query` to short-circuit
2602
+ * `==` and `in` lookups when an index covers the where clause.
2603
+ *
2604
+ * Returns `null` if no indexes are declared on this collection.
2605
+ */
2606
+ getIndexes() {
2607
+ return this.indexes.fields().length > 0 ? this.indexes : null;
795
2608
  }
796
2609
  /** Get all records as encrypted envelopes (for dump). */
797
2610
  async dumpEnvelopes() {
@@ -826,13 +2639,38 @@ var Collection = class {
826
2639
  _by: by
827
2640
  };
828
2641
  }
829
- async decryptRecord(envelope) {
2642
+ /**
2643
+ * Decrypt an envelope into a record of type `T`.
2644
+ *
2645
+ * When a schema is attached, the decrypted value is validated before
2646
+ * being returned. A divergence between the stored bytes and the
2647
+ * current schema throws `SchemaValidationError` with
2648
+ * `direction: 'output'` — silently returning drifted data would
2649
+ * propagate garbage into the UI and break the whole point of having
2650
+ * a schema.
2651
+ *
2652
+ * `skipValidation` exists for history reads: when calling
2653
+ * `getVersion()` the caller is explicitly asking for an old snapshot
2654
+ * that may predate a schema change, so validating it would be a
2655
+ * false positive. Every non-history read leaves this flag `false`.
2656
+ */
2657
+ async decryptRecord(envelope, opts = {}) {
2658
+ let record;
830
2659
  if (!this.encrypted) {
831
- return JSON.parse(envelope._data);
2660
+ record = JSON.parse(envelope._data);
2661
+ } else {
2662
+ const dek = await this.getDEK(this.name);
2663
+ const json = await decrypt(envelope._iv, envelope._data, dek);
2664
+ record = JSON.parse(json);
832
2665
  }
833
- const dek = await this.getDEK(this.name);
834
- const json = await decrypt(envelope._iv, envelope._data, dek);
835
- return JSON.parse(json);
2666
+ if (this.schema !== void 0 && !opts.skipValidation) {
2667
+ record = await validateSchemaOutput(
2668
+ this.schema,
2669
+ record,
2670
+ `${this.name}@v${envelope._v}`
2671
+ );
2672
+ }
2673
+ return record;
836
2674
  }
837
2675
  };
838
2676
 
@@ -840,13 +2678,62 @@ var Collection = class {
840
2678
  var Compartment = class {
841
2679
  adapter;
842
2680
  name;
2681
+ /**
2682
+ * The active in-memory keyring. NOT readonly because `load()`
2683
+ * needs to refresh it after restoring a different keyring file —
2684
+ * otherwise the in-memory DEKs (from the pre-load session) and
2685
+ * the on-disk wrapped DEKs (from the loaded backup) drift apart
2686
+ * and every subsequent decrypt fails with TamperedError.
2687
+ */
843
2688
  keyring;
844
2689
  encrypted;
845
2690
  emitter;
846
2691
  onDirty;
847
2692
  historyConfig;
848
2693
  getDEK;
2694
+ /**
2695
+ * Optional callback that re-derives an UnlockedKeyring from the
2696
+ * adapter using the active user's passphrase. Called by `load()`
2697
+ * after the on-disk keyring file has been replaced — refreshes
2698
+ * `this.keyring` so the next DEK access uses the loaded wrapped
2699
+ * DEKs instead of the stale pre-load ones.
2700
+ *
2701
+ * Provided by Noydb at openCompartment() time. Tests that
2702
+ * construct Compartment directly can pass `undefined`; load()
2703
+ * skips the refresh in that case (which is fine for plaintext
2704
+ * compartments — there's nothing to re-unwrap).
2705
+ */
2706
+ reloadKeyring;
849
2707
  collectionCache = /* @__PURE__ */ new Map();
2708
+ /**
2709
+ * Per-compartment ledger store. Lazy-initialized on first
2710
+ * `collection()` call (which passes it through to the Collection)
2711
+ * or on first `ledger()` call from user code.
2712
+ *
2713
+ * One LedgerStore is shared across all collections in a compartment
2714
+ * because the hash chain is compartment-scoped: the chain head is a
2715
+ * single "what did this compartment do last" identifier, not a
2716
+ * per-collection one. Two collections appending concurrently is the
2717
+ * single-writer concurrency concern documented in the LedgerStore
2718
+ * docstring.
2719
+ */
2720
+ ledgerStore = null;
2721
+ /**
2722
+ * Per-compartment foreign-key reference registry. Collections
2723
+ * register their `refs` option here on construction; the
2724
+ * compartment uses the registry on every put/delete/checkIntegrity
2725
+ * call. One instance lives for the compartment's lifetime.
2726
+ */
2727
+ refRegistry = new RefRegistry();
2728
+ /**
2729
+ * Set of collection record-ids currently being deleted as part of
2730
+ * a cascade. Populated on entry to `enforceRefsOnDelete` and
2731
+ * drained on exit. Used to break mutual-cascade cycles: deleting
2732
+ * A → cascade to B → cascade back to A would otherwise recurse
2733
+ * forever, so we short-circuit when we see an already-in-progress
2734
+ * delete on the same (collection, id) pair.
2735
+ */
2736
+ cascadeInProgress = /* @__PURE__ */ new Set();
850
2737
  constructor(opts) {
851
2738
  this.adapter = opts.adapter;
852
2739
  this.name = opts.name;
@@ -855,19 +2742,53 @@ var Compartment = class {
855
2742
  this.emitter = opts.emitter;
856
2743
  this.onDirty = opts.onDirty;
857
2744
  this.historyConfig = opts.historyConfig ?? { enabled: true };
2745
+ this.reloadKeyring = opts.reloadKeyring;
2746
+ this.getDEK = this.makeGetDEK();
2747
+ }
2748
+ /**
2749
+ * Construct (or reconstruct) the lazy DEK resolver. Captures the
2750
+ * CURRENT value of `this.keyring` and `this.adapter` in a closure,
2751
+ * memoizing the inner getDEKFn after first use so subsequent
2752
+ * lookups are O(1).
2753
+ *
2754
+ * `load()` calls this after refreshing `this.keyring` to discard
2755
+ * the prior session's cached DEKs.
2756
+ */
2757
+ makeGetDEK() {
858
2758
  let getDEKFn = null;
859
- this.getDEK = async (collectionName) => {
2759
+ return async (collectionName) => {
860
2760
  if (!getDEKFn) {
861
2761
  getDEKFn = await ensureCollectionDEK(this.adapter, this.name, this.keyring);
862
2762
  }
863
2763
  return getDEKFn(collectionName);
864
2764
  };
865
2765
  }
866
- /** Open a typed collection within this compartment. */
867
- collection(collectionName) {
2766
+ /**
2767
+ * Open a typed collection within this compartment.
2768
+ *
2769
+ * - `options.indexes` declares secondary indexes for the query DSL.
2770
+ * Indexes are computed in memory after decryption; adapters never
2771
+ * see plaintext index data.
2772
+ * - `options.prefetch` (default `true`) controls hydration. Eager mode
2773
+ * loads everything on first access; lazy mode (`prefetch: false`)
2774
+ * loads records on demand and bounds memory via the LRU cache.
2775
+ * - `options.cache` configures the LRU bounds. Required in lazy mode.
2776
+ * Accepts `{ maxRecords, maxBytes: '50MB' | 1024 }`.
2777
+ * - `options.schema` attaches a Standard Schema v1 validator (Zod,
2778
+ * Valibot, ArkType, Effect Schema, etc.). Every `put()` is validated
2779
+ * before encryption; every read is validated after decryption.
2780
+ * Failing records throw `SchemaValidationError`.
2781
+ *
2782
+ * Lazy mode + indexes is rejected at construction time — see the
2783
+ * Collection constructor for the rationale.
2784
+ */
2785
+ collection(collectionName, options) {
868
2786
  let coll = this.collectionCache.get(collectionName);
869
2787
  if (!coll) {
870
- coll = new Collection({
2788
+ if (options?.refs) {
2789
+ this.refRegistry.register(collectionName, options.refs);
2790
+ }
2791
+ const collOpts = {
871
2792
  adapter: this.adapter,
872
2793
  compartment: this.name,
873
2794
  name: collectionName,
@@ -876,18 +2797,205 @@ var Compartment = class {
876
2797
  emitter: this.emitter,
877
2798
  getDEK: this.getDEK,
878
2799
  onDirty: this.onDirty,
879
- historyConfig: this.historyConfig
880
- });
2800
+ historyConfig: this.historyConfig,
2801
+ ledger: this.ledger(),
2802
+ refEnforcer: this
2803
+ };
2804
+ if (options?.indexes !== void 0) collOpts.indexes = options.indexes;
2805
+ if (options?.prefetch !== void 0) collOpts.prefetch = options.prefetch;
2806
+ if (options?.cache !== void 0) collOpts.cache = options.cache;
2807
+ if (options?.schema !== void 0) collOpts.schema = options.schema;
2808
+ coll = new Collection(collOpts);
881
2809
  this.collectionCache.set(collectionName, coll);
882
2810
  }
883
2811
  return coll;
884
2812
  }
2813
+ /**
2814
+ * Enforce strict outbound refs on a `put()`. Called by Collection
2815
+ * just before it writes to the adapter. For every strict ref
2816
+ * declared on the collection, check that the target id exists in
2817
+ * the target collection; throw `RefIntegrityError` if not.
2818
+ *
2819
+ * `warn` and `cascade` modes don't affect put semantics — they're
2820
+ * enforced at delete time or via `checkIntegrity()`.
2821
+ */
2822
+ async enforceRefsOnPut(collectionName, record) {
2823
+ const outbound = this.refRegistry.getOutbound(collectionName);
2824
+ if (Object.keys(outbound).length === 0) return;
2825
+ if (!record || typeof record !== "object") return;
2826
+ const obj = record;
2827
+ for (const [field, descriptor] of Object.entries(outbound)) {
2828
+ if (descriptor.mode !== "strict") continue;
2829
+ const rawId = obj[field];
2830
+ if (rawId === null || rawId === void 0) continue;
2831
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
2832
+ throw new RefIntegrityError({
2833
+ collection: collectionName,
2834
+ id: obj["id"] ?? "<unknown>",
2835
+ field,
2836
+ refTo: descriptor.target,
2837
+ refId: null,
2838
+ message: `Ref field "${collectionName}.${field}" must be a string or number, got ${typeof rawId}.`
2839
+ });
2840
+ }
2841
+ const refId = String(rawId);
2842
+ const target = this.collection(descriptor.target);
2843
+ const exists = await target.get(refId);
2844
+ if (!exists) {
2845
+ throw new RefIntegrityError({
2846
+ collection: collectionName,
2847
+ id: obj["id"] ?? "<unknown>",
2848
+ field,
2849
+ refTo: descriptor.target,
2850
+ refId,
2851
+ message: `Strict ref "${collectionName}.${field}" \u2192 "${descriptor.target}" cannot be satisfied: target id "${refId}" not found in "${descriptor.target}".`
2852
+ });
2853
+ }
2854
+ }
2855
+ }
2856
+ /**
2857
+ * Enforce inbound ref modes on a `delete()`. Called by Collection
2858
+ * just before it deletes from the adapter. Walks every inbound
2859
+ * ref that targets this (collection, id) and:
2860
+ *
2861
+ * - `strict`: throws if any referencing records exist
2862
+ * - `cascade`: deletes every referencing record
2863
+ * - `warn`: no-op (checkIntegrity picks it up)
2864
+ *
2865
+ * Cascade cycles are broken via `cascadeInProgress` — re-entering
2866
+ * for the same (collection, id) returns immediately so two
2867
+ * mutually-cascading collections don't recurse forever.
2868
+ */
2869
+ async enforceRefsOnDelete(collectionName, id) {
2870
+ const key = `${collectionName}/${id}`;
2871
+ if (this.cascadeInProgress.has(key)) return;
2872
+ this.cascadeInProgress.add(key);
2873
+ try {
2874
+ const inbound = this.refRegistry.getInbound(collectionName);
2875
+ for (const rule of inbound) {
2876
+ const fromCollection = this.collection(rule.collection);
2877
+ const allRecords = await fromCollection.list();
2878
+ const matches = allRecords.filter((rec) => {
2879
+ const raw = rec[rule.field];
2880
+ if (typeof raw !== "string" && typeof raw !== "number") return false;
2881
+ return String(raw) === id;
2882
+ });
2883
+ if (matches.length === 0) continue;
2884
+ if (rule.mode === "strict") {
2885
+ const first = matches[0];
2886
+ throw new RefIntegrityError({
2887
+ collection: rule.collection,
2888
+ id: first?.["id"] ?? "<unknown>",
2889
+ field: rule.field,
2890
+ refTo: collectionName,
2891
+ refId: id,
2892
+ message: `Cannot delete "${collectionName}"/"${id}": ${matches.length} record(s) in "${rule.collection}" still reference it via strict ref "${rule.field}".`
2893
+ });
2894
+ }
2895
+ if (rule.mode === "cascade") {
2896
+ for (const match of matches) {
2897
+ const matchId = match["id"] ?? null;
2898
+ if (matchId === null) continue;
2899
+ await fromCollection.delete(matchId);
2900
+ }
2901
+ }
2902
+ }
2903
+ } finally {
2904
+ this.cascadeInProgress.delete(key);
2905
+ }
2906
+ }
2907
+ /**
2908
+ * Walk every collection that has declared refs, load its records,
2909
+ * and report any reference whose target id is missing. Modes are
2910
+ * reported alongside each violation so the caller can distinguish
2911
+ * "this is a warning the user asked for" from "this should never
2912
+ * have happened" (strict violations produced by out-of-band
2913
+ * writes).
2914
+ *
2915
+ * Returns `{ violations: [...] }` instead of throwing — the whole
2916
+ * point of `checkIntegrity()` is to surface a list for display
2917
+ * or repair, not to fail noisily.
2918
+ */
2919
+ async checkIntegrity() {
2920
+ const violations = [];
2921
+ for (const [collectionName, refs] of this.refRegistry.entries()) {
2922
+ const coll = this.collection(collectionName);
2923
+ const records = await coll.list();
2924
+ for (const record of records) {
2925
+ const recId = record["id"] ?? "<unknown>";
2926
+ for (const [field, descriptor] of Object.entries(refs)) {
2927
+ const rawId = record[field];
2928
+ if (rawId === null || rawId === void 0) continue;
2929
+ if (typeof rawId !== "string" && typeof rawId !== "number") {
2930
+ violations.push({
2931
+ collection: collectionName,
2932
+ id: recId,
2933
+ field,
2934
+ refTo: descriptor.target,
2935
+ refId: rawId,
2936
+ mode: descriptor.mode
2937
+ });
2938
+ continue;
2939
+ }
2940
+ const refId = String(rawId);
2941
+ const target = this.collection(descriptor.target);
2942
+ const exists = await target.get(refId);
2943
+ if (!exists) {
2944
+ violations.push({
2945
+ collection: collectionName,
2946
+ id: recId,
2947
+ field,
2948
+ refTo: descriptor.target,
2949
+ refId: rawId,
2950
+ mode: descriptor.mode
2951
+ });
2952
+ }
2953
+ }
2954
+ }
2955
+ }
2956
+ return { violations };
2957
+ }
2958
+ /**
2959
+ * Return this compartment's hash-chained audit log.
2960
+ *
2961
+ * The ledger is lazy-initialized on first access and cached for the
2962
+ * lifetime of the Compartment instance. Every LedgerStore instance
2963
+ * shares the same adapter and DEK resolver, so `compartment.ledger()`
2964
+ * can be called repeatedly without performance cost.
2965
+ *
2966
+ * The LedgerStore itself is the public API: consumers call
2967
+ * `.append()` (via Collection internals), `.head()`, `.verify()`,
2968
+ * and `.entries({ from, to })`. See the LedgerStore docstring for
2969
+ * the full surface and the concurrency caveats.
2970
+ */
2971
+ ledger() {
2972
+ if (!this.ledgerStore) {
2973
+ this.ledgerStore = new LedgerStore({
2974
+ adapter: this.adapter,
2975
+ compartment: this.name,
2976
+ encrypted: this.encrypted,
2977
+ getDEK: this.getDEK,
2978
+ actor: this.keyring.userId
2979
+ });
2980
+ }
2981
+ return this.ledgerStore;
2982
+ }
885
2983
  /** List all collection names in this compartment. */
886
2984
  async collections() {
887
2985
  const snapshot = await this.adapter.loadAll(this.name);
888
2986
  return Object.keys(snapshot);
889
2987
  }
890
- /** Dump compartment as encrypted JSON backup string. */
2988
+ /**
2989
+ * Dump compartment as a verifiable encrypted JSON backup string.
2990
+ *
2991
+ * v0.4 backups embed the current ledger head and the full
2992
+ * `_ledger` + `_ledger_deltas` internal collections so the
2993
+ * receiver can run `verifyBackupIntegrity()` after `load()` and
2994
+ * detect any tampering between dump and restore. Pre-v0.4 callers
2995
+ * who didn't have a ledger get a backup without these fields, and
2996
+ * the corresponding `load()` skips the integrity check with a
2997
+ * warning — both modes round-trip cleanly.
2998
+ */
891
2999
  async dump() {
892
3000
  const snapshot = await this.adapter.loadAll(this.name);
893
3001
  const keyringIds = await this.adapter.list(this.name, "_keyring");
@@ -898,17 +3006,58 @@ var Compartment = class {
898
3006
  keyrings[keyringId] = JSON.parse(envelope._data);
899
3007
  }
900
3008
  }
3009
+ const internalSnapshot = {};
3010
+ for (const internalName of [LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION]) {
3011
+ const ids = await this.adapter.list(this.name, internalName);
3012
+ if (ids.length === 0) continue;
3013
+ const records = {};
3014
+ for (const id of ids) {
3015
+ const envelope = await this.adapter.get(this.name, internalName, id);
3016
+ if (envelope) records[id] = envelope;
3017
+ }
3018
+ internalSnapshot[internalName] = records;
3019
+ }
3020
+ const head = await this.ledger().head();
901
3021
  const backup = {
902
3022
  _noydb_backup: NOYDB_BACKUP_VERSION,
903
3023
  _compartment: this.name,
904
3024
  _exported_at: (/* @__PURE__ */ new Date()).toISOString(),
905
3025
  _exported_by: this.keyring.userId,
906
3026
  keyrings,
907
- collections: snapshot
3027
+ collections: snapshot,
3028
+ ...Object.keys(internalSnapshot).length > 0 ? { _internal: internalSnapshot } : {},
3029
+ ...head ? {
3030
+ ledgerHead: {
3031
+ hash: head.hash,
3032
+ index: head.entry.index,
3033
+ ts: head.entry.ts
3034
+ }
3035
+ } : {}
908
3036
  };
909
3037
  return JSON.stringify(backup);
910
3038
  }
911
- /** Restore compartment from an encrypted JSON backup string. */
3039
+ /**
3040
+ * Restore a compartment from a verifiable backup.
3041
+ *
3042
+ * After loading, runs `verifyBackupIntegrity()` to confirm:
3043
+ * 1. The hash chain is intact (no `prevHash` mismatches)
3044
+ * 2. The chain head matches the embedded `ledgerHead.hash`
3045
+ * from the backup
3046
+ * 3. Every data envelope's `payloadHash` matches the
3047
+ * corresponding ledger entry — i.e. nobody swapped
3048
+ * ciphertext between dump and restore
3049
+ *
3050
+ * On any failure, throws `BackupLedgerError` (chain or head
3051
+ * mismatch) or `BackupCorruptedError` (data envelope mismatch).
3052
+ * The compartment state on the adapter has already been written
3053
+ * by the time we throw, so the caller is responsible for either
3054
+ * accepting the suspect state or wiping it and trying a different
3055
+ * backup.
3056
+ *
3057
+ * Pre-v0.4 backups (no `ledgerHead` field, no `_internal`) load
3058
+ * with a console warning and skip the integrity check entirely
3059
+ * — there's no chain to verify against.
3060
+ */
912
3061
  async load(backupJson) {
913
3062
  const backup = JSON.parse(backupJson);
914
3063
  await this.adapter.saveAll(this.name, backup.collections);
@@ -922,7 +3071,124 @@ var Compartment = class {
922
3071
  };
923
3072
  await this.adapter.put(this.name, "_keyring", userId, envelope);
924
3073
  }
3074
+ if (backup._internal) {
3075
+ for (const [internalName, records] of Object.entries(backup._internal)) {
3076
+ for (const [id, envelope] of Object.entries(records)) {
3077
+ await this.adapter.put(this.name, internalName, id, envelope);
3078
+ }
3079
+ }
3080
+ }
3081
+ if (this.reloadKeyring) {
3082
+ this.keyring = await this.reloadKeyring();
3083
+ this.getDEK = this.makeGetDEK();
3084
+ }
925
3085
  this.collectionCache.clear();
3086
+ this.ledgerStore = null;
3087
+ if (!backup.ledgerHead) {
3088
+ console.warn(
3089
+ `[noy-db] Loaded a legacy backup with no ledgerHead \u2014 verifiable-backup integrity check skipped. Re-export with v0.4+ to get tamper detection.`
3090
+ );
3091
+ return;
3092
+ }
3093
+ const result = await this.verifyBackupIntegrity();
3094
+ if (!result.ok) {
3095
+ if (result.kind === "data") {
3096
+ throw new BackupCorruptedError(
3097
+ result.collection,
3098
+ result.id,
3099
+ result.message
3100
+ );
3101
+ }
3102
+ throw new BackupLedgerError(result.message, result.divergedAt);
3103
+ }
3104
+ if (result.head !== backup.ledgerHead.hash) {
3105
+ throw new BackupLedgerError(
3106
+ `Backup ledger head mismatch: embedded "${backup.ledgerHead.hash}" but reconstructed "${result.head}".`
3107
+ );
3108
+ }
3109
+ }
3110
+ /**
3111
+ * End-to-end backup integrity check. Runs both:
3112
+ *
3113
+ * 1. `ledger.verify()` — walks the hash chain and confirms
3114
+ * every `prevHash` matches the recomputed hash of its
3115
+ * predecessor.
3116
+ *
3117
+ * 2. **Data envelope cross-check** — for every (collection, id)
3118
+ * that has a current value, find the most recent ledger
3119
+ * entry recording a `put` for that pair, recompute the
3120
+ * sha256 of the stored envelope's `_data`, and compare to
3121
+ * the entry's `payloadHash`. Any mismatch means an
3122
+ * out-of-band write modified the data without updating the
3123
+ * ledger.
3124
+ *
3125
+ * Returns a discriminated union so callers can handle the two
3126
+ * failure modes differently:
3127
+ * - `{ ok: true, head, length }` — chain verified and all
3128
+ * data matches; safe to use.
3129
+ * - `{ ok: false, kind: 'chain', divergedAt, message }` — the
3130
+ * chain itself is broken at the given index.
3131
+ * - `{ ok: false, kind: 'data', collection, id, message }` —
3132
+ * a specific data envelope doesn't match its ledger entry.
3133
+ *
3134
+ * This method is exposed so users can call it any time, not just
3135
+ * during `load()`. A scheduled background check is the simplest
3136
+ * way to detect tampering of an in-place compartment.
3137
+ */
3138
+ async verifyBackupIntegrity() {
3139
+ const chainResult = await this.ledger().verify();
3140
+ if (!chainResult.ok) {
3141
+ return {
3142
+ ok: false,
3143
+ kind: "chain",
3144
+ divergedAt: chainResult.divergedAt,
3145
+ message: `Ledger chain diverged at index ${chainResult.divergedAt}: expected prevHash "${chainResult.expected}" but found "${chainResult.actual}".`
3146
+ };
3147
+ }
3148
+ const ledger = this.ledger();
3149
+ const allEntries = await ledger.loadAllEntries();
3150
+ const seen = /* @__PURE__ */ new Set();
3151
+ const latest = /* @__PURE__ */ new Map();
3152
+ for (let i = allEntries.length - 1; i >= 0; i--) {
3153
+ const entry = allEntries[i];
3154
+ if (!entry) continue;
3155
+ const key = `${entry.collection}/${entry.id}`;
3156
+ if (seen.has(key)) continue;
3157
+ seen.add(key);
3158
+ if (entry.op === "delete") continue;
3159
+ latest.set(key, {
3160
+ collection: entry.collection,
3161
+ id: entry.id,
3162
+ expectedHash: entry.payloadHash
3163
+ });
3164
+ }
3165
+ for (const { collection, id, expectedHash } of latest.values()) {
3166
+ const envelope = await this.adapter.get(this.name, collection, id);
3167
+ if (!envelope) {
3168
+ return {
3169
+ ok: false,
3170
+ kind: "data",
3171
+ collection,
3172
+ id,
3173
+ message: `Ledger expects data record "${collection}/${id}" to exist, but the adapter has no envelope for it.`
3174
+ };
3175
+ }
3176
+ const actualHash = await sha256Hex(envelope._data);
3177
+ if (actualHash !== expectedHash) {
3178
+ return {
3179
+ ok: false,
3180
+ kind: "data",
3181
+ collection,
3182
+ id,
3183
+ message: `Data envelope "${collection}/${id}" has been tampered with: expected payloadHash "${expectedHash}", got "${actualHash}".`
3184
+ };
3185
+ }
3186
+ }
3187
+ return {
3188
+ ok: true,
3189
+ head: chainResult.head,
3190
+ length: chainResult.length
3191
+ };
926
3192
  }
927
3193
  /** Export compartment as decrypted JSON (owner only). */
928
3194
  async export() {
@@ -1288,7 +3554,23 @@ var Noydb = class {
1288
3554
  encrypted: this.options.encrypt !== false,
1289
3555
  emitter: this.emitter,
1290
3556
  onDirty: syncEngine ? (coll, id, action, version) => syncEngine.trackChange(coll, id, action, version) : void 0,
1291
- historyConfig: this.options.history
3557
+ historyConfig: this.options.history,
3558
+ // Refresh callback used by Compartment.load() to re-derive
3559
+ // the in-memory keyring from a freshly-loaded keyring file.
3560
+ // Encrypted compartments need this so post-load decrypts work
3561
+ // against the loaded session's wrapped DEKs; plaintext
3562
+ // compartments leave it null and load() skips the refresh.
3563
+ reloadKeyring: this.options.encrypt !== false && this.options.secret ? async () => {
3564
+ this.keyringCache.delete(name);
3565
+ const refreshed = await loadKeyring(
3566
+ this.options.adapter,
3567
+ name,
3568
+ this.options.user,
3569
+ this.options.secret
3570
+ );
3571
+ this.keyringCache.set(name, refreshed);
3572
+ return refreshed;
3573
+ } : void 0
1292
3574
  });
1293
3575
  this.compartmentCache.set(name, comp);
1294
3576
  return comp;
@@ -1576,11 +3858,18 @@ function estimateEntropy(passphrase) {
1576
3858
  return Math.floor(passphrase.length * Math.log2(charsetSize));
1577
3859
  }
1578
3860
  export {
3861
+ BackupCorruptedError,
3862
+ BackupLedgerError,
1579
3863
  Collection,
3864
+ CollectionIndexes,
1580
3865
  Compartment,
1581
3866
  ConflictError,
1582
3867
  DecryptionError,
1583
3868
  InvalidKeyError,
3869
+ LEDGER_COLLECTION,
3870
+ LEDGER_DELTAS_COLLECTION,
3871
+ LedgerStore,
3872
+ Lru,
1584
3873
  NOYDB_BACKUP_VERSION,
1585
3874
  NOYDB_FORMAT_VERSION,
1586
3875
  NOYDB_KEYRING_VERSION,
@@ -1591,21 +3880,43 @@ export {
1591
3880
  Noydb,
1592
3881
  NoydbError,
1593
3882
  PermissionDeniedError,
3883
+ Query,
1594
3884
  ReadOnlyError,
3885
+ RefIntegrityError,
3886
+ RefRegistry,
3887
+ RefScopeError,
3888
+ SchemaValidationError,
1595
3889
  SyncEngine,
1596
3890
  TamperedError,
1597
3891
  ValidationError,
3892
+ applyPatch,
3893
+ canonicalJson,
3894
+ computePatch,
1598
3895
  createNoydb,
1599
3896
  defineAdapter,
1600
- diff,
3897
+ diff2 as diff,
1601
3898
  enrollBiometric,
3899
+ envelopePayloadHash,
1602
3900
  estimateEntropy,
3901
+ estimateRecordBytes,
3902
+ evaluateClause,
3903
+ evaluateFieldClause,
3904
+ executePlan,
1603
3905
  formatDiff,
3906
+ hashEntry,
1604
3907
  isBiometricAvailable,
1605
3908
  loadBiometric,
3909
+ paddedIndex,
3910
+ parseBytes,
3911
+ parseIndex,
3912
+ readPath,
3913
+ ref,
1606
3914
  removeBiometric,
1607
3915
  saveBiometric,
3916
+ sha256Hex,
1608
3917
  unlockBiometric,
1609
- validatePassphrase
3918
+ validatePassphrase,
3919
+ validateSchemaInput,
3920
+ validateSchemaOutput
1610
3921
  };
1611
3922
  //# sourceMappingURL=index.js.map