@noy-db/hub 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +197 -0
- package/dist/aggregate/index.cjs +476 -0
- package/dist/aggregate/index.cjs.map +1 -0
- package/dist/aggregate/index.d.cts +38 -0
- package/dist/aggregate/index.d.ts +38 -0
- package/dist/aggregate/index.js +53 -0
- package/dist/aggregate/index.js.map +1 -0
- package/dist/blobs/index.cjs +1480 -0
- package/dist/blobs/index.cjs.map +1 -0
- package/dist/blobs/index.d.cts +45 -0
- package/dist/blobs/index.d.ts +45 -0
- package/dist/blobs/index.js +48 -0
- package/dist/blobs/index.js.map +1 -0
- package/dist/bundle/index.cjs +436 -0
- package/dist/bundle/index.cjs.map +1 -0
- package/dist/bundle/index.d.cts +7 -0
- package/dist/bundle/index.d.ts +7 -0
- package/dist/bundle/index.js +40 -0
- package/dist/bundle/index.js.map +1 -0
- package/dist/chunk-2QR2PQTT.js +217 -0
- package/dist/chunk-2QR2PQTT.js.map +1 -0
- package/dist/chunk-4OWFYIDQ.js +79 -0
- package/dist/chunk-4OWFYIDQ.js.map +1 -0
- package/dist/chunk-5AATM2M2.js +90 -0
- package/dist/chunk-5AATM2M2.js.map +1 -0
- package/dist/chunk-ACLDOTNQ.js +543 -0
- package/dist/chunk-ACLDOTNQ.js.map +1 -0
- package/dist/chunk-BTDCBVJW.js +160 -0
- package/dist/chunk-BTDCBVJW.js.map +1 -0
- package/dist/chunk-CIMZBAZB.js +72 -0
- package/dist/chunk-CIMZBAZB.js.map +1 -0
- package/dist/chunk-E445ICYI.js +365 -0
- package/dist/chunk-E445ICYI.js.map +1 -0
- package/dist/chunk-EXQRC2L4.js +722 -0
- package/dist/chunk-EXQRC2L4.js.map +1 -0
- package/dist/chunk-FZU343FL.js +32 -0
- package/dist/chunk-FZU343FL.js.map +1 -0
- package/dist/chunk-GJILMRPO.js +354 -0
- package/dist/chunk-GJILMRPO.js.map +1 -0
- package/dist/chunk-GOUT6DND.js +1285 -0
- package/dist/chunk-GOUT6DND.js.map +1 -0
- package/dist/chunk-J66GRPNH.js +111 -0
- package/dist/chunk-J66GRPNH.js.map +1 -0
- package/dist/chunk-M2F2JAWB.js +464 -0
- package/dist/chunk-M2F2JAWB.js.map +1 -0
- package/dist/chunk-M5INGEFC.js +84 -0
- package/dist/chunk-M5INGEFC.js.map +1 -0
- package/dist/chunk-M62XNWRA.js +72 -0
- package/dist/chunk-M62XNWRA.js.map +1 -0
- package/dist/chunk-MR4424N3.js +275 -0
- package/dist/chunk-MR4424N3.js.map +1 -0
- package/dist/chunk-NPC4LFV5.js +132 -0
- package/dist/chunk-NPC4LFV5.js.map +1 -0
- package/dist/chunk-NXFEYLVG.js +311 -0
- package/dist/chunk-NXFEYLVG.js.map +1 -0
- package/dist/chunk-R36SIKES.js +79 -0
- package/dist/chunk-R36SIKES.js.map +1 -0
- package/dist/chunk-TDR6T5CJ.js +381 -0
- package/dist/chunk-TDR6T5CJ.js.map +1 -0
- package/dist/chunk-UF3BUNQZ.js +1 -0
- package/dist/chunk-UF3BUNQZ.js.map +1 -0
- package/dist/chunk-UQFSPSWG.js +1109 -0
- package/dist/chunk-UQFSPSWG.js.map +1 -0
- package/dist/chunk-USKYUS74.js +793 -0
- package/dist/chunk-USKYUS74.js.map +1 -0
- package/dist/chunk-XCL3WP6J.js +121 -0
- package/dist/chunk-XCL3WP6J.js.map +1 -0
- package/dist/chunk-XHFOENR2.js +680 -0
- package/dist/chunk-XHFOENR2.js.map +1 -0
- package/dist/chunk-ZFKD4QMV.js +430 -0
- package/dist/chunk-ZFKD4QMV.js.map +1 -0
- package/dist/chunk-ZLMV3TUA.js +490 -0
- package/dist/chunk-ZLMV3TUA.js.map +1 -0
- package/dist/chunk-ZRG4V3F5.js +17 -0
- package/dist/chunk-ZRG4V3F5.js.map +1 -0
- package/dist/consent/index.cjs +204 -0
- package/dist/consent/index.cjs.map +1 -0
- package/dist/consent/index.d.cts +24 -0
- package/dist/consent/index.d.ts +24 -0
- package/dist/consent/index.js +23 -0
- package/dist/consent/index.js.map +1 -0
- package/dist/crdt/index.cjs +152 -0
- package/dist/crdt/index.cjs.map +1 -0
- package/dist/crdt/index.d.cts +30 -0
- package/dist/crdt/index.d.ts +30 -0
- package/dist/crdt/index.js +24 -0
- package/dist/crdt/index.js.map +1 -0
- package/dist/crypto-IVKU7YTT.js +44 -0
- package/dist/crypto-IVKU7YTT.js.map +1 -0
- package/dist/delegation-XDJCBTI2.js +16 -0
- package/dist/delegation-XDJCBTI2.js.map +1 -0
- package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
- package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
- package/dist/hash-9KO1BGxh.d.cts +63 -0
- package/dist/hash-ChfJjRjQ.d.ts +63 -0
- package/dist/history/index.cjs +1215 -0
- package/dist/history/index.cjs.map +1 -0
- package/dist/history/index.d.cts +62 -0
- package/dist/history/index.d.ts +62 -0
- package/dist/history/index.js +79 -0
- package/dist/history/index.js.map +1 -0
- package/dist/i18n/index.cjs +746 -0
- package/dist/i18n/index.cjs.map +1 -0
- package/dist/i18n/index.d.cts +38 -0
- package/dist/i18n/index.d.ts +38 -0
- package/dist/i18n/index.js +55 -0
- package/dist/i18n/index.js.map +1 -0
- package/dist/index-BRHBCmLt.d.ts +1940 -0
- package/dist/index-C8kQtmOk.d.ts +380 -0
- package/dist/index-DN-J-5wT.d.cts +1940 -0
- package/dist/index-DhjMjz7L.d.cts +380 -0
- package/dist/index.cjs +14756 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +269 -0
- package/dist/index.d.ts +269 -0
- package/dist/index.js +6085 -0
- package/dist/index.js.map +1 -0
- package/dist/indexing/index.cjs +736 -0
- package/dist/indexing/index.cjs.map +1 -0
- package/dist/indexing/index.d.cts +36 -0
- package/dist/indexing/index.d.ts +36 -0
- package/dist/indexing/index.js +77 -0
- package/dist/indexing/index.js.map +1 -0
- package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
- package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
- package/dist/ledger-2NX4L7PN.js +33 -0
- package/dist/ledger-2NX4L7PN.js.map +1 -0
- package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
- package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
- package/dist/periods/index.cjs +1035 -0
- package/dist/periods/index.cjs.map +1 -0
- package/dist/periods/index.d.cts +21 -0
- package/dist/periods/index.d.ts +21 -0
- package/dist/periods/index.js +25 -0
- package/dist/periods/index.js.map +1 -0
- package/dist/predicate-SBHmi6D0.d.cts +161 -0
- package/dist/predicate-SBHmi6D0.d.ts +161 -0
- package/dist/query/index.cjs +1957 -0
- package/dist/query/index.cjs.map +1 -0
- package/dist/query/index.d.cts +3 -0
- package/dist/query/index.d.ts +3 -0
- package/dist/query/index.js +62 -0
- package/dist/query/index.js.map +1 -0
- package/dist/session/index.cjs +487 -0
- package/dist/session/index.cjs.map +1 -0
- package/dist/session/index.d.cts +45 -0
- package/dist/session/index.d.ts +45 -0
- package/dist/session/index.js +44 -0
- package/dist/session/index.js.map +1 -0
- package/dist/shadow/index.cjs +133 -0
- package/dist/shadow/index.cjs.map +1 -0
- package/dist/shadow/index.d.cts +16 -0
- package/dist/shadow/index.d.ts +16 -0
- package/dist/shadow/index.js +20 -0
- package/dist/shadow/index.js.map +1 -0
- package/dist/store/index.cjs +1069 -0
- package/dist/store/index.cjs.map +1 -0
- package/dist/store/index.d.cts +491 -0
- package/dist/store/index.d.ts +491 -0
- package/dist/store/index.js +34 -0
- package/dist/store/index.js.map +1 -0
- package/dist/strategy-BSxFXGzb.d.cts +110 -0
- package/dist/strategy-BSxFXGzb.d.ts +110 -0
- package/dist/strategy-D-SrOLCl.d.cts +548 -0
- package/dist/strategy-D-SrOLCl.d.ts +548 -0
- package/dist/sync/index.cjs +1062 -0
- package/dist/sync/index.cjs.map +1 -0
- package/dist/sync/index.d.cts +42 -0
- package/dist/sync/index.d.ts +42 -0
- package/dist/sync/index.js +28 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/team/index.cjs +1233 -0
- package/dist/team/index.cjs.map +1 -0
- package/dist/team/index.d.cts +117 -0
- package/dist/team/index.d.ts +117 -0
- package/dist/team/index.js +39 -0
- package/dist/team/index.js.map +1 -0
- package/dist/tx/index.cjs +212 -0
- package/dist/tx/index.cjs.map +1 -0
- package/dist/tx/index.d.cts +20 -0
- package/dist/tx/index.d.ts +20 -0
- package/dist/tx/index.js +20 -0
- package/dist/tx/index.js.map +1 -0
- package/dist/types-BZpCZB8N.d.ts +7526 -0
- package/dist/types-Bfs0qr5F.d.cts +7526 -0
- package/dist/ulid-COREQ2RQ.js +9 -0
- package/dist/ulid-COREQ2RQ.js.map +1 -0
- package/dist/util/index.cjs +230 -0
- package/dist/util/index.cjs.map +1 -0
- package/dist/util/index.d.cts +77 -0
- package/dist/util/index.d.ts +77 -0
- package/dist/util/index.js +190 -0
- package/dist/util/index.js.map +1 -0
- package/package.json +244 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/history/ledger/patch.ts","../src/history/ledger/constants.ts","../src/history/ledger/store.ts"],"sourcesContent":["/**\n * RFC 6902 JSON Patch — compute + apply.\n *\n * This module is the \"delta history\" primitive: instead of\n * snapshotting the full record on every put (the behavior),\n * `Collection.put` computes a JSON Patch from the previous version to\n * the new version and stores only the patch in the ledger. To\n * reconstruct version N, we walk from the genesis snapshot forward\n * applying patches. Storage scales with **edit size**, not record\n * size — a 10 KB record edited 1000 times costs ~10 KB of deltas\n * instead of ~10 MB of snapshots.\n *\n * ## Why hand-roll instead of using a library?\n *\n * RFC 6902 has good libraries (`fast-json-patch`, `rfc6902`) but every\n * single one of them adds a runtime dependency to `@noy-db/core`. The\n * \"zero runtime dependencies\" promise is one of the core's load-bearing\n * features, and the patch surface we actually need is small enough\n * (~150 LoC) that vendoring is the right call.\n *\n * What we implement:\n * - `add` — insert a value at a path\n * - `remove` — delete the value at a path\n * - `replace` — overwrite the value at a path\n *\n * What we deliberately skip (out of scope for the ledger use):\n * - `move` and `copy` — optimizations; the diff algorithm doesn't\n * emit them, so the apply path doesn't need them\n * - `test` — used for transactional patches; we already have\n * optimistic concurrency via `_v` at the envelope layer\n * - Sophisticated array diffing (LCS, edit distance) — we treat\n * arrays as atomic values and emit a single `replace` op when\n * they differ. The accounting domain has small arrays where this\n * is fine; if we ever need patch-level array diffing we can add\n * it without changing the storage format.\n *\n * ## Path encoding (RFC 6902 §3)\n *\n * Paths look like `/foo/bar/0`. Each path segment is either an object\n * key or a numeric array index. Two characters need escaping inside\n * keys: `~` becomes `~0` and `/` becomes `~1`. We implement both.\n *\n * Empty path (`\"\"`) refers to the root document. Only `replace` makes\n * sense at the root, and our diff function emits it as a top-level\n * `replace` when `prev` and `next` differ in shape (object vs array,\n * primitive vs object, etc.).\n */\n\n/** A single JSON Patch operation. Subset of RFC 6902 — see file docstring. */\nexport type JsonPatchOp =\n | { readonly op: 'add'; readonly path: string; readonly value: unknown }\n | { readonly op: 'remove'; readonly path: string }\n | { readonly op: 'replace'; readonly path: string; readonly value: unknown }\n\n/** A complete JSON Patch document — an array of operations. */\nexport type JsonPatch = readonly JsonPatchOp[]\n\n// ─── Compute (diff) ──────────────────────────────────────────────────\n\n/**\n * Compute a JSON Patch that, when applied to `prev`, produces `next`.\n *\n * The algorithm is a straightforward recursive object walk:\n *\n * 1. If both inputs are plain objects (and not arrays/null):\n * - For each key in `prev`, recurse if `next` has it, else emit `remove`\n * - For each key in `next` not in `prev`, emit `add`\n * 2. If both inputs are arrays AND structurally equal, no-op.\n * Otherwise emit a single `replace` for the whole array.\n * 3. If both inputs are deeply equal primitives, no-op.\n * 4. Otherwise emit a `replace` at the current path.\n *\n * We do not minimize patches across move-like rearrangements — every\n * generated patch is straightforward enough to apply by hand if you\n * had to debug it.\n */\nexport function computePatch(prev: unknown, next: unknown): JsonPatch {\n const ops: JsonPatchOp[] = []\n diff(prev, next, '', ops)\n return ops\n}\n\nfunction diff(\n prev: unknown,\n next: unknown,\n path: string,\n out: JsonPatchOp[],\n): void {\n // Both null / both undefined → no-op (we don't differentiate them\n // in JSON terms; canonicalJson would reject undefined anyway).\n if (prev === next) return\n\n // One side null, the other not → straight replace.\n if (prev === null || next === null) {\n out.push({ op: 'replace', path, value: next })\n return\n }\n\n const prevIsArray = Array.isArray(prev)\n const nextIsArray = Array.isArray(next)\n const prevIsObject = typeof prev === 'object' && !prevIsArray\n const nextIsObject = typeof next === 'object' && !nextIsArray\n\n // Type changed (e.g., object → primitive, array → object). Replace.\n if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {\n out.push({ op: 'replace', path, value: next })\n return\n }\n\n // Both arrays. We don't do clever LCS-based diffing — emit a single\n // replace for the whole array if they differ. See file docstring for\n // the rationale.\n if (prevIsArray && nextIsArray) {\n if (!arrayDeepEqual(prev as unknown[], next as unknown[])) {\n out.push({ op: 'replace', path, value: next })\n }\n return\n }\n\n // Both plain objects. Recurse key by key.\n if (prevIsObject && nextIsObject) {\n const prevObj = prev as Record<string, unknown>\n const nextObj = next as Record<string, unknown>\n const prevKeys = Object.keys(prevObj)\n const nextKeys = Object.keys(nextObj)\n\n // Handle removes and overlapping recursions in one pass over prev.\n for (const key of prevKeys) {\n const childPath = path + '/' + escapePathSegment(key)\n if (!(key in nextObj)) {\n out.push({ op: 'remove', path: childPath })\n } else {\n diff(prevObj[key], nextObj[key], childPath, out)\n }\n }\n // Handle adds.\n for (const key of nextKeys) {\n if (!(key in prevObj)) {\n out.push({\n op: 'add',\n path: path + '/' + escapePathSegment(key),\n value: nextObj[key],\n })\n }\n }\n return\n }\n\n // Two primitives that aren't strictly equal — replace.\n out.push({ op: 'replace', path, value: next })\n}\n\nfunction arrayDeepEqual(a: unknown[], b: unknown[]): boolean {\n if (a.length !== b.length) return false\n for (let i = 0; i < a.length; i++) {\n if (!deepEqual(a[i], b[i])) return false\n }\n return true\n}\n\nfunction deepEqual(a: unknown, b: unknown): boolean {\n if (a === b) return true\n if (a === null || b === null) return false\n if (typeof a !== typeof b) return false\n if (typeof a !== 'object') return false\n const aArray = Array.isArray(a)\n const bArray = Array.isArray(b)\n if (aArray !== bArray) return false\n if (aArray && bArray) return arrayDeepEqual(a, b as unknown[])\n const aObj = a as Record<string, unknown>\n const bObj = b as Record<string, unknown>\n const aKeys = Object.keys(aObj)\n const bKeys = Object.keys(bObj)\n if (aKeys.length !== bKeys.length) return false\n for (const key of aKeys) {\n if (!(key in bObj)) return false\n if (!deepEqual(aObj[key], bObj[key])) return false\n }\n return true\n}\n\n// ─── Apply ──────────────────────────────────────────────────────────\n\n/**\n * Apply a JSON Patch to a base document and return the result.\n *\n * The base document is **not mutated** — every op clones the parent\n * container before writing to it, so the caller's reference to `base`\n * stays untouched. This costs an extra allocation per op but makes\n * the apply pipeline reorderable and safe to interrupt.\n *\n * Throws on:\n * - Removing a path that doesn't exist\n * - Adding to a path whose parent doesn't exist\n * - A path component that doesn't match the document shape (e.g.,\n * trying to step into a primitive)\n *\n * Throwing is the right behavior for the ledger use case: a failed\n * apply means the chain is corrupted, which should be loud rather\n * than silently producing a wrong reconstruction.\n */\nexport function applyPatch<T = unknown>(base: T, patch: JsonPatch): T {\n let result: unknown = clone(base)\n for (const op of patch) {\n result = applyOp(result, op)\n }\n return result as T\n}\n\nfunction applyOp(doc: unknown, op: JsonPatchOp): unknown {\n // Empty path → operation targets the root. Only `replace` and `add`\n // make sense at the root, but we handle `remove` for completeness\n // (root removal returns null).\n if (op.path === '') {\n if (op.op === 'remove') return null\n return clone(op.value)\n }\n\n const segments = parsePath(op.path)\n return walkAndApply(doc, segments, op)\n}\n\nfunction walkAndApply(\n doc: unknown,\n segments: string[],\n op: JsonPatchOp,\n): unknown {\n if (segments.length === 0) {\n // Should never happen — empty path is handled in applyOp().\n throw new Error('walkAndApply: empty segments (internal error)')\n }\n\n const [head, ...rest] = segments\n if (head === undefined) throw new Error('walkAndApply: undefined segment')\n\n if (rest.length === 0) {\n return applyAtTerminal(doc, head, op)\n }\n\n // Recurse into the child container, then rebuild the parent with\n // the modified child.\n if (Array.isArray(doc)) {\n const idx = parseArrayIndex(head, doc.length)\n const child = doc[idx]\n const newChild = walkAndApply(child, rest, op)\n const next = doc.slice()\n next[idx] = newChild\n return next\n }\n if (doc !== null && typeof doc === 'object') {\n const obj = doc as Record<string, unknown>\n if (!(head in obj)) {\n throw new Error(`applyPatch: path segment \"${head}\" not found in object`)\n }\n const newChild = walkAndApply(obj[head], rest, op)\n return { ...obj, [head]: newChild }\n }\n throw new Error(\n `applyPatch: cannot step into ${typeof doc} at segment \"${head}\"`,\n )\n}\n\nfunction applyAtTerminal(\n doc: unknown,\n segment: string,\n op: JsonPatchOp,\n): unknown {\n if (Array.isArray(doc)) {\n const idx =\n segment === '-' ? doc.length : parseArrayIndex(segment, doc.length + 1)\n const next = doc.slice()\n if (op.op === 'remove') {\n next.splice(idx, 1)\n return next\n }\n if (op.op === 'add') {\n next.splice(idx, 0, clone(op.value))\n return next\n }\n if (op.op === 'replace') {\n if (idx >= doc.length) {\n throw new Error(\n `applyPatch: replace at out-of-bounds array index ${idx}`,\n )\n }\n next[idx] = clone(op.value)\n return next\n }\n }\n if (doc !== null && typeof doc === 'object') {\n const obj = doc as Record<string, unknown>\n if (op.op === 'remove') {\n if (!(segment in obj)) {\n throw new Error(\n `applyPatch: remove on missing key \"${segment}\"`,\n )\n }\n const next = { ...obj }\n delete next[segment]\n return next\n }\n if (op.op === 'add') {\n // RFC 6902: `add` on an existing key replaces it.\n return { ...obj, [segment]: clone(op.value) }\n }\n if (op.op === 'replace') {\n if (!(segment in obj)) {\n throw new Error(\n `applyPatch: replace on missing key \"${segment}\"`,\n )\n }\n return { ...obj, [segment]: clone(op.value) }\n }\n }\n throw new Error(\n `applyPatch: cannot apply ${op.op} at terminal segment \"${segment}\"`,\n )\n}\n\n// ─── Path encoding (RFC 6902 §3) ─────────────────────────────────────\n\n/**\n * Escape a single path segment per RFC 6902 §3:\n * `~` → `~0`\n * `/` → `~1`\n *\n * Order matters: `~` must be escaped first, otherwise the `~1` we\n * just emitted would be re-escaped to `~01`.\n */\nfunction escapePathSegment(segment: string): string {\n return segment.replace(/~/g, '~0').replace(/\\//g, '~1')\n}\n\nfunction unescapePathSegment(segment: string): string {\n return segment.replace(/~1/g, '/').replace(/~0/g, '~')\n}\n\nfunction parsePath(path: string): string[] {\n if (!path.startsWith('/')) {\n throw new Error(`applyPatch: path must start with '/', got \"${path}\"`)\n }\n return path\n .slice(1)\n .split('/')\n .map(unescapePathSegment)\n}\n\nfunction parseArrayIndex(segment: string, max: number): number {\n if (!/^\\d+$/.test(segment)) {\n throw new Error(\n `applyPatch: array index must be a non-negative integer, got \"${segment}\"`,\n )\n }\n const idx = Number.parseInt(segment, 10)\n if (idx < 0 || idx > max) {\n throw new Error(\n `applyPatch: array index ${idx} out of range [0, ${max}]`,\n )\n }\n return idx\n}\n\n// ─── Cheap structural clone ─────────────────────────────────────────\n\n/**\n * Plain-JSON clone via JSON.parse(JSON.stringify(value)).\n *\n * Faster than `structuredClone` for our use because (a) we know our\n * inputs are JSON-compatible (no Dates, Maps, or BigInts — anything\n * else gets rejected by canonicalJson upstream), and (b) `structuredClone`\n * has overhead for handling arbitrary structured data we don't need.\n *\n * For tiny ledger entries (< 1 KB), the JSON round-trip is in the\n * single-digit microsecond range.\n */\nfunction clone<T>(value: T): T {\n if (value === null || value === undefined) return value\n if (typeof value !== 'object') return value\n return JSON.parse(JSON.stringify(value)) as T\n}\n","/**\n * Ledger storage constants — pinned in their own leaf module so\n * always-on core code (vault.ts, dictionary.ts) can import them\n * without dragging the `LedgerStore` class into the bundle.\n *\n * `splitting: true` in tsup is not enough on its own: when a\n * source file exports both pure constants and a heavyweight class,\n * the bundler keeps the entire chunk reachable from any importer.\n * Extracting the constants lets the floor scenario import them\n * without paying for the class.\n *\n * @internal\n */\n\n/** The internal collection name used for ledger entry storage. */\nexport const LEDGER_COLLECTION = '_ledger'\n\n/**\n * The internal collection name used for delta payload storage.\n *\n * Deltas live in a sibling collection (not inside `_ledger`) for two\n * reasons:\n *\n * 1. **Listing efficiency.** `ledger.loadAllEntries()` calls\n * `adapter.list(_ledger)` which would otherwise return every\n * delta key alongside every entry key. Splitting them keeps the\n * list small (one key per ledger entry) and the delta reads\n * keyed by the entry's index.\n *\n * 2. **Prune-friendliness.** A future `pruneHistory()` will delete\n * old deltas while keeping the ledger chain intact (folding old\n * deltas into a base snapshot). Separating the storage makes\n * that deletion a targeted operation on one collection instead\n * of a filter across a mixed list.\n *\n * Both collections share the same ledger DEK — one DEK, two\n * internal collections, same zero-knowledge guarantees.\n */\nexport const LEDGER_DELTAS_COLLECTION = '_ledger_deltas'\n","/**\n * `LedgerStore` — read/write access to a compartment's hash-chained\n * audit log.\n *\n * The store is a thin wrapper around the adapter's `_ledger/` internal\n * collection. Every append:\n *\n * 1. Loads the current head (or treats an empty ledger as head = -1)\n * 2. Computes `prevHash` = sha256(canonicalJson(head))\n * 3. Builds the new entry with `index = head.index + 1`\n * 4. Encrypts the entry with the compartment's ledger DEK\n * 5. Writes the encrypted envelope to `_ledger/<paddedIndex>`\n *\n * `verify()` walks the chain from genesis forward and returns\n * `{ ok: true, head }` on success or `{ ok: false, divergedAt }` on the\n * first broken link.\n *\n * ## Thread / concurrency model\n *\n * For we assume a **single writer per vault**. Two\n * concurrent `append()` calls would race on the \"read head, write\n * head+1\" cycle and could produce a broken chain. The sync engine\n * is the primary concurrent-writer scenario, and it uses\n * optimistic-concurrency via `expectedVersion` on the adapter — but\n * the ledger path has no such guard today. Multi-writer hardening is a\n * follow-up.\n *\n * Single-writer usage IS safe, including across process restarts:\n * `head()` reads the adapter fresh each call, so a crash between the\n * adapter.put of a data record and the ledger append just means the\n * ledger is missing an entry for that record. `verify()` still\n * succeeds; a future `verifyIntegrity()` helper can cross-check the\n * ledger against the data collections to catch the gap.\n *\n * ## Why hide the ledger from `vault.collection()`?\n *\n * The `_ledger` name starts with `_`, matching the existing prefix\n * convention for internal collections (`_keyring`, `_sync`,\n * `_history`). The Vault's public `collection()` method already\n * returns entries for any name, but `loadAll()` filters out\n * underscore-prefixed collections so backups and exports don't leak\n * ledger metadata. We keep the ledger accessible ONLY via\n * `vault.ledger()` to enforce the hash-chain invariants — direct\n * puts via `collection('_ledger')` would bypass the `append()` logic.\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../../types.js'\nimport { encrypt, decrypt } from '../../crypto.js'\nimport { ConflictError, LedgerContentionError } from '../../errors.js'\nimport {\n canonicalJson,\n hashEntry,\n paddedIndex,\n sha256Hex,\n type LedgerEntry,\n} from './entry.js'\nimport type { JsonPatch } from './patch.js'\nimport { applyPatch } from './patch.js'\nimport { LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION } from './constants.js'\nimport { envelopePayloadHash } from './hash.js'\n\n/**\n * Maximum optimistic-CAS retries on the ledger head. Each failed\n * attempt invalidates the head cache, re-reads, and retries with a\n * fresh next-index. After N failures we surface\n * `LedgerContentionError` so the caller can decide whether to retry,\n * queue, or alert.\n */\nconst MAX_APPEND_ATTEMPTS = 8\n\n// — re-export the constants + helper so any existing\n// `import { LEDGER_COLLECTION } from '...store.js'` paths keep\n// working. Internal core paths (vault.ts) import from the leaf\n// modules directly to avoid pulling this file's class into the\n// floor bundle.\nexport { LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION, envelopePayloadHash }\n\n/**\n * Input shape for `LedgerStore.append()`. The caller supplies the\n * operation metadata; the store fills in `index` and `prevHash`.\n */\nexport interface AppendInput {\n op: LedgerEntry['op']\n collection: string\n id: string\n version: number\n actor: string\n payloadHash: string\n /**\n * Optional JSON Patch representing the delta from the previous\n * version to the new version. Present only for `put` operations\n * that had a previous version; omitted for genesis puts and for\n * deletes. When present, `LedgerStore.append` persists the patch\n * in `_ledger_deltas/<paddedIndex>` and records its sha256 hash\n * as the entry's `deltaHash` field.\n */\n delta?: JsonPatch\n}\n\n/**\n * Result of `LedgerStore.verify()`. On success, `head` is the hash of\n * the last entry — the same value that should be published to any\n * external anchoring service (blockchain, OpenTimestamps, etc.). On\n * failure, `divergedAt` is the 0-based index of the first entry whose\n * recorded `prevHash` does not match the recomputed hash of its\n * predecessor. Entries at `divergedAt` and later are untrustworthy;\n * entries before that index are still valid.\n */\nexport type VerifyResult =\n | { readonly ok: true; readonly head: string; readonly length: number }\n | {\n readonly ok: false\n readonly divergedAt: number\n readonly expected: string\n readonly actual: string\n }\n\n/**\n * A LedgerStore is bound to a single vault. Callers obtain one\n * via `vault.ledger()` — there is no public constructor to keep\n * the hash-chain invariants in one place.\n *\n * The class holds no mutable state beyond its dependencies (adapter,\n * vault name, DEK resolver, actor id). Every method reads the\n * adapter fresh so multiple instances against the same vault\n * see each other's writes immediately (at the cost of re-parsing the\n * ledger on every head() / verify() call; acceptable at scale).\n */\nexport class LedgerStore {\n private readonly adapter: NoydbStore\n private readonly vault: string\n private readonly encrypted: boolean\n private readonly getDEK: (collectionName: string) => Promise<CryptoKey>\n private readonly actor: string\n\n /**\n * In-memory cache of the chain head — the most recently appended\n * entry along with its precomputed hash. Without this, every\n * `append()` would re-load every prior entry to recompute the\n * prevHash, making N puts O(N²) — a 1K-record stress test goes from\n * < 100ms to a multi-second timeout.\n *\n * The cache is populated on first read (`append`, `head`, `verify`)\n * and updated in-place on every successful `append`. Single-writer\n * usage (the assumption) keeps it consistent. A second\n * LedgerStore instance writing to the same vault would not\n * see the first instance's appends in its cached state — that's the\n * concurrency caveat documented at the class level.\n *\n * Sentinel `undefined` means \"not yet loaded\"; an explicit `null`\n * value means \"loaded and confirmed empty\" — distinguishing these\n * matters because an empty ledger is a valid state (genesis prevHash\n * is the empty string), and we don't want to re-scan the adapter\n * just because the chain is freshly initialized.\n */\n private headCache: { entry: LedgerEntry; hash: string } | null | undefined = undefined\n\n constructor(opts: {\n adapter: NoydbStore\n vault: string\n encrypted: boolean\n getDEK: (collectionName: string) => Promise<CryptoKey>\n actor: string\n }) {\n this.adapter = opts.adapter\n this.vault = opts.vault\n this.encrypted = opts.encrypted\n this.getDEK = opts.getDEK\n this.actor = opts.actor\n }\n\n /**\n * Lazily load (or return cached) the current chain head. The cache\n * sentinel is `undefined` until first access; after the first call,\n * the cache holds either a `{ entry, hash }` for non-empty ledgers\n * or `null` for empty ones.\n */\n private async getCachedHead(): Promise<{ entry: LedgerEntry; hash: string } | null> {\n if (this.headCache !== undefined) return this.headCache\n const entries = await this.loadAllEntries()\n const last = entries[entries.length - 1]\n if (!last) {\n this.headCache = null\n return null\n }\n this.headCache = { entry: last, hash: await hashEntry(last) }\n return this.headCache\n }\n\n /**\n * Append a new entry to the ledger. Returns the full entry that was\n * written (with its assigned index and computed prevHash) so the\n * caller can use the hash for downstream purposes (e.g., embedding\n * in a verifiable backup).\n *\n * This is the **only** way to add entries. Direct adapter writes to\n * `_ledger/` would bypass the chain math and would be caught by the\n * next `verify()` call as a divergence.\n *\n * ## Multi-writer correctness\n *\n * Append is implemented as an optimistic-CAS retry loop. On every\n * attempt:\n *\n * 1. Read fresh head (cache invalidated on retry).\n * 2. Compute `nextIndex = head.index + 1`, `prevHash = hash(head)`.\n * 3. Encrypt delta payload IN MEMORY (no adapter write yet) so we\n * can compute `deltaHash` before claiming the chain slot.\n * 4. Build + encrypt the entry envelope.\n * 5. `adapter.put(_ledger, paddedIndex, envelope, expectedVersion: 0)`\n * — the `expectedVersion: 0` asserts \"this slot must not exist.\"\n * Stores with `casAtomic: true` honor the CAS check; under\n * contention the second writer's put throws `ConflictError`.\n * 6. On `ConflictError`: invalidate the head cache, sleep with\n * bounded backoff + jitter, retry. After `MAX_APPEND_ATTEMPTS`\n * retries throw {@link LedgerContentionError}.\n * 7. On success: write the delta envelope (if any) at the same\n * index. Update the head cache.\n *\n * Entry-first ordering matters: writing the delta first under\n * contention would orphan delta records at indices the writer never\n * actually claimed. The deltaHash is computed off the encrypted\n * envelope's `_data` field, which doesn't require the envelope to\n * be persisted.\n *\n * Stores with `casAtomic: false` (file, s3, r2 by default) silently\n * accept the `expectedVersion: 0` argument and proceed without a\n * CAS check. Concurrent appends against those stores remain\n * best-effort — pair them with an advisory lock or with sync\n * single-writer discipline.\n */\n async append(input: AppendInput): Promise<LedgerEntry> {\n let lastConflict: ConflictError | undefined\n for (let attempt = 0; attempt < MAX_APPEND_ATTEMPTS; attempt++) {\n // Force a fresh head read on every retry. The first attempt may\n // hit the cache; subsequent attempts must re-scan the adapter\n // because the prior conflict means our cached state is stale.\n if (attempt > 0) {\n this.headCache = undefined\n }\n try {\n return await this.appendOnce(input)\n } catch (err) {\n if (err instanceof ConflictError) {\n lastConflict = err\n if (attempt < MAX_APPEND_ATTEMPTS - 1) {\n await sleepBackoff(attempt)\n }\n continue\n }\n throw err\n }\n }\n void lastConflict\n throw new LedgerContentionError(MAX_APPEND_ATTEMPTS)\n }\n\n /**\n * One attempt at the append cycle. Throws `ConflictError` when the\n * CAS check on the entry put fails — `append()` catches that and\n * retries. Any other error propagates to the caller.\n */\n private async appendOnce(input: AppendInput): Promise<LedgerEntry> {\n const cached = await this.getCachedHead()\n const lastEntry = cached?.entry\n const prevHash = cached?.hash ?? ''\n const nextIndex = lastEntry ? lastEntry.index + 1 : 0\n\n // Encrypt the delta in memory so we can compute deltaHash WITHOUT\n // claiming the deltas slot yet — entry-put is the chain claim.\n let deltaEnvelope: EncryptedEnvelope | undefined\n let deltaHash: string | undefined\n if (input.delta !== undefined) {\n deltaEnvelope = await this.encryptDelta(input.delta)\n deltaHash = await sha256Hex(deltaEnvelope._data)\n }\n\n // Build the entry. Conditionally include `deltaHash` so\n // canonicalJson (which rejects undefined) never sees it when\n // there's no delta.\n const entryBase = {\n index: nextIndex,\n prevHash,\n op: input.op,\n collection: input.collection,\n id: input.id,\n version: input.version,\n ts: new Date().toISOString(),\n actor: input.actor === '' ? this.actor : input.actor,\n payloadHash: input.payloadHash,\n } as const\n const entry: LedgerEntry =\n deltaHash !== undefined\n ? { ...entryBase, deltaHash }\n : entryBase\n\n const envelope = await this.encryptEntry(entry)\n // expectedVersion: 0 ≡ \"the slot must not yet exist.\" Honored by\n // casAtomic stores; silently passed through by non-CAS stores.\n await this.adapter.put(\n this.vault,\n LEDGER_COLLECTION,\n paddedIndex(entry.index),\n envelope,\n 0,\n )\n\n // Chain slot claimed. Now write the delta record (if any).\n if (deltaEnvelope) {\n await this.adapter.put(\n this.vault,\n LEDGER_DELTAS_COLLECTION,\n paddedIndex(entry.index),\n deltaEnvelope,\n 0,\n )\n }\n\n // Update the head cache so the next append() doesn't re-scan the\n // adapter.\n this.headCache = { entry, hash: await hashEntry(entry) }\n return entry\n }\n\n /**\n * Load a delta payload by its entry index. Returns `null` if the\n * entry at that index doesn't reference a delta (genesis puts and\n * deletes leave the slot empty) or if the delta row is missing\n * (possible after a `pruneHistory` fold).\n *\n * The caller is responsible for deciding what to do with a missing\n * delta — `ledger.reconstruct()` uses it as a \"stop walking\n * backward\" signal and falls back to the on-disk current value.\n */\n async loadDelta(index: number): Promise<JsonPatch | null> {\n const envelope = await this.adapter.get(\n this.vault,\n LEDGER_DELTAS_COLLECTION,\n paddedIndex(index),\n )\n if (!envelope) return null\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as JsonPatch\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as JsonPatch\n }\n\n /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */\n private async encryptDelta(patch: JsonPatch): Promise<EncryptedEnvelope> {\n const json = JSON.stringify(patch)\n if (!this.encrypted) {\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: json,\n _by: this.actor,\n }\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: this.actor,\n }\n }\n\n /**\n * Read all entries in ascending-index order. Used internally by\n * `append()`, `head()`, `verify()`, and `entries()`. Decryption is\n * serial because the entries are tiny and the overhead of a Promise\n * pool would dominate at realistic chain lengths (< 100K entries).\n */\n async loadAllEntries(): Promise<LedgerEntry[]> {\n const keys = await this.adapter.list(this.vault, LEDGER_COLLECTION)\n // Sort lexicographically, which matches numeric order because\n // keys are zero-padded to 10 digits.\n keys.sort()\n const entries: LedgerEntry[] = []\n for (const key of keys) {\n const envelope = await this.adapter.get(\n this.vault,\n LEDGER_COLLECTION,\n key,\n )\n if (!envelope) continue\n entries.push(await this.decryptEntry(envelope))\n }\n return entries\n }\n\n /**\n * Return the current head of the ledger: the last entry, its hash,\n * and the total chain length. `null` on an empty ledger so callers\n * can distinguish \"no history yet\" from \"empty history\".\n */\n async head(): Promise<\n | { readonly entry: LedgerEntry; readonly hash: string; readonly length: number }\n | null\n > {\n const cached = await this.getCachedHead()\n if (!cached) return null\n // `length` is `entry.index + 1` because indices are zero-based and\n // contiguous. We don't need to re-scan the adapter to compute it.\n return {\n entry: cached.entry,\n hash: cached.hash,\n length: cached.entry.index + 1,\n }\n }\n\n /**\n * Return entries in the requested half-open range `[from, to)`.\n * Defaults: `from = 0`, `to = length`. The indices are clipped to\n * the valid range; no error is thrown for out-of-range queries.\n */\n async entries(opts: { from?: number; to?: number } = {}): Promise<LedgerEntry[]> {\n const all = await this.loadAllEntries()\n const from = Math.max(0, opts.from ?? 0)\n const to = Math.min(all.length, opts.to ?? all.length)\n return all.slice(from, to)\n }\n\n /**\n * Reconstruct a record's state at a given historical version by\n * walking the ledger's delta chain backward from the current state.\n *\n * ## Algorithm\n *\n * Ledger deltas are stored in **reverse** form — each entry's\n * patch describes how to undo that put, transforming the new\n * record back into the previous one. `reconstruct` exploits this\n * by:\n *\n * 1. Finding every ledger entry for `(collection, id)` in the\n * chain, sorted by index ascending.\n * 2. Starting from `current` (the present value of the record,\n * as held by the caller — typically fetched via\n * `Collection.get()`).\n * 3. Walking entries in **descending** index order and applying\n * each entry's reverse patch, stopping when we reach the\n * entry whose version equals `atVersion`.\n *\n * The result is the record as it existed immediately AFTER the\n * put at `atVersion`. To get the state at the genesis put\n * (version 1), the walk runs all the way back through every put\n * after the first.\n *\n * ## Caveats\n *\n * - **Delete entries** break the walk: once we see a delete, the\n * record didn't exist before that point, so there's nothing to\n * reconstruct. We return `null` in that case.\n * - **Missing deltas** (e.g., after `pruneHistory` folds old\n * entries into a base snapshot) also stop the walk. does\n * not ship pruneHistory, so today this only happens if an entry\n * was deleted out-of-band.\n * - The caller MUST pass the correct current value. Passing a\n * mutated object would corrupt the reconstruction — the patch\n * chain is only valid against the exact state that was in\n * effect when the most recent put happened.\n *\n * For, `reconstruct` is the only way to read a historical\n * version via deltas. The legacy `_history` collection still\n * holds full snapshots and `Collection.getVersion()` still reads\n * from there — the two paths coexist until pruneHistory lands in\n * a follow-up and delta becomes the default.\n */\n async reconstruct<T>(\n collection: string,\n id: string,\n current: T,\n atVersion: number,\n ): Promise<T | null> {\n const all = await this.loadAllEntries()\n // Filter to entries for this (collection, id), in ascending index.\n const matching = all.filter(\n (e) => e.collection === collection && e.id === id,\n )\n if (matching.length === 0) {\n // No ledger history at all; the current state IS version 1\n // (or there's nothing), so the only valid atVersion is the\n // current record's version. We can't verify that here, so\n // return current if atVersion is plausible, null otherwise.\n return null\n }\n\n // Walk entries in descending index order, applying each reverse\n // delta until we reach the target version.\n let state: T | null = current\n for (let i = matching.length - 1; i >= 0; i--) {\n const entry = matching[i]\n if (!entry) continue\n\n // Match check FIRST — before applying this entry's reverse\n // patch. `state` at this point is the record state immediately\n // after this entry's put (or before this entry's delete), so\n // if the caller asked for this exact version, we're done.\n if (entry.version === atVersion && entry.op !== 'delete') {\n return state\n }\n\n if (entry.op === 'delete') {\n // A delete erases the live state. If the caller asks for a\n // version older than the delete we should continue walking\n // (state becomes null and the next put resets it). But we\n // can't reconstruct that pre-delete state from the current\n // in-memory `state` — the delete has no reverse patch. So\n // anything past this point is unreachable; return null.\n return null\n }\n\n if (entry.deltaHash === undefined) {\n // Genesis put — the earliest state for this lifecycle. We\n // can't walk further back. If the caller asked for exactly\n // this version, return the current state (we already failed\n // the match check above because a fresh genesis after a\n // delete can have version === atVersion). Otherwise the\n // target is unreachable from here.\n if (entry.version === atVersion) return state\n return null\n }\n\n const patch = await this.loadDelta(entry.index)\n if (!patch) {\n // Delta row is missing (probably pruned). Stop walking.\n return null\n }\n\n if (state === null) {\n // We're trying to walk back across a delete range and there's\n // nothing to apply a reverse patch to. Bail.\n return null\n }\n\n state = applyPatch(state, patch)\n }\n\n // Ran off the end of the walk without matching. The target\n // version doesn't exist in this record's chain.\n return null\n }\n\n /**\n * Walk the chain from genesis forward and verify every link.\n *\n * Returns `{ ok: true, head, length }` if every entry's `prevHash`\n * matches the recomputed hash of its predecessor (and the genesis\n * entry's `prevHash` is the empty string).\n *\n * Returns `{ ok: false, divergedAt, expected, actual }` on the first\n * mismatch. `divergedAt` is the 0-based index of the BROKEN entry\n * — entries before that index still verify cleanly; entries at and\n * after `divergedAt` are untrustworthy.\n *\n * This method detects:\n * - Mutated entry content (fields changed)\n * - Reordered entries (if any adjacent pair swaps, the prevHash\n * of the second no longer matches)\n * - Inserted entries (the inserted entry's prevHash likely fails,\n * and the following entry's prevHash definitely fails)\n * - Deleted entries (the entry after the deletion sees a wrong\n * prevHash)\n *\n * It does NOT detect:\n * - Tampering with the DATA collections that bypassed the ledger\n * entirely (e.g., an attacker who modifies records without\n * appending matching ledger entries — this is why we also\n * plan a `verifyIntegrity()` helper in a follow-up)\n * - Truncation of the chain at the tail (dropping the last N\n * entries leaves a shorter but still consistent chain). External\n * anchoring of `head.hash` to a trusted service is the defense\n * against this.\n */\n async verify(): Promise<VerifyResult> {\n const entries = await this.loadAllEntries()\n let expectedPrevHash = ''\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i]\n if (!entry) continue\n if (entry.prevHash !== expectedPrevHash) {\n return {\n ok: false,\n divergedAt: i,\n expected: expectedPrevHash,\n actual: entry.prevHash,\n }\n }\n if (entry.index !== i) {\n // An entry whose stored index doesn't match its position in\n // the sorted list means someone rewrote the adapter keys.\n // Treat as divergence.\n return {\n ok: false,\n divergedAt: i,\n expected: `index=${i}`,\n actual: `index=${entry.index}`,\n }\n }\n expectedPrevHash = await hashEntry(entry)\n }\n return {\n ok: true,\n head: expectedPrevHash,\n length: entries.length,\n }\n }\n\n // ─── Encryption plumbing ─────────────────────────────────────────\n\n /**\n * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The\n * envelope's `_v` field is set to `entry.index + 1` so the usual\n * optimistic-concurrency machinery has a reasonable version number\n * to compare against (the ledger is append-only, so concurrent\n * writes should always bump the index).\n */\n private async encryptEntry(entry: LedgerEntry): Promise<EncryptedEnvelope> {\n const json = canonicalJson(entry)\n if (!this.encrypted) {\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: entry.index + 1,\n _ts: entry.ts,\n _iv: '',\n _data: json,\n _by: entry.actor,\n }\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: entry.index + 1,\n _ts: entry.ts,\n _iv: iv,\n _data: data,\n _by: entry.actor,\n }\n }\n\n /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */\n private async decryptEntry(envelope: EncryptedEnvelope): Promise<LedgerEntry> {\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as LedgerEntry\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as LedgerEntry\n }\n}\n\n// `envelopePayloadHash` was moved to `./hash.ts` so it can be\n// imported by core code without dragging this file's `LedgerStore`\n// class into the floor bundle. The re-export at the top of this\n// file keeps the original `import { envelopePayloadHash } from '.../store.js'`\n// path working.\n\n/**\n * Exponential backoff with jitter for the append CAS retry loop.\n * Attempt 0 → ~5–10 ms, attempt 7 → ~640–1280 ms. Jitter avoids the\n * thundering-herd problem when multiple writers collide repeatedly.\n */\nfunction sleepBackoff(attempt: number): Promise<void> {\n const base = 5 * Math.pow(2, attempt)\n const jitter = Math.random() * base\n return new Promise((resolve) => setTimeout(resolve, base + jitter))\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA4EO,SAAS,aAAa,MAAe,MAA0B;AACpE,QAAM,MAAqB,CAAC;AAC5B,OAAK,MAAM,MAAM,IAAI,GAAG;AACxB,SAAO;AACT;AAEA,SAAS,KACP,MACA,MACA,MACA,KACM;AAGN,MAAI,SAAS,KAAM;AAGnB,MAAI,SAAS,QAAQ,SAAS,MAAM;AAClC,QAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAC7C;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,QAAQ,IAAI;AACtC,QAAM,cAAc,MAAM,QAAQ,IAAI;AACtC,QAAM,eAAe,OAAO,SAAS,YAAY,CAAC;AAClD,QAAM,eAAe,OAAO,SAAS,YAAY,CAAC;AAGlD,MAAI,gBAAgB,eAAe,iBAAiB,cAAc;AAChE,QAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAC7C;AAAA,EACF;AAKA,MAAI,eAAe,aAAa;AAC9B,QAAI,CAAC,eAAe,MAAmB,IAAiB,GAAG;AACzD,UAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,IAC/C;AACA;AAAA,EACF;AAGA,MAAI,gBAAgB,cAAc;AAChC,UAAM,UAAU;AAChB,UAAM,UAAU;AAChB,UAAM,WAAW,OAAO,KAAK,OAAO;AACpC,UAAM,WAAW,OAAO,KAAK,OAAO;AAGpC,eAAW,OAAO,UAAU;AAC1B,YAAM,YAAY,OAAO,MAAM,kBAAkB,GAAG;AACpD,UAAI,EAAE,OAAO,UAAU;AACrB,YAAI,KAAK,EAAE,IAAI,UAAU,MAAM,UAAU,CAAC;AAAA,MAC5C,OAAO;AACL,aAAK,QAAQ,GAAG,GAAG,QAAQ,GAAG,GAAG,WAAW,GAAG;AAAA,MACjD;AAAA,IACF;AAEA,eAAW,OAAO,UAAU;AAC1B,UAAI,EAAE,OAAO,UAAU;AACrB,YAAI,KAAK;AAAA,UACP,IAAI;AAAA,UACJ,MAAM,OAAO,MAAM,kBAAkB,GAAG;AAAA,UACxC,OAAO,QAAQ,GAAG;AAAA,QACpB,CAAC;AAAA,MACH;AAAA,IACF;AACA;AAAA,EACF;AAGA,MAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAC/C;AAEA,SAAS,eAAe,GAAc,GAAuB;AAC3D,MAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAClC,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,QAAI,CAAC,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,EAAG,QAAO;AAAA,EACrC;AACA,SAAO;AACT;AAEA,SAAS,UAAU,GAAY,GAAqB;AAClD,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,MAAM,QAAQ,MAAM,KAAM,QAAO;AACrC,MAAI,OAAO,MAAM,OAAO,EAAG,QAAO;AAClC,MAAI,OAAO,MAAM,SAAU,QAAO;AAClC,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,MAAI,WAAW,OAAQ,QAAO;AAC9B,MAAI,UAAU,OAAQ,QAAO,eAAe,GAAG,CAAc;AAC7D,QAAM,OAAO;AACb,QAAM,OAAO;AACb,QAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,QAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,MAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAC1C,aAAW,OAAO,OAAO;AACvB,QAAI,EAAE,OAAO,MAAO,QAAO;AAC3B,QAAI,CAAC,UAAU,KAAK,GAAG,GAAG,KAAK,GAAG,CAAC,EAAG,QAAO;AAAA,EAC/C;AACA,SAAO;AACT;AAsBO,SAAS,WAAwB,MAAS,OAAqB;AACpE,MAAI,SAAkB,MAAM,IAAI;AAChC,aAAW,MAAM,OAAO;AACtB,aAAS,QAAQ,QAAQ,EAAE;AAAA,EAC7B;AACA,SAAO;AACT;AAEA,SAAS,QAAQ,KAAc,IAA0B;AAIvD,MAAI,GAAG,SAAS,IAAI;AAClB,QAAI,GAAG,OAAO,SAAU,QAAO;AAC/B,WAAO,MAAM,GAAG,KAAK;AAAA,EACvB;AAEA,QAAM,WAAW,UAAU,GAAG,IAAI;AAClC,SAAO,aAAa,KAAK,UAAU,EAAE;AACvC;AAEA,SAAS,aACP,KACA,UACA,IACS;AACT,MAAI,SAAS,WAAW,GAAG;AAEzB,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACjE;AAEA,QAAM,CAAC,MAAM,GAAG,IAAI,IAAI;AACxB,MAAI,SAAS,OAAW,OAAM,IAAI,MAAM,iCAAiC;AAEzE,MAAI,KAAK,WAAW,GAAG;AACrB,WAAO,gBAAgB,KAAK,MAAM,EAAE;AAAA,EACtC;AAIA,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAM,MAAM,gBAAgB,MAAM,IAAI,MAAM;AAC5C,UAAM,QAAQ,IAAI,GAAG;AACrB,UAAM,WAAW,aAAa,OAAO,MAAM,EAAE;AAC7C,UAAM,OAAO,IAAI,MAAM;AACvB,SAAK,GAAG,IAAI;AACZ,WAAO;AAAA,EACT;AACA,MAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,UAAM,MAAM;AACZ,QAAI,EAAE,QAAQ,MAAM;AAClB,YAAM,IAAI,MAAM,6BAA6B,IAAI,uBAAuB;AAAA,IAC1E;AACA,UAAM,WAAW,aAAa,IAAI,IAAI,GAAG,MAAM,EAAE;AACjD,WAAO,EAAE,GAAG,KAAK,CAAC,IAAI,GAAG,SAAS;AAAA,EACpC;AACA,QAAM,IAAI;AAAA,IACR,gCAAgC,OAAO,GAAG,gBAAgB,IAAI;AAAA,EAChE;AACF;AAEA,SAAS,gBACP,KACA,SACA,IACS;AACT,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAM,MACJ,YAAY,MAAM,IAAI,SAAS,gBAAgB,SAAS,IAAI,SAAS,CAAC;AACxE,UAAM,OAAO,IAAI,MAAM;AACvB,QAAI,GAAG,OAAO,UAAU;AACtB,WAAK,OAAO,KAAK,CAAC;AAClB,aAAO;AAAA,IACT;AACA,QAAI,GAAG,OAAO,OAAO;AACnB,WAAK,OAAO,KAAK,GAAG,MAAM,GAAG,KAAK,CAAC;AACnC,aAAO;AAAA,IACT;AACA,QAAI,GAAG,OAAO,WAAW;AACvB,UAAI,OAAO,IAAI,QAAQ;AACrB,cAAM,IAAI;AAAA,UACR,oDAAoD,GAAG;AAAA,QACzD;AAAA,MACF;AACA,WAAK,GAAG,IAAI,MAAM,GAAG,KAAK;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AACA,MAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,UAAM,MAAM;AACZ,QAAI,GAAG,OAAO,UAAU;AACtB,UAAI,EAAE,WAAW,MAAM;AACrB,cAAM,IAAI;AAAA,UACR,sCAAsC,OAAO;AAAA,QAC/C;AAAA,MACF;AACA,YAAM,OAAO,EAAE,GAAG,IAAI;AACtB,aAAO,KAAK,OAAO;AACnB,aAAO;AAAA,IACT;AACA,QAAI,GAAG,OAAO,OAAO;AAEnB,aAAO,EAAE,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,GAAG,KAAK,EAAE;AAAA,IAC9C;AACA,QAAI,GAAG,OAAO,WAAW;AACvB,UAAI,EAAE,WAAW,MAAM;AACrB,cAAM,IAAI;AAAA,UACR,uCAAuC,OAAO;AAAA,QAChD;AAAA,MACF;AACA,aAAO,EAAE,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,GAAG,KAAK,EAAE;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,IAAI;AAAA,IACR,4BAA4B,GAAG,EAAE,yBAAyB,OAAO;AAAA,EACnE;AACF;AAYA,SAAS,kBAAkB,SAAyB;AAClD,SAAO,QAAQ,QAAQ,MAAM,IAAI,EAAE,QAAQ,OAAO,IAAI;AACxD;AAEA,SAAS,oBAAoB,SAAyB;AACpD,SAAO,QAAQ,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,GAAG;AACvD;AAEA,SAAS,UAAU,MAAwB;AACzC,MAAI,CAAC,KAAK,WAAW,GAAG,GAAG;AACzB,UAAM,IAAI,MAAM,8CAA8C,IAAI,GAAG;AAAA,EACvE;AACA,SAAO,KACJ,MAAM,CAAC,EACP,MAAM,GAAG,EACT,IAAI,mBAAmB;AAC5B;AAEA,SAAS,gBAAgB,SAAiB,KAAqB;AAC7D,MAAI,CAAC,QAAQ,KAAK,OAAO,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,gEAAgE,OAAO;AAAA,IACzE;AAAA,EACF;AACA,QAAM,MAAM,OAAO,SAAS,SAAS,EAAE;AACvC,MAAI,MAAM,KAAK,MAAM,KAAK;AACxB,UAAM,IAAI;AAAA,MACR,2BAA2B,GAAG,qBAAqB,GAAG;AAAA,IACxD;AAAA,EACF;AACA,SAAO;AACT;AAeA,SAAS,MAAS,OAAa;AAC7B,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,SAAO,KAAK,MAAM,KAAK,UAAU,KAAK,CAAC;AACzC;;;AC5WO,IAAM,oBAAoB;AAuB1B,IAAM,2BAA2B;;;AC+BxC,IAAM,sBAAsB;AA4DrB,IAAM,cAAN,MAAkB;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBT,YAAqE;AAAA,EAE7E,YAAY,MAMT;AACD,SAAK,UAAU,KAAK;AACpB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,SAAS,KAAK;AACnB,SAAK,QAAQ,KAAK;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,gBAAsE;AAClF,QAAI,KAAK,cAAc,OAAW,QAAO,KAAK;AAC9C,UAAM,UAAU,MAAM,KAAK,eAAe;AAC1C,UAAM,OAAO,QAAQ,QAAQ,SAAS,CAAC;AACvC,QAAI,CAAC,MAAM;AACT,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AACA,SAAK,YAAY,EAAE,OAAO,MAAM,MAAM,MAAM,UAAU,IAAI,EAAE;AAC5D,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA4CA,MAAM,OAAO,OAA0C;AACrD,QAAI;AACJ,aAAS,UAAU,GAAG,UAAU,qBAAqB,WAAW;AAI9D,UAAI,UAAU,GAAG;AACf,aAAK,YAAY;AAAA,MACnB;AACA,UAAI;AACF,eAAO,MAAM,KAAK,WAAW,KAAK;AAAA,MACpC,SAAS,KAAK;AACZ,YAAI,eAAe,eAAe;AAChC,yBAAe;AACf,cAAI,UAAU,sBAAsB,GAAG;AACrC,kBAAM,aAAa,OAAO;AAAA,UAC5B;AACA;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAAA,IACF;AACA,SAAK;AACL,UAAM,IAAI,sBAAsB,mBAAmB;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,WAAW,OAA0C;AACjE,UAAM,SAAS,MAAM,KAAK,cAAc;AACxC,UAAM,YAAY,QAAQ;AAC1B,UAAM,WAAW,QAAQ,QAAQ;AACjC,UAAM,YAAY,YAAY,UAAU,QAAQ,IAAI;AAIpD,QAAI;AACJ,QAAI;AACJ,QAAI,MAAM,UAAU,QAAW;AAC7B,sBAAgB,MAAM,KAAK,aAAa,MAAM,KAAK;AACnD,kBAAY,MAAM,UAAU,cAAc,KAAK;AAAA,IACjD;AAKA,UAAM,YAAY;AAAA,MAChB,OAAO;AAAA,MACP;AAAA,MACA,IAAI,MAAM;AAAA,MACV,YAAY,MAAM;AAAA,MAClB,IAAI,MAAM;AAAA,MACV,SAAS,MAAM;AAAA,MACf,KAAI,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC3B,OAAO,MAAM,UAAU,KAAK,KAAK,QAAQ,MAAM;AAAA,MAC/C,aAAa,MAAM;AAAA,IACrB;AACA,UAAM,QACJ,cAAc,SACV,EAAE,GAAG,WAAW,UAAU,IAC1B;AAEN,UAAM,WAAW,MAAM,KAAK,aAAa,KAAK;AAG9C,UAAM,KAAK,QAAQ;AAAA,MACjB,KAAK;AAAA,MACL;AAAA,MACA,YAAY,MAAM,KAAK;AAAA,MACvB;AAAA,MACA;AAAA,IACF;AAGA,QAAI,eAAe;AACjB,YAAM,KAAK,QAAQ;AAAA,QACjB,KAAK;AAAA,QACL;AAAA,QACA,YAAY,MAAM,KAAK;AAAA,QACvB;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAIA,SAAK,YAAY,EAAE,OAAO,MAAM,MAAM,UAAU,KAAK,EAAE;AACvD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,UAAU,OAA0C;AACxD,UAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,MAClC,KAAK;AAAA,MACL;AAAA,MACA,YAAY,KAAK;AAAA,IACnB;AACA,QAAI,CAAC,SAAU,QAAO;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,SAAS,KAAK;AAAA,IAClC;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA,EAGA,MAAc,aAAa,OAA8C;AACvE,UAAM,OAAO,KAAK,UAAU,KAAK;AACjC,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO;AAAA,QACP,KAAK,KAAK;AAAA,MACZ;AAAA,IACF;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,KAAK;AAAA,IACZ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,iBAAyC;AAC7C,UAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,iBAAiB;AAGlE,SAAK,KAAK;AACV,UAAM,UAAyB,CAAC;AAChC,eAAW,OAAO,MAAM;AACtB,YAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,QAClC,KAAK;AAAA,QACL;AAAA,QACA;AAAA,MACF;AACA,UAAI,CAAC,SAAU;AACf,cAAQ,KAAK,MAAM,KAAK,aAAa,QAAQ,CAAC;AAAA,IAChD;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAGJ;AACA,UAAM,SAAS,MAAM,KAAK,cAAc;AACxC,QAAI,CAAC,OAAQ,QAAO;AAGpB,WAAO;AAAA,MACL,OAAO,OAAO;AAAA,MACd,MAAM,OAAO;AAAA,MACb,QAAQ,OAAO,MAAM,QAAQ;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,QAAQ,OAAuC,CAAC,GAA2B;AAC/E,UAAM,MAAM,MAAM,KAAK,eAAe;AACtC,UAAM,OAAO,KAAK,IAAI,GAAG,KAAK,QAAQ,CAAC;AACvC,UAAM,KAAK,KAAK,IAAI,IAAI,QAAQ,KAAK,MAAM,IAAI,MAAM;AACrD,WAAO,IAAI,MAAM,MAAM,EAAE;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+CA,MAAM,YACJ,YACA,IACA,SACA,WACmB;AACnB,UAAM,MAAM,MAAM,KAAK,eAAe;AAEtC,UAAM,WAAW,IAAI;AAAA,MACnB,CAAC,MAAM,EAAE,eAAe,cAAc,EAAE,OAAO;AAAA,IACjD;AACA,QAAI,SAAS,WAAW,GAAG;AAKzB,aAAO;AAAA,IACT;AAIA,QAAI,QAAkB;AACtB,aAAS,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AAC7C,YAAM,QAAQ,SAAS,CAAC;AACxB,UAAI,CAAC,MAAO;AAMZ,UAAI,MAAM,YAAY,aAAa,MAAM,OAAO,UAAU;AACxD,eAAO;AAAA,MACT;AAEA,UAAI,MAAM,OAAO,UAAU;AAOzB,eAAO;AAAA,MACT;AAEA,UAAI,MAAM,cAAc,QAAW;AAOjC,YAAI,MAAM,YAAY,UAAW,QAAO;AACxC,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,MAAM,KAAK,UAAU,MAAM,KAAK;AAC9C,UAAI,CAAC,OAAO;AAEV,eAAO;AAAA,MACT;AAEA,UAAI,UAAU,MAAM;AAGlB,eAAO;AAAA,MACT;AAEA,cAAQ,WAAW,OAAO,KAAK;AAAA,IACjC;AAIA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiCA,MAAM,SAAgC;AACpC,UAAM,UAAU,MAAM,KAAK,eAAe;AAC1C,QAAI,mBAAmB;AACvB,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,QAAQ,QAAQ,CAAC;AACvB,UAAI,CAAC,MAAO;AACZ,UAAI,MAAM,aAAa,kBAAkB;AACvC,eAAO;AAAA,UACL,IAAI;AAAA,UACJ,YAAY;AAAA,UACZ,UAAU;AAAA,UACV,QAAQ,MAAM;AAAA,QAChB;AAAA,MACF;AACA,UAAI,MAAM,UAAU,GAAG;AAIrB,eAAO;AAAA,UACL,IAAI;AAAA,UACJ,YAAY;AAAA,UACZ,UAAU,SAAS,CAAC;AAAA,UACpB,QAAQ,SAAS,MAAM,KAAK;AAAA,QAC9B;AAAA,MACF;AACA,yBAAmB,MAAM,UAAU,KAAK;AAAA,IAC1C;AACA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,MAAM;AAAA,MACN,QAAQ,QAAQ;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,aAAa,OAAgD;AACzE,UAAM,OAAO,cAAc,KAAK;AAChC,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,IAAI,MAAM,QAAQ;AAAA,QAClB,KAAK,MAAM;AAAA,QACX,KAAK;AAAA,QACL,OAAO;AAAA,QACP,KAAK,MAAM;AAAA,MACb;AAAA,IACF;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,IAAI,MAAM,QAAQ;AAAA,MAClB,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,aAAa,UAAmD;AAC5E,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,SAAS,KAAK;AAAA,IAClC;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AACF;AAaA,SAAS,aAAa,SAAgC;AACpD,QAAM,OAAO,IAAI,KAAK,IAAI,GAAG,OAAO;AACpC,QAAM,SAAS,KAAK,OAAO,IAAI;AAC/B,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,OAAO,MAAM,CAAC;AACpE;","names":[]}
|
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
import {
|
|
2
|
+
evaluateClause,
|
|
3
|
+
readPath
|
|
4
|
+
} from "./chunk-M5INGEFC.js";
|
|
5
|
+
import {
|
|
6
|
+
IndexRequiredError
|
|
7
|
+
} from "./chunk-ACLDOTNQ.js";
|
|
8
|
+
|
|
9
|
+
// src/indexing/persisted-indexes.ts
|
|
10
|
+
var IDX_PREFIX = "_idx/";
|
|
11
|
+
function encodeIdxId(field, recordId) {
|
|
12
|
+
return `${IDX_PREFIX}${field}/${recordId}`;
|
|
13
|
+
}
|
|
14
|
+
function decodeIdxId(id) {
|
|
15
|
+
if (!id.startsWith(IDX_PREFIX)) return null;
|
|
16
|
+
const rest = id.slice(IDX_PREFIX.length);
|
|
17
|
+
const firstSlash = rest.indexOf("/");
|
|
18
|
+
if (firstSlash <= 0) return null;
|
|
19
|
+
const field = rest.slice(0, firstSlash);
|
|
20
|
+
const recordId = rest.slice(firstSlash + 1);
|
|
21
|
+
if (recordId.length === 0) return null;
|
|
22
|
+
return { field, recordId };
|
|
23
|
+
}
|
|
24
|
+
function isIdxId(id) {
|
|
25
|
+
return decodeIdxId(id) !== null;
|
|
26
|
+
}
|
|
27
|
+
var COMPOSITE_DELIMITER = "|";
|
|
28
|
+
function compositeKey(fields) {
|
|
29
|
+
return fields.join(COMPOSITE_DELIMITER);
|
|
30
|
+
}
|
|
31
|
+
var PersistedCollectionIndex = class {
|
|
32
|
+
indexes = /* @__PURE__ */ new Map();
|
|
33
|
+
defs = /* @__PURE__ */ new Map();
|
|
34
|
+
/**
|
|
35
|
+
* Declare a single-field index. Subsequent `upsert` / `ingest` calls
|
|
36
|
+
* populate the in-memory mirror; calls before `declare` are no-ops
|
|
37
|
+
* (tolerant bulk-load ordering). Idempotent.
|
|
38
|
+
*/
|
|
39
|
+
declare(field) {
|
|
40
|
+
if (this.indexes.has(field)) return;
|
|
41
|
+
this.indexes.set(field, { buckets: /* @__PURE__ */ new Map(), values: /* @__PURE__ */ new Map() });
|
|
42
|
+
this.defs.set(field, { kind: "single", field, key: field });
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Declare a composite (multi-field) index. The synthetic
|
|
46
|
+
* key is `fields.join('|')`; it doubles as the in-memory map key and
|
|
47
|
+
* the `_idx/<key>/<recordId>` side-car field segment. Callers upsert
|
|
48
|
+
* and lookup via the same `key` as single-field indexes, just with a
|
|
49
|
+
* tuple value (JSON-stringified for bucketing).
|
|
50
|
+
*/
|
|
51
|
+
declareComposite(fields) {
|
|
52
|
+
if (fields.length === 0) {
|
|
53
|
+
throw new Error("declareComposite: fields array must be non-empty");
|
|
54
|
+
}
|
|
55
|
+
for (const f of fields) {
|
|
56
|
+
if (f.includes(COMPOSITE_DELIMITER)) {
|
|
57
|
+
throw new Error(
|
|
58
|
+
`declareComposite: field "${f}" contains the composite delimiter "${COMPOSITE_DELIMITER}" \u2014 pick a different field name or open an issue to add hash-based composite keys.`
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
const key = compositeKey(fields);
|
|
63
|
+
if (this.indexes.has(key)) return;
|
|
64
|
+
this.indexes.set(key, { buckets: /* @__PURE__ */ new Map(), values: /* @__PURE__ */ new Map() });
|
|
65
|
+
this.defs.set(key, { kind: "composite", fields: [...fields], key });
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Every declared index's structured definition. Collection walks this
|
|
69
|
+
* when materialising side-cars on put/delete so it can extract a
|
|
70
|
+
* single-field value or a composite tuple appropriately.
|
|
71
|
+
*/
|
|
72
|
+
definitions() {
|
|
73
|
+
return [...this.defs.values()];
|
|
74
|
+
}
|
|
75
|
+
/** True if `field` has been declared as indexable on this mirror. */
|
|
76
|
+
has(field) {
|
|
77
|
+
return this.indexes.has(field);
|
|
78
|
+
}
|
|
79
|
+
/** All declared field names, in declaration order. */
|
|
80
|
+
fields() {
|
|
81
|
+
return [...this.indexes.keys()];
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Bulk-load the mirror from decrypted index bodies. Intended to be
|
|
85
|
+
* called once per field after reading the collection's `_idx/<field>/*`
|
|
86
|
+
* side-cars. Safe to call twice with the same rows — bucket Sets
|
|
87
|
+
* deduplicate recordIds. If `field` is not declared, this is a no-op
|
|
88
|
+
* (tolerates the case where bulk-load runs before `declare()` lands).
|
|
89
|
+
*/
|
|
90
|
+
ingest(field, rows) {
|
|
91
|
+
const state = this.indexes.get(field);
|
|
92
|
+
if (!state) return;
|
|
93
|
+
for (const row of rows) {
|
|
94
|
+
addToState(state, row.recordId, row.value);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Incrementally update a record's index entry for one field. Called by
|
|
99
|
+
* `Collection.put()` after the main write succeeds. If
|
|
100
|
+
* `previousValue` is non-null, the record is removed from the old
|
|
101
|
+
* bucket first — this is the update path. Pass `null` for fresh adds.
|
|
102
|
+
* No-op if the field is not declared.
|
|
103
|
+
*/
|
|
104
|
+
upsert(recordId, field, newValue, previousValue) {
|
|
105
|
+
const state = this.indexes.get(field);
|
|
106
|
+
if (!state) return;
|
|
107
|
+
if (previousValue !== null && previousValue !== void 0) {
|
|
108
|
+
removeFromState(state, recordId, previousValue);
|
|
109
|
+
}
|
|
110
|
+
addToState(state, recordId, newValue);
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Remove a record from the index for one field. Called by
|
|
114
|
+
* `Collection.delete()`. No-op if the field is not declared or
|
|
115
|
+
* the record isn't in the bucket. Empty buckets are dropped to keep
|
|
116
|
+
* the Map clean.
|
|
117
|
+
*/
|
|
118
|
+
remove(recordId, field, value) {
|
|
119
|
+
const state = this.indexes.get(field);
|
|
120
|
+
if (!state) return;
|
|
121
|
+
removeFromState(state, recordId, value);
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Drop all bucket data while preserving field declarations. Called on
|
|
125
|
+
* invalidation (incoming sync changes, keyring rotation) — the next
|
|
126
|
+
* query re-populates via `ingest`.
|
|
127
|
+
*/
|
|
128
|
+
clear() {
|
|
129
|
+
for (const state of this.indexes.values()) {
|
|
130
|
+
state.buckets.clear();
|
|
131
|
+
state.values.clear();
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Equality lookup — return the set of record ids whose `field` matches
|
|
136
|
+
* `value`. Returns `null` if the field is not declared (caller falls
|
|
137
|
+
* back to scan or throws `IndexRequiredError`). Returns a shared empty
|
|
138
|
+
* set if the field is declared but no record matches — that set MUST
|
|
139
|
+
* NOT be mutated by the caller.
|
|
140
|
+
*/
|
|
141
|
+
lookupEqual(field, value) {
|
|
142
|
+
const state = this.indexes.get(field);
|
|
143
|
+
if (!state) return null;
|
|
144
|
+
const key = stringifyKey(value);
|
|
145
|
+
return state.buckets.get(key) ?? EMPTY_SET;
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Set lookup — return the union of record ids whose `field` matches any
|
|
149
|
+
* of `values`. Returns `null` if the field is not declared. Returns a
|
|
150
|
+
* fresh (non-shared) Set — safe for the caller to mutate.
|
|
151
|
+
*/
|
|
152
|
+
lookupIn(field, values) {
|
|
153
|
+
const state = this.indexes.get(field);
|
|
154
|
+
if (!state) return null;
|
|
155
|
+
const out = /* @__PURE__ */ new Set();
|
|
156
|
+
for (const value of values) {
|
|
157
|
+
const bucket = state.buckets.get(stringifyKey(value));
|
|
158
|
+
if (bucket) for (const id of bucket) out.add(id);
|
|
159
|
+
}
|
|
160
|
+
return out;
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Range lookup. Return record ids whose indexed value
|
|
164
|
+
* satisfies the predicate. Comparison happens on the ORIGINAL TYPED
|
|
165
|
+
* value carried in `state.values` — so numeric `<` sorts numerically,
|
|
166
|
+
* not lexicographically on `String(n)`. Returns `null` if the field
|
|
167
|
+
* is not declared.
|
|
168
|
+
*
|
|
169
|
+
* Supported ops: `'<'`, `'<='`, `'>'`, `'>='`, `'between'`. For
|
|
170
|
+
* `'between'`, `value` is `[lo, hi]` and both bounds are inclusive
|
|
171
|
+
* (matches the eager-mode operator contract in `predicate.ts`).
|
|
172
|
+
*/
|
|
173
|
+
lookupRange(field, op, value) {
|
|
174
|
+
const state = this.indexes.get(field);
|
|
175
|
+
if (!state) return null;
|
|
176
|
+
const out = /* @__PURE__ */ new Set();
|
|
177
|
+
for (const [recordId, live] of state.values) {
|
|
178
|
+
if (live === void 0 || live === null) continue;
|
|
179
|
+
if (matchesRange(live, op, value)) out.add(recordId);
|
|
180
|
+
}
|
|
181
|
+
return out;
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Sorted iteration — return every entry on `field` as an
|
|
185
|
+
* `OrderedEntry[]`, sorted by the ORIGINAL TYPED value (#275: no more
|
|
186
|
+
* `'10' < '2'` surprises on numeric fields). Consumers paginate with
|
|
187
|
+
* a numeric offset. `OrderedEntry.value` is the typed value.
|
|
188
|
+
*/
|
|
189
|
+
orderedBy(field, dir) {
|
|
190
|
+
const state = this.indexes.get(field);
|
|
191
|
+
if (!state) return null;
|
|
192
|
+
const entries = [];
|
|
193
|
+
for (const [recordId, value] of state.values) {
|
|
194
|
+
entries.push({ recordId, value });
|
|
195
|
+
}
|
|
196
|
+
entries.sort((a, b) => compareTyped(a.value, b.value));
|
|
197
|
+
if (dir === "desc") entries.reverse();
|
|
198
|
+
return entries;
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
var EMPTY_SET = /* @__PURE__ */ new Set();
|
|
202
|
+
function stringifyKey(value) {
|
|
203
|
+
if (value === null || value === void 0) return "\0NULL\0";
|
|
204
|
+
if (typeof value === "string") return value;
|
|
205
|
+
if (typeof value === "number" || typeof value === "boolean") return String(value);
|
|
206
|
+
if (value instanceof Date) return value.toISOString();
|
|
207
|
+
if (Array.isArray(value)) {
|
|
208
|
+
const parts = [];
|
|
209
|
+
for (const el of value) parts.push(stringifyKey(el));
|
|
210
|
+
return JSON.stringify(parts);
|
|
211
|
+
}
|
|
212
|
+
return "\0OBJECT\0";
|
|
213
|
+
}
|
|
214
|
+
function addToState(state, recordId, value) {
|
|
215
|
+
if (value === null || value === void 0) return;
|
|
216
|
+
const key = stringifyKey(value);
|
|
217
|
+
let bucket = state.buckets.get(key);
|
|
218
|
+
if (!bucket) {
|
|
219
|
+
bucket = /* @__PURE__ */ new Set();
|
|
220
|
+
state.buckets.set(key, bucket);
|
|
221
|
+
}
|
|
222
|
+
bucket.add(recordId);
|
|
223
|
+
state.values.set(recordId, value);
|
|
224
|
+
}
|
|
225
|
+
function removeFromState(state, recordId, value) {
|
|
226
|
+
if (value === null || value === void 0) return;
|
|
227
|
+
const key = stringifyKey(value);
|
|
228
|
+
const bucket = state.buckets.get(key);
|
|
229
|
+
if (bucket) {
|
|
230
|
+
bucket.delete(recordId);
|
|
231
|
+
if (bucket.size === 0) state.buckets.delete(key);
|
|
232
|
+
}
|
|
233
|
+
state.values.delete(recordId);
|
|
234
|
+
}
|
|
235
|
+
function matchesRange(live, op, bound) {
|
|
236
|
+
if (op === "between") {
|
|
237
|
+
if (!Array.isArray(bound) || bound.length !== 2) return false;
|
|
238
|
+
return compareTyped(live, bound[0]) >= 0 && compareTyped(live, bound[1]) <= 0;
|
|
239
|
+
}
|
|
240
|
+
const cmp = compareTyped(live, bound);
|
|
241
|
+
switch (op) {
|
|
242
|
+
case "<":
|
|
243
|
+
return cmp < 0;
|
|
244
|
+
case "<=":
|
|
245
|
+
return cmp <= 0;
|
|
246
|
+
case ">":
|
|
247
|
+
return cmp > 0;
|
|
248
|
+
case ">=":
|
|
249
|
+
return cmp >= 0;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
function compareTyped(a, b) {
|
|
253
|
+
if (a === void 0 || a === null) return b === void 0 || b === null ? 0 : 1;
|
|
254
|
+
if (b === void 0 || b === null) return -1;
|
|
255
|
+
if (typeof a === "number" && typeof b === "number") return a - b;
|
|
256
|
+
if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();
|
|
257
|
+
if (typeof a === "string" && typeof b === "string") return a < b ? -1 : a > b ? 1 : 0;
|
|
258
|
+
if (typeof a === "boolean" && typeof b === "boolean") {
|
|
259
|
+
return a === b ? 0 : a ? 1 : -1;
|
|
260
|
+
}
|
|
261
|
+
return 0;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// src/indexing/lazy-builder.ts
|
|
265
|
+
var EMPTY_PLAN = {
|
|
266
|
+
clauses: [],
|
|
267
|
+
orderBy: [],
|
|
268
|
+
limit: void 0,
|
|
269
|
+
offset: 0
|
|
270
|
+
};
|
|
271
|
+
var LazyQuery = class _LazyQuery {
|
|
272
|
+
source;
|
|
273
|
+
plan;
|
|
274
|
+
constructor(source, plan = EMPTY_PLAN) {
|
|
275
|
+
this.source = source;
|
|
276
|
+
this.plan = plan;
|
|
277
|
+
}
|
|
278
|
+
where(field, op, value) {
|
|
279
|
+
const clause = { type: "field", field, op, value };
|
|
280
|
+
return new _LazyQuery(this.source, {
|
|
281
|
+
...this.plan,
|
|
282
|
+
clauses: [...this.plan.clauses, clause]
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
orderBy(field, direction = "asc") {
|
|
286
|
+
return new _LazyQuery(this.source, {
|
|
287
|
+
...this.plan,
|
|
288
|
+
orderBy: [...this.plan.orderBy, { field, direction }]
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
limit(n) {
|
|
292
|
+
return new _LazyQuery(this.source, { ...this.plan, limit: n });
|
|
293
|
+
}
|
|
294
|
+
offset(n) {
|
|
295
|
+
return new _LazyQuery(this.source, { ...this.plan, offset: n });
|
|
296
|
+
}
|
|
297
|
+
async toArray() {
|
|
298
|
+
await this.source.ensurePersistedIndexesLoaded();
|
|
299
|
+
const touchedFields = collectTouchedFields(this.plan);
|
|
300
|
+
const missingFields = touchedFields.filter((f) => !isFieldIndexed(f, this.source.persistedIndexes));
|
|
301
|
+
if (missingFields.length > 0) {
|
|
302
|
+
throw new IndexRequiredError({
|
|
303
|
+
collection: this.source.collectionName,
|
|
304
|
+
touchedFields,
|
|
305
|
+
missingFields
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
const candidateIds = this.resolveCandidateIds();
|
|
309
|
+
if (candidateIds === null) {
|
|
310
|
+
throw new IndexRequiredError({
|
|
311
|
+
collection: this.source.collectionName,
|
|
312
|
+
touchedFields,
|
|
313
|
+
missingFields: touchedFields
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
const records = [];
|
|
317
|
+
for (const id of candidateIds) {
|
|
318
|
+
const record = await this.source.getRecord(id);
|
|
319
|
+
if (record === null) continue;
|
|
320
|
+
if (!matchesAll(record, this.plan.clauses)) continue;
|
|
321
|
+
records.push(record);
|
|
322
|
+
}
|
|
323
|
+
const sorted = this.plan.orderBy.length > 0 ? sortRecords(records, this.plan.orderBy) : records;
|
|
324
|
+
const offset = this.plan.offset > 0 ? this.plan.offset : 0;
|
|
325
|
+
const limited = this.plan.limit === void 0 ? sorted.slice(offset) : sorted.slice(offset, offset + this.plan.limit);
|
|
326
|
+
return limited;
|
|
327
|
+
}
|
|
328
|
+
async first() {
|
|
329
|
+
const out = await this.limit(1).toArray();
|
|
330
|
+
return out.length > 0 ? out[0] : null;
|
|
331
|
+
}
|
|
332
|
+
async count() {
|
|
333
|
+
const out = await this.toArray();
|
|
334
|
+
return out.length;
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
337
|
+
* Resolve the candidate record-id set to decrypt. Returns null when the
|
|
338
|
+
* query has no usable driver — no `==`/`in` clause and no `orderBy`
|
|
339
|
+
* clause that can scope the scan. Callers interpret null as
|
|
340
|
+
* IndexRequiredError (see `toArray`).
|
|
341
|
+
*/
|
|
342
|
+
resolveCandidateIds() {
|
|
343
|
+
const idx = this.source.persistedIndexes;
|
|
344
|
+
const eqMap = /* @__PURE__ */ new Map();
|
|
345
|
+
for (const clause of this.plan.clauses) {
|
|
346
|
+
if (clause.op === "==") eqMap.set(clause.field, clause.value);
|
|
347
|
+
}
|
|
348
|
+
if (eqMap.size >= 2) {
|
|
349
|
+
for (const def of idx.definitions()) {
|
|
350
|
+
if (def.kind !== "composite") continue;
|
|
351
|
+
if (def.fields.every((f) => eqMap.has(f))) {
|
|
352
|
+
const tuple = def.fields.map((f) => eqMap.get(f));
|
|
353
|
+
const ids = idx.lookupEqual(def.key, tuple);
|
|
354
|
+
if (ids) return [...ids];
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
for (const clause of this.plan.clauses) {
|
|
359
|
+
if (clause.op === "==") {
|
|
360
|
+
const ids = idx.lookupEqual(clause.field, clause.value);
|
|
361
|
+
if (ids) return [...ids];
|
|
362
|
+
} else if (clause.op === "in" && Array.isArray(clause.value)) {
|
|
363
|
+
const ids = idx.lookupIn(clause.field, clause.value);
|
|
364
|
+
if (ids) return [...ids];
|
|
365
|
+
} else if (isRangeOp(clause.op)) {
|
|
366
|
+
const ids = idx.lookupRange(clause.field, clause.op, clause.value);
|
|
367
|
+
if (ids) return [...ids];
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
if (this.plan.orderBy.length > 0) {
|
|
371
|
+
const primary = this.plan.orderBy[0];
|
|
372
|
+
const entries = idx.orderedBy(primary.field, primary.direction);
|
|
373
|
+
if (entries) return entries.map((e) => e.recordId);
|
|
374
|
+
}
|
|
375
|
+
return null;
|
|
376
|
+
}
|
|
377
|
+
};
|
|
378
|
+
function isFieldIndexed(field, idx) {
|
|
379
|
+
if (idx.has(field)) return true;
|
|
380
|
+
for (const def of idx.definitions()) {
|
|
381
|
+
if (def.kind === "composite" && def.fields.includes(field)) return true;
|
|
382
|
+
}
|
|
383
|
+
return false;
|
|
384
|
+
}
|
|
385
|
+
function isRangeOp(op) {
|
|
386
|
+
return op === "<" || op === "<=" || op === ">" || op === ">=" || op === "between";
|
|
387
|
+
}
|
|
388
|
+
function collectTouchedFields(plan) {
|
|
389
|
+
const seen = /* @__PURE__ */ new Set();
|
|
390
|
+
for (const c of plan.clauses) seen.add(c.field);
|
|
391
|
+
for (const o of plan.orderBy) seen.add(o.field);
|
|
392
|
+
return [...seen];
|
|
393
|
+
}
|
|
394
|
+
function matchesAll(record, clauses) {
|
|
395
|
+
for (const c of clauses) {
|
|
396
|
+
if (!evaluateClause(record, c)) return false;
|
|
397
|
+
}
|
|
398
|
+
return true;
|
|
399
|
+
}
|
|
400
|
+
function sortRecords(records, orderBy) {
|
|
401
|
+
return [...records].sort((a, b) => {
|
|
402
|
+
for (const { field, direction } of orderBy) {
|
|
403
|
+
const av = readPath(a, field);
|
|
404
|
+
const bv = readPath(b, field);
|
|
405
|
+
const cmp = compareValues(av, bv);
|
|
406
|
+
if (cmp !== 0) return direction === "asc" ? cmp : -cmp;
|
|
407
|
+
}
|
|
408
|
+
return 0;
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
function compareValues(a, b) {
|
|
412
|
+
if (a === void 0 || a === null) return b === void 0 || b === null ? 0 : 1;
|
|
413
|
+
if (b === void 0 || b === null) return -1;
|
|
414
|
+
if (typeof a === "number" && typeof b === "number") return a - b;
|
|
415
|
+
if (typeof a === "string" && typeof b === "string") return a < b ? -1 : a > b ? 1 : 0;
|
|
416
|
+
if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();
|
|
417
|
+
return 0;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
export {
|
|
421
|
+
IDX_PREFIX,
|
|
422
|
+
encodeIdxId,
|
|
423
|
+
decodeIdxId,
|
|
424
|
+
isIdxId,
|
|
425
|
+
COMPOSITE_DELIMITER,
|
|
426
|
+
compositeKey,
|
|
427
|
+
PersistedCollectionIndex,
|
|
428
|
+
LazyQuery
|
|
429
|
+
};
|
|
430
|
+
//# sourceMappingURL=chunk-ZFKD4QMV.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/indexing/persisted-indexes.ts","../src/indexing/lazy-builder.ts"],"sourcesContent":["/**\n * Persistent, encrypted secondary indexes for lazy-mode collections.\n *\n * Parallel to the in-memory `CollectionIndexes` used by eager mode (see\n * `packages/hub/src/query/indexes.ts`): same logical surface, but entries\n * are materialised as encrypted side-car records (`_idx/<field>/<recordId>`)\n * and bulk-loaded into an in-memory mirror on first query.\n *\n * This module only owns the id-namespace convention, the in-memory mirror,\n * and the typed errors. Write-path integration (PR 2 / ), query-planner\n * dispatch (PR 3 / , PR 4 / ), and the rebuild/reconcile utilities\n * (PR 5 / ) live in other files.\n *\n * See the design spec for the full architecture + threat model.\n */\n\n/**\n * Reserved id prefix for encrypted index side-car records.\n * Matches the existing `_keyring`, `_ledger_deltas/…`, `_meta/handle`\n * conventions inside a collection's id namespace.\n */\nexport const IDX_PREFIX = '_idx/' as const\n\n/**\n * Encode the side-car record id for a (field, recordId) pair.\n *\n * Format: `_idx/<field>/<recordId>` — no escaping. Field names may contain\n * dots (for dotted-path access consistent with eager-mode `readPath`);\n * record ids may contain slashes. The first two slash-separated segments\n * are `_idx` and the field; everything after the *second* slash is the\n * record id verbatim.\n */\nexport function encodeIdxId(field: string, recordId: string): string {\n return `${IDX_PREFIX}${field}/${recordId}`\n}\n\n/**\n * Decode a side-car id back into `{ field, recordId }`, or `null` if the\n * input is not a well-formed idx id. A well-formed id is:\n * - prefixed with `_idx/`\n * - contains a field segment (non-empty, no slashes)\n * - contains a record-id segment (non-empty, may contain slashes)\n */\nexport function decodeIdxId(id: string): { field: string; recordId: string } | null {\n if (!id.startsWith(IDX_PREFIX)) return null\n const rest = id.slice(IDX_PREFIX.length)\n const firstSlash = rest.indexOf('/')\n if (firstSlash <= 0) return null\n const field = rest.slice(0, firstSlash)\n const recordId = rest.slice(firstSlash + 1)\n if (recordId.length === 0) return null\n return { field, recordId }\n}\n\n/**\n * Fast-path predicate for discriminating side-car ids from regular record\n * ids and other reserved namespaces. Used by the hub to filter `list()`\n * results during bulk-load of the in-memory mirror.\n */\nexport function isIdxId(id: string): boolean {\n return decodeIdxId(id) !== null\n}\n\n/**\n * Sorted-value entry returned by `orderedBy()`. Mirrors the body shape\n * used by the write path — but `orderedBy` emits them already sorted by\n * `value` in the requested direction. Consumers (PR 4 / ) treat the\n * array as immutable and paginate via a numeric offset.\n *\n * **Note on `value`:** as of, this is the ORIGINAL TYPED\n * value (number, Date, boolean, etc.), not the stringified bucket key.\n * That's what lets range predicates and `orderedBy` compare numerically\n * instead of stumbling into `'10' < '2'` on `String(n)`.\n */\nexport interface OrderedEntry {\n readonly recordId: string\n readonly value: unknown\n}\n\n/**\n * Bulk-load row shape accepted by `ingest()`. The `value` field is the\n * decrypted index body's `value` field verbatim.\n */\nexport interface IngestRow {\n readonly recordId: string\n readonly value: unknown\n}\n\n/**\n * In-memory mirror of the persisted index side-car records for a single\n * collection. Populated by bulk-loading `_idx/<field>/*` ids on first\n * query and maintained incrementally by `Collection.put()` / `.delete()`\n * via `upsert()` / `remove()`.\n *\n * API surface is deliberately parallel to `CollectionIndexes` (eager mode)\n * so the query planner in PR 3/4 can dispatch to either polymorphically.\n *\n * Lifecycle:\n * - `declare(field)` — accept the field as indexable (idempotent)\n * - `ingest(field, rows[])` — bulk-load from decrypted index bodies\n * - `upsert(recordId, field, newValue, previousValue)` — incremental update\n * - `remove(recordId, field, value)` — incremental remove\n * - `lookupEqual(field, value)` / `lookupIn(field, values)` — equality reads\n * - `orderedBy(field, dir)` — sorted iteration for orderBy\n * - `clear()` — drop all buckets (invalidation / rotation)\n */\n/**\n * Per-field storage: the equality bucket map AND a parallel table of typed\n * values keyed by recordId. The typed table exists so range predicates\n * and `orderedBy` can compare on the original typed value rather\n * than the stringified bucket key — String(10) < String(2) is the classic\n * landmine `stringifyKey` introduces for numeric fields.\n */\ninterface PersistedFieldState {\n readonly buckets: Map<string, Set<string>>\n readonly values: Map<string, unknown>\n}\n\n/**\n * Structured index definition. Single-field indexes carry just a field\n * name; composite indexes carry the ordered list of fields and\n * the synthetic `key` (= fields joined by `COMPOSITE_DELIMITER`) used\n * as the bucket-map key and side-car envelope id segment.\n */\nexport type PersistedIndexDef =\n | { readonly kind: 'single'; readonly field: string; readonly key: string }\n | { readonly kind: 'composite'; readonly fields: readonly string[]; readonly key: string }\n\n/**\n * Delimiter used to synthesize a composite-index key from an ordered\n * field list. Intentionally a character that is extremely unusual in\n * JavaScript object keys (`|`) so collision with a literal field name\n * is vanishingly rare in practice. Composite declarations whose field\n * names contain `|` are rejected at declare-time with an explicit\n * error.\n */\nexport const COMPOSITE_DELIMITER = '|'\n\nexport function compositeKey(fields: readonly string[]): string {\n return fields.join(COMPOSITE_DELIMITER)\n}\n\nexport class PersistedCollectionIndex {\n private readonly indexes = new Map<string, PersistedFieldState>()\n private readonly defs = new Map<string, PersistedIndexDef>()\n\n /**\n * Declare a single-field index. Subsequent `upsert` / `ingest` calls\n * populate the in-memory mirror; calls before `declare` are no-ops\n * (tolerant bulk-load ordering). Idempotent.\n */\n declare(field: string): void {\n if (this.indexes.has(field)) return\n this.indexes.set(field, { buckets: new Map(), values: new Map() })\n this.defs.set(field, { kind: 'single', field, key: field })\n }\n\n /**\n * Declare a composite (multi-field) index. The synthetic\n * key is `fields.join('|')`; it doubles as the in-memory map key and\n * the `_idx/<key>/<recordId>` side-car field segment. Callers upsert\n * and lookup via the same `key` as single-field indexes, just with a\n * tuple value (JSON-stringified for bucketing).\n */\n declareComposite(fields: readonly string[]): void {\n if (fields.length === 0) {\n throw new Error('declareComposite: fields array must be non-empty')\n }\n for (const f of fields) {\n if (f.includes(COMPOSITE_DELIMITER)) {\n throw new Error(\n `declareComposite: field \"${f}\" contains the composite delimiter ` +\n `\"${COMPOSITE_DELIMITER}\" — pick a different field name or open an ` +\n `issue to add hash-based composite keys.`,\n )\n }\n }\n const key = compositeKey(fields)\n if (this.indexes.has(key)) return\n this.indexes.set(key, { buckets: new Map(), values: new Map() })\n this.defs.set(key, { kind: 'composite', fields: [...fields], key })\n }\n\n /**\n * Every declared index's structured definition. Collection walks this\n * when materialising side-cars on put/delete so it can extract a\n * single-field value or a composite tuple appropriately.\n */\n definitions(): PersistedIndexDef[] {\n return [...this.defs.values()]\n }\n\n /** True if `field` has been declared as indexable on this mirror. */\n has(field: string): boolean {\n return this.indexes.has(field)\n }\n\n /** All declared field names, in declaration order. */\n fields(): string[] {\n return [...this.indexes.keys()]\n }\n\n /**\n * Bulk-load the mirror from decrypted index bodies. Intended to be\n * called once per field after reading the collection's `_idx/<field>/*`\n * side-cars. Safe to call twice with the same rows — bucket Sets\n * deduplicate recordIds. If `field` is not declared, this is a no-op\n * (tolerates the case where bulk-load runs before `declare()` lands).\n */\n ingest(field: string, rows: readonly IngestRow[]): void {\n const state = this.indexes.get(field)\n if (!state) return\n for (const row of rows) {\n addToState(state, row.recordId, row.value)\n }\n }\n\n /**\n * Incrementally update a record's index entry for one field. Called by\n * `Collection.put()` after the main write succeeds. If\n * `previousValue` is non-null, the record is removed from the old\n * bucket first — this is the update path. Pass `null` for fresh adds.\n * No-op if the field is not declared.\n */\n upsert(recordId: string, field: string, newValue: unknown, previousValue: unknown): void {\n const state = this.indexes.get(field)\n if (!state) return\n if (previousValue !== null && previousValue !== undefined) {\n removeFromState(state, recordId, previousValue)\n }\n addToState(state, recordId, newValue)\n }\n\n /**\n * Remove a record from the index for one field. Called by\n * `Collection.delete()`. No-op if the field is not declared or\n * the record isn't in the bucket. Empty buckets are dropped to keep\n * the Map clean.\n */\n remove(recordId: string, field: string, value: unknown): void {\n const state = this.indexes.get(field)\n if (!state) return\n removeFromState(state, recordId, value)\n }\n\n /**\n * Drop all bucket data while preserving field declarations. Called on\n * invalidation (incoming sync changes, keyring rotation) — the next\n * query re-populates via `ingest`.\n */\n clear(): void {\n for (const state of this.indexes.values()) {\n state.buckets.clear()\n state.values.clear()\n }\n }\n\n /**\n * Equality lookup — return the set of record ids whose `field` matches\n * `value`. Returns `null` if the field is not declared (caller falls\n * back to scan or throws `IndexRequiredError`). Returns a shared empty\n * set if the field is declared but no record matches — that set MUST\n * NOT be mutated by the caller.\n */\n lookupEqual(field: string, value: unknown): ReadonlySet<string> | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const key = stringifyKey(value)\n return state.buckets.get(key) ?? EMPTY_SET\n }\n\n /**\n * Set lookup — return the union of record ids whose `field` matches any\n * of `values`. Returns `null` if the field is not declared. Returns a\n * fresh (non-shared) Set — safe for the caller to mutate.\n */\n lookupIn(field: string, values: readonly unknown[]): ReadonlySet<string> | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const out = new Set<string>()\n for (const value of values) {\n const bucket = state.buckets.get(stringifyKey(value))\n if (bucket) for (const id of bucket) out.add(id)\n }\n return out\n }\n\n /**\n * Range lookup. Return record ids whose indexed value\n * satisfies the predicate. Comparison happens on the ORIGINAL TYPED\n * value carried in `state.values` — so numeric `<` sorts numerically,\n * not lexicographically on `String(n)`. Returns `null` if the field\n * is not declared.\n *\n * Supported ops: `'<'`, `'<='`, `'>'`, `'>='`, `'between'`. For\n * `'between'`, `value` is `[lo, hi]` and both bounds are inclusive\n * (matches the eager-mode operator contract in `predicate.ts`).\n */\n lookupRange(\n field: string,\n op: '<' | '<=' | '>' | '>=' | 'between',\n value: unknown,\n ): ReadonlySet<string> | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const out = new Set<string>()\n for (const [recordId, live] of state.values) {\n if (live === undefined || live === null) continue\n if (matchesRange(live, op, value)) out.add(recordId)\n }\n return out\n }\n\n /**\n * Sorted iteration — return every entry on `field` as an\n * `OrderedEntry[]`, sorted by the ORIGINAL TYPED value (#275: no more\n * `'10' < '2'` surprises on numeric fields). Consumers paginate with\n * a numeric offset. `OrderedEntry.value` is the typed value.\n */\n orderedBy(field: string, dir: 'asc' | 'desc'): readonly OrderedEntry[] | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const entries: OrderedEntry[] = []\n for (const [recordId, value] of state.values) {\n entries.push({ recordId, value })\n }\n entries.sort((a, b) => compareTyped(a.value, b.value))\n if (dir === 'desc') entries.reverse()\n return entries\n }\n}\n\nconst EMPTY_SET: ReadonlySet<string> = new Set()\n\n/**\n * Canonicalize a value into a bucket key. Deliberately identical to the\n * eager-mode `stringifyKey` in `query/indexes.ts` so semantics match. When\n * `query/indexes.ts` changes its coercion rules, update this in lockstep.\n *\n * null / undefined values are NOT indexed — callers who pass them to\n * `upsert` / `remove` short-circuit before reaching this function; the\n * sentinel here exists only to make `lookupEqual(field, null)` return\n * an empty bucket (rather than matching some arbitrary record).\n */\nfunction stringifyKey(value: unknown): string {\n if (value === null || value === undefined) return '\\0NULL\\0'\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'boolean') return String(value)\n if (value instanceof Date) return value.toISOString()\n // composite index values are tuple arrays. JSON.stringify\n // gives a delimiter-safe, order-preserving canonical form so buckets\n // for `['c-A', '2026-Q1']` and `['c-A', '2026-Q2']` never collide.\n if (Array.isArray(value)) {\n const parts: string[] = []\n for (const el of value) parts.push(stringifyKey(el))\n return JSON.stringify(parts)\n }\n return '\\0OBJECT\\0'\n}\n\nfunction addToState(state: PersistedFieldState, recordId: string, value: unknown): void {\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n let bucket = state.buckets.get(key)\n if (!bucket) {\n bucket = new Set()\n state.buckets.set(key, bucket)\n }\n bucket.add(recordId)\n state.values.set(recordId, value)\n}\n\nfunction removeFromState(state: PersistedFieldState, recordId: string, value: unknown): void {\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n const bucket = state.buckets.get(key)\n if (bucket) {\n bucket.delete(recordId)\n if (bucket.size === 0) state.buckets.delete(key)\n }\n state.values.delete(recordId)\n}\n\n/**\n * Range-predicate comparator. Runs on the ORIGINAL TYPED value so numeric\n * fields sort numerically (not lexicographically on `String(n)`). ISO-8601\n * date strings already sort correctly lexicographically; Date instances\n * compare via `getTime()` before the string branch to keep the contract\n * honest regardless of which form survived serialization.\n */\nfunction matchesRange(\n live: unknown,\n op: '<' | '<=' | '>' | '>=' | 'between',\n bound: unknown,\n): boolean {\n if (op === 'between') {\n if (!Array.isArray(bound) || bound.length !== 2) return false\n return compareTyped(live, bound[0]) >= 0 && compareTyped(live, bound[1]) <= 0\n }\n const cmp = compareTyped(live, bound)\n switch (op) {\n case '<': return cmp < 0\n case '<=': return cmp <= 0\n case '>': return cmp > 0\n case '>=': return cmp >= 0\n }\n}\n\nfunction compareTyped(a: unknown, b: unknown): number {\n if (a === undefined || a === null) return b === undefined || b === null ? 0 : 1\n if (b === undefined || b === null) return -1\n if (typeof a === 'number' && typeof b === 'number') return a - b\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime()\n if (typeof a === 'string' && typeof b === 'string') return a < b ? -1 : a > b ? 1 : 0\n if (typeof a === 'boolean' && typeof b === 'boolean') {\n return a === b ? 0 : a ? 1 : -1\n }\n // Mixed/unsupported types: deliberately treat as equal so sort stays\n // stable. Matches the eager-mode `compareValues` contract in\n // builder.ts — we don't silently coerce arbitrary objects to strings\n // (which would be meaningless) nor throw (which would be hostile).\n return 0\n}\n","/**\n * Lazy-mode query builder.\n *\n * Companion to `Query<T>` in `builder.ts`, but built for collections in lazy\n * mode where `snapshot()` is unavailable — records live in the adapter and\n * are pulled on demand. Dispatches through `PersistedCollectionIndex` to\n * resolve a candidate record-id set, then decrypts only those records.\n *\n * Scope:\n * - `.where(field, '==' | 'in', value)` — dispatched through the index\n * - `.where(field, other-op, value)` — evaluated against the decrypted\n * candidate set (non-indexed ops still require the field to be indexed\n * — we need SOMETHING to scope the candidate set)\n * - `.orderBy(field, dir?)` — dispatched through `orderedBy` when no\n * `==`/`in` clause is present; otherwise applied as an in-memory sort\n * over the candidate set\n * - `.limit(n)` / `.offset(n)` — page slice after filtering\n * - `.toArray()` / `.first()` / `.count()` — terminals\n *\n * Every field referenced by a where or orderBy clause MUST be indexed;\n * otherwise `toArray()` throws `IndexRequiredError`. This is deliberate:\n * silent scan-fallback would hide the very performance cliff that lazy-mode\n * indexes exist to prevent (see `docs/architecture.md` §indexes).\n */\n\nimport type { Clause, FieldClause, Operator } from '../query/predicate.js'\nimport { evaluateClause, readPath } from '../query/predicate.js'\nimport type { PersistedCollectionIndex } from './persisted-indexes.js'\nimport { IndexRequiredError } from '../errors.js'\n\nexport interface LazyOrderBy {\n readonly field: string\n readonly direction: 'asc' | 'desc'\n}\n\n/**\n * Source abstraction the LazyQuery runs against. Collection implements it.\n * Kept minimal so the builder stays test-friendly.\n */\nexport interface LazyQuerySource<T> {\n readonly collectionName: string\n readonly persistedIndexes: PersistedCollectionIndex\n /** Ensure `_idx/<field>/*` side-cars have been bulk-loaded into the mirror. */\n ensurePersistedIndexesLoaded(): Promise<void>\n /** Decrypt one record by id, or return null if it's gone. */\n getRecord(id: string): Promise<T | null>\n}\n\ninterface LazyPlan {\n readonly clauses: readonly FieldClause[]\n readonly orderBy: readonly LazyOrderBy[]\n readonly limit: number | undefined\n readonly offset: number\n}\n\nconst EMPTY_PLAN: LazyPlan = {\n clauses: [],\n orderBy: [],\n limit: undefined,\n offset: 0,\n}\n\nexport class LazyQuery<T> {\n private readonly source: LazyQuerySource<T>\n private readonly plan: LazyPlan\n\n constructor(source: LazyQuerySource<T>, plan: LazyPlan = EMPTY_PLAN) {\n this.source = source\n this.plan = plan\n }\n\n where<V>(field: string, op: Operator, value: V): LazyQuery<T> {\n const clause: FieldClause = { type: 'field', field, op, value }\n return new LazyQuery<T>(this.source, {\n ...this.plan,\n clauses: [...this.plan.clauses, clause],\n })\n }\n\n orderBy(field: string, direction: 'asc' | 'desc' = 'asc'): LazyQuery<T> {\n return new LazyQuery<T>(this.source, {\n ...this.plan,\n orderBy: [...this.plan.orderBy, { field, direction }],\n })\n }\n\n limit(n: number): LazyQuery<T> {\n return new LazyQuery<T>(this.source, { ...this.plan, limit: n })\n }\n\n offset(n: number): LazyQuery<T> {\n return new LazyQuery<T>(this.source, { ...this.plan, offset: n })\n }\n\n async toArray(): Promise<T[]> {\n await this.source.ensurePersistedIndexesLoaded()\n\n const touchedFields = collectTouchedFields(this.plan)\n const missingFields = touchedFields.filter(f => !isFieldIndexed(f, this.source.persistedIndexes))\n if (missingFields.length > 0) {\n throw new IndexRequiredError({\n collection: this.source.collectionName,\n touchedFields,\n missingFields,\n })\n }\n\n const candidateIds = this.resolveCandidateIds()\n if (candidateIds === null) {\n // No usable driver — every touched field is indexed but no clause\n // pins the candidate set. This happens when a query only uses\n // operators other than `==`/`in` and no `orderBy` clause is\n // present — we refuse to enumerate the whole index, because that\n // defeats the purpose of lazy mode.\n throw new IndexRequiredError({\n collection: this.source.collectionName,\n touchedFields,\n missingFields: touchedFields,\n })\n }\n\n const records: T[] = []\n for (const id of candidateIds) {\n const record = await this.source.getRecord(id)\n if (record === null) continue\n if (!matchesAll(record, this.plan.clauses)) continue\n records.push(record)\n }\n\n const sorted = this.plan.orderBy.length > 0\n ? sortRecords(records, this.plan.orderBy)\n : records\n\n const offset = this.plan.offset > 0 ? this.plan.offset : 0\n const limited = this.plan.limit === undefined\n ? sorted.slice(offset)\n : sorted.slice(offset, offset + this.plan.limit)\n\n return limited\n }\n\n async first(): Promise<T | null> {\n const out = await this.limit(1).toArray()\n return out.length > 0 ? out[0]! : null\n }\n\n async count(): Promise<number> {\n const out = await this.toArray()\n return out.length\n }\n\n /**\n * Resolve the candidate record-id set to decrypt. Returns null when the\n * query has no usable driver — no `==`/`in` clause and no `orderBy`\n * clause that can scope the scan. Callers interpret null as\n * IndexRequiredError (see `toArray`).\n */\n private resolveCandidateIds(): readonly string[] | null {\n const idx = this.source.persistedIndexes\n\n // prefer a composite index when the query's `==`\n // clauses cover every field of one declared composite. The\n // composite mirror lookup is O(matches) vs single-field +\n // post-filter on the decrypted candidate set.\n const eqMap = new Map<string, unknown>()\n for (const clause of this.plan.clauses) {\n if (clause.op === '==') eqMap.set(clause.field, clause.value)\n }\n if (eqMap.size >= 2) {\n for (const def of idx.definitions()) {\n if (def.kind !== 'composite') continue\n if (def.fields.every(f => eqMap.has(f))) {\n const tuple = def.fields.map(f => eqMap.get(f))\n const ids = idx.lookupEqual(def.key, tuple)\n if (ids) return [...ids]\n }\n }\n }\n\n for (const clause of this.plan.clauses) {\n if (clause.op === '==') {\n const ids = idx.lookupEqual(clause.field, clause.value)\n if (ids) return [...ids]\n } else if (clause.op === 'in' && Array.isArray(clause.value)) {\n const ids = idx.lookupIn(clause.field, clause.value as readonly unknown[])\n if (ids) return [...ids]\n } else if (isRangeOp(clause.op)) {\n // range predicates on an indexed field dispatch\n // through `lookupRange`, which compares on the original typed\n // value (no numeric-lexicographic landmines).\n const ids = idx.lookupRange(clause.field, clause.op, clause.value)\n if (ids) return [...ids]\n }\n }\n\n // No equality/range driver — try to scope via orderBy.\n if (this.plan.orderBy.length > 0) {\n const primary = this.plan.orderBy[0]!\n const entries = idx.orderedBy(primary.field, primary.direction)\n if (entries) return entries.map(e => e.recordId)\n }\n\n return null\n }\n}\n\n/**\n * True if the given field name is covered by either a single-field\n * index or appears as a component of a declared composite index.\n * Composite coverage is sufficient for the missing-field check because\n * composite writes also maintain the in-memory mirror — the range /\n * orderBy / single-equality lookup paths fall through to decrypted\n * candidates that still get post-filtered by the composite clause.\n */\nfunction isFieldIndexed(field: string, idx: PersistedCollectionIndex): boolean {\n if (idx.has(field)) return true\n for (const def of idx.definitions()) {\n if (def.kind === 'composite' && def.fields.includes(field)) return true\n }\n return false\n}\n\nfunction isRangeOp(op: Operator): op is '<' | '<=' | '>' | '>=' | 'between' {\n return op === '<' || op === '<=' || op === '>' || op === '>=' || op === 'between'\n}\n\nfunction collectTouchedFields(plan: LazyPlan): string[] {\n const seen = new Set<string>()\n for (const c of plan.clauses) seen.add(c.field)\n for (const o of plan.orderBy) seen.add(o.field)\n return [...seen]\n}\n\nfunction matchesAll(record: unknown, clauses: readonly Clause[]): boolean {\n for (const c of clauses) {\n if (!evaluateClause(record, c)) return false\n }\n return true\n}\n\nfunction sortRecords<T>(records: T[], orderBy: readonly LazyOrderBy[]): T[] {\n return [...records].sort((a, b) => {\n for (const { field, direction } of orderBy) {\n const av = readPath(a, field)\n const bv = readPath(b, field)\n const cmp = compareValues(av, bv)\n if (cmp !== 0) return direction === 'asc' ? cmp : -cmp\n }\n return 0\n })\n}\n\nfunction compareValues(a: unknown, b: unknown): number {\n if (a === undefined || a === null) return b === undefined || b === null ? 0 : 1\n if (b === undefined || b === null) return -1\n if (typeof a === 'number' && typeof b === 'number') return a - b\n if (typeof a === 'string' && typeof b === 'string') return a < b ? -1 : a > b ? 1 : 0\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime()\n return 0\n}\n"],"mappings":";;;;;;;;;AAqBO,IAAM,aAAa;AAWnB,SAAS,YAAY,OAAe,UAA0B;AACnE,SAAO,GAAG,UAAU,GAAG,KAAK,IAAI,QAAQ;AAC1C;AASO,SAAS,YAAY,IAAwD;AAClF,MAAI,CAAC,GAAG,WAAW,UAAU,EAAG,QAAO;AACvC,QAAM,OAAO,GAAG,MAAM,WAAW,MAAM;AACvC,QAAM,aAAa,KAAK,QAAQ,GAAG;AACnC,MAAI,cAAc,EAAG,QAAO;AAC5B,QAAM,QAAQ,KAAK,MAAM,GAAG,UAAU;AACtC,QAAM,WAAW,KAAK,MAAM,aAAa,CAAC;AAC1C,MAAI,SAAS,WAAW,EAAG,QAAO;AAClC,SAAO,EAAE,OAAO,SAAS;AAC3B;AAOO,SAAS,QAAQ,IAAqB;AAC3C,SAAO,YAAY,EAAE,MAAM;AAC7B;AA2EO,IAAM,sBAAsB;AAE5B,SAAS,aAAa,QAAmC;AAC9D,SAAO,OAAO,KAAK,mBAAmB;AACxC;AAEO,IAAM,2BAAN,MAA+B;AAAA,EACnB,UAAU,oBAAI,IAAiC;AAAA,EAC/C,OAAO,oBAAI,IAA+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO3D,QAAQ,OAAqB;AAC3B,QAAI,KAAK,QAAQ,IAAI,KAAK,EAAG;AAC7B,SAAK,QAAQ,IAAI,OAAO,EAAE,SAAS,oBAAI,IAAI,GAAG,QAAQ,oBAAI,IAAI,EAAE,CAAC;AACjE,SAAK,KAAK,IAAI,OAAO,EAAE,MAAM,UAAU,OAAO,KAAK,MAAM,CAAC;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,iBAAiB,QAAiC;AAChD,QAAI,OAAO,WAAW,GAAG;AACvB,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACpE;AACA,eAAW,KAAK,QAAQ;AACtB,UAAI,EAAE,SAAS,mBAAmB,GAAG;AACnC,cAAM,IAAI;AAAA,UACR,4BAA4B,CAAC,uCACzB,mBAAmB;AAAA,QAEzB;AAAA,MACF;AAAA,IACF;AACA,UAAM,MAAM,aAAa,MAAM;AAC/B,QAAI,KAAK,QAAQ,IAAI,GAAG,EAAG;AAC3B,SAAK,QAAQ,IAAI,KAAK,EAAE,SAAS,oBAAI,IAAI,GAAG,QAAQ,oBAAI,IAAI,EAAE,CAAC;AAC/D,SAAK,KAAK,IAAI,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC,GAAG,MAAM,GAAG,IAAI,CAAC;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAmC;AACjC,WAAO,CAAC,GAAG,KAAK,KAAK,OAAO,CAAC;AAAA,EAC/B;AAAA;AAAA,EAGA,IAAI,OAAwB;AAC1B,WAAO,KAAK,QAAQ,IAAI,KAAK;AAAA,EAC/B;AAAA;AAAA,EAGA,SAAmB;AACjB,WAAO,CAAC,GAAG,KAAK,QAAQ,KAAK,CAAC;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,OAAe,MAAkC;AACtD,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO;AACZ,eAAW,OAAO,MAAM;AACtB,iBAAW,OAAO,IAAI,UAAU,IAAI,KAAK;AAAA,IAC3C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,UAAkB,OAAe,UAAmB,eAA8B;AACvF,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO;AACZ,QAAI,kBAAkB,QAAQ,kBAAkB,QAAW;AACzD,sBAAgB,OAAO,UAAU,aAAa;AAAA,IAChD;AACA,eAAW,OAAO,UAAU,QAAQ;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,UAAkB,OAAe,OAAsB;AAC5D,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO;AACZ,oBAAgB,OAAO,UAAU,KAAK;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAc;AACZ,eAAW,SAAS,KAAK,QAAQ,OAAO,GAAG;AACzC,YAAM,QAAQ,MAAM;AACpB,YAAM,OAAO,MAAM;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,YAAY,OAAe,OAA4C;AACrE,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO,QAAO;AACnB,UAAM,MAAM,aAAa,KAAK;AAC9B,WAAO,MAAM,QAAQ,IAAI,GAAG,KAAK;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,OAAe,QAAwD;AAC9E,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO,QAAO;AACnB,UAAM,MAAM,oBAAI,IAAY;AAC5B,eAAW,SAAS,QAAQ;AAC1B,YAAM,SAAS,MAAM,QAAQ,IAAI,aAAa,KAAK,CAAC;AACpD,UAAI,OAAQ,YAAW,MAAM,OAAQ,KAAI,IAAI,EAAE;AAAA,IACjD;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,YACE,OACA,IACA,OAC4B;AAC5B,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO,QAAO;AACnB,UAAM,MAAM,oBAAI,IAAY;AAC5B,eAAW,CAAC,UAAU,IAAI,KAAK,MAAM,QAAQ;AAC3C,UAAI,SAAS,UAAa,SAAS,KAAM;AACzC,UAAI,aAAa,MAAM,IAAI,KAAK,EAAG,KAAI,IAAI,QAAQ;AAAA,IACrD;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAe,KAAqD;AAC5E,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO,QAAO;AACnB,UAAM,UAA0B,CAAC;AACjC,eAAW,CAAC,UAAU,KAAK,KAAK,MAAM,QAAQ;AAC5C,cAAQ,KAAK,EAAE,UAAU,MAAM,CAAC;AAAA,IAClC;AACA,YAAQ,KAAK,CAAC,GAAG,MAAM,aAAa,EAAE,OAAO,EAAE,KAAK,CAAC;AACrD,QAAI,QAAQ,OAAQ,SAAQ,QAAQ;AACpC,WAAO;AAAA,EACT;AACF;AAEA,IAAM,YAAiC,oBAAI,IAAI;AAY/C,SAAS,aAAa,OAAwB;AAC5C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAW,QAAO,OAAO,KAAK;AAChF,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AAIpD,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,UAAM,QAAkB,CAAC;AACzB,eAAW,MAAM,MAAO,OAAM,KAAK,aAAa,EAAE,CAAC;AACnD,WAAO,KAAK,UAAU,KAAK;AAAA,EAC7B;AACA,SAAO;AACT;AAEA,SAAS,WAAW,OAA4B,UAAkB,OAAsB;AACtF,MAAI,UAAU,QAAQ,UAAU,OAAW;AAC3C,QAAM,MAAM,aAAa,KAAK;AAC9B,MAAI,SAAS,MAAM,QAAQ,IAAI,GAAG;AAClC,MAAI,CAAC,QAAQ;AACX,aAAS,oBAAI,IAAI;AACjB,UAAM,QAAQ,IAAI,KAAK,MAAM;AAAA,EAC/B;AACA,SAAO,IAAI,QAAQ;AACnB,QAAM,OAAO,IAAI,UAAU,KAAK;AAClC;AAEA,SAAS,gBAAgB,OAA4B,UAAkB,OAAsB;AAC3F,MAAI,UAAU,QAAQ,UAAU,OAAW;AAC3C,QAAM,MAAM,aAAa,KAAK;AAC9B,QAAM,SAAS,MAAM,QAAQ,IAAI,GAAG;AACpC,MAAI,QAAQ;AACV,WAAO,OAAO,QAAQ;AACtB,QAAI,OAAO,SAAS,EAAG,OAAM,QAAQ,OAAO,GAAG;AAAA,EACjD;AACA,QAAM,OAAO,OAAO,QAAQ;AAC9B;AASA,SAAS,aACP,MACA,IACA,OACS;AACT,MAAI,OAAO,WAAW;AACpB,QAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,EAAG,QAAO;AACxD,WAAO,aAAa,MAAM,MAAM,CAAC,CAAC,KAAK,KAAK,aAAa,MAAM,MAAM,CAAC,CAAC,KAAK;AAAA,EAC9E;AACA,QAAM,MAAM,aAAa,MAAM,KAAK;AACpC,UAAQ,IAAI;AAAA,IACV,KAAK;AAAM,aAAO,MAAM;AAAA,IACxB,KAAK;AAAM,aAAO,OAAO;AAAA,IACzB,KAAK;AAAM,aAAO,MAAM;AAAA,IACxB,KAAK;AAAM,aAAO,OAAO;AAAA,EAC3B;AACF;AAEA,SAAS,aAAa,GAAY,GAAoB;AACpD,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO,MAAM,UAAa,MAAM,OAAO,IAAI;AAC9E,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO;AAC1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAC3E,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,OAAO,MAAM,aAAa,OAAO,MAAM,WAAW;AACpD,WAAO,MAAM,IAAI,IAAI,IAAI,IAAI;AAAA,EAC/B;AAKA,SAAO;AACT;;;AC/WA,IAAM,aAAuB;AAAA,EAC3B,SAAS,CAAC;AAAA,EACV,SAAS,CAAC;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEO,IAAM,YAAN,MAAM,WAAa;AAAA,EACP;AAAA,EACA;AAAA,EAEjB,YAAY,QAA4B,OAAiB,YAAY;AACnE,SAAK,SAAS;AACd,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,MAAS,OAAe,IAAc,OAAwB;AAC5D,UAAM,SAAsB,EAAE,MAAM,SAAS,OAAO,IAAI,MAAM;AAC9D,WAAO,IAAI,WAAa,KAAK,QAAQ;AAAA,MACnC,GAAG,KAAK;AAAA,MACR,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,MAAM;AAAA,IACxC,CAAC;AAAA,EACH;AAAA,EAEA,QAAQ,OAAe,YAA4B,OAAqB;AACtE,WAAO,IAAI,WAAa,KAAK,QAAQ;AAAA,MACnC,GAAG,KAAK;AAAA,MACR,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,EAAE,OAAO,UAAU,CAAC;AAAA,IACtD,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,GAAyB;AAC7B,WAAO,IAAI,WAAa,KAAK,QAAQ,EAAE,GAAG,KAAK,MAAM,OAAO,EAAE,CAAC;AAAA,EACjE;AAAA,EAEA,OAAO,GAAyB;AAC9B,WAAO,IAAI,WAAa,KAAK,QAAQ,EAAE,GAAG,KAAK,MAAM,QAAQ,EAAE,CAAC;AAAA,EAClE;AAAA,EAEA,MAAM,UAAwB;AAC5B,UAAM,KAAK,OAAO,6BAA6B;AAE/C,UAAM,gBAAgB,qBAAqB,KAAK,IAAI;AACpD,UAAM,gBAAgB,cAAc,OAAO,OAAK,CAAC,eAAe,GAAG,KAAK,OAAO,gBAAgB,CAAC;AAChG,QAAI,cAAc,SAAS,GAAG;AAC5B,YAAM,IAAI,mBAAmB;AAAA,QAC3B,YAAY,KAAK,OAAO;AAAA,QACxB;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,eAAe,KAAK,oBAAoB;AAC9C,QAAI,iBAAiB,MAAM;AAMzB,YAAM,IAAI,mBAAmB;AAAA,QAC3B,YAAY,KAAK,OAAO;AAAA,QACxB;AAAA,QACA,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,UAAM,UAAe,CAAC;AACtB,eAAW,MAAM,cAAc;AAC7B,YAAM,SAAS,MAAM,KAAK,OAAO,UAAU,EAAE;AAC7C,UAAI,WAAW,KAAM;AACrB,UAAI,CAAC,WAAW,QAAQ,KAAK,KAAK,OAAO,EAAG;AAC5C,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,UAAM,SAAS,KAAK,KAAK,QAAQ,SAAS,IACtC,YAAY,SAAS,KAAK,KAAK,OAAO,IACtC;AAEJ,UAAM,SAAS,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;AACzD,UAAM,UAAU,KAAK,KAAK,UAAU,SAChC,OAAO,MAAM,MAAM,IACnB,OAAO,MAAM,QAAQ,SAAS,KAAK,KAAK,KAAK;AAEjD,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAA2B;AAC/B,UAAM,MAAM,MAAM,KAAK,MAAM,CAAC,EAAE,QAAQ;AACxC,WAAO,IAAI,SAAS,IAAI,IAAI,CAAC,IAAK;AAAA,EACpC;AAAA,EAEA,MAAM,QAAyB;AAC7B,UAAM,MAAM,MAAM,KAAK,QAAQ;AAC/B,WAAO,IAAI;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,sBAAgD;AACtD,UAAM,MAAM,KAAK,OAAO;AAMxB,UAAM,QAAQ,oBAAI,IAAqB;AACvC,eAAW,UAAU,KAAK,KAAK,SAAS;AACtC,UAAI,OAAO,OAAO,KAAM,OAAM,IAAI,OAAO,OAAO,OAAO,KAAK;AAAA,IAC9D;AACA,QAAI,MAAM,QAAQ,GAAG;AACnB,iBAAW,OAAO,IAAI,YAAY,GAAG;AACnC,YAAI,IAAI,SAAS,YAAa;AAC9B,YAAI,IAAI,OAAO,MAAM,OAAK,MAAM,IAAI,CAAC,CAAC,GAAG;AACvC,gBAAM,QAAQ,IAAI,OAAO,IAAI,OAAK,MAAM,IAAI,CAAC,CAAC;AAC9C,gBAAM,MAAM,IAAI,YAAY,IAAI,KAAK,KAAK;AAC1C,cAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAEA,eAAW,UAAU,KAAK,KAAK,SAAS;AACtC,UAAI,OAAO,OAAO,MAAM;AACtB,cAAM,MAAM,IAAI,YAAY,OAAO,OAAO,OAAO,KAAK;AACtD,YAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,MACzB,WAAW,OAAO,OAAO,QAAQ,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC5D,cAAM,MAAM,IAAI,SAAS,OAAO,OAAO,OAAO,KAA2B;AACzE,YAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,MACzB,WAAW,UAAU,OAAO,EAAE,GAAG;AAI/B,cAAM,MAAM,IAAI,YAAY,OAAO,OAAO,OAAO,IAAI,OAAO,KAAK;AACjE,YAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,MACzB;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,QAAQ,SAAS,GAAG;AAChC,YAAM,UAAU,KAAK,KAAK,QAAQ,CAAC;AACnC,YAAM,UAAU,IAAI,UAAU,QAAQ,OAAO,QAAQ,SAAS;AAC9D,UAAI,QAAS,QAAO,QAAQ,IAAI,OAAK,EAAE,QAAQ;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AACF;AAUA,SAAS,eAAe,OAAe,KAAwC;AAC7E,MAAI,IAAI,IAAI,KAAK,EAAG,QAAO;AAC3B,aAAW,OAAO,IAAI,YAAY,GAAG;AACnC,QAAI,IAAI,SAAS,eAAe,IAAI,OAAO,SAAS,KAAK,EAAG,QAAO;AAAA,EACrE;AACA,SAAO;AACT;AAEA,SAAS,UAAU,IAAyD;AAC1E,SAAO,OAAO,OAAO,OAAO,QAAQ,OAAO,OAAO,OAAO,QAAQ,OAAO;AAC1E;AAEA,SAAS,qBAAqB,MAA0B;AACtD,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,KAAK,KAAK,QAAS,MAAK,IAAI,EAAE,KAAK;AAC9C,aAAW,KAAK,KAAK,QAAS,MAAK,IAAI,EAAE,KAAK;AAC9C,SAAO,CAAC,GAAG,IAAI;AACjB;AAEA,SAAS,WAAW,QAAiB,SAAqC;AACxE,aAAW,KAAK,SAAS;AACvB,QAAI,CAAC,eAAe,QAAQ,CAAC,EAAG,QAAO;AAAA,EACzC;AACA,SAAO;AACT;AAEA,SAAS,YAAe,SAAc,SAAsC;AAC1E,SAAO,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM;AACjC,eAAW,EAAE,OAAO,UAAU,KAAK,SAAS;AAC1C,YAAM,KAAK,SAAS,GAAG,KAAK;AAC5B,YAAM,KAAK,SAAS,GAAG,KAAK;AAC5B,YAAM,MAAM,cAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,cAAc,QAAQ,MAAM,CAAC;AAAA,IACrD;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAEA,SAAS,cAAc,GAAY,GAAoB;AACrD,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO,MAAM,UAAa,MAAM,OAAO,IAAI;AAC9E,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO;AAC1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAC3E,SAAO;AACT;","names":[]}
|