@noy-db/hub 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +197 -0
- package/dist/aggregate/index.cjs +476 -0
- package/dist/aggregate/index.cjs.map +1 -0
- package/dist/aggregate/index.d.cts +38 -0
- package/dist/aggregate/index.d.ts +38 -0
- package/dist/aggregate/index.js +53 -0
- package/dist/aggregate/index.js.map +1 -0
- package/dist/blobs/index.cjs +1480 -0
- package/dist/blobs/index.cjs.map +1 -0
- package/dist/blobs/index.d.cts +45 -0
- package/dist/blobs/index.d.ts +45 -0
- package/dist/blobs/index.js +48 -0
- package/dist/blobs/index.js.map +1 -0
- package/dist/bundle/index.cjs +436 -0
- package/dist/bundle/index.cjs.map +1 -0
- package/dist/bundle/index.d.cts +7 -0
- package/dist/bundle/index.d.ts +7 -0
- package/dist/bundle/index.js +40 -0
- package/dist/bundle/index.js.map +1 -0
- package/dist/chunk-2QR2PQTT.js +217 -0
- package/dist/chunk-2QR2PQTT.js.map +1 -0
- package/dist/chunk-4OWFYIDQ.js +79 -0
- package/dist/chunk-4OWFYIDQ.js.map +1 -0
- package/dist/chunk-5AATM2M2.js +90 -0
- package/dist/chunk-5AATM2M2.js.map +1 -0
- package/dist/chunk-ACLDOTNQ.js +543 -0
- package/dist/chunk-ACLDOTNQ.js.map +1 -0
- package/dist/chunk-BTDCBVJW.js +160 -0
- package/dist/chunk-BTDCBVJW.js.map +1 -0
- package/dist/chunk-CIMZBAZB.js +72 -0
- package/dist/chunk-CIMZBAZB.js.map +1 -0
- package/dist/chunk-E445ICYI.js +365 -0
- package/dist/chunk-E445ICYI.js.map +1 -0
- package/dist/chunk-EXQRC2L4.js +722 -0
- package/dist/chunk-EXQRC2L4.js.map +1 -0
- package/dist/chunk-FZU343FL.js +32 -0
- package/dist/chunk-FZU343FL.js.map +1 -0
- package/dist/chunk-GJILMRPO.js +354 -0
- package/dist/chunk-GJILMRPO.js.map +1 -0
- package/dist/chunk-GOUT6DND.js +1285 -0
- package/dist/chunk-GOUT6DND.js.map +1 -0
- package/dist/chunk-J66GRPNH.js +111 -0
- package/dist/chunk-J66GRPNH.js.map +1 -0
- package/dist/chunk-M2F2JAWB.js +464 -0
- package/dist/chunk-M2F2JAWB.js.map +1 -0
- package/dist/chunk-M5INGEFC.js +84 -0
- package/dist/chunk-M5INGEFC.js.map +1 -0
- package/dist/chunk-M62XNWRA.js +72 -0
- package/dist/chunk-M62XNWRA.js.map +1 -0
- package/dist/chunk-MR4424N3.js +275 -0
- package/dist/chunk-MR4424N3.js.map +1 -0
- package/dist/chunk-NPC4LFV5.js +132 -0
- package/dist/chunk-NPC4LFV5.js.map +1 -0
- package/dist/chunk-NXFEYLVG.js +311 -0
- package/dist/chunk-NXFEYLVG.js.map +1 -0
- package/dist/chunk-R36SIKES.js +79 -0
- package/dist/chunk-R36SIKES.js.map +1 -0
- package/dist/chunk-TDR6T5CJ.js +381 -0
- package/dist/chunk-TDR6T5CJ.js.map +1 -0
- package/dist/chunk-UF3BUNQZ.js +1 -0
- package/dist/chunk-UF3BUNQZ.js.map +1 -0
- package/dist/chunk-UQFSPSWG.js +1109 -0
- package/dist/chunk-UQFSPSWG.js.map +1 -0
- package/dist/chunk-USKYUS74.js +793 -0
- package/dist/chunk-USKYUS74.js.map +1 -0
- package/dist/chunk-XCL3WP6J.js +121 -0
- package/dist/chunk-XCL3WP6J.js.map +1 -0
- package/dist/chunk-XHFOENR2.js +680 -0
- package/dist/chunk-XHFOENR2.js.map +1 -0
- package/dist/chunk-ZFKD4QMV.js +430 -0
- package/dist/chunk-ZFKD4QMV.js.map +1 -0
- package/dist/chunk-ZLMV3TUA.js +490 -0
- package/dist/chunk-ZLMV3TUA.js.map +1 -0
- package/dist/chunk-ZRG4V3F5.js +17 -0
- package/dist/chunk-ZRG4V3F5.js.map +1 -0
- package/dist/consent/index.cjs +204 -0
- package/dist/consent/index.cjs.map +1 -0
- package/dist/consent/index.d.cts +24 -0
- package/dist/consent/index.d.ts +24 -0
- package/dist/consent/index.js +23 -0
- package/dist/consent/index.js.map +1 -0
- package/dist/crdt/index.cjs +152 -0
- package/dist/crdt/index.cjs.map +1 -0
- package/dist/crdt/index.d.cts +30 -0
- package/dist/crdt/index.d.ts +30 -0
- package/dist/crdt/index.js +24 -0
- package/dist/crdt/index.js.map +1 -0
- package/dist/crypto-IVKU7YTT.js +44 -0
- package/dist/crypto-IVKU7YTT.js.map +1 -0
- package/dist/delegation-XDJCBTI2.js +16 -0
- package/dist/delegation-XDJCBTI2.js.map +1 -0
- package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
- package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
- package/dist/hash-9KO1BGxh.d.cts +63 -0
- package/dist/hash-ChfJjRjQ.d.ts +63 -0
- package/dist/history/index.cjs +1215 -0
- package/dist/history/index.cjs.map +1 -0
- package/dist/history/index.d.cts +62 -0
- package/dist/history/index.d.ts +62 -0
- package/dist/history/index.js +79 -0
- package/dist/history/index.js.map +1 -0
- package/dist/i18n/index.cjs +746 -0
- package/dist/i18n/index.cjs.map +1 -0
- package/dist/i18n/index.d.cts +38 -0
- package/dist/i18n/index.d.ts +38 -0
- package/dist/i18n/index.js +55 -0
- package/dist/i18n/index.js.map +1 -0
- package/dist/index-BRHBCmLt.d.ts +1940 -0
- package/dist/index-C8kQtmOk.d.ts +380 -0
- package/dist/index-DN-J-5wT.d.cts +1940 -0
- package/dist/index-DhjMjz7L.d.cts +380 -0
- package/dist/index.cjs +14756 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +269 -0
- package/dist/index.d.ts +269 -0
- package/dist/index.js +6085 -0
- package/dist/index.js.map +1 -0
- package/dist/indexing/index.cjs +736 -0
- package/dist/indexing/index.cjs.map +1 -0
- package/dist/indexing/index.d.cts +36 -0
- package/dist/indexing/index.d.ts +36 -0
- package/dist/indexing/index.js +77 -0
- package/dist/indexing/index.js.map +1 -0
- package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
- package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
- package/dist/ledger-2NX4L7PN.js +33 -0
- package/dist/ledger-2NX4L7PN.js.map +1 -0
- package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
- package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
- package/dist/periods/index.cjs +1035 -0
- package/dist/periods/index.cjs.map +1 -0
- package/dist/periods/index.d.cts +21 -0
- package/dist/periods/index.d.ts +21 -0
- package/dist/periods/index.js +25 -0
- package/dist/periods/index.js.map +1 -0
- package/dist/predicate-SBHmi6D0.d.cts +161 -0
- package/dist/predicate-SBHmi6D0.d.ts +161 -0
- package/dist/query/index.cjs +1957 -0
- package/dist/query/index.cjs.map +1 -0
- package/dist/query/index.d.cts +3 -0
- package/dist/query/index.d.ts +3 -0
- package/dist/query/index.js +62 -0
- package/dist/query/index.js.map +1 -0
- package/dist/session/index.cjs +487 -0
- package/dist/session/index.cjs.map +1 -0
- package/dist/session/index.d.cts +45 -0
- package/dist/session/index.d.ts +45 -0
- package/dist/session/index.js +44 -0
- package/dist/session/index.js.map +1 -0
- package/dist/shadow/index.cjs +133 -0
- package/dist/shadow/index.cjs.map +1 -0
- package/dist/shadow/index.d.cts +16 -0
- package/dist/shadow/index.d.ts +16 -0
- package/dist/shadow/index.js +20 -0
- package/dist/shadow/index.js.map +1 -0
- package/dist/store/index.cjs +1069 -0
- package/dist/store/index.cjs.map +1 -0
- package/dist/store/index.d.cts +491 -0
- package/dist/store/index.d.ts +491 -0
- package/dist/store/index.js +34 -0
- package/dist/store/index.js.map +1 -0
- package/dist/strategy-BSxFXGzb.d.cts +110 -0
- package/dist/strategy-BSxFXGzb.d.ts +110 -0
- package/dist/strategy-D-SrOLCl.d.cts +548 -0
- package/dist/strategy-D-SrOLCl.d.ts +548 -0
- package/dist/sync/index.cjs +1062 -0
- package/dist/sync/index.cjs.map +1 -0
- package/dist/sync/index.d.cts +42 -0
- package/dist/sync/index.d.ts +42 -0
- package/dist/sync/index.js +28 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/team/index.cjs +1233 -0
- package/dist/team/index.cjs.map +1 -0
- package/dist/team/index.d.cts +117 -0
- package/dist/team/index.d.ts +117 -0
- package/dist/team/index.js +39 -0
- package/dist/team/index.js.map +1 -0
- package/dist/tx/index.cjs +212 -0
- package/dist/tx/index.cjs.map +1 -0
- package/dist/tx/index.d.cts +20 -0
- package/dist/tx/index.d.ts +20 -0
- package/dist/tx/index.js +20 -0
- package/dist/tx/index.js.map +1 -0
- package/dist/types-BZpCZB8N.d.ts +7526 -0
- package/dist/types-Bfs0qr5F.d.cts +7526 -0
- package/dist/ulid-COREQ2RQ.js +9 -0
- package/dist/ulid-COREQ2RQ.js.map +1 -0
- package/dist/util/index.cjs +230 -0
- package/dist/util/index.cjs.map +1 -0
- package/dist/util/index.d.cts +77 -0
- package/dist/util/index.d.ts +77 -0
- package/dist/util/index.js +190 -0
- package/dist/util/index.js.map +1 -0
- package/package.json +244 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/types.ts","../src/errors.ts","../src/crypto.ts","../src/bundle/ulid.ts","../src/history/ledger/entry.ts","../src/history/ledger/patch.ts","../src/history/ledger/constants.ts","../src/history/ledger/hash.ts","../src/history/ledger/store.ts","../src/history/ledger/index.ts","../src/team/tiers.ts","../src/team/delegation.ts","../src/index.ts","../src/blobs/blob-set.ts","../src/blobs/mime-magic.ts","../src/store/bundle-store.ts","../src/store/sync-policy.ts","../src/store/route-store.ts","../src/store/store-middleware.ts","../src/bundle/format.ts","../src/bundle/bundle.ts","../src/schema.ts","../src/history/history.ts","../src/history/time-machine.ts","../src/shadow/vault-frame.ts","../src/consent/consent.ts","../src/refs.ts","../src/team/keyring.ts","../src/noydb.ts","../src/vault.ts","../src/collection.ts","../src/crdt/strategy.ts","../src/i18n/strategy.ts","../src/history/strategy.ts","../src/query/predicate.ts","../src/query/join.ts","../src/query/live.ts","../src/aggregate/strategy.ts","../src/query/builder.ts","../src/indexing/eager-indexes.ts","../src/aggregate/reducers.ts","../src/aggregate/aggregation.ts","../src/aggregate/groupby.ts","../src/query/scan-builder.ts","../src/indexing/persisted-indexes.ts","../src/indexing/lazy-builder.ts","../src/indexing/strategy.ts","../src/cache/lru.ts","../src/cache/policy.ts","../src/team/sync-strategy.ts","../src/blobs/strategy.ts","../src/shadow/strategy.ts","../src/consent/strategy.ts","../src/periods/strategy.ts","../src/i18n/dictionary.ts","../src/periods/periods.ts","../src/blobs/export-blobs.ts","../src/blobs/blob-compaction.ts","../src/team/magic-link-grant.ts","../src/events.ts","../src/tx/strategy.ts","../src/session/strategy.ts","../src/crdt/crdt.ts","../src/team/presence.ts","../src/team/sync.ts","../src/team/sync-transaction.ts","../src/tx/transaction.ts","../src/i18n/core.ts","../src/team/sync-credentials.ts","../src/session/session-policy.ts","../src/session/session.ts","../src/session/dev-unlock.ts","../src/history/diff.ts","../src/vault-diff.ts","../src/validation.ts"],"sourcesContent":["/**\n * Core types — the {@link NoydbStore} interface, envelope format, roles, and\n * all configuration shapes consumed by {@link createNoydb}.\n *\n * ## What lives here\n *\n * - **{@link NoydbStore}** — the 6-method contract every backend must implement\n * (`get`, `put`, `delete`, `list`, `loadAll`, `saveAll`).\n * - **{@link EncryptedEnvelope}** — the wire format stored by backends:\n * `{ _noydb, _v, _ts, _iv, _data }`. Backends only ever see this shape.\n * - **{@link Role} / {@link Permission}** — the access-control vocabulary\n * (`owner`, `admin`, `operator`, `viewer`, `client`).\n * - **{@link NoydbOptions}** — the full configuration object passed to\n * {@link createNoydb}.\n *\n * ## Extending the store interface\n *\n * All optional store capabilities (`ping`, `listPage`, `listSince`,\n * `presencePublish`, `presenceSubscribe`, `listVaults`) are additive extensions\n * discovered via `'method' in store`. Implementing them unlocks features but\n * is never required — core always falls back to the 6-method baseline.\n *\n * @module\n */\n\nimport type { StandardSchemaV1 } from './schema.js'\nimport type { SyncPolicy } from './store/sync-policy.js'\nimport type { BlobStrategy } from './blobs/strategy.js'\nimport type { IndexStrategy } from './indexing/strategy.js'\nimport type { AggregateStrategy } from './aggregate/strategy.js'\nimport type { CrdtStrategy } from './crdt/strategy.js'\nimport type { ConsentStrategy } from './consent/strategy.js'\nimport type { PeriodsStrategy } from './periods/strategy.js'\nimport type { ShadowStrategy } from './shadow/strategy.js'\nimport type { TxStrategy } from './tx/strategy.js'\nimport type { HistoryStrategy } from './history/strategy.js'\nimport type { I18nStrategy } from './i18n/strategy.js'\nimport type { SessionStrategy } from './session/strategy.js'\nimport type { SyncStrategy } from './team/sync-strategy.js'\n\n/** Format version for encrypted record envelopes. */\nexport const NOYDB_FORMAT_VERSION = 1 as const\n\n/** Format version for keyring files. */\nexport const NOYDB_KEYRING_VERSION = 1 as const\n\n/** Format version for backup files. */\nexport const NOYDB_BACKUP_VERSION = 1 as const\n\n/** Format version for sync metadata. */\nexport const NOYDB_SYNC_VERSION = 1 as const\n\n// ─── Roles & Permissions ───────────────────────────────────────────────\n\n/**\n * Access role assigned to a user within a vault.\n *\n * Roles control both the operations a user can perform and which DEKs\n * they receive in their keyring:\n *\n * | Role | Collections | Can grant/revoke | Can export |\n * |------------|-----------------|:----------------:|:----------:|\n * | `owner` | all (rw) | Yes (all roles) | Yes |\n * | `admin` | all (rw) | Yes (≤ admin) | Yes |\n * | `operator` | explicit (rw) | No | ACL-scoped |\n * | `viewer` | all (ro) | No | Yes |\n * | `client` | explicit (ro) | No | ACL-scoped |\n */\nexport type Role = 'owner' | 'admin' | 'operator' | 'viewer' | 'client'\n\n/**\n * Read-write or read-only access on a collection.\n * Stored per-collection in the user's keyring.\n */\nexport type Permission = 'rw' | 'ro'\n\n/**\n * Map of collection name → permission level for a user's keyring entry.\n * `'*'` is the wildcard collection matching all collections in the vault.\n */\nexport type Permissions = Record<string, Permission>\n\n// ─── Encrypted Envelope ────────────────────────────────────────────────\n\n/** The encrypted wrapper stored by adapters. Adapters only ever see this. */\nexport interface EncryptedEnvelope {\n readonly _noydb: typeof NOYDB_FORMAT_VERSION\n readonly _v: number\n readonly _ts: string\n readonly _iv: string\n readonly _data: string\n /** User who created this version (unencrypted metadata). */\n readonly _by?: string\n /**\n * Hierarchical access tier. Omitted → tier 0.\n *\n * Unencrypted on purpose — the store reads it to route the envelope\n * to the right DEK slot without having to try-decrypt against every\n * tier. Only leaks the tier of each record, not any value\n * equivalence.\n */\n readonly _tier?: number\n /**\n * User id who last elevated this record. Used by\n * `demote()` to gate the reverse operation: only the original\n * elevator or an owner can demote a record back down. Cleared on\n * every successful demote so a later re-elevate requires the new\n * actor to own the demotion right.\n */\n readonly _elevatedBy?: string\n /**\n * Deterministic-encryption index. Map of field name →\n * base64 deterministic ciphertext. Present only when the collection\n * declares `deterministicFields` and the feature is acknowledged. The\n * field names are unencrypted (they're the index keys); the values\n * are AES-GCM ciphertext with an HKDF-derived deterministic IV.\n *\n * Enables blind equality search (`collection.findByDet(field,\n * value)`) without decrypting every record. Leaks equality as a known\n * side channel.\n */\n readonly _det?: Record<string, string>\n}\n\n/**\n * Placeholder returned by `getAtTier()` in `'ghost'` mode when a\n * record is at a tier the caller cannot decrypt. Record existence is\n * advertised — the id and tier are visible — but contents are\n * withheld. `canElevateFrom` lists user ids authorized to elevate\n * access for this caller when known; absent when the workflow is\n * not configured.\n */\nexport interface GhostRecord {\n readonly _ghost: true\n readonly _tier: number\n readonly canElevateFrom?: readonly string[]\n}\n\n/** Control what lower-tier reads see above their clearance. */\nexport type TierMode = 'invisibility' | 'ghost'\n\n/**\n * Event emitted when a record at a tier above the caller's inherent\n * clearance is read or written successfully (via elevation or\n * delegation). Always written to the ledger; subscribers get a\n * real-time feed.\n */\nexport interface CrossTierAccessEvent {\n readonly actor: string\n readonly collection: string\n readonly id: string\n readonly tier: number\n /** How the caller gained tier access: they elevated it, or a delegation is active. */\n readonly authorization: 'elevation' | 'delegation' | 'inherent'\n readonly op: 'get' | 'put' | 'elevate' | 'demote'\n readonly ts: string\n /**\n * When `authorization === 'elevation'`, the audit reason string the\n * caller passed to `vault.elevate(...)`. Empty for inherent /\n * delegation paths.\n */\n readonly reason?: string\n /**\n * When `authorization === 'elevation'`, the tier the caller's\n * keyring effectively held BEFORE elevation. Useful for audit\n * dashboards distinguishing \"operator elevating to 2\" from\n * \"inherent tier-2 write.\"\n */\n readonly elevatedFrom?: number\n}\n\n/**\n * A single deterministic-ciphertext index slot on an envelope. Stored\n * as `iv:data` (both base64, colon-separated) so a single string per\n * field keeps the envelope compact.\n */\nexport type DeterministicCipher = string\n\n// ─── Vault Snapshot ──────────────────────────────────────────────\n\n/** All records across all collections for a compartment. */\nexport type VaultSnapshot = Record<string, Record<string, EncryptedEnvelope>>\n\n/**\n * Result of a single page fetch via the optional `listPage` adapter extension.\n *\n * `items` carries the actual encrypted envelopes (not just ids) so the\n * caller can decrypt and emit a single record without an extra `get()`\n * round-trip per id. `nextCursor` is `null` on the final page.\n */\nexport interface ListPageResult {\n /** Encrypted envelopes for this page, in adapter-defined order. */\n items: Array<{ id: string; envelope: EncryptedEnvelope }>\n /** Opaque cursor for the next page, or `null` if this was the last page. */\n nextCursor: string | null\n}\n\n// ─── Store Interface ───────────────────────────────────────────────────\n\nexport interface NoydbStore {\n /**\n * Optional human-readable adapter name (e.g. 'memory', 'file', 'dynamo').\n * Used in diagnostic messages and the listPage fallback warning. Adapters\n * are encouraged to set this so logs are clearer about which backend is\n * involved when something goes wrong.\n */\n name?: string\n\n /** Get a single record. Returns null if not found. */\n get(vault: string, collection: string, id: string): Promise<EncryptedEnvelope | null>\n\n /** Put a record. Throws ConflictError if expectedVersion doesn't match. */\n put(\n vault: string,\n collection: string,\n id: string,\n envelope: EncryptedEnvelope,\n expectedVersion?: number,\n ): Promise<void>\n\n /** Delete a record. */\n delete(vault: string, collection: string, id: string): Promise<void>\n\n /** List all record IDs in a collection. */\n list(vault: string, collection: string): Promise<string[]>\n\n /** Load all records for a vault (initial hydration). */\n loadAll(vault: string): Promise<VaultSnapshot>\n\n /** Save all records for a vault (bulk write / restore). */\n saveAll(vault: string, data: VaultSnapshot): Promise<void>\n\n /** Optional connectivity check for sync engine. */\n ping?(): Promise<boolean>\n\n /**\n * Optional: list record IDs in a collection that have `_ts` after `since`.\n * Used by partial sync (`pull({ modifiedSince })`). Adapters that omit this\n * fall back to a full `loadAll` + client-side timestamp filter.\n */\n listSince?(vault: string, collection: string, since: string): Promise<string[]>\n\n /**\n * Optional pagination extension. Adapters that implement `listPage` get\n * the streaming `Collection.scan()` fast path; adapters that don't are\n * silently fallen back to a full `loadAll()` + slice (with a one-time\n * console.warn).\n *\n * `cursor` is opaque to the core — each adapter encodes its own paging\n * state (DynamoDB: base64 LastEvaluatedKey JSON; S3: ContinuationToken;\n * memory/file/browser: numeric offset of a sorted id list). Pass\n * `undefined` to start from the beginning.\n *\n * `limit` is a soft upper bound on `items.length`. Adapters MAY return\n * fewer items even when more exist (e.g. if the underlying store has\n * its own page size cap), and MUST signal \"no more pages\" by returning\n * `nextCursor: null`.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listPage' in adapter`.\n */\n listPage?(\n vault: string,\n collection: string,\n cursor?: string,\n limit?: number,\n ): Promise<ListPageResult>\n\n /**\n * Optional pub/sub for real-time presence.\n * Publish an encrypted payload to a presence channel.\n * Falls back to storage-based polling when absent.\n */\n presencePublish?(channel: string, payload: string): Promise<void>\n\n /**\n * Optional pub/sub for real-time presence.\n * Subscribe to a presence channel. Returns an unsubscribe function.\n * Falls back to storage-based polling when absent.\n */\n presenceSubscribe?(channel: string, callback: (payload: string) => void): () => void\n\n /**\n * Optional cross-vault enumeration extension.\n *\n * Returns the names of every top-level vault the store\n * currently stores. Used by `Noydb.listAccessibleVaults()` to\n * enumerate the universe of vaults before filtering down to\n * the ones the calling principal can actually unwrap.\n *\n * **Why this is optional:** the storage shape of compartments\n * differs across backends. Memory and file stores store\n * vaults as top-level keys / directories and can enumerate\n * them in O(1) calls. DynamoDB stores everything in a single table\n * keyed by `(compartment#collection, id)` — enumerating compartments\n * requires either a Scan (expensive, eventually consistent, leaks\n * ciphertext metadata) or a dedicated GSI that the consumer\n * provisioned. S3 needs a prefix list (cheap if enabled, ACL-sensitive\n * otherwise). Browser localStorage can scan keys by prefix.\n *\n * Stores that cannot implement `listVaults` cheaply or\n * cleanly should omit it. Core surfaces a `StoreCapabilityError`\n * with a clear message when a caller invokes\n * `listAccessibleVaults()` against a store that doesn't\n * provide this method, so consumers know to either upgrade their\n * store, provide a candidate list explicitly to `queryAcross()`,\n * or fall back to maintaining the compartment index out of band.\n *\n * **Privacy note:** `listVaults` returns *every* compartment\n * the store has, not just the ones the caller can access. The\n * existence-leak filtering (returning only compartments whose\n * keyring the caller can unwrap) happens in core, not in the\n * store. The store is trusted to know its own contents — that\n * is not a leak in the threat model. The leak the API guards\n * against is the *return value* of `listAccessibleVaults()`\n * exposing existence to a downstream observer who only sees that\n * function's output.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listVaults' in store`.\n */\n listVaults?(): Promise<string[]>\n\n /**\n * Optional: generate a presigned URL for direct client download.\n * Only meaningful for object stores (S3, GCS) that support URL signing.\n * Returns a time-limited URL that fetches the encrypted envelope directly.\n * The caller must decrypt client-side (the URL returns ciphertext).\n */\n presignUrl?(vault: string, collection: string, id: string, expiresInSeconds?: number): Promise<string>\n\n /**\n * Optional: estimate current storage usage.\n * Returns `{ usedBytes, quotaBytes }` or null if the store cannot estimate.\n * Used by quota-aware routing to detect overflow conditions.\n */\n estimateUsage?(): Promise<{ usedBytes: number; quotaBytes: number } | null>\n\n /**\n * Optional multi-record atomic write.\n *\n * When present, `db.transaction(async (tx) => { ... })` uses this to\n * commit every staged op in one storage-layer transaction — either\n * all ops land or none do, regardless of which records they touch.\n * Every `TxOp.expectedVersion` (when set) must be honored atomically\n * alongside the write; any violation throws `ConflictError` and the\n * whole batch fails.\n *\n * Stores that omit this fall through to the hub's per-record OCC\n * fallback: pre-flight CAS check, then sequential `put`/`delete`\n * with best-effort unwind on mid-batch failure (see\n * `runTransaction` for the exact semantics and crash window).\n *\n * Native implementations: `to-memory` (single Map mutation),\n * `to-dynamo` (`TransactWriteItems`), `to-browser-idb` (one\n * `readwrite` transaction). File / S3 cannot implement this\n * atomically and should omit the method.\n */\n tx?(ops: readonly TxOp[]): Promise<void>\n}\n\n/**\n * A single staged operation inside a `db.transaction(fn)` commit. The\n * hub assembles `TxOp[]` from the user's `tx.collection().put/delete`\n * calls, encrypts any `record` values into `envelope`, and hands the\n * array to `NoydbStore.tx()` when the store supports atomic batch\n * writes. Stores that implement `tx()` MUST honor every\n * `expectedVersion` atomically against the stored envelope version.\n */\nexport interface TxOp {\n readonly type: 'put' | 'delete'\n readonly vault: string\n readonly collection: string\n readonly id: string\n /** Populated for `type: 'put'` — the encrypted envelope to write. */\n readonly envelope?: EncryptedEnvelope\n /** Optional per-record CAS. Mismatch must throw `ConflictError`. */\n readonly expectedVersion?: number\n}\n\n// ─── Store Factory Helper ──────────────────────────────────────────────\n\n/** Type-safe helper for creating store factories. */\nexport function createStore<TOptions>(\n factory: (options: TOptions) => NoydbStore,\n): (options: TOptions) => NoydbStore {\n return factory\n}\n\n// ─── Keyring ───────────────────────────────────────────────────────────\n\n/**\n * Interchange formats `@noy-db/as-*` packages can produce. `'*'` is a\n * wildcard granting every current + future plaintext format.\n */\nexport type ExportFormat =\n | 'xlsx'\n | 'csv'\n | 'json'\n | 'ndjson'\n | 'xml'\n | 'sql'\n | 'pdf'\n | 'blob'\n | 'zip'\n | '*'\n\n/**\n * Owner-granted export capability on a keyring.\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for record formatters + blob\n * extractors that emit plaintext bytes (`as-xlsx`, `as-csv`,\n * `as-blob`, `as-zip`, …). **Defaults to empty** for every role;\n * the owner/admin must positively grant per-format (or `'*'`).\n * - `bundle` — boolean for `.noydb` encrypted container export\n * (`as-noydb`). **Default policy: on for owner/admin, off for\n * operator/viewer/client** — applied when the field is absent or\n * undefined (see `hasExportCapability`).\n */\nexport interface ExportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\n/**\n * Owner-granted import capability on a keyring (sibling of\n * `ExportCapability`, issue ).\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for `as-*` readers that ingest\n * plaintext bytes (`as-csv`, `as-json`, `as-ndjson`, `as-zip`, …).\n * Defaults to empty for every role; the owner/admin must positively\n * grant per-format (or `'*'`).\n * - `bundle` — boolean gate for `.noydb` bundle import. **Defaults to\n * `false` for every role**, including owner/admin. Import is more\n * dangerous than export (corrupts vs leaks), so the policy is\n * default-closed across the board — the owner explicitly opts a\n * keyring in via `db.grant({ importCapability: { bundle: true } })`.\n */\nexport interface ImportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\nexport interface KeyringFile {\n readonly _noydb_keyring: typeof NOYDB_KEYRING_VERSION\n readonly user_id: string\n readonly display_name: string\n readonly role: Role\n readonly permissions: Permissions\n readonly deks: Record<string, string>\n readonly salt: string\n readonly created_at: string\n readonly granted_by: string\n /**\n * Optional — authorization spec capability bits. Absent on keyrings written\n * before the RFC implementation. Loading falls back to role-based\n * defaults (owner/admin get bundle-on, everyone else off).\n */\n readonly export_capability?: ExportCapability\n /**\n * Optional bundle-slot expiry. ISO-8601 timestamp; past\n * the cutoff `loadKeyring` throws `KeyringExpiredError` before any\n * DEK unwrap is attempted. Useful for time-boxed audit access:\n * \"this slot works for 30 days then becomes opaque to its holder.\"\n *\n * Absent on live keyrings written via `db.grant()` — the field is\n * meaningful for `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })`. Setting it on a live\n * keyring is allowed but unusual.\n */\n readonly expires_at?: string\n /**\n * Optional — issue import-capability bits. Absent on keyrings\n * written before landed. Loading falls back to default-closed\n * for every role and every format.\n */\n readonly import_capability?: ImportCapability\n /**\n * hierarchical access clearance. Absent → 0 (advisory;\n * the real check is whether the DEK map carries a `collection#tier`\n * entry for the requested tier). Owners and admins default to the\n * highest tier they have DEKs for at grant time.\n */\n readonly clearance?: number\n}\n\n// ─── Backup ────────────────────────────────────────────────────────────\n\nexport interface VaultBackup {\n readonly _noydb_backup: typeof NOYDB_BACKUP_VERSION\n readonly _compartment: string\n readonly _exported_at: string\n readonly _exported_by: string\n readonly keyrings: Record<string, KeyringFile>\n readonly collections: VaultSnapshot\n /**\n * Internal collections (`_ledger`, `_ledger_deltas`, `_history`, `_sync`, …)\n * captured alongside the data collections. Optional for backwards\n * compat with backups, which only stored data collections —\n * loading a backup leaves the ledger empty (and `verifyBackupIntegrity`\n * skips the chain check, surfacing only a console warning).\n */\n readonly _internal?: VaultSnapshot\n /**\n * Verifiable-backup metadata. Embeds the ledger head at\n * dump time so `load()` can cross-check that the loaded chain matches\n * exactly what was exported. A backup whose chain has been tampered\n * with — either by modifying ledger entries or by modifying data\n * envelopes that the chain references — fails this check.\n *\n * Optional for backwards compat with backups; missing means\n * \"legacy backup, load with a warning, no integrity check\".\n */\n readonly ledgerHead?: {\n /** Hex sha256 of the canonical JSON of the last ledger entry. */\n readonly hash: string\n /** Sequential index of the last ledger entry. */\n readonly index: number\n /** ISO timestamp captured at dump time. */\n readonly ts: string\n }\n}\n\n// ─── Export ────────────────────────────────────────────────────────────\n\n/**\n * Options for `Vault.exportStream()` and `Vault.exportJSON()`.\n *\n * The defaults match the most common consumer pattern: one chunk per\n * collection, no ledger metadata. Per-record streaming and ledger-head\n * inclusion are opt-in because both add structure most consumers don't\n * need.\n */\nexport interface ExportStreamOptions {\n /**\n * `'collection'` (default) yields one chunk per collection with all\n * records bundled in `chunk.records`. `'record'` yields one chunk per\n * record, useful for arbitrarily large collections that should never\n * be materialized as a single array.\n */\n readonly granularity?: 'collection' | 'record'\n\n /**\n * When `true`, every chunk includes the current compartment ledger\n * head under `chunk.ledgerHead`. The value is identical across every\n * chunk in a single export (one ledger per compartment). Forward-\n * compatible with future partition work where the head would become\n * per-partition. Default: `false`.\n */\n readonly withLedgerHead?: boolean\n /**\n * When set to a BCP 47 locale string (e.g. `'th'`), `exportJSON()`\n * resolves all `dictKey` labels to that locale and omits the raw\n * `dictionaries` snapshot from the output. Has no effect\n * on `exportStream()` — format packages use the `chunk.dictionaries`\n * snapshot directly and apply their own locale strategy.\n *\n * Default: `undefined` — embed the raw snapshot under `_dictionaries`.\n */\n readonly resolveLabels?: string\n}\n\n/**\n * One chunk yielded by `Vault.exportStream()`.\n *\n * `granularity: 'collection'` yields one chunk per collection with the\n * full record array in `records`. `granularity: 'record'` yields one\n * chunk per record with `records` containing exactly one element — the\n * `schema` and `refs` metadata is repeated on every chunk so consumers\n * doing per-record streaming don't have to thread state across yields.\n */\nexport interface ExportChunk<T = unknown> {\n /** Collection name (no leading underscore — internal collections are filtered out). */\n readonly collection: string\n\n /**\n * Standard Schema validator attached to the collection at `collection()`\n * construction time, or `null` if no schema was provided. Surfaced so\n * downstream serializers (`@noy-db/as-*` packages, custom\n * exporters) can produce schema-aware output (typed CSV headers, XSD\n * generation, etc.) without poking at collection internals.\n */\n readonly schema: StandardSchemaV1<unknown, T> | null\n\n /**\n * Foreign-key references declared on the collection via the `refs`\n * option, as the `{ field → { target, mode } }` map produced by\n * `RefRegistry.getOutbound`. Empty object when no refs were declared.\n */\n readonly refs: Record<string, { readonly target: string; readonly mode: 'strict' | 'warn' | 'cascade' }>\n\n /**\n * Decrypted, ACL-scoped, schema-validated records. Length 1 in\n * `granularity: 'record'` mode, full collection in `granularity: 'collection'`\n * mode. Records are returned by reference from the collection's eager\n * cache where applicable — consumers must treat them as immutable.\n */\n readonly records: T[]\n\n /**\n * Dictionary snapshots for every `dictKey` field declared on this\n * collection. Captured once at stream-start and held\n * constant across all chunks within the same export — a rename\n * mid-export does not change the snapshot. `undefined` when the\n * collection has no `dictKeyFields`.\n *\n * Shape: `{ [fieldName]: { [stableKey]: { [locale]: label } } }`\n *\n * @example\n * ```ts\n * chunk.dictionaries?.status?.paid?.th // → 'ชำระแล้ว'\n * ```\n */\n readonly dictionaries?: Record<\n string, // field name\n Record<string, Record<string, string>> // stable key → locale → label\n >\n\n /**\n * Vault ledger head at export time. Present only when\n * `exportStream({ withLedgerHead: true })` was called. Identical\n * across every chunk in the same export — included on every chunk\n * for forward-compatibility with future per-partition ledgers, where\n * the value will differ per chunk.\n */\n readonly ledgerHead?: {\n readonly hash: string\n readonly index: number\n readonly ts: string\n }\n}\n\n// ─── Sync ──────────────────────────────────────────────────────────────\n\nexport interface DirtyEntry {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n readonly version: number\n readonly timestamp: string\n}\n\nexport interface SyncMetadata {\n readonly _noydb_sync: typeof NOYDB_SYNC_VERSION\n readonly last_push: string | null\n readonly last_pull: string | null\n readonly dirty: DirtyEntry[]\n}\n\nexport interface Conflict {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly local: EncryptedEnvelope\n readonly remote: EncryptedEnvelope\n readonly localVersion: number\n readonly remoteVersion: number\n /**\n * Present only when the collection uses `conflictPolicy: 'manual'`.\n * Call `resolve(winner)` to commit the winning envelope, or\n * `resolve(null)` to defer (conflict stays queued for the next sync).\n * Called synchronously inside the `sync:conflict` event handler.\n */\n readonly resolve?: (winner: EncryptedEnvelope | null) => void\n}\n\nexport type ConflictStrategy =\n | 'local-wins'\n | 'remote-wins'\n | 'version'\n | ((conflict: Conflict) => 'local' | 'remote')\n\n/**\n * Collection-level conflict policy.\n * Overrides the db-level `conflict` option for the specific collection.\n *\n * - `'last-writer-wins'` — higher `_ts` wins (timestamp LWW).\n * - `'first-writer-wins'` — lower `_v` wins (earlier version is preserved).\n * - `'manual'` — emits `sync:conflict` with a `resolve` callback. Call\n * `resolve(winner)` synchronously to commit or `resolve(null)` to defer.\n * - Custom fn — synchronous `(local: T, remote: T) => T`. Must be pure.\n */\nexport type ConflictPolicy<T> =\n | 'last-writer-wins'\n | 'first-writer-wins'\n | 'manual'\n | ((local: T, remote: T) => T)\n\n/**\n * Envelope-level resolver registered per collection with the SyncEngine.\n * Receives the `id` of the conflicting record and both envelopes.\n * Returns the winning envelope, or `null` to defer resolution.\n * @internal\n */\nexport type CollectionConflictResolver = (\n id: string,\n local: EncryptedEnvelope,\n remote: EncryptedEnvelope,\n) => Promise<EncryptedEnvelope | null>\n\n/** Options for targeted push operations. */\nexport interface PushOptions {\n /** Only push records belonging to these collections. Omit to push all dirty. */\n collections?: string[]\n}\n\n/** Options for targeted pull operations. */\nexport interface PullOptions {\n /** Only pull these collections. Omit to pull all. */\n collections?: string[]\n /**\n * Only pull records with `_ts` strictly after this ISO timestamp.\n * Adapters that implement `listSince` use it directly; others fall back\n * to a full scan with client-side filtering.\n */\n modifiedSince?: string\n}\n\nexport interface PushResult {\n readonly pushed: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\nexport interface PullResult {\n readonly pulled: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\n/** Result of a sync transaction commit. */\nexport interface SyncTransactionResult {\n readonly status: 'committed' | 'conflict'\n readonly pushed: number\n readonly conflicts: Conflict[]\n}\n\nexport interface SyncStatus {\n readonly dirty: number\n readonly lastPush: string | null\n readonly lastPull: string | null\n readonly online: boolean\n}\n\n// ─── Sync Target ─────────────────────────────────────────\n\nexport type SyncTargetRole = 'sync-peer' | 'backup' | 'archive'\n\n/**\n * A sync target with role and optional per-target policy.\n *\n * | Role | Direction | Conflict resolution | Typical use |\n * |-------------|---------------|---------------------|--------------------------|\n * | `sync-peer` | Bidirectional | ConflictStrategy | DynamoDB live sync |\n * | `backup` | Push-only | N/A (receives merged)| S3 dump, Google Drive |\n * | `archive` | Push-only | N/A | IPFS, Git tags, S3 Lock |\n */\nexport interface SyncTarget {\n /** The store to sync with. */\n readonly store: NoydbStore\n /** Role determines sync direction and conflict handling. */\n readonly role: SyncTargetRole\n /** Per-target sync policy. Inherits store-category default when absent. */\n readonly policy?: SyncPolicy\n /** Human-readable label for DevTools and audit logs. */\n readonly label?: string\n}\n\n// ─── Events ────────────────────────────────────────────────────────────\n\nexport interface ChangeEvent {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n}\n\nexport interface NoydbEventMap {\n 'change': ChangeEvent\n 'error': Error\n 'sync:push': PushResult\n 'sync:pull': PullResult\n 'sync:conflict': Conflict\n 'sync:online': void\n 'sync:offline': void\n 'sync:backup-error': { vault: string; target: string; error: Error }\n 'history:save': { vault: string; collection: string; id: string; version: number }\n 'history:prune': { vault: string; collection: string; id: string; pruned: number }\n /**\n * Emitted when a persisted-index side-car put/delete fails after the\n * main record write already succeeded. The main record is durable; the\n * index mirror may have drifted. Operators reconcile via\n * `collection.reconcileIndex(field)`.\n */\n 'index:write-partial': {\n vault: string\n collection: string\n id: string\n action: 'put' | 'delete'\n error: Error\n }\n /**\n * emitted by `Collection.ensurePersistedIndexesLoaded()`\n * once per field on first lazy-mode query when\n * `reconcileOnOpen: 'auto' | 'dry-run'` is configured. `applied` is\n * `0` in `'dry-run'` mode. `skipped` is reserved for a future\n * drift-stamp optimization that short-circuits the reconcile when\n * the mirror version matches what's on disk — currently always\n * `false` (the full reconcile runs every session).\n */\n 'index:reconciled': {\n vault: string\n collection: string\n field: string\n missing: readonly string[]\n stale: readonly string[]\n applied: number\n skipped: boolean\n }\n}\n\n// ─── Grant / Revoke ────────────────────────────────────────────────────\n\nexport interface GrantOptions {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly passphrase: string\n readonly permissions?: Permissions\n /**\n * Optional `@noy-db/as-*` export capability. Omit or\n * leave undefined to apply role-based defaults (see\n * `hasExportCapability` and `ExportCapability`).\n */\n readonly exportCapability?: ExportCapability\n /**\n * Optional `@noy-db/as-*` import capability (issue ). Omit or\n * leave undefined for default-closed semantics — no plaintext format\n * is grantable until positively listed; bundle import is denied.\n */\n readonly importCapability?: ImportCapability\n}\n\nexport interface RevokeOptions {\n readonly userId: string\n readonly rotateKeys?: boolean\n\n /**\n * Cascade behavior when the revoked user is an admin who has granted\n * other admins.\n *\n * - `'strict'` (default) — recursively revoke every admin that the\n * target (transitively) granted. The cascade walks the\n * `granted_by` field on each keyring file and stops at non-admin\n * leaves. All affected collections are accumulated and rotated in\n * a single pass at the end, so cascade cost is O(records in\n * affected collections), not O(records × cascade depth).\n *\n * - `'warn'` — leave the descendant admins in place but emit a\n * `console.warn` listing them. Useful for diagnostic dry runs and\n * for environments where the operator wants to clean up the\n * delegation tree manually.\n *\n * No effect when the target is not an admin (operators, viewers, and\n * clients cannot grant other users, so they have no delegation\n * subtree to cascade through). Defaults to `'strict'`.\n */\n readonly cascade?: 'strict' | 'warn'\n}\n\n// ─── Cross-vault queries ──────────────────────────────\n\n/**\n * One entry returned by `Noydb.listAccessibleVaults()`. Carries\n * the compartment id and the role the calling principal holds in it,\n * so the consumer can decide how to fan out without re-checking\n * permissions per vault.\n */\nexport interface AccessibleVault {\n readonly id: string\n readonly role: Role\n}\n\n/**\n * Options for `Noydb.listAccessibleVaults()`.\n */\nexport interface ListAccessibleVaultsOptions {\n /**\n * Minimum role the caller must hold to include a compartment in the\n * result. Compartments where the caller's role is strictly *below*\n * this threshold are silently excluded. Defaults to `'client'`,\n * which means \"every vault I can unwrap is returned.\" Set to\n * `'admin'` for \"vaults where I can grant/revoke,\" or\n * `'owner'` for \"vaults I own.\"\n *\n * The privilege ordering used:\n * `client (1) < viewer (2) < operator (3) < admin (4) < owner (5)`\n *\n * Note: `viewer` and `client` are conceptually peers in the ACL\n * (neither can grant), but `viewer` has read-all access while\n * `client` has only explicit-collection read. The numeric order\n * reflects \"how much can this principal see,\" not \"how much can\n * this principal modify.\"\n */\n readonly minRole?: Role\n}\n\n/**\n * Options for `Noydb.queryAcross()`.\n */\nexport interface QueryAcrossOptions {\n /**\n * Maximum number of compartments to process in parallel. Defaults\n * to `1` (sequential) — conservative because the per-compartment\n * callback typically does its own I/O and an unbounded fan-out can\n * exhaust adapter connections (DynamoDB throughput, S3 socket\n * limits, browser fetch concurrency).\n *\n * Set to `4` or `8` for cloud-backed compartments where parallelism\n * is the whole point of fanning out. Set to `1` (default) for local\n * adapters where the disk I/O serializes anyway.\n */\n readonly concurrency?: number\n}\n\n/**\n * One entry in the array returned by `Noydb.queryAcross()`. Either\n * `result` is set (callback succeeded for this compartment) or\n * `error` is set (callback threw, or compartment failed to open).\n *\n * Per-compartment errors do **not** abort the overall fan-out — every\n * compartment is given a chance to run its callback, and the\n * partition between success and failure is exposed in the return\n * value. Consumers that want fail-fast semantics can check\n * `r.error !== undefined` and short-circuit themselves.\n */\nexport type QueryAcrossResult<T> =\n | { readonly vault: string; readonly result: T; readonly error?: undefined }\n | { readonly vault: string; readonly result?: undefined; readonly error: Error }\n\n// ─── User Info ─────────────────────────────────────────────────────────\n\nexport interface UserInfo {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly permissions: Permissions\n readonly createdAt: string\n readonly grantedBy: string\n}\n\n// ─── Session ───────────────────────────────────────────────\n\n/**\n * Operations that a session policy can require re-authentication for.\n * Passed as the `requireReAuthFor` array in `SessionPolicy`.\n */\nexport type ReAuthOperation = 'export' | 'grant' | 'revoke' | 'rotate' | 'changeSecret'\n\n/**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior.\n *\n * All timeout values are in milliseconds. `undefined` means \"no limit.\"\n * The policy is evaluated lazily — it does not start timers itself;\n * enforcement happens at the Noydb call site.\n */\nexport interface SessionPolicy {\n /**\n * Idle timeout in ms. If no NOYDB operation is performed for this\n * duration, the session is revoked on the next operation attempt\n * (which will throw `SessionExpiredError`). The idle clock resets\n * on every successful operation.\n *\n * Default: `undefined` (no idle timeout).\n */\n readonly idleTimeoutMs?: number\n\n /**\n * Absolute timeout in ms from session creation. After this duration\n * the session is unconditionally revoked regardless of activity.\n *\n * Default: `undefined` (no absolute timeout).\n */\n readonly absoluteTimeoutMs?: number\n\n /**\n * Operations that require the user to re-authenticate (re-enter their\n * passphrase or perform a fresh WebAuthn assertion) before proceeding,\n * even if the session is still alive.\n *\n * Common pattern: `requireReAuthFor: ['export', 'grant']` — allow\n * read/write operations in the background but demand a fresh credential\n * for high-risk mutations.\n *\n * Default: `[]` (no extra re-auth requirements).\n */\n readonly requireReAuthFor?: readonly ReAuthOperation[]\n\n /**\n * If `true`, the session is revoked when the page goes to the background\n * (visibilitychange event, `document.hidden === true`). Useful for\n * high-sensitivity deployments where leaving the tab is treated as\n * a session boundary.\n *\n * No-op in non-browser environments (Node.js, workers without document).\n * Default: `false`.\n */\n readonly lockOnBackground?: boolean\n}\n\n// ─── i18n / Locale ─────────────────────────────────────\n\n/**\n * Locale-aware read options. Pass to `Collection.get()`, `list()`,\n * `query()`, and `scan()` to trigger per-record locale resolution for\n * `dictKey` and `i18nText` fields.\n *\n * - **`locale: 'raw'`** — skip resolution for `i18nText` fields and\n * return the full `{ [locale]: string }` map. Dict key fields still\n * return the stable key (no `<field>Label` added).\n * - **`fallback`** — single locale code or ordered list. Use `'any'` as\n * the last element to fall back to any present translation.\n *\n * When neither the call-level locale nor the compartment's default locale\n * is set, reading a record with `i18nText` fields throws\n * `LocaleNotSpecifiedError`.\n */\nexport interface LocaleReadOptions {\n /**\n * The target locale code (e.g. `'th'`), or `'raw'` to return the full\n * language map without resolution.\n */\n readonly locale?: string\n /**\n * Fallback locale or ordered fallback chain. Use `'any'` as the last\n * element to fall back to any present translation.\n */\n readonly fallback?: string | readonly string[]\n}\n\n// ─── plaintextTranslator hook ──────────────────────────────\n\n/**\n * Context passed to the consumer-supplied `plaintextTranslator` function.\n * The hook receives the source text plus enough metadata to route it to the\n * right translation service and record what it did.\n */\nexport interface PlaintextTranslatorContext {\n /** The plaintext string to translate. */\n readonly text: string\n /** BCP 47 source locale (the locale the text is written in). */\n readonly from: string\n /** BCP 47 target locale to translate into. */\n readonly to: string\n /** The schema field name that triggered the translation. */\n readonly field: string\n /** The collection the record is being put into. */\n readonly collection: string\n}\n\n/**\n * A consumer-supplied async function that translates a single string\n * from one locale to another. noy-db ships no built-in translator.\n *\n * **Security:** this function receives plaintext. The consumer is\n * responsible for the data policy of whatever service it calls. See\n * `NOYDB_SPEC.md § Zero-Knowledge Storage` and the `plaintextTranslator`\n * JSDoc on `NoydbOptions` for the full invariant statement.\n */\nexport type PlaintextTranslatorFn = (\n ctx: PlaintextTranslatorContext,\n) => Promise<string>\n\n/**\n * One entry in the in-process translator audit log. Cleared when\n * `db.close()` is called — same lifetime as the KEK and DEKs.\n *\n * Deliberately omits any content hash or translated-text fingerprint\n * to prevent correlation attacks on the audit trail.\n */\nexport interface TranslatorAuditEntry {\n readonly type: 'translator-invocation'\n /** Schema field name that was translated. */\n readonly field: string\n /** Collection the record belongs to. */\n readonly collection: string\n /** Source locale. */\n readonly fromLocale: string\n /** Target locale. */\n readonly toLocale: string\n /**\n * Consumer-provided translator name from\n * `NoydbOptions.plaintextTranslatorName`. Defaults to `'anonymous'`\n * when not supplied.\n */\n readonly translatorName: string\n /** ISO 8601 timestamp of the invocation. */\n readonly timestamp: string\n /**\n * `true` when the result was served from the in-process cache rather\n * than by calling the translator function. Present only on cache hits\n * so the absence of the field also communicates a cache miss.\n */\n readonly cached?: true\n}\n\n// ─── Presence ─────────────────────────────────────────────\n\n/**\n * A presence peer entry. `lastSeen` is an ISO timestamp set by core on each\n * `update()` call. Stale entries (lastSeen older than `staleMs`) are filtered\n * before delivering to the subscriber callback.\n */\nexport interface PresencePeer<P> {\n readonly userId: string\n readonly payload: P\n readonly lastSeen: string\n}\n\n// ─── CRDT ─────────────────────────────────────────────────\n\n// Re-exported from crdt.ts so consumers only need one import path.\nexport type { CrdtMode, CrdtState, LwwMapState, RgaState, YjsState } from './crdt/crdt.js'\n\n// ─── Blob / Attachment Store ────────────────────────\n\n/**\n * Second store shape for blob-store backends (Drive, WebDAV, Git, iCloud)\n * that operate on whole-vault bundles rather than per-record KV.\n *\n * Implement `readBundle` / `writeBundle` instead of the six-method KV\n * contract. Use `wrapBundleStore()` from `@noy-db/hub` to convert to a\n * `NoydbStore` that the rest of the API consumes transparently.\n *\n * Named `NoydbBundleStore` (not `NoydbBundleAdapter`) for consistency\n * with the hub / to-* / in-* rename. Concrete implementations ship\n * in `@noy-db/to-*` packages starting in.\n */\nexport interface NoydbBundleStore {\n /** Discriminant for engine auto-detection of store shape. */\n readonly kind: 'bundle'\n /** Human-readable name for diagnostics (e.g. `'drive'`, `'webdav'`). */\n readonly name?: string\n /**\n * Read the entire vault as raw bytes. Returns `null` if no bundle exists\n * yet (first open of a brand-new vault).\n */\n readBundle(vaultId: string): Promise<{ bytes: Uint8Array; version: string } | null>\n /**\n * Write the entire vault as raw bytes. `expectedVersion` is the version\n * token from the last `readBundle` (or `null` for a first write).\n * Implementations MUST reject the write if the stored version has advanced\n * past `expectedVersion` — throw `BundleVersionConflictError`.\n * Returns the new version token on success.\n */\n writeBundle(\n vaultId: string,\n bytes: Uint8Array,\n expectedVersion: string | null,\n ): Promise<{ version: string }>\n /** Delete a vault bundle. Idempotent — no-op if the bundle does not exist. */\n deleteBundle(vaultId: string): Promise<void>\n /** List all vault bundles managed by this store. */\n listBundles(): Promise<Array<{ vaultId: string; version: string; size: number }>>\n}\n\n/**\n * Content-addressed blob object stored in the vault-level blob index.\n * Identified by HMAC-SHA-256(blobDEK, plaintext) — opaque to the store.\n *\n * Shared across all collections within a vault for deduplication: two\n * records that attach identical byte content reference the same `eTag`\n * and share a single set of encrypted chunks in `_blob_chunks`.\n */\nexport interface BlobObject {\n /** HMAC-SHA-256 hex of the original plaintext bytes, keyed by `_blob` DEK. */\n readonly eTag: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** Compressed size in bytes (the payload that is actually encrypted and chunked). */\n readonly compressedSize: number\n /** Compression algorithm applied before encryption. */\n readonly compression: 'gzip' | 'none'\n /** Raw chunk size in bytes used at write time. Readers MUST use this value. */\n readonly chunkSize: number\n /** Total number of chunks written. Reader expects exactly this many. */\n readonly chunkCount: number\n /** MIME type if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of first upload. */\n readonly createdAt: string\n /** Live reference count — slots + published versions pointing to this blob. */\n readonly refCount: number\n /**\n * Hint indicating which store holds the chunk data.\n * Used by `routeStore` size-tiered routing: `'default'` for small blobs\n * stored inline (e.g. DynamoDB), `'blobs'` for large blobs in the overflow\n * store (e.g. S3). Absent when no routing is configured.\n */\n readonly storeHint?: 'default' | 'blobs'\n}\n\n// ─── Attachment types ─────────────────────────────────────────\n\n/** Single attachment metadata entry stored inside a record's attachment envelope. */\nexport interface AttachmentEntry {\n /** Content-addressed identifier (HMAC-SHA-256 of plaintext). */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** MIME type, if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of the upload. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Attachment entry annotated with its slot name, as returned by `AttachmentHandle.list()`. */\nexport type AttachmentInfo = AttachmentEntry & { readonly name: string }\n\n/** Options for `AttachmentHandle.put()`. */\nexport interface AttachmentPutOptions {\n /** Compress the attachment with gzip before encryption. Default: `true`. */\n compress?: boolean\n /** Chunk size in bytes. Default: `DEFAULT_CHUNK_SIZE` (256 KB). */\n chunkSize?: number\n /** MIME type to store with the attachment. Auto-detected from magic bytes if omitted. */\n mimeType?: string\n /** User ID to record as the uploader. Falls back to the active user's ID. */\n uploadedBy?: string\n}\n\n/** Options for `AttachmentHandle.response()`. */\nexport interface AttachmentResponseOptions {\n /**\n * Set `Content-Disposition: inline` so the browser renders the file\n * instead of downloading it. Default: `false` (attachment disposition).\n */\n inline?: boolean\n}\n\n/**\n * Slot record — mutable metadata linking a named slot on a record\n * to a `BlobObject` via its eTag.\n *\n * Multiple slots (even across different records) may reference the same\n * `eTag` — the underlying chunks are shared. Updating metadata creates\n * a new envelope version (`_v++`) while the blob data is unchanged.\n */\nexport interface SlotRecord {\n /** Reference to the `BlobObject` in `_blob_index`. */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes (denormalized from `BlobObject`). */\n readonly size: number\n /** MIME type. Takes precedence over the MIME type stored in `BlobObject`. */\n readonly mimeType?: string\n /** ISO timestamp of the upload that set this slot. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Result of `BlobSet.list()` — slot record plus its named slot key. */\nexport interface SlotInfo extends SlotRecord {\n /** The slot name (key in the record's slot map). */\n readonly name: string\n}\n\n/**\n * Explicitly published version snapshot — an independent reference to a\n * blob at a specific point in time.\n */\nexport interface VersionRecord {\n /** User-defined label (e.g. `'issued-2025-01'`, `'amendment-2025-02'`). */\n readonly label: string\n /** eTag of the blob snapshot at publish time — independent of the current slot. */\n readonly eTag: string\n /** ISO timestamp when the version was published. */\n readonly publishedAt: string\n /** User ID of the publisher, if available. */\n readonly publishedBy?: string\n}\n\n/** Options for `BlobSet.put()`. */\nexport interface BlobPutOptions {\n /** MIME type hint. If omitted, auto-detected from magic bytes. */\n mimeType?: string\n /**\n * Raw chunk size in bytes. Priority: this value > store.maxBlobBytes > 256 KB.\n */\n chunkSize?: number\n /**\n * Whether to gzip-compress bytes before encrypting. Default: `true`.\n * Auto-set to `false` for pre-compressed MIME types (JPEG, PNG, ZIP, etc.).\n */\n compress?: boolean\n /** User ID to record as `uploadedBy`. Defaults to the Noydb session user. */\n uploadedBy?: string\n}\n\n/** Options for `BlobSet.response()` and `BlobSet.responseVersion()`. */\nexport interface BlobResponseOptions {\n /**\n * When `true`, sets `Content-Disposition: inline; filename=\"...\"` so\n * the browser renders the file in the tab. Default (`false`) sets\n * `attachment; filename=\"...\"` which triggers a download.\n */\n inline?: boolean\n /** Override the filename in the Content-Disposition header. */\n filename?: string\n}\n\n// ─── Store Capabilities ─────────────────────────────\n\nexport type StoreAuthKind =\n | 'none'\n | 'filesystem'\n | 'api-key'\n | 'iam'\n | 'oauth'\n | 'kerberos'\n | 'browser-origin'\n\nexport interface StoreAuth {\n kind: StoreAuthKind | StoreAuthKind[]\n required: boolean\n flow: 'static' | 'oauth' | 'kerberos' | 'implicit'\n}\n\nexport interface StoreCapabilities {\n /**\n * true — the store's expectedVersion check and write are atomic at the\n * storage layer. Two concurrent puts with the same expectedVersion will\n * produce exactly one success and one ConflictError.\n * false — check and write are separate operations with a race window.\n */\n casAtomic: boolean\n auth: StoreAuth\n /**\n * true — the store implements {@link NoydbStore.tx} and commits\n * every op atomically at the storage layer. The hub's\n * `db.transaction(fn)` will delegate to `tx(ops)` and surface a\n * single pass/fail outcome. false (or absent) — no native\n * multi-record atomicity; the hub falls back to per-record OCC\n * with best-effort unwind on partial failure.\n */\n txAtomic?: boolean\n /**\n * Maximum raw bytes per blob chunk record.\n * `undefined` — no limit (S3, file, IDB); blob stored as single chunk.\n * `256 * 1024` — DynamoDB (400 KB item limit minus envelope overhead).\n * `5 * 1024 * 1024` — localStorage quota safety.\n */\n maxBlobBytes?: number\n}\n\n// ─── Factory Options ───────────────────────────────────────────────────\n\nexport interface NoydbOptions {\n /** Primary store (local storage). */\n readonly store: NoydbStore\n /**\n * tree-shake seam — optional blob strategy. Pass `withBlobs()`\n * from `@noy-db/hub/blobs` to enable `collection.blob(id)` storage.\n * When omitted, hub's blob machinery stays out of the bundle (ESM\n * tree-shaking) and `collection.blob(id)` throws with a pointer at\n * the subpath. `BlobStrategy` is `@internal` — users only construct\n * it via the subpath factory.\n *\n * @internal\n */\n readonly blobStrategy?: BlobStrategy\n /**\n * tree-shake seam — optional indexing strategy. Pass\n * `withIndexing()` from `@noy-db/hub/indexing` to enable eager-mode\n * `==/in` fast-paths, lazy-mode `.lazyQuery()`, rebuild/reconcile,\n * and auto-reconcile. When omitted, indexing code never reaches the\n * bundle; `.lazyQuery()` throws with a pointer at the subpath, and\n * eager-mode collections fall back to linear scans regardless of\n * `indexes: [...]` declarations. `IndexStrategy` is `@internal` —\n * users only construct it via the subpath factory.\n *\n * @internal\n */\n readonly indexStrategy?: IndexStrategy\n /**\n * tree-shake seam — optional aggregate strategy. Pass\n * `withAggregate()` from `@noy-db/hub/aggregate` to enable\n * `.aggregate()` and `.groupBy()` on Query. When omitted, those\n * methods throw with a pointer at the subpath; the ~886 LOC of\n * Aggregation + GroupedQuery machinery never reaches the bundle.\n * Streaming `scan().aggregate()` works independently of this\n * strategy — it doesn't use the `Aggregation` class.\n *\n * @internal\n */\n readonly aggregateStrategy?: AggregateStrategy\n /**\n * tree-shake seam — optional CRDT strategy. Required when\n * any collection is declared with `crdt: 'lww-map' | 'rga' | 'yjs'`;\n * otherwise the first put/sync-merge hitting the CRDT path throws.\n * When omitted, ~221 LOC of LWW-Map / RGA / merge helpers never\n * reach the bundle.\n *\n * @internal\n */\n readonly crdtStrategy?: CrdtStrategy\n /**\n * tree-shake seam — optional consent-audit strategy. Pass\n * `withConsent()` from `@noy-db/hub/consent` to enable per-op audit\n * writes into `_consent_audit` when a consent scope is active.\n * When omitted, `vault.consentAudit()` returns `[]` and writes are\n * no-ops; the consent module's ~194 LOC never reaches the bundle.\n *\n * @internal\n */\n readonly consentStrategy?: ConsentStrategy\n /**\n * tree-shake seam — optional periods strategy. Pass\n * `withPeriods()` from `@noy-db/hub/periods` to enable\n * `vault.closePeriod()` / `.openPeriod()` / write-guard on closed\n * periods. When omitted, `vault.listPeriods()` returns `[]` and\n * the write-guard is a no-op; the ~363 LOC of period validation +\n * ledger appending stay out of the bundle.\n *\n * @internal\n */\n readonly periodsStrategy?: PeriodsStrategy\n /**\n * tree-shake seam — optional VaultFrame strategy. Pass\n * `withShadow()` from `@noy-db/hub/shadow` to enable\n * `vault.frame()`. Without it, calling `vault.frame()` throws.\n *\n * @internal\n */\n readonly shadowStrategy?: ShadowStrategy\n /**\n * tree-shake seam — optional multi-record transactions. Pass\n * `withTransactions()` from `@noy-db/hub/tx` to enable\n * `db.transaction(fn)`. Without it, calling the method throws.\n *\n * @internal\n */\n readonly txStrategy?: TxStrategy\n /**\n * tree-shake seam — optional history + ledger + time-machine.\n * Pass `withHistory()` from `@noy-db/hub/history` to enable\n * per-record version snapshots, the hash-chained audit ledger, JSON\n * Patch deltas, `vault.ledger()`, `vault.at()`, and the\n * `collection.history()` / `getVersion()` / `revert()` / `diff()` /\n * `clearHistory()` / `pruneRecordHistory()` read APIs. When omitted,\n * snapshots/prune/clear are silent no-ops, the read APIs throw with\n * a pointer at the subpath, and ~1,880 LOC stay out of the bundle.\n *\n * @internal\n */\n readonly historyStrategy?: HistoryStrategy\n /**\n * tree-shake seam — optional i18n strategy. Pass `withI18n()`\n * from `@noy-db/hub/i18n` to enable `i18nText`/`dictKey` field\n * resolution on reads, `i18nText` validation on writes, and\n * `vault.dictionary(name)`. When omitted, locale resolution is the\n * identity (raw values returned), the validators throw with a\n * pointer to the subpath, and ~854 LOC of dictionary + locale\n * machinery stay out of the bundle.\n *\n * @internal\n */\n readonly i18nStrategy?: I18nStrategy\n /**\n * tree-shake seam — optional session-policy strategy. Pass\n * `withSession()` from `@noy-db/hub/session` to enable\n * `sessionPolicy` validation, `PolicyEnforcer` lifecycle (idle /\n * absolute timeouts, lockOnBackground), and global session-token\n * revocation. When omitted, setting `sessionPolicy` throws at\n * `createNoydb()` time, and ~495 LOC of policy + token machinery\n * stay out of the bundle.\n *\n * @internal\n */\n readonly sessionStrategy?: SessionStrategy\n /**\n * tree-shake seam — optional sync engine + presence strategy.\n * Pass `withSync()` from `@noy-db/hub/sync` to enable\n * `db.push()` / `pull()` / replication, `db.transaction(vault)`\n * for sync-aware transactions, and `collection.presence()`. When\n * omitted, configuring `sync` / calling these surfaces throws with\n * a pointer at the subpath, and ~856 LOC of replication + presence\n * machinery stay out of the bundle. Keyring stays core; grant/\n * revoke/magic-link/delegation tree-shake via direct imports.\n *\n * @internal\n */\n readonly syncStrategy?: SyncStrategy\n /** Optional remote store(s) for sync. Accepts a single store, a SyncTarget, or an array. */\n readonly sync?: NoydbStore | SyncTarget | SyncTarget[]\n /** User identifier. */\n readonly user: string\n /** Passphrase for key derivation. Required unless encrypt is false. */\n readonly secret?: string\n /** Auth method. Default: 'passphrase'. */\n readonly auth?: 'passphrase' | 'biometric'\n /** Enable encryption. Default: true. */\n readonly encrypt?: boolean\n /** Conflict resolution strategy. Default: 'version'. */\n readonly conflict?: ConflictStrategy\n /**\n * Sync scheduling policy. Controls when push/pull fire.\n * Default inferred from store category: per-record → `on-change`,\n * bundle → `debounce 30s`.\n */\n readonly syncPolicy?: SyncPolicy\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n * When both are supplied, `syncPolicy` takes precedence.\n */\n readonly autoSync?: boolean\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n */\n readonly syncInterval?: number\n /**\n * Session timeout in ms. Clears keys after inactivity. Default: none.\n * @deprecated Use `sessionPolicy.idleTimeoutMs` instead. This field is\n * still honored for backwards compatibility but `sessionPolicy` takes\n * precedence when both are supplied.\n */\n readonly sessionTimeout?: number\n /**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior. When supplied, replaces the\n * legacy `sessionTimeout` field.\n */\n readonly sessionPolicy?: SessionPolicy\n /** Validate passphrase strength on creation. Default: true. */\n readonly validatePassphrase?: boolean\n /** Audit history configuration. */\n readonly history?: HistoryConfig\n /**\n * Consumer-supplied translation function for `i18nText` fields with\n * `autoTranslate: true`.\n *\n * ⚠ **`plaintextTranslator` receives unencrypted text.** Configuring\n * this hook causes plaintext to leave noy-db's zero-knowledge boundary\n * over whatever channel the consumer's implementation uses. noy-db ships\n * no built-in translator and adds no translator SDKs as dependencies.\n * The consumer chooses and owns the data policy of the external service.\n *\n * Per-field opt-in via `autoTranslate: true` on `i18nText()`. Calling\n * `put()` on a collection with `autoTranslate: true` fields while this\n * option is absent throws `TranslatorNotConfiguredError`.\n *\n * See `NOYDB_SPEC.md § Zero-Knowledge Storage` for the invariant text.\n */\n readonly plaintextTranslator?: PlaintextTranslatorFn\n /**\n * Human-readable name for the translator, recorded in the in-process\n * audit log (e.g. `'deepl-pro-with-dpa'`, `'self-hosted-llama-7b'`).\n * Defaults to `'anonymous'` when not supplied.\n */\n readonly plaintextTranslatorName?: string\n}\n\n// ─── History / Audit Trail ─────────────────────────────────────────────\n\n/** History configuration. */\nexport interface HistoryConfig {\n /** Enable history tracking. Default: true. */\n readonly enabled?: boolean\n /** Maximum history entries per record. Oldest pruned on overflow. Default: unlimited. */\n readonly maxVersions?: number\n}\n\n/** Options for querying history. */\nexport interface HistoryOptions {\n /** Start date (inclusive), ISO 8601. */\n readonly from?: string\n /** End date (inclusive), ISO 8601. */\n readonly to?: string\n /** Maximum entries to return. */\n readonly limit?: number\n}\n\n/** Options for pruning history. */\nexport interface PruneOptions {\n /** Keep only the N most recent versions. */\n readonly keepVersions?: number\n /** Delete versions older than this date, ISO 8601. */\n readonly beforeDate?: string\n}\n\n/** A decrypted history entry. */\nexport interface HistoryEntry<T> {\n readonly version: number\n readonly timestamp: string\n readonly userId: string\n readonly record: T\n}\n\n// ─── Bulk operations ──────────────────────────────────────\n\n/** Per-item options for `Collection.putMany()`. */\nexport interface PutManyItemOptions {\n /**\n * Optimistic-concurrency check: fail this item if the stored version\n * is not `expectedVersion`. Honored only in `atomic: true` mode;\n * ignored in the default best-effort loop.\n */\n readonly expectedVersion?: number\n}\n\n/**\n * Batch-level options for `Collection.putMany()` and `deleteMany()`.\n *\n * `atomic: true` switches the call from best-effort loop\n * to all-or-nothing: a pre-flight CAS check runs first, then every op\n * is executed; any mid-batch failure triggers a best-effort revert.\n * On failure in atomic mode the whole call throws — you won't get a\n * partial `PutManyResult`. On success the result mirrors the default\n * loop's shape.\n */\nexport interface PutManyOptions {\n readonly atomic?: boolean\n}\n\n/** Result of `Collection.putMany()`. */\nexport interface PutManyResult {\n /** `true` iff every entry succeeded. */\n readonly ok: boolean\n /** IDs that were successfully written. */\n readonly success: readonly string[]\n /** Entries that failed, with the error that prevented each write. */\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n\n/** Result of `Collection.deleteMany()`. Same shape as `PutManyResult`. */\nexport interface DeleteManyResult {\n readonly ok: boolean\n readonly success: readonly string[]\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n","/**\n * All NOYDB error classes — a single import surface for `catch` blocks and\n * `instanceof` checks.\n *\n * ## Class hierarchy\n *\n * ```\n * Error\n * └─ NoydbError (code: string)\n * ├─ Crypto errors\n * │ ├─ DecryptionError — AES-GCM tag failure\n * │ ├─ TamperedError — ciphertext modified after write\n * │ └─ InvalidKeyError — wrong passphrase / corrupt keyring\n * ├─ Access errors\n * │ ├─ NoAccessError — no DEK for this collection\n * │ ├─ ReadOnlyError — ro permission, write attempted\n * │ ├─ PermissionDeniedError — role too low for operation\n * │ ├─ PrivilegeEscalationError — grant wider than grantor holds\n * │ └─ StoreCapabilityError — optional store method missing\n * ├─ Sync errors\n * │ ├─ ConflictError — optimistic-lock version mismatch\n * │ ├─ BundleVersionConflictError — bundle push rejected by remote\n * │ └─ NetworkError — push/pull network failure\n * ├─ Data errors\n * │ ├─ NotFoundError — get(id) on missing record\n * │ ├─ ValidationError — application-level guard failed\n * │ └─ SchemaValidationError — Standard Schema v1 rejection\n * ├─ Query errors\n * │ ├─ JoinTooLargeError — join row ceiling exceeded\n * │ ├─ DanglingReferenceError — strict ref() points at nothing\n * │ ├─ GroupCardinalityError — groupBy bucket cap exceeded\n * │ ├─ IndexRequiredError — lazy-mode query touches unindexed field\n * │ └─ IndexWriteFailureError — index side-car put/delete failed post-main\n * ├─ i18n / Dictionary errors\n * │ ├─ ReservedCollectionNameError\n * │ ├─ DictKeyMissingError\n * │ ├─ DictKeyInUseError\n * │ ├─ MissingTranslationError\n * │ ├─ LocaleNotSpecifiedError\n * │ └─ TranslatorNotConfiguredError\n * ├─ Backup errors\n * │ ├─ BackupLedgerError — hash-chain verification failed\n * │ └─ BackupCorruptedError — envelope hash mismatch in dump\n * ├─ Bundle errors\n * │ └─ BundleIntegrityError — .noydb body sha256 mismatch\n * └─ Session errors\n * ├─ SessionExpiredError\n * ├─ SessionNotFoundError\n * └─ SessionPolicyError\n * ```\n *\n * ## Catching all NOYDB errors\n *\n * ```ts\n * import { NoydbError, InvalidKeyError, ConflictError } from '@noy-db/hub'\n *\n * try {\n * await vault.unlock(passphrase)\n * } catch (e) {\n * if (e instanceof InvalidKeyError) { showBadPassphraseUI(); return }\n * if (e instanceof NoydbError) { logToSentry(e.code, e); return }\n * throw e // unexpected — re-throw\n * }\n * ```\n *\n * @module\n */\n\n/**\n * Base class for all NOYDB errors.\n *\n * Every error thrown by `@noy-db/hub` extends this class, so consumers can\n * catch all NOYDB errors in a single `catch (e) { if (e instanceof NoydbError) ... }`\n * block. The `code` field is a machine-readable string (e.g. `'DECRYPTION_FAILED'`)\n * suitable for `switch` statements and logging pipelines.\n */\nexport class NoydbError extends Error {\n /** Machine-readable error code. Stable across library versions. */\n readonly code: string\n\n constructor(code: string, message: string) {\n super(message)\n this.name = 'NoydbError'\n this.code = code\n }\n}\n\n// ─── Crypto Errors ─────────────────────────────────────────────────────\n\n/**\n * Thrown when AES-GCM decryption fails.\n *\n * The most common cause is a wrong passphrase or a corrupted ciphertext.\n * A `DecryptionError` at the wrong passphrase level is caught internally\n * and re-thrown as `InvalidKeyError` — so in practice this surfaces for\n * per-record corruption rather than authentication failures.\n */\nexport class DecryptionError extends NoydbError {\n constructor(message = 'Decryption failed') {\n super('DECRYPTION_FAILED', message)\n this.name = 'DecryptionError'\n }\n}\n\n/**\n * Thrown when GCM tag verification fails, indicating the ciphertext was\n * modified after encryption.\n *\n * AES-256-GCM is authenticated encryption — the tag over the ciphertext\n * is checked on every decrypt. If any byte was flipped (accidental\n * corruption or deliberate tampering), decryption throws this error.\n * Treat it as a security alert: the stored bytes are not what NOYDB wrote.\n */\nexport class TamperedError extends NoydbError {\n constructor(message = 'Data integrity check failed — record may have been tampered with') {\n super('TAMPERED', message)\n this.name = 'TamperedError'\n }\n}\n\n/**\n * Thrown when key unwrapping fails, typically because the passphrase is wrong\n * or the keyring file is corrupted.\n *\n * NOYDB uses AES-KW (RFC 3394) to wrap DEKs with the KEK. If AES-KW\n * unwrapping fails, it means either the KEK was derived from the wrong\n * passphrase (PBKDF2 with 600K iterations) or the keyring bytes are\n * corrupted. This is the error shown to the user on a failed unlock attempt.\n */\nexport class InvalidKeyError extends NoydbError {\n constructor(message = 'Invalid key — wrong passphrase or corrupted keyring') {\n super('INVALID_KEY', message)\n this.name = 'InvalidKeyError'\n }\n}\n\n// ─── Access Errors ─────────────────────────────────────────────────────\n\n/**\n * Thrown when the authenticated user does not have a DEK for the requested\n * collection — i.e. the collection is not in their keyring at all.\n *\n * This is the \"no key for this door\" error. It is different from\n * `ReadOnlyError` (user has a key but it only grants ro) and from\n * `PermissionDeniedError` (user's role doesn't allow the operation).\n */\nexport class NoAccessError extends NoydbError {\n constructor(message = 'No access — user does not have a key for this collection') {\n super('NO_ACCESS', message)\n this.name = 'NoAccessError'\n }\n}\n\n/**\n * Thrown when a user with read-only (`ro`) permission attempts a write\n * operation (`put` or `delete`) on a collection.\n *\n * The user has a DEK for the collection (they can decrypt and read), but\n * their keyring grants only `ro`. To fix: re-grant the user with `rw`\n * permission, or do not attempt writes as a viewer/client role.\n */\nexport class ReadOnlyError extends NoydbError {\n constructor(message = 'Read-only — user has ro permission on this collection') {\n super('READ_ONLY', message)\n this.name = 'ReadOnlyError'\n }\n}\n\n/**\n * Thrown when a write is attempted against a historical view produced\n * by `vault.at(timestamp)`. Time-machine views are read-only by\n * contract — mutating the past would require either the shadow-vault\n * mechanism or a ledger-history rewrite (which breaks\n * the tamper-evidence guarantee).\n *\n * Distinct from {@link ReadOnlyError} (keyring-level) and\n * {@link PermissionDeniedError} (role-level): this error is about the\n * *view* being historical, independent of the caller's permissions.\n */\nexport class ReadOnlyAtInstantError extends NoydbError {\n constructor(operation: string, timestamp: string) {\n super(\n 'READ_ONLY_AT_INSTANT',\n `Cannot ${operation}() on a vault view anchored at ${timestamp} — time-machine views are read-only`,\n )\n this.name = 'ReadOnlyAtInstantError'\n }\n}\n\n/**\n * Thrown when a write is attempted against a shadow-vault frame\n * produced by `vault.frame()`. Frames are read-only by contract —\n * the use case is screen-sharing / demos / compliance review where\n * the operator wants to prevent accidental edits.\n *\n * Behavioural enforcement only — the underlying keyring still holds\n * write-capable DEKs. See {@link VaultFrame} for the full caveat.\n */\nexport class ReadOnlyFrameError extends NoydbError {\n constructor(operation: string) {\n super(\n 'READ_ONLY_FRAME',\n `Cannot ${operation}() on a vault frame — frames are read-only presentations of the current vault`,\n )\n this.name = 'ReadOnlyFrameError'\n }\n}\n\n/**\n * Thrown when the authenticated user's role does not permit the requested\n * operation — e.g. a `viewer` calling `grantAccess()`, or an `operator`\n * calling `rotateKeys()`.\n *\n * This is a role-level check (what the user's role allows), distinct from\n * `NoAccessError` (collection not in keyring) and `ReadOnlyError` (in\n * keyring, but write not allowed).\n */\nexport class PermissionDeniedError extends NoydbError {\n constructor(message = 'Permission denied — insufficient role for this operation') {\n super('PERMISSION_DENIED', message)\n this.name = 'PermissionDeniedError'\n }\n}\n\n/**\n * Thrown when an `@noy-db/as-*` export is attempted without the\n * required capability bit on the invoking keyring.\n *\n * Two sub-cases discriminated by the `tier` field:\n *\n * - `tier: 'plaintext'` — a plaintext-tier export (`as-xlsx`,\n * `as-csv`, `as-blob`, `as-zip`, …) was attempted but the\n * keyring's `exportCapability.plaintext` does not include the\n * requested `format` (nor the `'*'` wildcard). Default for every\n * role is `plaintext: []` — the owner must positively grant.\n * - `tier: 'bundle'` — an encrypted `as-noydb` bundle export was\n * attempted but the keyring's `exportCapability.bundle` is\n * `false`. Default for `owner`/`admin` is `true`; for\n * `operator`/`viewer`/`client` it is `false`.\n *\n * Distinct from `PermissionDeniedError` (role-level check) and\n * `NoAccessError` (collection not readable). Surfaces separately so\n * UI layers can show a \"request the export capability from your\n * admin\" flow rather than a generic permission error.\n */\nexport class ExportCapabilityError extends NoydbError {\n readonly tier: 'plaintext' | 'bundle'\n readonly format?: string\n readonly userId: string\n\n constructor(opts: {\n tier: 'plaintext' | 'bundle'\n userId: string\n format?: string\n message?: string\n }) {\n const msg =\n opts.message ??\n (opts.tier === 'plaintext'\n ? `Export capability denied — keyring \"${opts.userId}\" is not granted plaintext-export capability for format \"${opts.format ?? '<unknown>'}\". Ask a vault owner or admin to grant it via vault.grant({ exportCapability: { plaintext: ['${opts.format ?? '<format>'}'] } }).`\n : `Export capability denied — keyring \"${opts.userId}\" is not granted encrypted-bundle export capability. Ask a vault owner or admin to grant it via vault.grant({ exportCapability: { bundle: true } }).`)\n super('EXPORT_CAPABILITY', msg)\n this.name = 'ExportCapabilityError'\n this.tier = opts.tier\n this.userId = opts.userId\n if (opts.format !== undefined) this.format = opts.format\n }\n}\n\n/**\n * Thrown when a keyring file's `expires_at` cutoff has passed.\n * Surfaced by `loadKeyring` before any DEK unwrap is attempted —\n * past the cutoff the slot refuses to open even with the right\n * passphrase. Distinct from PBKDF2 / unwrap errors so consumer code\n * can show a precise \"this bundle slot has expired\" message instead\n * of the generic decryption-failure UX.\n *\n * Used predominantly on `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })` to time-box audit access.\n */\nexport class KeyringExpiredError extends NoydbError {\n readonly userId: string\n readonly expiresAt: string\n constructor(opts: { userId: string; expiresAt: string }) {\n super(\n 'KEYRING_EXPIRED',\n `Keyring \"${opts.userId}\" expired at ${opts.expiresAt}. ` +\n 'The slot refuses to unlock past its expiry timestamp.',\n )\n this.name = 'KeyringExpiredError'\n this.userId = opts.userId\n this.expiresAt = opts.expiresAt\n }\n}\n\n/**\n * Thrown when an `@noy-db/as-*` import is attempted but the invoking\n * keyring lacks the required import-capability bit (issue ).\n *\n * - `tier: 'plaintext'` — a plaintext-tier import (`as-csv`, `as-json`,\n * `as-ndjson`, `as-zip`, …) was attempted but the keyring's\n * `importCapability.plaintext` does not include the requested\n * `format` (nor the `'*'` wildcard).\n * - `tier: 'bundle'` — a `.noydb` bundle import was attempted but the\n * keyring's `importCapability.bundle` is not `true`.\n *\n * Default for every role on every dimension is closed — owners and\n * admins must positively grant the capability. Distinct from\n * `PermissionDeniedError` and `NoAccessError` so UI layers can show a\n * specific \"request the import capability\" flow.\n */\nexport class ImportCapabilityError extends NoydbError {\n readonly tier: 'plaintext' | 'bundle'\n readonly format?: string\n readonly userId: string\n\n constructor(opts: {\n tier: 'plaintext' | 'bundle'\n userId: string\n format?: string\n message?: string\n }) {\n const msg =\n opts.message ??\n (opts.tier === 'plaintext'\n ? `Import capability denied — keyring \"${opts.userId}\" is not granted plaintext-import capability for format \"${opts.format ?? '<unknown>'}\". Ask a vault owner or admin to grant it via vault.grant({ importCapability: { plaintext: ['${opts.format ?? '<format>'}'] } }).`\n : `Import capability denied — keyring \"${opts.userId}\" is not granted encrypted-bundle import capability. Ask a vault owner or admin to grant it via vault.grant({ importCapability: { bundle: true } }).`)\n super('IMPORT_CAPABILITY', msg)\n this.name = 'ImportCapabilityError'\n this.tier = opts.tier\n this.userId = opts.userId\n if (opts.format !== undefined) this.format = opts.format\n }\n}\n\n/**\n * Thrown when a grant would give the grantee a permission the grantor\n * does not themselves hold — the \"admin cannot grant what admin cannot\n * do\" rule from the admin-delegation work.\n *\n * Distinct from `PermissionDeniedError` so callers can tell the two\n * cases apart in logs and tests:\n *\n * - `PermissionDeniedError` — \"you are not allowed to perform this\n * operation at all\" (wrong role).\n * - `PrivilegeEscalationError` — \"you are allowed to grant, but not\n * with these specific permissions\" (widening attempt).\n *\n * Under the admin model the grantee of an admin-grants-admin call\n * inherits the caller's entire DEK set by construction, so this error\n * is structurally unreachable in typical flows. The check and error\n * class exist so that future per-collection admin scoping cannot\n * accidentally bypass the subset rule — the guard is already wired in.\n *\n * `offendingCollection` carries the first collection name that failed\n * the subset check, to make the violation actionable in error output.\n */\n/**\n * Thrown when a caller invokes an API that requires an optional\n * store capability the active store does not implement.\n *\n * Today the only call site is `Noydb.listAccessibleVaults()`,\n * which depends on the optional `NoydbStore.listVaults()`\n * method. The error message names the missing method and the calling\n * API so consumers know exactly which combination is unsupported,\n * and the `capability` field is machine-readable so library code can\n * pattern-match in catch blocks (e.g. fall back to a candidate-list\n * shape).\n *\n * The class lives in `errors.ts` rather than as a generic\n * `ValidationError` because the diagnostic shape is different: a\n * `ValidationError` says \"the inputs you passed are wrong\"; this\n * error says \"the inputs are fine, but the store you wired up\n * doesn't support what you're asking for.\" Different fix, different\n * documentation.\n */\nexport class StoreCapabilityError extends NoydbError {\n /** The store method/capability that was missing. */\n readonly capability: string\n\n constructor(capability: string, callerApi: string, storeName?: string) {\n super(\n 'STORE_CAPABILITY',\n `${callerApi} requires the optional store capability \"${capability}\" ` +\n `but the active store${storeName ? ` (${storeName})` : ''} does not implement it. ` +\n `Use a store that supports \"${capability}\" (store-memory, store-file) or pass an explicit ` +\n `vault list to bypass enumeration.`,\n )\n this.name = 'StoreCapabilityError'\n this.capability = capability\n }\n}\n\nexport class PrivilegeEscalationError extends NoydbError {\n readonly offendingCollection: string\n\n constructor(offendingCollection: string, message?: string) {\n super(\n 'PRIVILEGE_ESCALATION',\n message ??\n `Privilege escalation: grantor has no DEK for collection \"${offendingCollection}\" and cannot grant access to it.`,\n )\n this.name = 'PrivilegeEscalationError'\n this.offendingCollection = offendingCollection\n }\n}\n\n/**\n * Thrown by `Collection.put` / `.delete` when the target record's\n * envelope `_ts` falls within a closed accounting period.\n *\n * Distinct from `ReadOnlyError` (keyring-level), `ReadOnlyAtInstantError`\n * (historical view), and `ReadOnlyFrameError` (shadow vault): this\n * error is about the STORED RECORD being sealed by an operator call\n * to `vault.closePeriod()`, independent of caller permissions or\n * view type. The `periodName` and `endDate` fields name the sealing\n * period so audit UIs can surface a \"this record is locked in\n * FY2026-Q1 (closed 2026-03-31)\" message without parsing the error\n * string.\n *\n * To apply a correction after close, book a compensating entry in a\n * new period rather than unlocking the old one. Re-opening a closed\n * period is deliberately unsupported.\n */\nexport class PeriodClosedError extends NoydbError {\n readonly periodName: string\n readonly endDate: string\n readonly recordTs: string\n\n constructor(periodName: string, endDate: string, recordTs: string) {\n super(\n 'PERIOD_CLOSED',\n `Cannot modify record (last written ${recordTs}) — sealed by closed period ` +\n `\"${periodName}\" (endDate: ${endDate}). Post a compensating entry in a ` +\n `new period instead.`,\n )\n this.name = 'PeriodClosedError'\n this.periodName = periodName\n this.endDate = endDate\n this.recordTs = recordTs\n }\n}\n\n// ─── Hierarchical Access Errors ─────────────────────\n\n/**\n * Thrown when a user tries to act at a tier they are not cleared for.\n *\n * This is the umbrella error for tier write refusals:\n * - `put({ tier: N })` when the user's keyring lacks tier-N DEK.\n * - `elevate(id, N)` when the caller cannot reach tier N.\n *\n * Distinct from `TierAccessDeniedError` which covers *read* refusals on\n * the invisibility/ghost path.\n */\nexport class TierNotGrantedError extends NoydbError {\n readonly tier: number\n readonly collection: string\n\n constructor(collection: string, tier: number) {\n super(\n 'TIER_NOT_GRANTED',\n `User has no DEK for tier ${tier} in collection \"${collection}\"`,\n )\n this.name = 'TierNotGrantedError'\n this.collection = collection\n this.tier = tier\n }\n}\n\n/**\n * Thrown when an elevated-handle operation runs after the elevation's\n * TTL expired. Reads continue at the original tier; only writes\n * through the scoped handle flip to throwing once expired.\n */\nexport class ElevationExpiredError extends NoydbError {\n readonly tier: number\n readonly expiresAt: number\n\n constructor(opts: { tier: number; expiresAt: number }) {\n super(\n 'ELEVATION_EXPIRED',\n `Elevation to tier ${opts.tier} expired at ${new Date(opts.expiresAt).toISOString()}`,\n )\n this.name = 'ElevationExpiredError'\n this.tier = opts.tier\n this.expiresAt = opts.expiresAt\n }\n}\n\n/**\n * Thrown by `vault.elevate(...)` when an elevation is already active\n * on the vault. Adopters must `release()` the existing handle before\n * starting a new elevation.\n */\nexport class AlreadyElevatedError extends NoydbError {\n readonly activeTier: number\n\n constructor(activeTier: number) {\n super(\n 'ALREADY_ELEVATED',\n `Vault is already elevated to tier ${activeTier}; release the existing handle first`,\n )\n this.name = 'AlreadyElevatedError'\n this.activeTier = activeTier\n }\n}\n\n/**\n * Thrown when `demote()` is called by someone who is not the original\n * elevator and not an owner.\n */\nexport class TierDemoteDeniedError extends NoydbError {\n constructor(id: string, tier: number) {\n super(\n 'TIER_DEMOTE_DENIED',\n `Only the original elevator or an owner can demote record \"${id}\" from tier ${tier}`,\n )\n this.name = 'TierDemoteDeniedError'\n }\n}\n\n/**\n * Thrown when `db.delegate()` is called against a user that has no\n * keyring in the target vault — the delegation token cannot be\n * constructed without the target user's KEK wrap.\n */\nexport class DelegationTargetMissingError extends NoydbError {\n readonly toUser: string\n\n constructor(toUser: string) {\n super(\n 'DELEGATION_TARGET_MISSING',\n `Delegation target user \"${toUser}\" has no keyring in this vault`,\n )\n this.name = 'DelegationTargetMissingError'\n this.toUser = toUser\n }\n}\n\n// ─── Sync Errors ───────────────────────────────────────────────────────\n\n/**\n * Thrown when a `put()` detects an optimistic concurrency conflict.\n *\n * NOYDB uses version numbers (`_v`) for optimistic locking. If a `put()`\n * is called with `expectedVersion: N` but the stored record is at version\n * `M ≠ N`, the write is rejected and the caller must re-read, re-apply their\n * change, and retry. The `version` field carries the actual stored version\n * so callers can decide whether to retry or surface the conflict to the user.\n */\nexport class ConflictError extends NoydbError {\n /** The actual stored version at the time of conflict. */\n readonly version: number\n\n constructor(version: number, message = 'Version conflict') {\n super('CONFLICT', message)\n this.name = 'ConflictError'\n this.version = version\n }\n}\n\n/**\n * Thrown by `LedgerStore.append()` after exhausting its CAS retry\n * budget under multi-writer contention. Two browser tabs, a\n * web app + an offline mobile peer, or a server worker pool all\n * producing ledger entries against the same vault can race on the\n * \"read head, write head+1\" cycle; the optimistic-CAS retry loop\n * resolves the race for `casAtomic: true` stores, but pathological\n * contention (or a buggy peer) can still exhaust the budget. When\n * that happens, the chain is intact — the failed writer simply\n * couldn't claim a slot. Caller's choice whether to retry, queue,\n * or surface the failure to the user.\n */\nexport class LedgerContentionError extends NoydbError {\n readonly attempts: number\n\n constructor(attempts: number) {\n super(\n 'LEDGER_CONTENTION',\n `LedgerStore.append: failed to claim a chain slot after ${attempts} optimistic-CAS retries`,\n )\n this.name = 'LedgerContentionError'\n this.attempts = attempts\n }\n}\n\n/**\n * Thrown when a bundle push is rejected because the remote has been updated\n * since the local bundle was last pulled.\n *\n * Unlike `ConflictError` (per-record), this is a whole-bundle conflict —\n * the remote's bundle handle has changed. The caller must pull the new\n * bundle, merge, and re-push. `remoteVersion` is the handle of the newer\n * remote bundle for use in diagnostics.\n */\nexport class BundleVersionConflictError extends NoydbError {\n /** The bundle handle of the newer remote version that rejected the push. */\n readonly remoteVersion: string\n\n constructor(remoteVersion: string, message = 'Bundle version conflict — remote has been updated') {\n super('BUNDLE_VERSION_CONFLICT', message)\n this.name = 'BundleVersionConflictError'\n this.remoteVersion = remoteVersion\n }\n}\n\n/**\n * Thrown when a sync operation (push or pull) fails due to a network error.\n *\n * NOYDB's offline-first design means network errors are expected during sync.\n * Callers should catch `NetworkError`, surface connectivity status in the UI,\n * and rely on the `SyncScheduler` to retry when connectivity is restored.\n */\nexport class NetworkError extends NoydbError {\n constructor(message = 'Network error') {\n super('NETWORK_ERROR', message)\n this.name = 'NetworkError'\n }\n}\n\n// ─── Data Errors ───────────────────────────────────────────────────────\n\n/**\n * Thrown when `collection.get(id)` is called with an ID that does not exist.\n *\n * NOYDB collections are memory-first, so this error is synchronous and cheap —\n * it does not make a network round-trip. Callers that expect the record to be\n * absent should use `collection.getOrNull(id)` instead.\n */\nexport class NotFoundError extends NoydbError {\n constructor(message = 'Record not found') {\n super('NOT_FOUND', message)\n this.name = 'NotFoundError'\n }\n}\n\n/**\n * Thrown when application-level validation fails before encryption.\n *\n * Distinct from `SchemaValidationError` (Standard Schema v1 validator)\n * and `MissingTranslationError` (i18nText). `ValidationError` is the\n * general-purpose validation base — use it for custom guards in `put()`\n * hooks or store middleware.\n */\nexport class ValidationError extends NoydbError {\n constructor(message = 'Validation error') {\n super('VALIDATION_ERROR', message)\n this.name = 'ValidationError'\n }\n}\n\n/**\n * Thrown when a Standard Schema v1 validator rejects a record on\n * `put()` (input validation) or on read (output validation). Carries\n * the raw issue list so callers can render field-level errors.\n *\n * `direction` distinguishes the two cases:\n * - `'input'`: the user passed bad data into `put()`. This is a\n * normal error case that application code should handle — typically\n * by showing validation messages in the UI.\n * - `'output'`: stored data does not match the current schema. This\n * indicates a schema drift (the schema was changed without\n * migrating the existing records) and should be treated as a bug\n * — the application should not swallow it silently.\n *\n * The `issues` type is deliberately `readonly unknown[]` on this class\n * so that `errors.ts` doesn't need to import from `schema.ts` (and\n * create a dependency cycle). Callers who know they're holding a\n * `SchemaValidationError` can cast to the more precise\n * `readonly StandardSchemaV1Issue[]` from `schema.ts`.\n */\nexport class SchemaValidationError extends NoydbError {\n readonly issues: readonly unknown[]\n readonly direction: 'input' | 'output'\n\n constructor(\n message: string,\n issues: readonly unknown[],\n direction: 'input' | 'output',\n ) {\n super('SCHEMA_VALIDATION_FAILED', message)\n this.name = 'SchemaValidationError'\n this.issues = issues\n this.direction = direction\n }\n}\n\n// ─── Query DSL Errors ─────────────────────────────────────────────────\n\n/**\n * Thrown when `.groupBy().aggregate()` produces more than the hard\n * cardinality cap (default 100_000 groups)..\n *\n * The cap exists because `.groupBy()` materializes one bucket per\n * distinct key value in memory, and runaway cardinality — a groupBy\n * on a high-uniqueness field like `id` or `createdAt` — is almost\n * always a query mistake rather than legitimate use. A hard error is\n * better than silent OOM: the consumer sees an actionable message\n * naming the field and the observed cardinality, with guidance to\n * either narrow the query with `.where()` or accept the ceiling\n * override.\n *\n * A separate one-shot warning fires at 10% of the cap (10_000\n * groups) so consumers get a heads-up before the hard error — same\n * pattern as `JoinTooLargeError` and the `.join()` row ceiling.\n *\n * **Not overridable in.** The 100k cap is a fixed constant so\n * the failure mode is consistent across the codebase; a\n * `{ maxGroups }` override can be added later without a break if a\n * real consumer asks.\n */\nexport class GroupCardinalityError extends NoydbError {\n /** The field being grouped on. */\n readonly field: string\n /** Observed number of distinct groups at the moment the cap tripped. */\n readonly cardinality: number\n /** The cap that was exceeded. */\n readonly maxGroups: number\n\n constructor(field: string, cardinality: number, maxGroups: number) {\n super(\n 'GROUP_CARDINALITY',\n `.groupBy(\"${field}\") produced ${cardinality} distinct groups, ` +\n `exceeding the ${maxGroups}-group ceiling. This is almost always a ` +\n `query mistake — grouping on a high-uniqueness field like \"id\" or ` +\n `\"createdAt\" produces one bucket per record. Narrow the query with ` +\n `.where() before grouping, or group on a lower-cardinality field ` +\n `(status, category, clientId). If you genuinely need high-cardinality ` +\n `grouping, file an issue with your use case.`,\n )\n this.name = 'GroupCardinalityError'\n this.field = field\n this.cardinality = cardinality\n this.maxGroups = maxGroups\n }\n}\n\n/**\n * Thrown in lazy mode when a `.query()` / `.where()` / `.orderBy()` clause\n * references a field that does not have a declared index.\n *\n * Lazy-mode queries only work when every touched field is indexed.\n * This is deliberate — silent scan-fallback would hide the performance\n * cliff that lazy-mode indexes exist to prevent.\n *\n * Payload:\n * - `collection` — name of the collection queried\n * - `touchedFields` — every field referenced by the query (filter + order)\n * - `missingFields` — subset of `touchedFields` that have no declared index\n */\nexport class IndexRequiredError extends NoydbError {\n readonly collection: string\n readonly touchedFields: readonly string[]\n readonly missingFields: readonly string[]\n\n constructor(args: { collection: string; touchedFields: readonly string[]; missingFields: readonly string[] }) {\n super(\n 'INDEX_REQUIRED',\n `Collection \"${args.collection}\": query references unindexed fields in lazy mode ` +\n `(missing: ${args.missingFields.join(', ')}). ` +\n `Declare an index on each field, or use collection.scan() for non-indexed iteration.`,\n )\n this.name = 'IndexRequiredError'\n this.collection = args.collection\n this.touchedFields = [...args.touchedFields]\n this.missingFields = [...args.missingFields]\n }\n}\n\n/**\n * Thrown (or surfaced via the `index:write-partial` event) when one or more\n * per-indexed-field side-car writes fail after the main record write has\n * already succeeded.\n *\n * Not thrown out of `.put()` / `.delete()` directly — those succeed when the\n * main record succeeds. Instead, `IndexWriteFailureError` instances are collected\n * into the session-scoped reconcile queue and emitted on the Collection\n * emitter as `index:write-partial`.\n *\n * Payload:\n * - `recordId` — the id of the main record whose side-car writes failed\n * - `field` — the indexed field whose side-car write failed\n * - `op` — `'put'` or `'delete'`, indicating which mutation was in flight\n * - `cause` — the underlying error from the store\n */\nexport class IndexWriteFailureError extends NoydbError {\n readonly recordId: string\n readonly field: string\n readonly op: 'put' | 'delete'\n override readonly cause: unknown\n\n constructor(args: { recordId: string; field: string; op: 'put' | 'delete'; cause: unknown }) {\n super(\n 'INDEX_WRITE_FAILURE',\n `Index side-car ${args.op} failed for field \"${args.field}\" on record \"${args.recordId}\"`,\n )\n this.name = 'IndexWriteFailureError'\n this.recordId = args.recordId\n this.field = args.field\n this.op = args.op\n this.cause = args.cause\n }\n}\n\n// ─── Bundle Format Errors ─────────────────────────────────\n\n/**\n * Thrown by `readNoydbBundle()` when the body bytes don't match\n * the integrity hash declared in the bundle header — i.e. someone\n * modified the bytes between write and read.\n *\n * Distinct from a generic `Error` (which would be thrown for\n * format violations like a missing magic prefix or malformed\n * header JSON) so consumers can pattern-match the corruption case\n * and handle it differently from a producer bug. A\n * `BundleIntegrityError` indicates \"the bytes you got are not\n * what was written\"; a plain `Error` from `parsePrefixAndHeader`\n * indicates \"what was written wasn't a valid bundle in the first\n * place.\"\n *\n * Also thrown when decompression fails after the integrity hash\n * passed — that's a producer bug (the wrong algorithm byte was\n * written) but it surfaces with the same error class because the\n * end result is \"the body cannot be turned back into a dump.\"\n */\nexport class BundleIntegrityError extends NoydbError {\n constructor(message: string) {\n super('BUNDLE_INTEGRITY', `.noydb bundle integrity check failed: ${message}`)\n this.name = 'BundleIntegrityError'\n }\n}\n\n// ─── i18n / Dictionary Errors ──────────────────────────\n\n/**\n * Thrown when `vault.collection()` is called with a name that is\n * reserved for NOYDB internal use (any name starting with `_dict_`).\n *\n * Dictionary collections are accessed exclusively via\n * `vault.dictionary(name)` — attempting to open one as a regular\n * collection would bypass the dictionary invariants (ACL, rename\n * tracking, reserved-name policy).\n */\nexport class ReservedCollectionNameError extends NoydbError {\n /** The rejected collection name. */\n readonly collectionName: string\n\n constructor(collectionName: string) {\n super(\n 'RESERVED_COLLECTION_NAME',\n `\"${collectionName}\" is a reserved collection name. ` +\n `Use vault.dictionary(\"${collectionName.replace(/^_dict_/, '')}\") ` +\n `to access dictionary collections.`,\n )\n this.name = 'ReservedCollectionNameError'\n this.collectionName = collectionName\n }\n}\n\n/**\n * Thrown by `DictionaryHandle.get()` and `DictionaryHandle.delete()` when\n * the requested key does not exist in the dictionary.\n *\n * Distinct from `NotFoundError` (which is for data records) so callers\n * can distinguish \"data record missing\" from \"dictionary key missing\"\n * without inspecting error messages.\n */\nexport class DictKeyMissingError extends NoydbError {\n /** The dictionary name. */\n readonly dictionaryName: string\n /** The key that was not found. */\n readonly key: string\n\n constructor(dictionaryName: string, key: string) {\n super(\n 'DICT_KEY_MISSING',\n `Dictionary \"${dictionaryName}\" has no entry for key \"${key}\".`,\n )\n this.name = 'DictKeyMissingError'\n this.dictionaryName = dictionaryName\n this.key = key\n }\n}\n\n/**\n * Thrown by `DictionaryHandle.delete()` in strict mode when the key to\n * be deleted is still referenced by one or more records.\n *\n * The caller must either rename the key first (the only sanctioned\n * mass-mutation path) or pass `{ mode: 'warn' }` to skip the check\n * (development only).\n */\nexport class DictKeyInUseError extends NoydbError {\n /** The dictionary name. */\n readonly dictionaryName: string\n /** The key that is still referenced. */\n readonly key: string\n /** Name of the first collection found to reference this key. */\n readonly usedBy: string\n /** Number of records in `usedBy` that reference this key. */\n readonly count: number\n\n constructor(\n dictionaryName: string,\n key: string,\n usedBy: string,\n count: number,\n ) {\n super(\n 'DICT_KEY_IN_USE',\n `Cannot delete key \"${key}\" from dictionary \"${dictionaryName}\": ` +\n `${count} record(s) in \"${usedBy}\" still reference it. ` +\n `Use dictionary.rename(\"${key}\", newKey) to rewrite references first.`,\n )\n this.name = 'DictKeyInUseError'\n this.dictionaryName = dictionaryName\n this.key = key\n this.usedBy = usedBy\n this.count = count\n }\n}\n\n/**\n * Thrown by `Collection.put()` when an `i18nText` field is missing one\n * or more required translations.\n *\n * The `missing` array names each locale code that was absent from the\n * field value. The `field` property names the field so callers can\n * render a field-level error message without parsing the string.\n */\nexport class MissingTranslationError extends NoydbError {\n /** The field name whose translation(s) are missing. */\n readonly field: string\n /** Locale codes that were required but absent. */\n readonly missing: readonly string[]\n\n constructor(field: string, missing: readonly string[], message?: string) {\n super(\n 'MISSING_TRANSLATION',\n message ??\n `Field \"${field}\": missing required translation(s): ${missing.join(', ')}.`,\n )\n this.name = 'MissingTranslationError'\n this.field = field\n this.missing = missing\n }\n}\n\n/**\n * Thrown when reading an `i18nText` field without specifying a locale —\n * either at the call site (`get(id, { locale })`) or on the vault\n * (`openVault(name, { locale })`).\n *\n * Also thrown when `resolveI18nText()` exhausts the fallback chain and\n * no translation is available for the requested locale.\n *\n * The `field` property names the field that triggered the error so the\n * caller can surface it in the UI.\n */\nexport class LocaleNotSpecifiedError extends NoydbError {\n /** The field name that required a locale. */\n readonly field: string\n\n constructor(field: string, message?: string) {\n super(\n 'LOCALE_NOT_SPECIFIED',\n message ??\n `Cannot read i18nText field \"${field}\" without a locale. ` +\n `Pass { locale } to get()/list()/query() or set a default via ` +\n `openVault(name, { locale }).`,\n )\n this.name = 'LocaleNotSpecifiedError'\n this.field = field\n }\n}\n\n// ─── Translator Errors ─────────────────────────────────────\n\n/**\n * Thrown when a collection has an `i18nText` field with\n * `autoTranslate: true` but no `plaintextTranslator` was configured\n * on `createNoydb()`.\n *\n * The error is raised at `put()` time (not at schema construction) so\n * the mis-configuration is surfaced by the first write rather than\n * silently at startup.\n */\nexport class TranslatorNotConfiguredError extends NoydbError {\n /** The field that requested auto-translation. */\n readonly field: string\n /** The collection the put was targeting. */\n readonly collection: string\n\n constructor(field: string, collection: string) {\n super(\n 'TRANSLATOR_NOT_CONFIGURED',\n `Field \"${field}\" in collection \"${collection}\" has autoTranslate: true, ` +\n `but no plaintextTranslator was configured on createNoydb(). ` +\n `Either configure a plaintextTranslator or remove autoTranslate from the schema.`,\n )\n this.name = 'TranslatorNotConfiguredError'\n this.field = field\n this.collection = collection\n }\n}\n\n// ─── Backup Errors ─────────────────────────────────────────\n\n/**\n * Thrown when `Vault.load()` finds that a backup's hash chain\n * doesn't verify, or that its embedded `ledgerHead.hash` doesn't\n * match the chain head reconstructed from the loaded entries.\n *\n * Distinct from `BackupCorruptedError` so callers can choose to\n * recover from one but not the other (e.g., a corrupted JSON file is\n * unrecoverable; a chain mismatch might mean the backup is from an\n * incompatible noy-db version).\n */\nexport class BackupLedgerError extends NoydbError {\n /** First-broken-entry index, if known. */\n readonly divergedAt?: number\n\n constructor(message: string, divergedAt?: number) {\n super('BACKUP_LEDGER', message)\n this.name = 'BackupLedgerError'\n if (divergedAt !== undefined) this.divergedAt = divergedAt\n }\n}\n\n/**\n * Thrown when `Vault.load()` finds that the backup's data\n * collection content doesn't match the ledger's recorded\n * `payloadHash`es. This is the \"envelope was tampered with after\n * dump\" detection — the chain itself can be intact, but if any\n * encrypted record bytes were swapped, this check catches it.\n */\nexport class BackupCorruptedError extends NoydbError {\n /** The (collection, id) pair whose envelope failed the hash check. */\n readonly collection: string\n readonly id: string\n\n constructor(collection: string, id: string, message: string) {\n super('BACKUP_CORRUPTED', message)\n this.name = 'BackupCorruptedError'\n this.collection = collection\n this.id = id\n }\n}\n\n// ─── Session Errors ───────────────────────────────────────\n\n/**\n * Thrown by `resolveSession()` when the session token's `expiresAt`\n * timestamp is in the past. The session key is also removed from the\n * in-memory store when this is thrown, so retrying with the same sessionId\n * will produce `SessionNotFoundError`.\n *\n * Separate from `SessionNotFoundError` so callers can distinguish between\n * \"session is gone\" (key store cleared, tab reloaded) and \"session is\n * still in the store but has exceeded its lifetime\" (idle timeout, absolute\n * timeout, policy-driven expiry). The remediation differs: expired sessions\n * should prompt a fresh unlock; not-found sessions may indicate a bug or a\n * cross-tab scenario where the session was never established.\n */\nexport class SessionExpiredError extends NoydbError {\n readonly sessionId: string\n\n constructor(sessionId: string) {\n super('SESSION_EXPIRED', `Session \"${sessionId}\" has expired. Re-unlock to continue.`)\n this.name = 'SessionExpiredError'\n this.sessionId = sessionId\n }\n}\n\n/**\n * Thrown by `resolveSession()` when the session key cannot be found in\n * the module-level store. This happens when:\n * - The session was explicitly revoked via `revokeSession()`.\n * - The JS context was reloaded (tab navigation, page refresh, worker restart).\n * - `Noydb.close()` was called (which calls `revokeAllSessions()`).\n * - The sessionId is wrong or was generated by a different JS context.\n *\n * The session token (if the caller holds it) is permanently useless after\n * this error — the key is gone and cannot be recovered.\n */\nexport class SessionNotFoundError extends NoydbError {\n readonly sessionId: string\n\n constructor(sessionId: string) {\n super('SESSION_NOT_FOUND', `Session key for \"${sessionId}\" not found. The session may have been revoked or the page reloaded.`)\n this.name = 'SessionNotFoundError'\n this.sessionId = sessionId\n }\n}\n\n/**\n * Thrown when a session policy blocks an operation — for example,\n * `requireReAuthFor: ['export']` is set and the caller attempts to\n * call `exportStream()` without re-authenticating for this session.\n *\n * The `operation` field names the specific operation that was blocked\n * (e.g. `'export'`, `'grant'`, `'rotate'`) so the caller can surface\n * a targeted prompt (\"Please re-enter your passphrase to export data\").\n */\nexport class SessionPolicyError extends NoydbError {\n readonly operation: string\n\n constructor(operation: string, message?: string) {\n super(\n 'SESSION_POLICY',\n message ?? `Operation \"${operation}\" requires re-authentication per the active session policy.`,\n )\n this.name = 'SessionPolicyError'\n this.operation = operation\n }\n}\n\n// ─── Query / Join Errors ────────────────────────────────────\n\n/**\n * Thrown when a `.join()` would exceed its configured row ceiling on\n * either side. The ceiling defaults to 50,000 per side and can be\n * overridden via the `{ maxRows }` option on `.join()`.\n *\n * Carries both row counts so the error message can show which side\n * tripped the limit (e.g. \"left had 60,000 rows, right had 1,200,\n * max was 50,000\"). The `side` field is machine-readable so test\n * code and devtools can match on it without regex-parsing the\n * message.\n *\n * The row ceiling exists because joins are bounded in-memory\n * operations over materialized record sets. Consumers whose\n * collections genuinely exceed the ceiling should track \n * (streaming joins over `scan()`) or filter the left side further\n * with `where()` / `limit()` before joining.\n */\nexport class JoinTooLargeError extends NoydbError {\n readonly leftRows: number\n readonly rightRows: number\n readonly maxRows: number\n readonly side: 'left' | 'right'\n\n constructor(opts: {\n leftRows: number\n rightRows: number\n maxRows: number\n side: 'left' | 'right'\n message: string\n }) {\n super('JOIN_TOO_LARGE', opts.message)\n this.name = 'JoinTooLargeError'\n this.leftRows = opts.leftRows\n this.rightRows = opts.rightRows\n this.maxRows = opts.maxRows\n this.side = opts.side\n }\n}\n\n/**\n * Thrown by `.join()` in strict `ref()` mode when a left-side record\n * points at a right-side id that does not exist in the target\n * collection.\n *\n * Distinct from `RefIntegrityError` so test code can pattern-match\n * on the *read-time* dangling case without catching *write-time*\n * integrity violations. Both indicate \"ref points at nothing\" but\n * happen at different lifecycle phases and deserve different\n * remediation in documentation: a RefIntegrityError on `put()`\n * means the input is invalid; a DanglingReferenceError on `.join()`\n * means stored data has drifted and `vault.checkIntegrity()`\n * is the right tool to find the full set of orphans.\n */\nexport class DanglingReferenceError extends NoydbError {\n readonly field: string\n readonly target: string\n readonly refId: string\n\n constructor(opts: {\n field: string\n target: string\n refId: string\n message: string\n }) {\n super('DANGLING_REFERENCE', opts.message)\n this.name = 'DanglingReferenceError'\n this.field = opts.field\n this.target = opts.target\n this.refId = opts.refId\n }\n}\n\n/**\n * Thrown by {@link sanitizeFilename} when an input filename cannot be\n * made safe — NUL byte, empty after normalization, missing\n * `opaqueId` for the opaque profile, `..` segment, or a `maxBytes`\n * cap too small to hold a single code point.\n */\nexport class FilenameSanitizationError extends NoydbError {\n constructor(message: string) {\n super('FILENAME_SANITIZATION', message)\n this.name = 'FilenameSanitizationError'\n }\n}\n\n/**\n * Thrown when a write target resolves OUTSIDE the requested\n * directory after sanitization — the canonical Zip-Slip class. The\n * sanitizer's job is to strip path-traversal segments; this error\n * is the defense-in-depth fallback at the FS write site.\n */\nexport class PathEscapeError extends NoydbError {\n readonly attempted: string\n readonly targetDir: string\n\n constructor(opts: { attempted: string; targetDir: string }) {\n super(\n 'PATH_ESCAPE',\n `Sanitized filename \"${opts.attempted}\" resolves outside target dir \"${opts.targetDir}\"`,\n )\n this.name = 'PathEscapeError'\n this.attempted = opts.attempted\n this.targetDir = opts.targetDir\n }\n}\n","/**\n * Cryptographic primitives — thin wrappers around the Web Crypto API.\n *\n * ## Design principle\n *\n * **Zero npm crypto dependencies.** Every operation uses `globalThis.crypto.subtle`,\n * which is available natively in Node.js ≥ 18, all modern browsers, and\n * Deno/Bun. This avoids supply-chain risk from third-party crypto packages and\n * ensures the library stays auditable.\n *\n * ## Algorithms\n *\n * | Use case | Algorithm | Parameters |\n * |----------|-----------|------------|\n * | Key derivation | PBKDF2-SHA256 | 600,000 iterations, 32-byte salt |\n * | Record encryption | AES-256-GCM | 12-byte random IV per operation |\n * | DEK wrapping | AES-KW (RFC 3394) | 256-bit KEK |\n * | Binary encrypt | AES-256-GCM | same as record encryption |\n * | Integrity | HMAC-SHA256 | for presence channels |\n * | Content hash | SHA-256 | for ledger and bundle integrity |\n *\n * ## Key lifecycle\n *\n * ```\n * passphrase + salt\n * └─► deriveKey() → KEK (CryptoKey, extractable: false)\n * └─► wrapKey() → wrapped DEK bytes [stored in keyring]\n * └─► unwrapKey() → DEK (CryptoKey) [memory only during session]\n * └─► encrypt() / decrypt() → ciphertext / plaintext\n * ```\n *\n * IVs are generated fresh by {@link generateIV} on every encrypt call.\n * Reusing an IV with the same key would break GCM's authentication guarantee —\n * this function should be the only place IVs are produced.\n *\n * @module\n */\n\nimport { DecryptionError, InvalidKeyError, TamperedError } from './errors.js'\n\nconst PBKDF2_ITERATIONS = 600_000\nconst SALT_BYTES = 32\nconst IV_BYTES = 12\nconst KEY_BITS = 256\n\nconst subtle = globalThis.crypto.subtle\n\n// ─── Key Derivation ────────────────────────────────────────────────────\n\n/** Derive a KEK from a passphrase and salt using PBKDF2-SHA256. */\nexport async function deriveKey(\n passphrase: string,\n salt: Uint8Array,\n): Promise<CryptoKey> {\n const keyMaterial = await subtle.importKey(\n 'raw',\n new TextEncoder().encode(passphrase),\n 'PBKDF2',\n false,\n ['deriveKey'],\n )\n\n return subtle.deriveKey(\n {\n name: 'PBKDF2',\n salt: salt as BufferSource,\n iterations: PBKDF2_ITERATIONS,\n hash: 'SHA-256',\n },\n keyMaterial,\n { name: 'AES-KW', length: KEY_BITS },\n false,\n ['wrapKey', 'unwrapKey'],\n )\n}\n\n// ─── DEK Generation ────────────────────────────────────────────────────\n\n/** Generate a random AES-256-GCM data encryption key. */\nexport async function generateDEK(): Promise<CryptoKey> {\n return subtle.generateKey(\n { name: 'AES-GCM', length: KEY_BITS },\n true, // extractable — needed for AES-KW wrapping\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Key Wrapping ──────────────────────────────────────────────────────\n\n/** Wrap (encrypt) a DEK with a KEK using AES-KW. Returns base64 string. */\nexport async function wrapKey(dek: CryptoKey, kek: CryptoKey): Promise<string> {\n const wrapped = await subtle.wrapKey('raw', dek, kek, 'AES-KW')\n return bufferToBase64(wrapped)\n}\n\n/** Unwrap (decrypt) a DEK from base64 string using a KEK. */\nexport async function unwrapKey(\n wrappedBase64: string,\n kek: CryptoKey,\n): Promise<CryptoKey> {\n try {\n return await subtle.unwrapKey(\n 'raw',\n base64ToBuffer(wrappedBase64) as BufferSource,\n kek,\n 'AES-KW',\n { name: 'AES-GCM', length: KEY_BITS },\n true,\n ['encrypt', 'decrypt'],\n )\n } catch {\n throw new InvalidKeyError()\n }\n}\n\n// ─── Encrypt / Decrypt ─────────────────────────────────────────────────\n\nexport interface EncryptResult {\n iv: string // base64\n data: string // base64\n}\n\n/** Encrypt plaintext JSON string with AES-256-GCM. Fresh IV per call. */\nexport async function encrypt(\n plaintext: string,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const encoded = new TextEncoder().encode(plaintext)\n\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/** Decrypt AES-256-GCM ciphertext. Throws on wrong key or tampered data. */\nexport async function decrypt(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new TextDecoder().decode(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Binary Encrypt / Decrypt ────────\n\n/**\n * Encrypt raw bytes with AES-256-GCM using a fresh random IV.\n * Used by the attachment store so binary blobs avoid double base64 encoding\n * (the existing `encrypt()` function calls `TextEncoder` on a string — here\n * we pass the `Uint8Array` directly to `subtle.encrypt`).\n */\nexport async function encryptBytes(\n data: Uint8Array,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext back to raw bytes.\n * Counterpart to `encryptBytes`. Throws `TamperedError` on auth-tag failure.\n */\nexport async function decryptBytes(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n/**\n * SHA-256 hex digest of raw bytes. Used to derive content-addressed\n * eTags for blob deduplication. Computed on plaintext bytes\n * before compression and encryption so the eTag identifies content, not\n * ciphertext, and survives re-encryption (key rotation, re-upload).\n */\nexport async function sha256Hex(data: Uint8Array): Promise<string> {\n const hash = await subtle.digest('SHA-256', data as unknown as BufferSource)\n return Array.from(new Uint8Array(hash))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── HMAC-SHA-256 ─────────────────────────────\n\n/**\n * Compute HMAC-SHA-256(key, data) and return hex string.\n *\n * Used to derive content-addressed eTags that are opaque to the store:\n * ```\n * eTag = hmacSha256Hex(blobDEK, plaintext)\n * ```\n *\n * Unlike a plain SHA-256, the HMAC is keyed by the vault-shared `_blob` DEK,\n * so an attacker with store access cannot pre-compute eTags for known files.\n * Deduplication still works within a vault (same key + same content = same eTag).\n */\nexport async function hmacSha256Hex(key: CryptoKey, data: Uint8Array): Promise<string> {\n // Export AES-GCM DEK raw bytes → import as HMAC key\n const rawKey = await subtle.exportKey('raw', key)\n const hmacKey = await subtle.importKey(\n 'raw',\n rawKey,\n { name: 'HMAC', hash: 'SHA-256' },\n false,\n ['sign'],\n )\n const sig = await subtle.sign('HMAC', hmacKey, data as unknown as BufferSource)\n return Array.from(new Uint8Array(sig))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── AAD-aware Binary Encrypt / Decrypt ──\n\n/**\n * Encrypt raw bytes with AES-256-GCM using Additional Authenticated Data.\n *\n * The AAD binds each chunk to its parent blob and position, preventing\n * chunk reorder, substitution, and truncation attacks:\n * ```\n * AAD = UTF-8(\"{eTag}:{chunkIndex}:{chunkCount}\")\n * ```\n *\n * The AAD is NOT stored — the reader reconstructs it from `BlobObject`\n * metadata and passes it to `decryptBytesWithAAD`.\n */\nexport async function encryptBytesWithAAD(\n data: Uint8Array,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext with AAD verification.\n *\n * If the AAD does not match the one used at encryption time (e.g. because\n * a chunk was reordered or substituted from another blob), the GCM auth\n * tag fails and this throws `TamperedError`.\n */\nexport async function decryptBytesWithAAD(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Presence Key Derivation ──────────────────────────────\n\n/**\n * Derive an AES-256-GCM presence key from a collection DEK using HKDF-SHA256.\n *\n * The presence key is domain-separated from the data DEK by the fixed salt\n * `'noydb-presence'` and the `info` = collection name. This means:\n * - The adapter never sees the presence key.\n * - Presence payloads rotate automatically when the collection DEK is rotated.\n * - Revoked users cannot derive the new presence key after a DEK rotation.\n *\n * @param dek The collection's AES-256-GCM DEK (extractable).\n * @param collectionName Used as the HKDF `info` parameter for domain separation.\n * @returns A non-extractable AES-256-GCM key suitable for presence payload encryption.\n */\nexport async function derivePresenceKey(dek: CryptoKey, collectionName: string): Promise<CryptoKey> {\n // Step 1: export DEK raw bytes\n const rawDek = await subtle.exportKey('raw', dek)\n\n // Step 2: import as HKDF key material\n const hkdfKey = await subtle.importKey(\n 'raw',\n rawDek,\n 'HKDF',\n false,\n ['deriveBits'],\n )\n\n // Step 3: derive 256 bits with salt='noydb-presence' and info=collectionName\n const salt = new TextEncoder().encode('noydb-presence')\n const info = new TextEncoder().encode(collectionName)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n KEY_BITS,\n )\n\n // Step 4: import derived bits as AES-GCM key\n return subtle.importKey(\n 'raw',\n bits,\n { name: 'AES-GCM', length: KEY_BITS },\n false,\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Deterministic Encryption ────────────────────────────\n\n/**\n * Derive a deterministic 12-byte IV from `{ DEK, context, plaintext }`\n * via HKDF-SHA256. Given the same three inputs, the IV is identical, so\n * `encryptDeterministic` produces the same ciphertext on every call —\n * which is precisely what enables blind equality search on encrypted\n * fields.\n *\n * **The side channel this opens.** Two records whose field value is the\n * same produce the same ciphertext. An observer with store access can\n * therefore tell which records share a value — not *what* the value is,\n * but the equivalence class. This is the well-known trade-off of\n * deterministic encryption and is why the feature is strictly opt-in\n * per field, guarded by `acknowledgeDeterministicRisk: true` at\n * collection creation.\n *\n * The context string MUST include the collection name and field name,\n * so:\n * - The same plaintext in two different fields encrypts differently\n * (no cross-field equality leak).\n * - The same plaintext in two different collections (different DEKs)\n * encrypts differently by virtue of the key, even before HKDF\n * domain separation kicks in.\n */\nasync function deriveDeterministicIV(\n dek: CryptoKey,\n context: string,\n plaintext: string,\n): Promise<Uint8Array> {\n const rawDek = await subtle.exportKey('raw', dek)\n const hkdfKey = await subtle.importKey('raw', rawDek, 'HKDF', false, ['deriveBits'])\n const salt = new TextEncoder().encode('noydb-deterministic-v1')\n const info = new TextEncoder().encode(`${context}\\x00${plaintext}`)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n IV_BYTES * 8,\n )\n return new Uint8Array(bits)\n}\n\n/**\n * Encrypt a plaintext string with AES-256-GCM and a deterministic,\n * HKDF-derived IV.\n *\n * The same `{ dek, context, plaintext }` triple always produces the\n * same `{ iv, data }` — call this twice and you can string-compare the\n * ciphertexts to check equality of the inputs without decrypting them.\n *\n * @param context Domain-separation string — by convention\n * `'<collection>/<field>'`. Different contexts encrypt\n * the same plaintext to different ciphertexts, so\n * `email` in collection `users` does not collide with\n * `email` in collection `customers`.\n */\nexport async function encryptDeterministic(\n plaintext: string,\n dek: CryptoKey,\n context: string,\n): Promise<EncryptResult> {\n const iv = await deriveDeterministicIV(dek, context, plaintext)\n const encoded = new TextEncoder().encode(plaintext)\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Counterpart to {@link encryptDeterministic}. The IV is stored\n * alongside the ciphertext (exactly like the randomized path), so\n * decrypt uses the stored IV and verifies the GCM auth tag — a tampered\n * ciphertext throws `TamperedError` just like randomized AES-GCM.\n */\nexport async function decryptDeterministic(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n return decrypt(ivBase64, dataBase64, dek)\n}\n\n// ─── Random Generation ─────────────────────────────────────────────────\n\n/** Generate a random 12-byte IV for AES-GCM. */\nexport function generateIV(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES))\n}\n\n/** Generate a random 32-byte salt for PBKDF2. */\nexport function generateSalt(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(SALT_BYTES))\n}\n\n// ─── Base64 Helpers ────────────────────────────────────────────────────\n\nexport function bufferToBase64(buffer: ArrayBuffer | Uint8Array): string {\n const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)\n let binary = ''\n for (let i = 0; i < bytes.length; i++) {\n binary += String.fromCharCode(bytes[i]!)\n }\n return btoa(binary)\n}\n\nexport function base64ToBuffer(base64: string): Uint8Array<ArrayBuffer> {\n const binary = atob(base64)\n const bytes = new Uint8Array(binary.length)\n for (let i = 0; i < binary.length; i++) {\n bytes[i] = binary.charCodeAt(i)\n }\n return bytes\n}\n","/**\n * Minimal ULID generator — zero dependencies, Web Crypto API only.\n *\n *. Used by the bundle writer to generate stable opaque\n * handles for `.noydb` containers.\n *\n * **What's a ULID?** A 128-bit identifier encoded as 26 Crockford\n * base32 characters. Layout:\n *\n * ```\n * 01HYABCDEFGHJKMNPQRSTVWXYZ\n * |--------||---------------|\n * 48-bit 80-bit\n * timestamp randomness\n * ```\n *\n * The first 10 chars encode a millisecond Unix timestamp (so ULIDs\n * sort lexicographically by creation time), and the remaining 16\n * chars are random. Crockford base32 omits I/L/O/U to avoid\n * ambiguity in handwriting and URLs.\n *\n * **Why hand-roll instead of pulling in `ulid`?** The package adds\n * a dep, the implementation is ~30 lines, and the bundle module\n * is the only consumer. Adding `ulid` would also drag in its own\n * crypto polyfill that we don't need on Node 18+ or modern\n * browsers.\n *\n * **Privacy consideration:** the timestamp prefix is observable in\n * the bundle header. This is a deliberate trade-off:\n * - Pro: lexicographic sortability lets bundle adapters list\n * newest-first without an extra index.\n * - Con: a casual observer can read the bundle's creation time\n * from the handle. They cannot read it from any OTHER field\n * (the header explicitly forbids `_exported_at`), and a\n * creation timestamp is the same kind of metadata that\n * filesystem mtime would already expose for a downloaded\n * bundle. The leak is therefore equivalent to what's already\n * visible from the file's mtime — not a new exposure.\n *\n * If a future use case needs timestamp-free handles, a v2 of the\n * format could specify \"use the random portion only\" without a\n * format break — `validateBundleHeader` only checks the regex\n * shape, not the encoded timestamp.\n */\n\n/**\n * Crockford base32 alphabet — omits I, L, O, U to avoid handwriting\n * and URL-encoding ambiguity. 32 characters covering 5 bits each.\n */\nconst CROCKFORD_ALPHABET = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'\n\n/**\n * Encode a non-negative integer as a fixed-width Crockford base32\n * string. The width is fixed (not the natural log32 length) so\n * leading zeros are preserved — that's required for the timestamp\n * prefix to remain lexicographically sortable.\n *\n * Used twice: once for the 48-bit timestamp portion (10 chars) and\n * once for each 40-bit half of the randomness (8 chars × 2).\n */\nfunction encodeBase32(value: number, length: number): string {\n let out = ''\n let v = value\n for (let i = 0; i < length; i++) {\n out = CROCKFORD_ALPHABET[v % 32]! + out\n v = Math.floor(v / 32)\n }\n return out\n}\n\n/**\n * Generate a fresh ULID. Uses `crypto.getRandomValues` for the\n * randomness portion — same Web Crypto API the rest of the\n * codebase uses for IVs and salt.\n *\n * Returns a 26-character string. Calling twice in the same\n * millisecond produces two distinct ULIDs (the random portion\n * differs); ULIDs from the same millisecond are NOT guaranteed\n * to be monotonically ordered relative to each other, only\n * relative to ULIDs from a different millisecond. The bundle\n * format never relies on intra-millisecond ordering.\n */\nexport function generateULID(): string {\n const now = Date.now()\n\n // 48-bit timestamp → 10 Crockford base32 characters.\n // JavaScript's max safe integer is 2^53 - 1; Date.now() is well\n // within that range until the year ~285,000 AD. Splitting into\n // high and low 24-bit halves keeps every intermediate value\n // inside the safe-integer range and avoids any ambiguity in the\n // base32 encoder above.\n const timestampHigh = Math.floor(now / 0x1000000) // top 24 bits\n const timestampLow = now & 0xffffff // bottom 24 bits\n const tsPart =\n encodeBase32(timestampHigh, 5) + encodeBase32(timestampLow, 5)\n\n // 80-bit randomness → 16 Crockford base32 characters. Split into\n // two 40-bit halves so each fits in JavaScript's safe-integer\n // range (53 bits) and the base32 encoder doesn't have to deal\n // with bigints.\n const randBytes = new Uint8Array(10)\n crypto.getRandomValues(randBytes)\n\n // First 5 bytes (40 bits) → 8 Crockford base32 characters.\n // Reconstruct the 40-bit integer from bytes in big-endian order.\n // Multiplication by 2^32 (instead of bit-shift) avoids JavaScript's\n // 32-bit integer cast on the high byte.\n const rand1 =\n randBytes[0]! * 2 ** 32 +\n (randBytes[1]! << 24 >>> 0) +\n (randBytes[2]! << 16) +\n (randBytes[3]! << 8) +\n randBytes[4]!\n // Same for the second 5 bytes.\n const rand2 =\n randBytes[5]! * 2 ** 32 +\n (randBytes[6]! << 24 >>> 0) +\n (randBytes[7]! << 16) +\n (randBytes[8]! << 8) +\n randBytes[9]!\n const randPart = encodeBase32(rand1, 8) + encodeBase32(rand2, 8)\n\n return tsPart + randPart\n}\n\n/**\n * Validate that a string is a syntactically well-formed ULID. Used\n * by the bundle header validator. Does NOT verify that the\n * timestamp portion decodes to a sensible date — the format only\n * cares about the encoding shape.\n */\nexport function isULID(value: string): boolean {\n return /^[0-9A-HJKMNP-TV-Z]{26}$/.test(value)\n}\n","/**\n * Ledger entry shape + canonical JSON + sha256 helpers.\n *\n * This file holds the PURE primitives used by the hash-chained ledger:\n * the entry type, the deterministic (sort-stable) JSON encoder, and\n * the sha256 hasher that produces `prevHash` and `ledger.head()`.\n *\n * Everything here is validator-free and side-effect free — the only\n * runtime dep is Web Crypto's `subtle.digest` for the sha256 call,\n * which we already use for every other hashing operation in the core.\n *\n * The hash chain property works like this:\n *\n * hash(entry[i]) = sha256(canonicalJSON(entry[i]))\n * entry[i+1].prevHash = hash(entry[i])\n *\n * Any modification to `entry[i]` (field values, field order, whitespace)\n * produces a different `hash(entry[i])`, which means `entry[i+1]`'s\n * stored `prevHash` no longer matches the recomputed hash, which means\n * `verify()` returns `{ ok: false, divergedAt: i + 1 }`. The chain is\n * append-only and tamper-evident without external anchoring.\n */\n\n/**\n * A single ledger entry in its plaintext form — what gets serialized,\n * hashed, and then encrypted with the ledger DEK before being written\n * to the `_ledger/` adapter collection.\n *\n * ## Why hash the ciphertext, not the plaintext?\n *\n * `payloadHash` is the sha256 of the record's ENCRYPTED envelope bytes,\n * not its plaintext. This matters:\n *\n * 1. **Zero-knowledge preserved.** A user (or a third party) can\n * verify the ledger against the stored envelopes without any\n * decryption keys. The adapter layer already holds only\n * ciphertext, so hashing the ciphertext keeps the ledger at the\n * same privacy level as the adapter.\n *\n * 2. **Determinism.** Plaintext → ciphertext is randomized by the\n * fresh per-write IV, so `hash(plaintext)` would need extra\n * normalization. `hash(ciphertext)` is already deterministic and\n * unique per write.\n *\n * 3. **Detection property.** If an attacker modifies even one byte of\n * the stored ciphertext (trying to flip a record), the hash\n * changes, the ledger's recorded `payloadHash` no longer matches,\n * and a data-integrity check fails. We don't do that check in\n * `verify()` today, but the\n * hook is there for a future `verifyIntegrity()` follow-up.\n *\n * Fields marked `op`, `collection`, `id`, `version`, `ts`, `actor` are\n * plaintext METADATA about the operation — NOT the record itself. The\n * entry is still encrypted at rest via the ledger DEK, but adapters\n * could theoretically infer operation patterns from the sizes and\n * timestamps. This is an accepted trade-off for the tamper-evidence\n * property; full ORAM-level privacy is out of scope for noy-db.\n */\nexport interface LedgerEntry {\n /**\n * Zero-based sequential position of this entry in the chain. The\n * canonical adapter key is this number zero-padded to 10 digits\n * (`\"0000000001\"`) so lexicographic ordering matches numeric order.\n */\n readonly index: number\n\n /**\n * Hex-encoded sha256 of the canonical JSON of the PREVIOUS entry.\n * The genesis entry (index 0) has `prevHash === ''` — the first\n * entry in a fresh vault has nothing to point back to.\n */\n readonly prevHash: string\n\n /**\n * Which kind of mutation this entry records. only supports\n * data operations (`put`, `delete`). Access-control operations\n * (`grant`, `revoke`, `rotate`) will be added in a follow-up once\n * the keyring write path is instrumented — that's tracked in the\n * epic issue.\n */\n readonly op: 'put' | 'delete'\n\n /** The collection the mutation targeted. */\n readonly collection: string\n\n /** The record id the mutation targeted. */\n readonly id: string\n\n /**\n * The record version AFTER this mutation. For `put` this is the\n * newly assigned version; for `delete` this is the version that\n * was deleted (the last version visible to reads).\n */\n readonly version: number\n\n /** ISO timestamp of the mutation. */\n readonly ts: string\n\n /** User id of the actor who performed the mutation. */\n readonly actor: string\n\n /**\n * Hex-encoded sha256 of the encrypted envelope's `_data` field.\n * For `put`, this is the hash of the new ciphertext. For `delete`,\n * it's the hash of the last visible ciphertext at deletion time,\n * or the empty string if nothing was there to delete. Hashing the\n * ciphertext (not the plaintext) preserves zero-knowledge — see\n * the file docstring.\n */\n readonly payloadHash: string\n\n /**\n * Optional hex-encoded sha256 of the encrypted JSON Patch delta\n * blob stored alongside this entry in `_ledger_deltas/`. Present\n * only for `put` operations that had a previous version — the\n * genesis put of a new record, and every `delete`, leave this\n * field undefined.\n *\n * The delta payload itself lives in a sibling internal collection\n * (`_ledger_deltas/<paddedIndex>`) and is encrypted with the\n * ledger DEK. Callers use `ledger.loadDelta(index)` to decrypt and\n * deserialize it when reconstructing a historical version.\n *\n * Why optional instead of always-present: the first put of a\n * record has no previous version to diff against, so storing an\n * empty patch would be noise. For deletes there's no \"next\" state\n * to describe with a delta. Both cases set this field to undefined.\n *\n * Note: the canonical-JSON hasher treats `undefined` as invalid\n * (it's one of the guard rails), so on the wire this field is\n * either `{ deltaHash: '<hex>' }` or absent from the JSON\n * entirely — never `{ deltaHash: undefined }`.\n */\n readonly deltaHash?: string\n}\n\n/**\n * Canonical (sort-stable) JSON encoder.\n *\n * This function is the load-bearing primitive of the hash chain:\n * `sha256(canonicalJSON(entry))` must produce the same hex string\n * every time, on every machine, for the same logical entry — otherwise\n * `verify()` would return `{ ok: false }` on cross-platform reads.\n *\n * JavaScript's `JSON.stringify` is almost canonical, but NOT quite:\n * it preserves the insertion order of object keys, which means\n * `{a:1,b:2}` and `{b:2,a:1}` serialize differently. We fix this by\n * recursively walking objects and sorting their keys before\n * concatenation.\n *\n * Arrays keep their original order (reordering them would change\n * semantics). Numbers, strings, booleans, and `null` use the default\n * JSON encoding. `undefined` and functions are rejected — ledger\n * entries are plain data, and silently dropping `undefined` would\n * break the \"same input → same hash\" property if a caller forgot to\n * omit a field.\n *\n * Performance: one pass per nesting level; O(n log n) for key sorting\n * at each object. Entries are small (< 1 KB) so this is negligible\n * compared to the sha256 call.\n */\nexport function canonicalJson(value: unknown): string {\n if (value === null) return 'null'\n if (typeof value === 'boolean') return value ? 'true' : 'false'\n if (typeof value === 'number') {\n if (!Number.isFinite(value)) {\n throw new Error(\n `canonicalJson: refusing to encode non-finite number ${String(value)}`,\n )\n }\n return JSON.stringify(value)\n }\n if (typeof value === 'string') return JSON.stringify(value)\n if (typeof value === 'bigint') {\n throw new Error('canonicalJson: BigInt is not JSON-serializable')\n }\n if (typeof value === 'undefined' || typeof value === 'function') {\n throw new Error(\n `canonicalJson: refusing to encode ${typeof value} — include all fields explicitly`,\n )\n }\n if (Array.isArray(value)) {\n return '[' + value.map((v) => canonicalJson(v)).join(',') + ']'\n }\n if (typeof value === 'object') {\n const obj = value as Record<string, unknown>\n const keys = Object.keys(obj).sort()\n const parts: string[] = []\n for (const key of keys) {\n parts.push(JSON.stringify(key) + ':' + canonicalJson(obj[key]))\n }\n return '{' + parts.join(',') + '}'\n }\n throw new Error(`canonicalJson: unexpected value type: ${typeof value}`)\n}\n\n/**\n * Compute a hex-encoded sha256 of a string via Web Crypto's subtle API.\n *\n * We use hex (not base64) for hashes because hex is case-insensitive,\n * fixed-length (64 chars), and easier to compare visually in debug\n * output. Base64 would save a few bytes in storage but every encrypted\n * ledger entry is already much larger than the hash itself.\n */\nexport async function sha256Hex(input: string): Promise<string> {\n const bytes = new TextEncoder().encode(input)\n const digest = await globalThis.crypto.subtle.digest('SHA-256', bytes)\n return bytesToHex(new Uint8Array(digest))\n}\n\n/**\n * Compute the canonical hash of a ledger entry. Short wrapper around\n * `canonicalJson` + `sha256Hex`; callers use this instead of composing\n * the two functions every time, so any future change to the hashing\n * pipeline (e.g., adding a domain-separation prefix) lives in one place.\n */\nexport async function hashEntry(entry: LedgerEntry): Promise<string> {\n return sha256Hex(canonicalJson(entry))\n}\n\n/** Convert a Uint8Array to a lowercase hex string. */\nfunction bytesToHex(bytes: Uint8Array): string {\n const hex = new Array<string>(bytes.length)\n for (let i = 0; i < bytes.length; i++) {\n // Non-null assertion: indexing a Uint8Array within bounds always\n // returns a number, but the compiler's noUncheckedIndexedAccess\n // flag widens it to `number | undefined`. Safe here by construction.\n hex[i] = (bytes[i] ?? 0).toString(16).padStart(2, '0')\n }\n return hex.join('')\n}\n\n/**\n * Pad an index to the canonical 10-digit form used as the adapter key.\n * Ten digits is enough for ~10 billion ledger entries per vault\n * — far beyond any realistic use case, but cheap enough that the extra\n * digits don't hurt storage.\n */\nexport function paddedIndex(index: number): string {\n return String(index).padStart(10, '0')\n}\n\n/** Parse a padded adapter key back into a number. Returns NaN on malformed input. */\nexport function parseIndex(key: string): number {\n return Number.parseInt(key, 10)\n}\n","/**\n * RFC 6902 JSON Patch — compute + apply.\n *\n * This module is the \"delta history\" primitive: instead of\n * snapshotting the full record on every put (the behavior),\n * `Collection.put` computes a JSON Patch from the previous version to\n * the new version and stores only the patch in the ledger. To\n * reconstruct version N, we walk from the genesis snapshot forward\n * applying patches. Storage scales with **edit size**, not record\n * size — a 10 KB record edited 1000 times costs ~10 KB of deltas\n * instead of ~10 MB of snapshots.\n *\n * ## Why hand-roll instead of using a library?\n *\n * RFC 6902 has good libraries (`fast-json-patch`, `rfc6902`) but every\n * single one of them adds a runtime dependency to `@noy-db/core`. The\n * \"zero runtime dependencies\" promise is one of the core's load-bearing\n * features, and the patch surface we actually need is small enough\n * (~150 LoC) that vendoring is the right call.\n *\n * What we implement:\n * - `add` — insert a value at a path\n * - `remove` — delete the value at a path\n * - `replace` — overwrite the value at a path\n *\n * What we deliberately skip (out of scope for the ledger use):\n * - `move` and `copy` — optimizations; the diff algorithm doesn't\n * emit them, so the apply path doesn't need them\n * - `test` — used for transactional patches; we already have\n * optimistic concurrency via `_v` at the envelope layer\n * - Sophisticated array diffing (LCS, edit distance) — we treat\n * arrays as atomic values and emit a single `replace` op when\n * they differ. The accounting domain has small arrays where this\n * is fine; if we ever need patch-level array diffing we can add\n * it without changing the storage format.\n *\n * ## Path encoding (RFC 6902 §3)\n *\n * Paths look like `/foo/bar/0`. Each path segment is either an object\n * key or a numeric array index. Two characters need escaping inside\n * keys: `~` becomes `~0` and `/` becomes `~1`. We implement both.\n *\n * Empty path (`\"\"`) refers to the root document. Only `replace` makes\n * sense at the root, and our diff function emits it as a top-level\n * `replace` when `prev` and `next` differ in shape (object vs array,\n * primitive vs object, etc.).\n */\n\n/** A single JSON Patch operation. Subset of RFC 6902 — see file docstring. */\nexport type JsonPatchOp =\n | { readonly op: 'add'; readonly path: string; readonly value: unknown }\n | { readonly op: 'remove'; readonly path: string }\n | { readonly op: 'replace'; readonly path: string; readonly value: unknown }\n\n/** A complete JSON Patch document — an array of operations. */\nexport type JsonPatch = readonly JsonPatchOp[]\n\n// ─── Compute (diff) ──────────────────────────────────────────────────\n\n/**\n * Compute a JSON Patch that, when applied to `prev`, produces `next`.\n *\n * The algorithm is a straightforward recursive object walk:\n *\n * 1. If both inputs are plain objects (and not arrays/null):\n * - For each key in `prev`, recurse if `next` has it, else emit `remove`\n * - For each key in `next` not in `prev`, emit `add`\n * 2. If both inputs are arrays AND structurally equal, no-op.\n * Otherwise emit a single `replace` for the whole array.\n * 3. If both inputs are deeply equal primitives, no-op.\n * 4. Otherwise emit a `replace` at the current path.\n *\n * We do not minimize patches across move-like rearrangements — every\n * generated patch is straightforward enough to apply by hand if you\n * had to debug it.\n */\nexport function computePatch(prev: unknown, next: unknown): JsonPatch {\n const ops: JsonPatchOp[] = []\n diff(prev, next, '', ops)\n return ops\n}\n\nfunction diff(\n prev: unknown,\n next: unknown,\n path: string,\n out: JsonPatchOp[],\n): void {\n // Both null / both undefined → no-op (we don't differentiate them\n // in JSON terms; canonicalJson would reject undefined anyway).\n if (prev === next) return\n\n // One side null, the other not → straight replace.\n if (prev === null || next === null) {\n out.push({ op: 'replace', path, value: next })\n return\n }\n\n const prevIsArray = Array.isArray(prev)\n const nextIsArray = Array.isArray(next)\n const prevIsObject = typeof prev === 'object' && !prevIsArray\n const nextIsObject = typeof next === 'object' && !nextIsArray\n\n // Type changed (e.g., object → primitive, array → object). Replace.\n if (prevIsArray !== nextIsArray || prevIsObject !== nextIsObject) {\n out.push({ op: 'replace', path, value: next })\n return\n }\n\n // Both arrays. We don't do clever LCS-based diffing — emit a single\n // replace for the whole array if they differ. See file docstring for\n // the rationale.\n if (prevIsArray && nextIsArray) {\n if (!arrayDeepEqual(prev as unknown[], next as unknown[])) {\n out.push({ op: 'replace', path, value: next })\n }\n return\n }\n\n // Both plain objects. Recurse key by key.\n if (prevIsObject && nextIsObject) {\n const prevObj = prev as Record<string, unknown>\n const nextObj = next as Record<string, unknown>\n const prevKeys = Object.keys(prevObj)\n const nextKeys = Object.keys(nextObj)\n\n // Handle removes and overlapping recursions in one pass over prev.\n for (const key of prevKeys) {\n const childPath = path + '/' + escapePathSegment(key)\n if (!(key in nextObj)) {\n out.push({ op: 'remove', path: childPath })\n } else {\n diff(prevObj[key], nextObj[key], childPath, out)\n }\n }\n // Handle adds.\n for (const key of nextKeys) {\n if (!(key in prevObj)) {\n out.push({\n op: 'add',\n path: path + '/' + escapePathSegment(key),\n value: nextObj[key],\n })\n }\n }\n return\n }\n\n // Two primitives that aren't strictly equal — replace.\n out.push({ op: 'replace', path, value: next })\n}\n\nfunction arrayDeepEqual(a: unknown[], b: unknown[]): boolean {\n if (a.length !== b.length) return false\n for (let i = 0; i < a.length; i++) {\n if (!deepEqual(a[i], b[i])) return false\n }\n return true\n}\n\nfunction deepEqual(a: unknown, b: unknown): boolean {\n if (a === b) return true\n if (a === null || b === null) return false\n if (typeof a !== typeof b) return false\n if (typeof a !== 'object') return false\n const aArray = Array.isArray(a)\n const bArray = Array.isArray(b)\n if (aArray !== bArray) return false\n if (aArray && bArray) return arrayDeepEqual(a, b as unknown[])\n const aObj = a as Record<string, unknown>\n const bObj = b as Record<string, unknown>\n const aKeys = Object.keys(aObj)\n const bKeys = Object.keys(bObj)\n if (aKeys.length !== bKeys.length) return false\n for (const key of aKeys) {\n if (!(key in bObj)) return false\n if (!deepEqual(aObj[key], bObj[key])) return false\n }\n return true\n}\n\n// ─── Apply ──────────────────────────────────────────────────────────\n\n/**\n * Apply a JSON Patch to a base document and return the result.\n *\n * The base document is **not mutated** — every op clones the parent\n * container before writing to it, so the caller's reference to `base`\n * stays untouched. This costs an extra allocation per op but makes\n * the apply pipeline reorderable and safe to interrupt.\n *\n * Throws on:\n * - Removing a path that doesn't exist\n * - Adding to a path whose parent doesn't exist\n * - A path component that doesn't match the document shape (e.g.,\n * trying to step into a primitive)\n *\n * Throwing is the right behavior for the ledger use case: a failed\n * apply means the chain is corrupted, which should be loud rather\n * than silently producing a wrong reconstruction.\n */\nexport function applyPatch<T = unknown>(base: T, patch: JsonPatch): T {\n let result: unknown = clone(base)\n for (const op of patch) {\n result = applyOp(result, op)\n }\n return result as T\n}\n\nfunction applyOp(doc: unknown, op: JsonPatchOp): unknown {\n // Empty path → operation targets the root. Only `replace` and `add`\n // make sense at the root, but we handle `remove` for completeness\n // (root removal returns null).\n if (op.path === '') {\n if (op.op === 'remove') return null\n return clone(op.value)\n }\n\n const segments = parsePath(op.path)\n return walkAndApply(doc, segments, op)\n}\n\nfunction walkAndApply(\n doc: unknown,\n segments: string[],\n op: JsonPatchOp,\n): unknown {\n if (segments.length === 0) {\n // Should never happen — empty path is handled in applyOp().\n throw new Error('walkAndApply: empty segments (internal error)')\n }\n\n const [head, ...rest] = segments\n if (head === undefined) throw new Error('walkAndApply: undefined segment')\n\n if (rest.length === 0) {\n return applyAtTerminal(doc, head, op)\n }\n\n // Recurse into the child container, then rebuild the parent with\n // the modified child.\n if (Array.isArray(doc)) {\n const idx = parseArrayIndex(head, doc.length)\n const child = doc[idx]\n const newChild = walkAndApply(child, rest, op)\n const next = doc.slice()\n next[idx] = newChild\n return next\n }\n if (doc !== null && typeof doc === 'object') {\n const obj = doc as Record<string, unknown>\n if (!(head in obj)) {\n throw new Error(`applyPatch: path segment \"${head}\" not found in object`)\n }\n const newChild = walkAndApply(obj[head], rest, op)\n return { ...obj, [head]: newChild }\n }\n throw new Error(\n `applyPatch: cannot step into ${typeof doc} at segment \"${head}\"`,\n )\n}\n\nfunction applyAtTerminal(\n doc: unknown,\n segment: string,\n op: JsonPatchOp,\n): unknown {\n if (Array.isArray(doc)) {\n const idx =\n segment === '-' ? doc.length : parseArrayIndex(segment, doc.length + 1)\n const next = doc.slice()\n if (op.op === 'remove') {\n next.splice(idx, 1)\n return next\n }\n if (op.op === 'add') {\n next.splice(idx, 0, clone(op.value))\n return next\n }\n if (op.op === 'replace') {\n if (idx >= doc.length) {\n throw new Error(\n `applyPatch: replace at out-of-bounds array index ${idx}`,\n )\n }\n next[idx] = clone(op.value)\n return next\n }\n }\n if (doc !== null && typeof doc === 'object') {\n const obj = doc as Record<string, unknown>\n if (op.op === 'remove') {\n if (!(segment in obj)) {\n throw new Error(\n `applyPatch: remove on missing key \"${segment}\"`,\n )\n }\n const next = { ...obj }\n delete next[segment]\n return next\n }\n if (op.op === 'add') {\n // RFC 6902: `add` on an existing key replaces it.\n return { ...obj, [segment]: clone(op.value) }\n }\n if (op.op === 'replace') {\n if (!(segment in obj)) {\n throw new Error(\n `applyPatch: replace on missing key \"${segment}\"`,\n )\n }\n return { ...obj, [segment]: clone(op.value) }\n }\n }\n throw new Error(\n `applyPatch: cannot apply ${op.op} at terminal segment \"${segment}\"`,\n )\n}\n\n// ─── Path encoding (RFC 6902 §3) ─────────────────────────────────────\n\n/**\n * Escape a single path segment per RFC 6902 §3:\n * `~` → `~0`\n * `/` → `~1`\n *\n * Order matters: `~` must be escaped first, otherwise the `~1` we\n * just emitted would be re-escaped to `~01`.\n */\nfunction escapePathSegment(segment: string): string {\n return segment.replace(/~/g, '~0').replace(/\\//g, '~1')\n}\n\nfunction unescapePathSegment(segment: string): string {\n return segment.replace(/~1/g, '/').replace(/~0/g, '~')\n}\n\nfunction parsePath(path: string): string[] {\n if (!path.startsWith('/')) {\n throw new Error(`applyPatch: path must start with '/', got \"${path}\"`)\n }\n return path\n .slice(1)\n .split('/')\n .map(unescapePathSegment)\n}\n\nfunction parseArrayIndex(segment: string, max: number): number {\n if (!/^\\d+$/.test(segment)) {\n throw new Error(\n `applyPatch: array index must be a non-negative integer, got \"${segment}\"`,\n )\n }\n const idx = Number.parseInt(segment, 10)\n if (idx < 0 || idx > max) {\n throw new Error(\n `applyPatch: array index ${idx} out of range [0, ${max}]`,\n )\n }\n return idx\n}\n\n// ─── Cheap structural clone ─────────────────────────────────────────\n\n/**\n * Plain-JSON clone via JSON.parse(JSON.stringify(value)).\n *\n * Faster than `structuredClone` for our use because (a) we know our\n * inputs are JSON-compatible (no Dates, Maps, or BigInts — anything\n * else gets rejected by canonicalJson upstream), and (b) `structuredClone`\n * has overhead for handling arbitrary structured data we don't need.\n *\n * For tiny ledger entries (< 1 KB), the JSON round-trip is in the\n * single-digit microsecond range.\n */\nfunction clone<T>(value: T): T {\n if (value === null || value === undefined) return value\n if (typeof value !== 'object') return value\n return JSON.parse(JSON.stringify(value)) as T\n}\n","/**\n * Ledger storage constants — pinned in their own leaf module so\n * always-on core code (vault.ts, dictionary.ts) can import them\n * without dragging the `LedgerStore` class into the bundle.\n *\n * `splitting: true` in tsup is not enough on its own: when a\n * source file exports both pure constants and a heavyweight class,\n * the bundler keeps the entire chunk reachable from any importer.\n * Extracting the constants lets the floor scenario import them\n * without paying for the class.\n *\n * @internal\n */\n\n/** The internal collection name used for ledger entry storage. */\nexport const LEDGER_COLLECTION = '_ledger'\n\n/**\n * The internal collection name used for delta payload storage.\n *\n * Deltas live in a sibling collection (not inside `_ledger`) for two\n * reasons:\n *\n * 1. **Listing efficiency.** `ledger.loadAllEntries()` calls\n * `adapter.list(_ledger)` which would otherwise return every\n * delta key alongside every entry key. Splitting them keeps the\n * list small (one key per ledger entry) and the delta reads\n * keyed by the entry's index.\n *\n * 2. **Prune-friendliness.** A future `pruneHistory()` will delete\n * old deltas while keeping the ledger chain intact (folding old\n * deltas into a base snapshot). Separating the storage makes\n * that deletion a targeted operation on one collection instead\n * of a filter across a mixed list.\n *\n * Both collections share the same ledger DEK — one DEK, two\n * internal collections, same zero-knowledge guarantees.\n */\nexport const LEDGER_DELTAS_COLLECTION = '_ledger_deltas'\n","/**\n * Envelope payload hash — pinned in its own leaf module so consumers\n * (DictionaryHandle, the active history strategy) can import it\n * without dragging in the `LedgerStore` class.\n *\n * see `constants.ts` for the broader rationale.\n *\n * @internal\n */\n\nimport type { EncryptedEnvelope } from '../../types.js'\nimport { sha256Hex } from './entry.js'\n\n/**\n * Compute the `payloadHash` value for an encrypted envelope. Used by\n * `LedgerStore.append` for both put (hash the new envelope) and\n * delete (hash the previous envelope) paths, and by\n * `DictionaryHandle` so its ledger entries match the same contract.\n *\n * Returns the empty string when there is no envelope (delete of a\n * never-existed record). The empty string tolerated by the ledger\n * entry's `payloadHash` field as the canonical \"nothing here\" value.\n */\nexport async function envelopePayloadHash(\n envelope: EncryptedEnvelope | null,\n): Promise<string> {\n if (!envelope) return ''\n // `_data` is a base64 string for encrypted envelopes and the raw\n // JSON for plaintext ones. Both are strings, so a single sha256Hex\n // call works for both modes — the hash value differs between\n // encrypted/plaintext compartments because the bytes on disk\n // differ.\n return sha256Hex(envelope._data)\n}\n","/**\n * `LedgerStore` — read/write access to a compartment's hash-chained\n * audit log.\n *\n * The store is a thin wrapper around the adapter's `_ledger/` internal\n * collection. Every append:\n *\n * 1. Loads the current head (or treats an empty ledger as head = -1)\n * 2. Computes `prevHash` = sha256(canonicalJson(head))\n * 3. Builds the new entry with `index = head.index + 1`\n * 4. Encrypts the entry with the compartment's ledger DEK\n * 5. Writes the encrypted envelope to `_ledger/<paddedIndex>`\n *\n * `verify()` walks the chain from genesis forward and returns\n * `{ ok: true, head }` on success or `{ ok: false, divergedAt }` on the\n * first broken link.\n *\n * ## Thread / concurrency model\n *\n * For we assume a **single writer per vault**. Two\n * concurrent `append()` calls would race on the \"read head, write\n * head+1\" cycle and could produce a broken chain. The sync engine\n * is the primary concurrent-writer scenario, and it uses\n * optimistic-concurrency via `expectedVersion` on the adapter — but\n * the ledger path has no such guard today. Multi-writer hardening is a\n * follow-up.\n *\n * Single-writer usage IS safe, including across process restarts:\n * `head()` reads the adapter fresh each call, so a crash between the\n * adapter.put of a data record and the ledger append just means the\n * ledger is missing an entry for that record. `verify()` still\n * succeeds; a future `verifyIntegrity()` helper can cross-check the\n * ledger against the data collections to catch the gap.\n *\n * ## Why hide the ledger from `vault.collection()`?\n *\n * The `_ledger` name starts with `_`, matching the existing prefix\n * convention for internal collections (`_keyring`, `_sync`,\n * `_history`). The Vault's public `collection()` method already\n * returns entries for any name, but `loadAll()` filters out\n * underscore-prefixed collections so backups and exports don't leak\n * ledger metadata. We keep the ledger accessible ONLY via\n * `vault.ledger()` to enforce the hash-chain invariants — direct\n * puts via `collection('_ledger')` would bypass the `append()` logic.\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../../types.js'\nimport { encrypt, decrypt } from '../../crypto.js'\nimport { ConflictError, LedgerContentionError } from '../../errors.js'\nimport {\n canonicalJson,\n hashEntry,\n paddedIndex,\n sha256Hex,\n type LedgerEntry,\n} from './entry.js'\nimport type { JsonPatch } from './patch.js'\nimport { applyPatch } from './patch.js'\nimport { LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION } from './constants.js'\nimport { envelopePayloadHash } from './hash.js'\n\n/**\n * Maximum optimistic-CAS retries on the ledger head. Each failed\n * attempt invalidates the head cache, re-reads, and retries with a\n * fresh next-index. After N failures we surface\n * `LedgerContentionError` so the caller can decide whether to retry,\n * queue, or alert.\n */\nconst MAX_APPEND_ATTEMPTS = 8\n\n// — re-export the constants + helper so any existing\n// `import { LEDGER_COLLECTION } from '...store.js'` paths keep\n// working. Internal core paths (vault.ts) import from the leaf\n// modules directly to avoid pulling this file's class into the\n// floor bundle.\nexport { LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION, envelopePayloadHash }\n\n/**\n * Input shape for `LedgerStore.append()`. The caller supplies the\n * operation metadata; the store fills in `index` and `prevHash`.\n */\nexport interface AppendInput {\n op: LedgerEntry['op']\n collection: string\n id: string\n version: number\n actor: string\n payloadHash: string\n /**\n * Optional JSON Patch representing the delta from the previous\n * version to the new version. Present only for `put` operations\n * that had a previous version; omitted for genesis puts and for\n * deletes. When present, `LedgerStore.append` persists the patch\n * in `_ledger_deltas/<paddedIndex>` and records its sha256 hash\n * as the entry's `deltaHash` field.\n */\n delta?: JsonPatch\n}\n\n/**\n * Result of `LedgerStore.verify()`. On success, `head` is the hash of\n * the last entry — the same value that should be published to any\n * external anchoring service (blockchain, OpenTimestamps, etc.). On\n * failure, `divergedAt` is the 0-based index of the first entry whose\n * recorded `prevHash` does not match the recomputed hash of its\n * predecessor. Entries at `divergedAt` and later are untrustworthy;\n * entries before that index are still valid.\n */\nexport type VerifyResult =\n | { readonly ok: true; readonly head: string; readonly length: number }\n | {\n readonly ok: false\n readonly divergedAt: number\n readonly expected: string\n readonly actual: string\n }\n\n/**\n * A LedgerStore is bound to a single vault. Callers obtain one\n * via `vault.ledger()` — there is no public constructor to keep\n * the hash-chain invariants in one place.\n *\n * The class holds no mutable state beyond its dependencies (adapter,\n * vault name, DEK resolver, actor id). Every method reads the\n * adapter fresh so multiple instances against the same vault\n * see each other's writes immediately (at the cost of re-parsing the\n * ledger on every head() / verify() call; acceptable at scale).\n */\nexport class LedgerStore {\n private readonly adapter: NoydbStore\n private readonly vault: string\n private readonly encrypted: boolean\n private readonly getDEK: (collectionName: string) => Promise<CryptoKey>\n private readonly actor: string\n\n /**\n * In-memory cache of the chain head — the most recently appended\n * entry along with its precomputed hash. Without this, every\n * `append()` would re-load every prior entry to recompute the\n * prevHash, making N puts O(N²) — a 1K-record stress test goes from\n * < 100ms to a multi-second timeout.\n *\n * The cache is populated on first read (`append`, `head`, `verify`)\n * and updated in-place on every successful `append`. Single-writer\n * usage (the assumption) keeps it consistent. A second\n * LedgerStore instance writing to the same vault would not\n * see the first instance's appends in its cached state — that's the\n * concurrency caveat documented at the class level.\n *\n * Sentinel `undefined` means \"not yet loaded\"; an explicit `null`\n * value means \"loaded and confirmed empty\" — distinguishing these\n * matters because an empty ledger is a valid state (genesis prevHash\n * is the empty string), and we don't want to re-scan the adapter\n * just because the chain is freshly initialized.\n */\n private headCache: { entry: LedgerEntry; hash: string } | null | undefined = undefined\n\n constructor(opts: {\n adapter: NoydbStore\n vault: string\n encrypted: boolean\n getDEK: (collectionName: string) => Promise<CryptoKey>\n actor: string\n }) {\n this.adapter = opts.adapter\n this.vault = opts.vault\n this.encrypted = opts.encrypted\n this.getDEK = opts.getDEK\n this.actor = opts.actor\n }\n\n /**\n * Lazily load (or return cached) the current chain head. The cache\n * sentinel is `undefined` until first access; after the first call,\n * the cache holds either a `{ entry, hash }` for non-empty ledgers\n * or `null` for empty ones.\n */\n private async getCachedHead(): Promise<{ entry: LedgerEntry; hash: string } | null> {\n if (this.headCache !== undefined) return this.headCache\n const entries = await this.loadAllEntries()\n const last = entries[entries.length - 1]\n if (!last) {\n this.headCache = null\n return null\n }\n this.headCache = { entry: last, hash: await hashEntry(last) }\n return this.headCache\n }\n\n /**\n * Append a new entry to the ledger. Returns the full entry that was\n * written (with its assigned index and computed prevHash) so the\n * caller can use the hash for downstream purposes (e.g., embedding\n * in a verifiable backup).\n *\n * This is the **only** way to add entries. Direct adapter writes to\n * `_ledger/` would bypass the chain math and would be caught by the\n * next `verify()` call as a divergence.\n *\n * ## Multi-writer correctness\n *\n * Append is implemented as an optimistic-CAS retry loop. On every\n * attempt:\n *\n * 1. Read fresh head (cache invalidated on retry).\n * 2. Compute `nextIndex = head.index + 1`, `prevHash = hash(head)`.\n * 3. Encrypt delta payload IN MEMORY (no adapter write yet) so we\n * can compute `deltaHash` before claiming the chain slot.\n * 4. Build + encrypt the entry envelope.\n * 5. `adapter.put(_ledger, paddedIndex, envelope, expectedVersion: 0)`\n * — the `expectedVersion: 0` asserts \"this slot must not exist.\"\n * Stores with `casAtomic: true` honor the CAS check; under\n * contention the second writer's put throws `ConflictError`.\n * 6. On `ConflictError`: invalidate the head cache, sleep with\n * bounded backoff + jitter, retry. After `MAX_APPEND_ATTEMPTS`\n * retries throw {@link LedgerContentionError}.\n * 7. On success: write the delta envelope (if any) at the same\n * index. Update the head cache.\n *\n * Entry-first ordering matters: writing the delta first under\n * contention would orphan delta records at indices the writer never\n * actually claimed. The deltaHash is computed off the encrypted\n * envelope's `_data` field, which doesn't require the envelope to\n * be persisted.\n *\n * Stores with `casAtomic: false` (file, s3, r2 by default) silently\n * accept the `expectedVersion: 0` argument and proceed without a\n * CAS check. Concurrent appends against those stores remain\n * best-effort — pair them with an advisory lock or with sync\n * single-writer discipline.\n */\n async append(input: AppendInput): Promise<LedgerEntry> {\n let lastConflict: ConflictError | undefined\n for (let attempt = 0; attempt < MAX_APPEND_ATTEMPTS; attempt++) {\n // Force a fresh head read on every retry. The first attempt may\n // hit the cache; subsequent attempts must re-scan the adapter\n // because the prior conflict means our cached state is stale.\n if (attempt > 0) {\n this.headCache = undefined\n }\n try {\n return await this.appendOnce(input)\n } catch (err) {\n if (err instanceof ConflictError) {\n lastConflict = err\n if (attempt < MAX_APPEND_ATTEMPTS - 1) {\n await sleepBackoff(attempt)\n }\n continue\n }\n throw err\n }\n }\n void lastConflict\n throw new LedgerContentionError(MAX_APPEND_ATTEMPTS)\n }\n\n /**\n * One attempt at the append cycle. Throws `ConflictError` when the\n * CAS check on the entry put fails — `append()` catches that and\n * retries. Any other error propagates to the caller.\n */\n private async appendOnce(input: AppendInput): Promise<LedgerEntry> {\n const cached = await this.getCachedHead()\n const lastEntry = cached?.entry\n const prevHash = cached?.hash ?? ''\n const nextIndex = lastEntry ? lastEntry.index + 1 : 0\n\n // Encrypt the delta in memory so we can compute deltaHash WITHOUT\n // claiming the deltas slot yet — entry-put is the chain claim.\n let deltaEnvelope: EncryptedEnvelope | undefined\n let deltaHash: string | undefined\n if (input.delta !== undefined) {\n deltaEnvelope = await this.encryptDelta(input.delta)\n deltaHash = await sha256Hex(deltaEnvelope._data)\n }\n\n // Build the entry. Conditionally include `deltaHash` so\n // canonicalJson (which rejects undefined) never sees it when\n // there's no delta.\n const entryBase = {\n index: nextIndex,\n prevHash,\n op: input.op,\n collection: input.collection,\n id: input.id,\n version: input.version,\n ts: new Date().toISOString(),\n actor: input.actor === '' ? this.actor : input.actor,\n payloadHash: input.payloadHash,\n } as const\n const entry: LedgerEntry =\n deltaHash !== undefined\n ? { ...entryBase, deltaHash }\n : entryBase\n\n const envelope = await this.encryptEntry(entry)\n // expectedVersion: 0 ≡ \"the slot must not yet exist.\" Honored by\n // casAtomic stores; silently passed through by non-CAS stores.\n await this.adapter.put(\n this.vault,\n LEDGER_COLLECTION,\n paddedIndex(entry.index),\n envelope,\n 0,\n )\n\n // Chain slot claimed. Now write the delta record (if any).\n if (deltaEnvelope) {\n await this.adapter.put(\n this.vault,\n LEDGER_DELTAS_COLLECTION,\n paddedIndex(entry.index),\n deltaEnvelope,\n 0,\n )\n }\n\n // Update the head cache so the next append() doesn't re-scan the\n // adapter.\n this.headCache = { entry, hash: await hashEntry(entry) }\n return entry\n }\n\n /**\n * Load a delta payload by its entry index. Returns `null` if the\n * entry at that index doesn't reference a delta (genesis puts and\n * deletes leave the slot empty) or if the delta row is missing\n * (possible after a `pruneHistory` fold).\n *\n * The caller is responsible for deciding what to do with a missing\n * delta — `ledger.reconstruct()` uses it as a \"stop walking\n * backward\" signal and falls back to the on-disk current value.\n */\n async loadDelta(index: number): Promise<JsonPatch | null> {\n const envelope = await this.adapter.get(\n this.vault,\n LEDGER_DELTAS_COLLECTION,\n paddedIndex(index),\n )\n if (!envelope) return null\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as JsonPatch\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as JsonPatch\n }\n\n /** Encrypt a JSON Patch into an envelope for storage. Mirrors encryptEntry. */\n private async encryptDelta(patch: JsonPatch): Promise<EncryptedEnvelope> {\n const json = JSON.stringify(patch)\n if (!this.encrypted) {\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: json,\n _by: this.actor,\n }\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: this.actor,\n }\n }\n\n /**\n * Read all entries in ascending-index order. Used internally by\n * `append()`, `head()`, `verify()`, and `entries()`. Decryption is\n * serial because the entries are tiny and the overhead of a Promise\n * pool would dominate at realistic chain lengths (< 100K entries).\n */\n async loadAllEntries(): Promise<LedgerEntry[]> {\n const keys = await this.adapter.list(this.vault, LEDGER_COLLECTION)\n // Sort lexicographically, which matches numeric order because\n // keys are zero-padded to 10 digits.\n keys.sort()\n const entries: LedgerEntry[] = []\n for (const key of keys) {\n const envelope = await this.adapter.get(\n this.vault,\n LEDGER_COLLECTION,\n key,\n )\n if (!envelope) continue\n entries.push(await this.decryptEntry(envelope))\n }\n return entries\n }\n\n /**\n * Return the current head of the ledger: the last entry, its hash,\n * and the total chain length. `null` on an empty ledger so callers\n * can distinguish \"no history yet\" from \"empty history\".\n */\n async head(): Promise<\n | { readonly entry: LedgerEntry; readonly hash: string; readonly length: number }\n | null\n > {\n const cached = await this.getCachedHead()\n if (!cached) return null\n // `length` is `entry.index + 1` because indices are zero-based and\n // contiguous. We don't need to re-scan the adapter to compute it.\n return {\n entry: cached.entry,\n hash: cached.hash,\n length: cached.entry.index + 1,\n }\n }\n\n /**\n * Return entries in the requested half-open range `[from, to)`.\n * Defaults: `from = 0`, `to = length`. The indices are clipped to\n * the valid range; no error is thrown for out-of-range queries.\n */\n async entries(opts: { from?: number; to?: number } = {}): Promise<LedgerEntry[]> {\n const all = await this.loadAllEntries()\n const from = Math.max(0, opts.from ?? 0)\n const to = Math.min(all.length, opts.to ?? all.length)\n return all.slice(from, to)\n }\n\n /**\n * Reconstruct a record's state at a given historical version by\n * walking the ledger's delta chain backward from the current state.\n *\n * ## Algorithm\n *\n * Ledger deltas are stored in **reverse** form — each entry's\n * patch describes how to undo that put, transforming the new\n * record back into the previous one. `reconstruct` exploits this\n * by:\n *\n * 1. Finding every ledger entry for `(collection, id)` in the\n * chain, sorted by index ascending.\n * 2. Starting from `current` (the present value of the record,\n * as held by the caller — typically fetched via\n * `Collection.get()`).\n * 3. Walking entries in **descending** index order and applying\n * each entry's reverse patch, stopping when we reach the\n * entry whose version equals `atVersion`.\n *\n * The result is the record as it existed immediately AFTER the\n * put at `atVersion`. To get the state at the genesis put\n * (version 1), the walk runs all the way back through every put\n * after the first.\n *\n * ## Caveats\n *\n * - **Delete entries** break the walk: once we see a delete, the\n * record didn't exist before that point, so there's nothing to\n * reconstruct. We return `null` in that case.\n * - **Missing deltas** (e.g., after `pruneHistory` folds old\n * entries into a base snapshot) also stop the walk. does\n * not ship pruneHistory, so today this only happens if an entry\n * was deleted out-of-band.\n * - The caller MUST pass the correct current value. Passing a\n * mutated object would corrupt the reconstruction — the patch\n * chain is only valid against the exact state that was in\n * effect when the most recent put happened.\n *\n * For, `reconstruct` is the only way to read a historical\n * version via deltas. The legacy `_history` collection still\n * holds full snapshots and `Collection.getVersion()` still reads\n * from there — the two paths coexist until pruneHistory lands in\n * a follow-up and delta becomes the default.\n */\n async reconstruct<T>(\n collection: string,\n id: string,\n current: T,\n atVersion: number,\n ): Promise<T | null> {\n const all = await this.loadAllEntries()\n // Filter to entries for this (collection, id), in ascending index.\n const matching = all.filter(\n (e) => e.collection === collection && e.id === id,\n )\n if (matching.length === 0) {\n // No ledger history at all; the current state IS version 1\n // (or there's nothing), so the only valid atVersion is the\n // current record's version. We can't verify that here, so\n // return current if atVersion is plausible, null otherwise.\n return null\n }\n\n // Walk entries in descending index order, applying each reverse\n // delta until we reach the target version.\n let state: T | null = current\n for (let i = matching.length - 1; i >= 0; i--) {\n const entry = matching[i]\n if (!entry) continue\n\n // Match check FIRST — before applying this entry's reverse\n // patch. `state` at this point is the record state immediately\n // after this entry's put (or before this entry's delete), so\n // if the caller asked for this exact version, we're done.\n if (entry.version === atVersion && entry.op !== 'delete') {\n return state\n }\n\n if (entry.op === 'delete') {\n // A delete erases the live state. If the caller asks for a\n // version older than the delete we should continue walking\n // (state becomes null and the next put resets it). But we\n // can't reconstruct that pre-delete state from the current\n // in-memory `state` — the delete has no reverse patch. So\n // anything past this point is unreachable; return null.\n return null\n }\n\n if (entry.deltaHash === undefined) {\n // Genesis put — the earliest state for this lifecycle. We\n // can't walk further back. If the caller asked for exactly\n // this version, return the current state (we already failed\n // the match check above because a fresh genesis after a\n // delete can have version === atVersion). Otherwise the\n // target is unreachable from here.\n if (entry.version === atVersion) return state\n return null\n }\n\n const patch = await this.loadDelta(entry.index)\n if (!patch) {\n // Delta row is missing (probably pruned). Stop walking.\n return null\n }\n\n if (state === null) {\n // We're trying to walk back across a delete range and there's\n // nothing to apply a reverse patch to. Bail.\n return null\n }\n\n state = applyPatch(state, patch)\n }\n\n // Ran off the end of the walk without matching. The target\n // version doesn't exist in this record's chain.\n return null\n }\n\n /**\n * Walk the chain from genesis forward and verify every link.\n *\n * Returns `{ ok: true, head, length }` if every entry's `prevHash`\n * matches the recomputed hash of its predecessor (and the genesis\n * entry's `prevHash` is the empty string).\n *\n * Returns `{ ok: false, divergedAt, expected, actual }` on the first\n * mismatch. `divergedAt` is the 0-based index of the BROKEN entry\n * — entries before that index still verify cleanly; entries at and\n * after `divergedAt` are untrustworthy.\n *\n * This method detects:\n * - Mutated entry content (fields changed)\n * - Reordered entries (if any adjacent pair swaps, the prevHash\n * of the second no longer matches)\n * - Inserted entries (the inserted entry's prevHash likely fails,\n * and the following entry's prevHash definitely fails)\n * - Deleted entries (the entry after the deletion sees a wrong\n * prevHash)\n *\n * It does NOT detect:\n * - Tampering with the DATA collections that bypassed the ledger\n * entirely (e.g., an attacker who modifies records without\n * appending matching ledger entries — this is why we also\n * plan a `verifyIntegrity()` helper in a follow-up)\n * - Truncation of the chain at the tail (dropping the last N\n * entries leaves a shorter but still consistent chain). External\n * anchoring of `head.hash` to a trusted service is the defense\n * against this.\n */\n async verify(): Promise<VerifyResult> {\n const entries = await this.loadAllEntries()\n let expectedPrevHash = ''\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i]\n if (!entry) continue\n if (entry.prevHash !== expectedPrevHash) {\n return {\n ok: false,\n divergedAt: i,\n expected: expectedPrevHash,\n actual: entry.prevHash,\n }\n }\n if (entry.index !== i) {\n // An entry whose stored index doesn't match its position in\n // the sorted list means someone rewrote the adapter keys.\n // Treat as divergence.\n return {\n ok: false,\n divergedAt: i,\n expected: `index=${i}`,\n actual: `index=${entry.index}`,\n }\n }\n expectedPrevHash = await hashEntry(entry)\n }\n return {\n ok: true,\n head: expectedPrevHash,\n length: entries.length,\n }\n }\n\n // ─── Encryption plumbing ─────────────────────────────────────────\n\n /**\n * Serialize + encrypt a ledger entry into an EncryptedEnvelope. The\n * envelope's `_v` field is set to `entry.index + 1` so the usual\n * optimistic-concurrency machinery has a reasonable version number\n * to compare against (the ledger is append-only, so concurrent\n * writes should always bump the index).\n */\n private async encryptEntry(entry: LedgerEntry): Promise<EncryptedEnvelope> {\n const json = canonicalJson(entry)\n if (!this.encrypted) {\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: entry.index + 1,\n _ts: entry.ts,\n _iv: '',\n _data: json,\n _by: entry.actor,\n }\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: entry.index + 1,\n _ts: entry.ts,\n _iv: iv,\n _data: data,\n _by: entry.actor,\n }\n }\n\n /** Decrypt an envelope into a LedgerEntry. Throws on bad key / tamper. */\n private async decryptEntry(envelope: EncryptedEnvelope): Promise<LedgerEntry> {\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as LedgerEntry\n }\n const dek = await this.getDEK(LEDGER_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as LedgerEntry\n }\n}\n\n// `envelopePayloadHash` was moved to `./hash.ts` so it can be\n// imported by core code without dragging this file's `LedgerStore`\n// class into the floor bundle. The re-export at the top of this\n// file keeps the original `import { envelopePayloadHash } from '.../store.js'`\n// path working.\n\n/**\n * Exponential backoff with jitter for the append CAS retry loop.\n * Attempt 0 → ~5–10 ms, attempt 7 → ~640–1280 ms. Jitter avoids the\n * thundering-herd problem when multiple writers collide repeatedly.\n */\nfunction sleepBackoff(attempt: number): Promise<void> {\n const base = 5 * Math.pow(2, attempt)\n const jitter = Math.random() * base\n return new Promise((resolve) => setTimeout(resolve, base + jitter))\n}\n","/**\n * Public surface of the ledger module.\n *\n * Consumers import these symbols via `@noy-db/core`:\n *\n * ```ts\n * import { LedgerStore, canonicalJson, hashEntry } from '@noy-db/hub'\n * import type { LedgerEntry, VerifyResult } from '@noy-db/hub'\n * ```\n *\n * The LedgerStore class itself is exported so test code and advanced\n * users can construct one directly, but the recommended entry point is\n * `vault.ledger()` which takes care of wiring the DEK resolver\n * and actor id from the active keyring.\n */\n\nexport {\n LedgerStore,\n LEDGER_COLLECTION,\n LEDGER_DELTAS_COLLECTION,\n envelopePayloadHash,\n type AppendInput,\n type VerifyResult,\n} from './store.js'\n\nexport {\n canonicalJson,\n sha256Hex,\n hashEntry,\n paddedIndex,\n parseIndex,\n type LedgerEntry,\n} from './entry.js'\n\n// JSON Patch compute + apply\nexport { computePatch, applyPatch } from './patch.js'\nexport type { JsonPatch, JsonPatchOp } from './patch.js'\n","/**\n * Hierarchical access — tier-aware keyring helpers.\n *\n * The keyring's existing `deks: Map<string, CryptoKey>` is keyed by\n * collection name. extends the key space:\n *\n * `'invoices'` — tier-0 DEK (unchanged from v0.x)\n * `'invoices#1'` — tier-1 DEK\n * `'invoices#2'` — tier-2 DEK\n *\n * Tier 0 keeps the bare collection name so any keyring written\n * before tiers existed loads without migration. Tiers ≥ 1 use `#N`\n * suffixes that\n * would be invalid as user-supplied collection names (see\n * `ReservedCollectionNameError` — `#` is reserved).\n *\n * @module\n */\n\nimport type { UnlockedKeyring } from './keyring.js'\nimport { TierNotGrantedError } from '../errors.js'\n\n/** Canonical DEK key for a given collection + tier. Tier 0 → bare name. */\nexport function dekKey(collection: string, tier: number): string {\n if (tier <= 0) return collection\n return `${collection}#${tier}`\n}\n\n/**\n * Returns the user's effective clearance for a given collection: the\n * maximum tier for which their keyring holds a DEK. Falls back to 0\n * when the user has only the tier-0 DEK (or none — the getDEK caller\n * will raise separately).\n */\nexport function effectiveClearance(keyring: UnlockedKeyring, collection: string): number {\n let max = 0\n const prefix = `${collection}#`\n for (const key of keyring.deks.keys()) {\n if (!key.startsWith(prefix)) continue\n const suffix = key.slice(prefix.length)\n const n = Number.parseInt(suffix, 10)\n if (Number.isFinite(n) && n > max) max = n\n }\n return max\n}\n\n/**\n * Assert the caller is cleared for the requested tier. Owners and\n * admins always pass (they can mint any new tier DEK on demand);\n * other roles must already hold the tier DEK — via a prior grant or\n * an active delegation — otherwise this throws `TierNotGrantedError`.\n *\n * This gate runs BEFORE `getDEK()` on the mutation path so a\n * non-cleared operator never has the opportunity to silently\n * auto-create a tier DEK they shouldn't have.\n */\nexport function assertTierAccess(\n keyring: UnlockedKeyring,\n collection: string,\n tier: number,\n): void {\n if (tier <= 0) return\n if (keyring.role === 'owner' || keyring.role === 'admin') return\n if (!keyring.deks.has(dekKey(collection, tier))) {\n throw new TierNotGrantedError(collection, tier)\n }\n}\n","/**\n * Time-boxed cross-tier delegation tokens.\n *\n * A higher-tier user can issue a delegation that grants another user\n * temporary access to records at a specified tier. The delegation is\n * persisted as an encrypted envelope in the reserved `_delegations`\n * collection. The target user's runtime scans this collection on every\n * open and, while `until` is still in the future, merges the\n * unwrapped tier DEKs into their in-memory DEK map.\n *\n * ## Token shape\n *\n * ```\n * {\n * id, // ULID, also the _delegations record id\n * toUser, // grantee user id\n * fromUser, // grantor user id (owner/admin/higher-tier principal)\n * tier, // tier being delegated\n * collection, // collection name OR null for \"every collection\"\n * record, // optional specific record id\n * until, // ISO timestamp — token expires at this instant\n * wrappedDek, // base64 AES-KW-wrapped tier DEK, wrapped under target KEK\n * createdAt, // ISO timestamp\n * }\n * ```\n *\n * The ciphertext is stored as a normal noy-db envelope — the\n * `_delegations` collection has its own DEK shared across all vault\n * users, so an operator can enumerate active delegations for audit\n * without being able to *use* them (the `wrappedDek` inside is still\n * keyed to the target user's KEK).\n *\n * ## Revocation\n *\n * Delete the `_delegations/<id>` envelope. The target user's runtime\n * reloads the delegation list at each open and at periodic intervals\n * (tracked by the caller — this module is pure logic).\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport type { UnlockedKeyring } from './keyring.js'\nimport { encrypt, decrypt, wrapKey, unwrapKey } from '../crypto.js'\nimport { dekKey } from './tiers.js'\nimport { DelegationTargetMissingError } from '../errors.js'\nimport { generateULID } from '../bundle/ulid.js'\n\nexport const DELEGATIONS_COLLECTION = '_delegations'\n\n/**\n * Durable payload of a delegation token. Encrypted under the vault's\n * `_delegations` DEK; the `wrappedDek` inside is additionally wrapped\n * under the target user's KEK.\n */\nexport interface DelegationToken {\n readonly id: string\n readonly toUser: string\n readonly fromUser: string\n readonly tier: number\n /** Collection name or `null` for all collections. */\n readonly collection: string | null\n /** Optional specific record id scope. */\n readonly record?: string\n readonly until: string\n readonly wrappedDek: string\n readonly createdAt: string\n}\n\nexport interface IssueDelegationOptions {\n readonly toUser: string\n readonly tier: number\n readonly collection?: string\n readonly record?: string\n readonly until: Date | string\n}\n\n/**\n * Build and persist a delegation token. The caller must hold a tier-N\n * DEK and must have already located the target user's keyring file\n * (so the `wrappedDek` can be re-wrapped against their KEK).\n */\nexport async function issueDelegation(\n store: NoydbStore,\n vault: string,\n grantor: UnlockedKeyring,\n targetKek: CryptoKey | null,\n delegationsDek: CryptoKey,\n opts: IssueDelegationOptions,\n): Promise<DelegationToken> {\n if (!targetKek) {\n throw new DelegationTargetMissingError(opts.toUser)\n }\n const tier = opts.tier\n const collectionName = opts.collection ?? null\n const dekLookupCollection = collectionName ?? ''\n // Tier DEK to delegate — fetched from the grantor's own keyring.\n const sourceDek = collectionName\n ? grantor.deks.get(dekKey(collectionName, tier))\n : undefined\n if (!sourceDek) {\n throw new DelegationTargetMissingError(\n `grantor cannot find tier ${tier} DEK for ${dekLookupCollection || '(any)'}`,\n )\n }\n const wrappedDek = await wrapKey(sourceDek, targetKek)\n\n const until = typeof opts.until === 'string' ? opts.until : opts.until.toISOString()\n const token: DelegationToken = {\n id: generateULID(),\n toUser: opts.toUser,\n fromUser: grantor.userId,\n tier,\n collection: collectionName,\n ...(opts.record && { record: opts.record }),\n until,\n wrappedDek,\n createdAt: new Date().toISOString(),\n }\n\n const plaintext = JSON.stringify(token)\n const { iv, data } = await encrypt(plaintext, delegationsDek)\n const envelope: EncryptedEnvelope = {\n _noydb: 1,\n _v: 1,\n _ts: token.createdAt,\n _iv: iv,\n _data: data,\n _by: grantor.userId,\n }\n await store.put(vault, DELEGATIONS_COLLECTION, token.id, envelope)\n return token\n}\n\n/**\n * Enumerate every live (non-expired) delegation addressed to `toUser`\n * and merge the unwrapped tier DEKs into their keyring. Returns the\n * list of merged delegations so the caller can register per-access\n * audit context.\n */\nexport async function loadActiveDelegations(\n store: NoydbStore,\n vault: string,\n user: UnlockedKeyring,\n delegationsDek: CryptoKey,\n now: Date = new Date(),\n): Promise<DelegationToken[]> {\n const ids = await store.list(vault, DELEGATIONS_COLLECTION)\n const merged: DelegationToken[] = []\n const nowIso = now.toISOString()\n for (const id of ids) {\n const env = await store.get(vault, DELEGATIONS_COLLECTION, id)\n if (!env) continue\n let token: DelegationToken\n try {\n const plaintext = await decrypt(env._iv, env._data, delegationsDek)\n token = JSON.parse(plaintext) as DelegationToken\n } catch {\n continue\n }\n if (token.toUser !== user.userId) continue\n if (token.until <= nowIso) continue\n\n let dek: CryptoKey\n try {\n dek = await unwrapKey(token.wrappedDek, user.kek)\n } catch {\n continue\n }\n const k = token.collection\n ? dekKey(token.collection, token.tier)\n : `__any#${token.tier}`\n user.deks.set(k, dek)\n merged.push(token)\n }\n return merged\n}\n\n/**\n * Revoke a delegation by id — the caller resolves the envelope and\n * issues a `delete`. Provided as a stable helper so the naming is\n * symmetric to `issueDelegation`.\n */\nexport async function revokeDelegation(\n store: NoydbStore,\n vault: string,\n id: string,\n): Promise<void> {\n await store.delete(vault, DELEGATIONS_COLLECTION, id)\n}\n","/**\n * **@noy-db/hub** — zero-knowledge, offline-first, encrypted document store.\n *\n * ## What it is\n *\n * A TypeScript library that encrypts every record with AES-256-GCM before it\n * reaches any storage backend. The store (file, DynamoDB, S3, IndexedDB, …)\n * only ever sees ciphertext — it has no way to read or tamper with your data\n * without the user's passphrase.\n *\n * ## Architecture in one diagram\n *\n * ```\n * Passphrase\n * └─► PBKDF2-SHA256 (600K iterations) → KEK [memory only]\n * └─► AES-KW unwrap → DEK per collection [memory only]\n * └─► AES-256-GCM encrypt/decrypt\n * └─► NoydbStore [sees only ciphertext envelopes]\n * ```\n *\n * ## Getting started\n *\n * ```ts\n * import { createNoydb } from '@noy-db/hub'\n * import { jsonFile } from '@noy-db/to-file'\n *\n * const db = await createNoydb({ store: jsonFile({ dir: './data' }) })\n * const acme = await db.openVault('acme', { passphrase: 'hunter2' })\n * const invoices = acme.collection<Invoice>('invoices')\n *\n * await invoices.put('inv-001', { amount: 1200, client: 'Acme Corp' })\n * const all = await invoices.query().toArray()\n * ```\n *\n * ## Key concepts\n *\n * | Concept | Type | Description |\n * |---------|------|-------------|\n * | Instance | {@link Noydb} | Top-level handle from {@link createNoydb} |\n * | Vault | {@link Vault} | Tenant namespace; has its own keyrings |\n * | Collection | {@link Collection} | Typed record set; has its own DEK |\n * | Store | {@link NoydbStore} | 6-method backend interface |\n * | Envelope | {@link EncryptedEnvelope} | What the store actually persists |\n *\n * ## Security invariants\n *\n * - **Zero crypto dependencies.** All cryptography uses `crypto.subtle` (Web\n * Crypto API). No npm crypto packages.\n * - **KEK never persisted.** The key-encryption key lives only in memory for\n * the duration of an open session.\n * - **Fresh IV per write.** Every `put()` generates a new random 12-byte IV.\n * - **Stores see only ciphertext.** Encryption happens in core before any\n * store method is called.\n *\n * ## Related packages\n *\n * | Package | Purpose |\n * |---------|---------|\n * | `@noy-db/to-file` | JSON file store (USB / local disk) |\n * | `@noy-db/to-aws-dynamo` | DynamoDB single-table store |\n * | `@noy-db/to-aws-s3` | S3 object store |\n * | `@noy-db/to-browser-idb` | IndexedDB store (atomic CAS) |\n * | `@noy-db/to-browser-local` | localStorage store |\n * | `@noy-db/to-memory` | In-memory store (testing) |\n * | `@noy-db/in-vue` | Vue 3 composables |\n * | `@noy-db/in-pinia` | Pinia store integration |\n * | `@noy-db/in-nuxt` | Nuxt 4 module |\n * | `@noy-db/on-webauthn` | Hardware-key / passkey unlock |\n * | `@noy-db/on-oidc` | OIDC / federated login unlock |\n *\n * @packageDocumentation\n */\n\n// Environment check — throws if Node <18 or crypto.subtle missing\nimport './env-check.js'\n\n// Types\nexport type {\n Role,\n Permission,\n Permissions,\n EncryptedEnvelope,\n VaultSnapshot,\n NoydbStore,\n ListPageResult,\n KeyringFile,\n VaultBackup,\n DirtyEntry,\n SyncMetadata,\n Conflict,\n ConflictStrategy,\n ConflictPolicy,\n CollectionConflictResolver,\n PushOptions,\n PullOptions,\n PushResult,\n PullResult,\n SyncTransactionResult,\n SyncStatus,\n ChangeEvent,\n NoydbEventMap,\n GrantOptions,\n RevokeOptions,\n UserInfo,\n NoydbOptions,\n HistoryConfig,\n HistoryOptions,\n HistoryEntry,\n PruneOptions,\n PutManyItemOptions,\n PutManyOptions,\n PutManyResult,\n DeleteManyResult,\n ExportStreamOptions,\n ExportChunk,\n AccessibleVault,\n ListAccessibleVaultsOptions,\n QueryAcrossOptions,\n QueryAcrossResult,\n SessionPolicy,\n ReAuthOperation,\n PlaintextTranslatorContext,\n PlaintextTranslatorFn,\n TranslatorAuditEntry,\n ExportCapability,\n ExportFormat,\n ImportCapability,\n} from './types.js'\n\nexport {\n NOYDB_FORMAT_VERSION,\n NOYDB_KEYRING_VERSION,\n NOYDB_BACKUP_VERSION,\n NOYDB_SYNC_VERSION,\n createStore,\n} from './types.js'\n\nexport type {\n StoreAuthKind,\n StoreAuth,\n StoreCapabilities,\n} from './types.js'\n\n// Blob store\nexport type {\n NoydbBundleStore,\n BlobObject,\n SlotRecord,\n SlotInfo,\n VersionRecord,\n BlobPutOptions,\n BlobResponseOptions,\n} from './types.js'\nexport { BlobSet } from './blobs/blob-set.js'\nexport {\n BLOB_COLLECTION,\n BLOB_INDEX_COLLECTION,\n BLOB_CHUNKS_COLLECTION,\n BLOB_SLOTS_PREFIX,\n BLOB_VERSIONS_PREFIX,\n DEFAULT_CHUNK_SIZE,\n} from './blobs/blob-set.js'\nexport { detectMimeType, detectMagic, isPreCompressed } from './blobs/mime-magic.js'\nexport { wrapBundleStore, createBundleStore } from './store/bundle-store.js'\nexport type { WrappedBundleNoydbStore, WrapBundleStoreOptions } from './store/bundle-store.js'\n\n// Sync policy\nexport type { SyncPolicy, PushPolicy, PullPolicy, PushMode, PullMode, SyncSchedulerStatus } from './store/sync-policy.js'\nexport { SyncScheduler, INDEXED_STORE_POLICY, BUNDLE_STORE_POLICY } from './store/sync-policy.js'\n\n// Sync target\nexport type { SyncTarget, SyncTargetRole } from './types.js'\n\n// Store routing\nexport { routeStore } from './store/route-store.js'\nexport type {\n RouteStoreOptions, RoutedNoydbStore, BlobStoreRoute, AgeRoute,\n BlobLifecyclePolicy, OverrideTarget, OverrideOptions, SuspendOptions, RouteStatus,\n} from './store/route-store.js'\n\n// Store middleware\nexport { wrapStore, withRetry, withLogging, withMetrics, withCircuitBreaker, withCache, withHealthCheck } from './store/store-middleware.js'\nexport type {\n StoreMiddleware, RetryOptions, LoggingOptions, LogLevel,\n MetricsOptions, StoreOperation, CircuitBreakerOptions, StoreCacheOptions, HealthCheckOptions,\n} from './store/store-middleware.js'\n\n// Errors\nexport {\n NoydbError,\n DecryptionError,\n TamperedError,\n InvalidKeyError,\n NoAccessError,\n ReadOnlyError,\n PermissionDeniedError,\n PrivilegeEscalationError,\n StoreCapabilityError,\n ConflictError,\n NetworkError,\n NotFoundError,\n ValidationError,\n SchemaValidationError,\n GroupCardinalityError,\n BackupLedgerError,\n BackupCorruptedError,\n JoinTooLargeError,\n DanglingReferenceError,\n FilenameSanitizationError,\n PathEscapeError,\n ElevationExpiredError,\n AlreadyElevatedError,\n LedgerContentionError,\n BundleIntegrityError,\n BundleVersionConflictError,\n SessionExpiredError,\n SessionNotFoundError,\n SessionPolicyError,\n ExportCapabilityError,\n ImportCapabilityError,\n KeyringExpiredError,\n ReadOnlyAtInstantError,\n ReadOnlyFrameError,\n} from './errors.js'\n\n// Bundle format — `.noydb` container\nexport {\n writeNoydbBundle,\n readNoydbBundle,\n readNoydbBundleHeader,\n resetBrotliSupportCache,\n} from './bundle/bundle.js'\nexport type {\n NoydbBundleHeader,\n CompressionAlgo,\n} from './bundle/format.js'\nexport type {\n WriteNoydbBundleOptions,\n NoydbBundleReadResult,\n} from './bundle/bundle.js'\nexport {\n NOYDB_BUNDLE_MAGIC,\n NOYDB_BUNDLE_PREFIX_BYTES,\n NOYDB_BUNDLE_FORMAT_VERSION,\n hasNoydbBundleMagic,\n} from './bundle/format.js'\nexport { generateULID, isULID } from './bundle/ulid.js'\n\n// Schema validation — Standard Schema v1 integration\nexport type {\n StandardSchemaV1,\n StandardSchemaV1SyncResult,\n StandardSchemaV1Issue,\n InferOutput,\n} from './schema.js'\nexport { validateSchemaInput, validateSchemaOutput } from './schema.js'\n\n// Time-machine queries — vault.at(ts) method lives on\n// Vault; these classes are the return types.\nexport { VaultInstant, CollectionInstant } from './history/time-machine.js'\nexport type { VaultEngine } from './history/time-machine.js'\n\n// Shadow vaults — vault.frame() method lives on Vault;\n// these classes are the return types.\nexport { VaultFrame, CollectionFrame } from './shadow/vault-frame.js'\n\n// Consent boundaries — vault.withConsent() / .consentAudit()\n// live on Vault; these are the types + constants.\nexport { CONSENT_AUDIT_COLLECTION } from './consent/consent.js'\nexport type {\n ConsentContext,\n ConsentOp,\n ConsentAuditEntry,\n ConsentAuditFilter,\n} from './consent/consent.js'\n\n// Hash-chained ledger\nexport {\n LedgerStore,\n LEDGER_COLLECTION,\n LEDGER_DELTAS_COLLECTION,\n envelopePayloadHash,\n canonicalJson,\n sha256Hex,\n hashEntry,\n paddedIndex,\n parseIndex,\n computePatch,\n applyPatch,\n} from './history/ledger/index.js'\nexport type {\n LedgerEntry,\n AppendInput,\n VerifyResult,\n JsonPatch,\n JsonPatchOp,\n} from './history/ledger/index.js'\n\n// Foreign-key references via ref()\nexport {\n ref,\n RefRegistry,\n RefIntegrityError,\n RefScopeError,\n} from './refs.js'\nexport type {\n RefMode,\n RefDescriptor,\n RefViolation,\n} from './refs.js'\n\n// Keyring types\nexport type { UnlockedKeyring } from './team/keyring.js'\n\n// Export-capability helpers\nexport { hasExportCapability, evaluateExportCapability } from './team/keyring.js'\nexport { hasImportCapability, evaluateImportCapability } from './team/keyring.js'\n\n// Bundle recipients\nexport type { BundleRecipient } from './team/keyring.js'\nexport { buildRecipientKeyringFile } from './team/keyring.js'\n\n// Core classes\nexport { Noydb, createNoydb } from './noydb.js'\nexport { Vault, ElevatedHandle, ELEVATION_AUDIT_COLLECTION } from './vault.js'\nexport { Collection } from './collection.js'\nexport type { CacheOptions, CacheStats, CollectionChangeEvent } from './collection.js'\n\n// CRDT mode\nexport type { CrdtMode, CrdtState, LwwMapState, RgaState, YjsState } from './crdt/crdt.js'\nexport { resolveCrdtSnapshot, mergeCrdtStates } from './crdt/crdt.js'\n\n// Presence\nexport { PresenceHandle } from './team/presence.js'\nexport type { PresencePeer } from './types.js'\nexport { derivePresenceKey } from './crypto.js'\nexport { SyncEngine } from './team/sync.js'\nexport { SyncTransaction } from './team/sync-transaction.js'\n\n// Multi-record transactions\nexport { TxContext, TxVault, TxCollection, runTransaction } from './tx/transaction.js'\nexport type { TxOp } from './types.js'\n\n// Accounting periods\nexport { PERIODS_COLLECTION } from './periods/index.js'\nexport type {\n PeriodRecord,\n ClosePeriodOptions,\n OpenPeriodOptions,\n} from './periods/index.js'\nexport { PeriodClosedError } from './errors.js'\n\n// Cache module — LRU + byte budget parsing\nexport { Lru, parseBytes, estimateRecordBytes } from './cache/index.js'\nexport type { LruOptions, LruStats } from './cache/index.js'\n\n// Biometric — removed in as redundant with @noy-db/on-webauthn\n// (which supports PRF + rawId fallback + BE-flag guard). Legacy consumers\n// migrate to `import { enrollWebAuthn, unlockWebAuthn } from '@noy-db/on-webauthn'`.\n\n// i18n — dictKey + DictionaryHandle\nexport {\n dictKey,\n isDictKeyDescriptor,\n isDictCollectionName,\n dictCollectionName,\n DictionaryHandle,\n DICT_COLLECTION_PREFIX,\n} from './i18n/dictionary.js'\nexport type {\n DictKeyDescriptor,\n DictEntry,\n DictionaryOptions,\n} from './i18n/dictionary.js'\n\n// i18n — i18nText\nexport {\n i18nText,\n isI18nTextDescriptor,\n validateI18nTextValue,\n resolveI18nText,\n applyI18nLocale,\n} from './i18n/core.js'\nexport type { I18nTextOptions, I18nTextDescriptor } from './i18n/core.js'\n\n// i18n errors\nexport {\n ReservedCollectionNameError,\n DictKeyMissingError,\n DictKeyInUseError,\n MissingTranslationError,\n LocaleNotSpecifiedError,\n TranslatorNotConfiguredError,\n} from './errors.js'\n\n// Locale read options + translator audit log\nexport type { LocaleReadOptions } from './types.js'\n\n// _sync_credentials reserved collection —\nexport {\n putCredential,\n getCredential,\n deleteCredential,\n listCredentials,\n credentialStatus,\n SYNC_CREDENTIALS_COLLECTION,\n} from './team/sync-credentials.js'\nexport type { SyncCredential } from './team/sync-credentials.js'\n\n// Magic-link unlock — extracted to @noy-db/on-magic-link in.\n// Consumers should: `import { ... } from '@noy-db/on-magic-link'`.\n\n// Session policies —\nexport { PolicyEnforcer, createEnforcer, validateSessionPolicy } from './session/session-policy.js'\n\n// Session tokens —\nexport {\n createSession,\n resolveSession,\n revokeSession,\n revokeAllSessions,\n isSessionAlive,\n activeSessionCount,\n} from './session/session.js'\nexport type {\n SessionToken,\n CreateSessionResult,\n CreateSessionOptions,\n} from './session/session.js'\n\n// Dev-mode persistent unlock —\nexport {\n enableDevUnlock,\n loadDevUnlock,\n clearDevUnlock,\n isDevUnlockActive,\n} from './session/dev-unlock.js'\nexport type { DevUnlockOptions } from './session/dev-unlock.js'\n\n// Crypto utilities (buffer encoding helpers + binary encrypt/hash)\nexport { bufferToBase64, base64ToBuffer, encryptBytes, decryptBytes } from './crypto.js'\nexport { encryptDeterministic, decryptDeterministic } from './crypto.js'\n\n// hierarchical access\nexport type { GhostRecord, TierMode, CrossTierAccessEvent } from './types.js'\nexport { TierNotGrantedError, TierDemoteDeniedError, DelegationTargetMissingError } from './errors.js'\n\n// lazy-mode index errors\nexport { IndexRequiredError, IndexWriteFailureError } from './errors.js'\nexport { dekKey, effectiveClearance, assertTierAccess } from './team/tiers.js'\nexport type { DelegationToken, IssueDelegationOptions } from './team/delegation.js'\nexport { DELEGATIONS_COLLECTION, issueDelegation, loadActiveDelegations, revokeDelegation } from './team/delegation.js'\n\n// magic-link-bridged cross-user KEK delegation\nexport type {\n MagicLinkGrantPayload,\n MagicLinkGrantRecord,\n IssueMagicLinkGrantOptions,\n} from './team/magic-link-grant.js'\nexport {\n MAGIC_LINK_GRANTS_COLLECTION,\n MAGIC_LINK_CONTENT_INFO_PREFIX,\n MAGIC_LINK_KEK_INFO_PREFIX,\n deriveMagicLinkContentKey,\n writeMagicLinkGrant,\n readMagicLinkGrantRecord,\n listMagicLinkGrants,\n unwrapMagicLinkGrant,\n revokeMagicLinkGrant,\n magicLinkGrantRecordId,\n isMagicLinkGrantExpired,\n} from './team/magic-link-grant.js'\n\n// Diff\nexport { diff, formatDiff } from './history/diff.js'\nexport type { DiffEntry, ChangeType } from './history/diff.js'\n\n// Vault-level diff\nexport { diffVault } from './vault-diff.js'\nexport type {\n VaultDiff,\n VaultDiffEntry,\n VaultDiffModifiedEntry,\n DiffOptions,\n DiffCandidate,\n} from './vault-diff.js'\n\n// Validation\nexport { validatePassphrase, estimateEntropy } from './validation.js'\n\n// Query DSL\nexport {\n Query,\n executePlan,\n evaluateClause,\n evaluateFieldClause,\n readPath,\n CollectionIndexes,\n applyJoins,\n DEFAULT_JOIN_MAX_ROWS,\n resetJoinWarnings,\n buildLiveQuery,\n count,\n sum,\n avg,\n min,\n max,\n Aggregation,\n reduceRecords,\n GroupedQuery,\n GroupedAggregation,\n groupAndReduce,\n GROUPBY_WARN_CARDINALITY,\n GROUPBY_MAX_CARDINALITY,\n ScanBuilder,\n} from './query/index.js'\nexport type {\n QueryPlan,\n QuerySource,\n OrderBy,\n Operator,\n Clause,\n FieldClause,\n FilterClause,\n GroupClause,\n IndexDef,\n HashIndex,\n JoinLeg,\n JoinContext,\n JoinableSource,\n JoinStrategy,\n LiveQuery,\n LiveUpstream,\n Reducer,\n ReducerOptions,\n AggregateSpec,\n AggregateResult,\n AggregationUpstream,\n LiveAggregation,\n GroupedRow,\n ScanPageProvider,\n} from './query/index.js'\n","import type {\n NoydbStore,\n EncryptedEnvelope,\n BlobObject,\n SlotRecord,\n SlotInfo,\n VersionRecord,\n BlobPutOptions,\n BlobResponseOptions,\n} from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport {\n encrypt,\n decrypt,\n hmacSha256Hex,\n encryptBytesWithAAD,\n decryptBytesWithAAD,\n bufferToBase64,\n base64ToBuffer,\n} from '../crypto.js'\nimport { ConflictError, NotFoundError } from '../errors.js'\nimport { detectMagic, isPreCompressed } from './mime-magic.js'\n\n// ─── Internal collection names ─────────────────────────────────────────\n\n/**\n * DEK slot name for vault-shared blob data. Calling `getDEK('_blob')`\n * auto-creates a blob DEK the first time — same lazy-creation mechanism\n * used for any user-defined collection.\n */\nexport const BLOB_COLLECTION = '_blob'\n\n/** Stores `BlobObject` metadata envelopes, keyed by eTag. */\nexport const BLOB_INDEX_COLLECTION = '_blob_index'\n\n/**\n * Stores encrypted chunk envelopes, keyed by `{eTag}/{chunkIndex}`.\n * NOT loaded into the in-memory query layer. Fetched on demand by\n * `BlobSet.get()` / `BlobSet.response()`.\n */\nexport const BLOB_CHUNKS_COLLECTION = '_blob_chunks'\n\n/** Prefix for per-collection slot metadata collections. */\nexport const BLOB_SLOTS_PREFIX = '_blob_slots_'\n\n/** Prefix for per-collection version records. */\nexport const BLOB_VERSIONS_PREFIX = '_blob_versions_'\n\n/**\n * Default chunk size: 256 KB raw bytes.\n * After AES-GCM (same size) + base64 (~33% inflation) → ~342 KB per\n * envelope, safely within DynamoDB's 400 KB item limit.\n */\nexport const DEFAULT_CHUNK_SIZE = 256 * 1024\n\n/** Maximum CAS retry attempts for refCount and slot metadata updates. */\nconst MAX_CAS_RETRIES = 5\n\n// ─── Compression helpers ───────────────────────────────────────────────\n\nasync function compressBytes(\n data: Uint8Array,\n): Promise<{ bytes: Uint8Array; algorithm: 'gzip' | 'none' }> {\n if (typeof CompressionStream === 'undefined') {\n return { bytes: data, algorithm: 'none' }\n }\n const cs = new CompressionStream('gzip')\n const writer = cs.writable.getWriter()\n await writer.write(data as Uint8Array<ArrayBuffer>)\n await writer.close()\n const buf = await new Response(cs.readable).arrayBuffer()\n return { bytes: new Uint8Array(buf), algorithm: 'gzip' }\n}\n\nasync function decompressBytes(data: Uint8Array): Promise<Uint8Array> {\n if (typeof DecompressionStream === 'undefined') {\n throw new Error(\n '[noy-db] DecompressionStream not available — cannot decompress blob chunk',\n )\n }\n const ds = new DecompressionStream('gzip')\n const writer = ds.writable.getWriter()\n await writer.write(data as Uint8Array<ArrayBuffer>)\n await writer.close()\n const buf = await new Response(ds.readable).arrayBuffer()\n return new Uint8Array(buf)\n}\n\nfunction concatChunks(chunks: Uint8Array[]): Uint8Array {\n const total = chunks.reduce((s, c) => s + c.byteLength, 0)\n const out = new Uint8Array(total)\n let offset = 0\n for (const c of chunks) {\n out.set(c, offset)\n offset += c.byteLength\n }\n return out\n}\n\n/** Build the AAD binding for chunk integrity: \"{eTag}:{chunkIndex}:{chunkCount}\" */\nfunction chunkAAD(eTag: string, chunkIndex: number, chunkCount: number): Uint8Array {\n return new TextEncoder().encode(`${eTag}:${chunkIndex}:${chunkCount}`)\n}\n\n// ─── BlobSet ──────────────────────────────────────────────────────────\n\n/**\n * Handle for reading, writing, versioning, and deleting binary blobs\n * on a specific record.\n *\n * Obtained via `collection.blob(id)`. No I/O is performed until you\n * call a method.\n *\n * ## Storage layout\n *\n * ```\n * _blob_index/{eTag} BlobObject metadata (vault-shared DEK)\n * _blob_chunks/{eTag}/{chunkIndex} Encrypted chunk data (vault-shared DEK + AAD)\n * _blob_slots_{collection}/{recordId} Slot map (parent collection DEK)\n * _blob_versions_{collection}/{recordId}/{slot}/{label} Published versions (parent collection DEK)\n * ```\n *\n * ## Deduplication\n *\n * `put()` computes `eTag = HMAC-SHA-256(blobDEK, plaintext)` — keyed so the\n * store cannot predict eTags for known content. If another record already\n * uploaded the same bytes, the chunks are reused and `refCount` is incremented.\n *\n * ## Chunk integrity\n *\n * Each chunk is encrypted with AES-256-GCM using AAD = `{eTag}:{index}:{count}`,\n * preventing chunk reorder, substitution, and truncation attacks.\n */\nexport class BlobSet {\n private readonly store: NoydbStore\n private readonly vault: string\n private readonly collection: string\n private readonly recordId: string\n private readonly getDEK: (name: string) => Promise<CryptoKey>\n private readonly encrypted: boolean\n private readonly userId: string | undefined\n private readonly maxBlobBytes: number | undefined\n\n constructor(opts: {\n store: NoydbStore\n vault: string\n collection: string\n recordId: string\n getDEK: (name: string) => Promise<CryptoKey>\n encrypted: boolean\n userId?: string\n maxBlobBytes?: number\n }) {\n this.store = opts.store\n this.vault = opts.vault\n this.collection = opts.collection\n this.recordId = opts.recordId\n this.getDEK = opts.getDEK\n this.encrypted = opts.encrypted\n this.userId = opts.userId\n this.maxBlobBytes = opts.maxBlobBytes\n }\n\n /** The internal collection that holds slot metadata for this collection's blobs. */\n private get slotsCollection(): string {\n return `${BLOB_SLOTS_PREFIX}${this.collection}`\n }\n\n /** The internal collection that holds published versions for this collection's blobs. */\n private get versionsCollection(): string {\n return `${BLOB_VERSIONS_PREFIX}${this.collection}`\n }\n\n // ─── Slot Metadata I/O (CAS-protected) ─────────────────────────────\n\n private async loadSlots(): Promise<{\n slots: Record<string, SlotRecord>\n version: number\n }> {\n const envelope = await this.store.get(this.vault, this.slotsCollection, this.recordId)\n if (!envelope) return { slots: {}, version: 0 }\n\n if (!this.encrypted) {\n return {\n slots: JSON.parse(envelope._data) as Record<string, SlotRecord>,\n version: envelope._v,\n }\n }\n\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return {\n slots: JSON.parse(json) as Record<string, SlotRecord>,\n version: envelope._v,\n }\n }\n\n private async saveSlots(\n slots: Record<string, SlotRecord>,\n currentVersion: number,\n ): Promise<void> {\n const json = JSON.stringify(slots)\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(this.collection)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: currentVersion + 1,\n _ts: now,\n _iv: iv,\n _data: data,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: currentVersion + 1,\n _ts: now,\n _iv: '',\n _data: json,\n }\n }\n\n await this.store.put(\n this.vault,\n this.slotsCollection,\n this.recordId,\n envelope,\n currentVersion > 0 ? currentVersion : undefined,\n )\n }\n\n /**\n * CAS retry loop for slot metadata updates. Re-reads slots on conflict\n * and re-applies the mutation function.\n */\n private async casUpdateSlots(\n mutate: (slots: Record<string, SlotRecord>) => Record<string, SlotRecord> | null,\n ): Promise<void> {\n for (let attempt = 0; attempt < MAX_CAS_RETRIES; attempt++) {\n const { slots, version } = await this.loadSlots()\n const updated = mutate(slots)\n if (updated === null) return // no-op\n try {\n await this.saveSlots(updated, version)\n return\n } catch (err) {\n if (err instanceof ConflictError && attempt < MAX_CAS_RETRIES - 1) continue\n throw err\n }\n }\n }\n\n // ─── Blob Index I/O (versioned for CAS refCount) ──────────────────\n\n private async loadBlobObject(eTag: string): Promise<{ blob: BlobObject; version: number } | null> {\n const envelope = await this.store.get(this.vault, BLOB_INDEX_COLLECTION, eTag)\n if (!envelope) return null\n\n if (!this.encrypted) {\n return { blob: JSON.parse(envelope._data) as BlobObject, version: envelope._v }\n }\n\n const dek = await this.getDEK(BLOB_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return { blob: JSON.parse(json) as BlobObject, version: envelope._v }\n }\n\n private async writeBlobObject(blob: BlobObject, expectedVersion?: number): Promise<void> {\n const json = JSON.stringify(blob)\n const now = new Date().toISOString()\n const newVersion = (expectedVersion ?? 0) + 1\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(BLOB_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: newVersion, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: newVersion, _ts: now, _iv: '', _data: json }\n }\n\n await this.store.put(\n this.vault,\n BLOB_INDEX_COLLECTION,\n blob.eTag,\n envelope,\n expectedVersion,\n )\n }\n\n /**\n * CAS retry loop for refCount changes on a BlobObject.\n */\n private async casUpdateRefCount(eTag: string, delta: number): Promise<void> {\n for (let attempt = 0; attempt < MAX_CAS_RETRIES; attempt++) {\n const result = await this.loadBlobObject(eTag)\n if (!result) throw new NotFoundError(`BlobObject ${eTag} not found`)\n const { blob, version } = result\n const updated: BlobObject = { ...blob, refCount: blob.refCount + delta }\n try {\n await this.writeBlobObject(updated, version)\n return\n } catch (err) {\n if (err instanceof ConflictError && attempt < MAX_CAS_RETRIES - 1) continue\n throw err\n }\n }\n }\n\n // ─── Chunk I/O (with AAD binding) ─────────────────────────────────\n\n private async writeChunk(\n eTag: string,\n index: number,\n chunkCount: number,\n chunk: Uint8Array,\n dek: CryptoKey | null,\n ): Promise<void> {\n const id = `${eTag}_${index}`\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (dek) {\n const aad = chunkAAD(eTag, index, chunkCount)\n const { iv, data } = await encryptBytesWithAAD(chunk, dek, aad)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: now,\n _iv: '',\n _data: bufferToBase64(chunk),\n }\n }\n\n await this.store.put(this.vault, BLOB_CHUNKS_COLLECTION, id, envelope)\n }\n\n private async readChunk(\n eTag: string,\n index: number,\n chunkCount: number,\n dek: CryptoKey | null,\n ): Promise<Uint8Array | null> {\n const envelope = await this.store.get(this.vault, BLOB_CHUNKS_COLLECTION, `${eTag}_${index}`)\n if (!envelope) return null\n\n if (dek) {\n const aad = chunkAAD(eTag, index, chunkCount)\n return await decryptBytesWithAAD(envelope._iv, envelope._data, dek, aad)\n }\n\n return base64ToBuffer(envelope._data)\n }\n\n // ─── Version record I/O ───────────────────────────────────────────\n\n private versionKey(slotName: string, label: string): string {\n return `${this.recordId}::${slotName}::${label}`\n }\n\n private async loadVersionRecord(slotName: string, label: string): Promise<VersionRecord | null> {\n const key = this.versionKey(slotName, label)\n const envelope = await this.store.get(this.vault, this.versionsCollection, key)\n if (!envelope) return null\n\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as VersionRecord\n }\n\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as VersionRecord\n }\n\n private async writeVersionRecord(slotName: string, record: VersionRecord): Promise<void> {\n const key = this.versionKey(slotName, record.label)\n const json = JSON.stringify(record)\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(this.collection)\n const { iv, data } = await encrypt(json, dek)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: '', _data: json }\n }\n\n await this.store.put(this.vault, this.versionsCollection, key, envelope)\n }\n\n private async deleteVersionRecord(slotName: string, label: string): Promise<void> {\n const key = this.versionKey(slotName, label)\n await this.store.delete(this.vault, this.versionsCollection, key)\n }\n\n // ─── Effective chunk size ─────────────────────────────────────────\n\n private effectiveChunkSize(opts?: BlobPutOptions): number {\n if (opts?.chunkSize) return opts.chunkSize\n if (this.maxBlobBytes) return this.maxBlobBytes\n return DEFAULT_CHUNK_SIZE\n }\n\n // ─── Fetch all chunks for a blob ──────────────────────────────────\n\n private async fetchAllChunks(blob: BlobObject): Promise<Uint8Array> {\n const blobDEK = this.encrypted ? await this.getDEK(BLOB_COLLECTION) : null\n const chunks: Uint8Array[] = []\n\n for (let i = 0; i < blob.chunkCount; i++) {\n const chunk = await this.readChunk(blob.eTag, i, blob.chunkCount, blobDEK)\n if (!chunk) {\n throw new NotFoundError(\n `Blob chunk ${i}/${blob.chunkCount} missing for eTag \"${blob.eTag}\" on record \"${this.recordId}\"`,\n )\n }\n chunks.push(chunk)\n }\n\n const assembled = concatChunks(chunks)\n return blob.compression === 'gzip' ? await decompressBytes(assembled) : assembled\n }\n\n // ─── Public API: Slot management ──────────────────────────────────\n\n /**\n * Upload bytes and attach them to this record under `slotName`.\n *\n * 1. Computes `eTag = HMAC-SHA-256(blobDEK, plaintext)` for keyed content-addressing.\n * 2. Auto-detects MIME type from magic bytes if not provided.\n * 3. If a blob with this eTag already exists, skips chunk upload (deduplication)\n * and CAS-increments refCount.\n * 4. Otherwise: compresses → splits into chunks → encrypts each chunk with\n * AAD binding → writes `_blob_chunks` → writes `BlobObject` to `_blob_index`.\n * 5. CAS-updates the slot metadata in `_blob_slots_{collection}`.\n * If overwriting an existing slot, decrements the old eTag's refCount.\n */\n async put(slotName: string, data: Uint8Array, opts?: BlobPutOptions): Promise<void> {\n // Step 1 — keyed content-hash (plaintext, before compression)\n const blobDEK = this.encrypted ? await this.getDEK(BLOB_COLLECTION) : null\n const eTag = blobDEK\n ? await hmacSha256Hex(blobDEK, data)\n : await plainSha256Hex(data)\n\n // Step 2 — MIME detection\n let mimeType = opts?.mimeType\n if (!mimeType) {\n const detected = detectMagic(data.subarray(0, 16))\n if (detected) mimeType = detected.mime\n }\n\n // Determine compression: explicit opt > auto-detect > default true\n let shouldCompress: boolean\n if (opts?.compress !== undefined) {\n shouldCompress = opts.compress\n } else if (mimeType && isPreCompressed(mimeType)) {\n shouldCompress = false\n } else {\n shouldCompress = true\n }\n\n // Step 3 — deduplication check\n const existingBlob = await this.loadBlobObject(eTag)\n\n if (existingBlob) {\n // eTag already exists — just increment refCount (CAS retry)\n await this.casUpdateRefCount(eTag, +1)\n } else {\n // Step 4 — compress\n const { bytes: compressed, algorithm } = shouldCompress\n ? await compressBytes(data)\n : { bytes: data, algorithm: 'none' as const }\n\n const chunkSize = this.effectiveChunkSize(opts)\n const chunkCount = Math.max(1, Math.ceil(compressed.byteLength / chunkSize))\n\n // Step 5 — write chunks FIRST with AAD binding (safe failure order)\n for (let i = 0; i < chunkCount; i++) {\n const start = i * chunkSize\n await this.writeChunk(\n eTag, i, chunkCount,\n compressed.subarray(start, start + chunkSize),\n blobDEK,\n )\n }\n\n // Step 6 — write blob index entry after all chunks succeed\n await this.writeBlobObject({\n eTag,\n size: data.byteLength,\n compressedSize: compressed.byteLength,\n compression: algorithm,\n chunkSize,\n chunkCount,\n ...(mimeType !== undefined ? { mimeType } : {}),\n createdAt: new Date().toISOString(),\n refCount: 1,\n })\n }\n\n // Step 7 — CAS-update slot metadata\n const uploaderUserId = opts?.uploadedBy ?? this.userId\n await this.casUpdateSlots((slots) => {\n const oldETag = slots[slotName]?.eTag\n slots[slotName] = {\n eTag,\n filename: slotName,\n size: data.byteLength,\n ...(mimeType !== undefined ? { mimeType } : {}),\n uploadedAt: new Date().toISOString(),\n ...(uploaderUserId !== undefined ? { uploadedBy: uploaderUserId } : {}),\n }\n // Schedule old eTag refCount decrement (non-blocking best-effort)\n if (oldETag && oldETag !== eTag) {\n this._deferredRefDecrement = oldETag\n }\n return slots\n })\n\n // Decrement old eTag refCount outside the CAS loop\n if (this._deferredRefDecrement) {\n const oldETag = this._deferredRefDecrement\n this._deferredRefDecrement = undefined\n await this.casUpdateRefCount(oldETag, -1).catch(() => {\n // Best-effort — blobGC will reconcile\n })\n }\n }\n\n private _deferredRefDecrement: string | undefined\n\n /**\n * Fetch all bytes for the named slot.\n * Returns `null` if the slot does not exist.\n * Throws `NotFoundError` if the index entry exists but a chunk is missing.\n */\n async get(slotName: string): Promise<Uint8Array | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n return this.fetchAllChunks(result.blob)\n }\n\n /**\n * List all slot entries for this record.\n * Returns metadata only — no chunk data is loaded.\n */\n async list(): Promise<SlotInfo[]> {\n const { slots } = await this.loadSlots()\n return Object.entries(slots).map(([name, slot]) => ({ name, ...slot }))\n }\n\n /**\n * Delete the named slot from this record.\n * Decrements refCount on the blob. Chunks are GC'd by `vault.blobGC()`.\n */\n async delete(slotName: string): Promise<void> {\n let eTagToDecrement: string | undefined\n\n await this.casUpdateSlots((slots) => {\n if (!(slotName in slots)) return null\n eTagToDecrement = slots[slotName]!.eTag\n delete slots[slotName]\n return slots\n })\n\n if (eTagToDecrement) {\n await this.casUpdateRefCount(eTagToDecrement, -1).catch(() => {\n // Best-effort — blobGC will reconcile\n })\n }\n }\n\n /**\n * Return a native `Response` whose body streams the decrypted,\n * decompressed blob bytes with full HTTP metadata headers.\n *\n * Note: implementation is buffered — all chunks are loaded into\n * memory before being enqueued. True streaming deferred to.\n *\n * Returns `null` if the slot does not exist.\n */\n async response(slotName: string, opts?: BlobResponseOptions): Promise<Response | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n return this.buildResponse(slot, result.blob, opts)\n }\n\n /**\n * Decrypt the slot and wrap the bytes in a browser ObjectURL ready\n * to feed into `<img src>`, `<a href>`, etc. The caller MUST call\n * `revoke()` when the URL is no longer needed — otherwise the URL\n * (and the underlying decrypted Blob) are pinned for the lifetime\n * of the document, which leaks memory in long-lived pages.\n *\n * Returns `null` when the slot does not exist.\n *\n * Throws when `URL.createObjectURL` is unavailable in the host\n * environment (Node without DOM, restricted workers). Framework\n * adapters — `useBlobURL` in `@noy-db/in-vue`, etc. — guard against\n * this for SSR contexts and stay at `null` instead of propagating.\n */\n async objectURL(\n slotName: string,\n opts?: { mimeType?: string },\n ): Promise<{ url: string; revoke: () => void } | null> {\n if (typeof URL === 'undefined' || typeof URL.createObjectURL !== 'function') {\n throw new Error(\n 'BlobSet.objectURL: URL.createObjectURL is unavailable in this environment. ' +\n 'Call this from the browser, or use BlobSet.get() and create the URL yourself.',\n )\n }\n const bytes = await this.get(slotName)\n if (!bytes) return null\n\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n const type = opts?.mimeType ?? slot?.mimeType ?? 'application/octet-stream'\n\n // Pinning the underlying ArrayBuffer in a Blob is what backs the\n // ObjectURL — once we createObjectURL the URL holds a strong ref\n // to the Blob, so the local `blob` variable can fall out of scope.\n // Copy through a fresh ArrayBuffer so TS narrows away the\n // SharedArrayBuffer branch of `ArrayBufferLike` (Uint8Array is\n // generic over the backing buffer type since TS 5.7).\n const buffer = bytes.buffer.slice(bytes.byteOffset, bytes.byteOffset + bytes.byteLength) as ArrayBuffer\n const blob = new Blob([buffer], { type })\n const url = URL.createObjectURL(blob)\n let revoked = false\n const revoke = (): void => {\n if (revoked) return\n revoked = true\n URL.revokeObjectURL(url)\n }\n return { url, revoke }\n }\n\n // ─── Public API: Published versions (UC-3 amendment versioning) ───\n\n /**\n * Publish the current slot content as a named version snapshot.\n *\n * The published version holds an independent refCount reference to\n * the blob. Even if the slot is later overwritten or deleted, the\n * published version keeps the blob data alive.\n *\n * Publishing with an existing label overwrites it — if the eTags differ,\n * refCounts are adjusted accordingly.\n */\n async publish(slotName: string, label: string): Promise<void> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) throw new NotFoundError(`Slot \"${slotName}\" not found on record \"${this.recordId}\"`)\n\n // Check for existing version with this label\n const existing = await this.loadVersionRecord(slotName, label)\n if (existing && existing.eTag === slot.eTag) return // no-op: same blob\n\n // Write the version record\n const record: VersionRecord = {\n label,\n eTag: slot.eTag,\n publishedAt: new Date().toISOString(),\n ...(this.userId !== undefined ? { publishedBy: this.userId } : {}),\n }\n await this.writeVersionRecord(slotName, record)\n\n // Increment refCount for the new version's eTag\n await this.casUpdateRefCount(slot.eTag, +1)\n\n // If overwriting an existing version with a different eTag, decrement the old one\n if (existing && existing.eTag !== slot.eTag) {\n await this.casUpdateRefCount(existing.eTag, -1).catch(() => {})\n }\n }\n\n /**\n * Fetch bytes for a published version.\n * Returns `null` if the version does not exist.\n */\n async getVersion(slotName: string, label: string): Promise<Uint8Array | null> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return null\n\n const result = await this.loadBlobObject(record.eTag)\n if (!result) return null\n\n return this.fetchAllChunks(result.blob)\n }\n\n /**\n * List all published versions for a slot.\n */\n async listVersions(slotName: string): Promise<VersionRecord[]> {\n const prefix = `${this.recordId}::${slotName}::`\n const allKeys = await this.store.list(this.vault, this.versionsCollection)\n const matchingKeys = allKeys.filter((k) => k.startsWith(prefix))\n\n const versions: VersionRecord[] = []\n for (const key of matchingKeys) {\n const envelope = await this.store.get(this.vault, this.versionsCollection, key)\n if (!envelope) continue\n\n if (!this.encrypted) {\n versions.push(JSON.parse(envelope._data) as VersionRecord)\n } else {\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n versions.push(JSON.parse(json) as VersionRecord)\n }\n }\n\n return versions\n }\n\n /**\n * Delete a published version. Decrements refCount on its blob.\n */\n async deleteVersion(slotName: string, label: string): Promise<void> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return\n\n await this.deleteVersionRecord(slotName, label)\n await this.casUpdateRefCount(record.eTag, -1).catch(() => {})\n }\n\n /**\n * Return a `Response` for a published version — same as `response()`\n * but reads from the version record's eTag instead of the current slot.\n */\n async responseVersion(\n slotName: string,\n label: string,\n opts?: BlobResponseOptions,\n ): Promise<Response | null> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return null\n\n const result = await this.loadBlobObject(record.eTag)\n if (!result) return null\n\n // Build a synthetic SlotRecord from the version + blob data\n const slotLike: SlotRecord = {\n eTag: record.eTag,\n filename: opts?.filename ?? `${slotName}-${label}`,\n size: result.blob.size,\n ...(result.blob.mimeType !== undefined ? { mimeType: result.blob.mimeType } : {}),\n uploadedAt: record.publishedAt,\n ...(record.publishedBy !== undefined ? { uploadedBy: record.publishedBy } : {}),\n }\n\n return this.buildResponse(slotLike, result.blob, opts)\n }\n\n // ─── Diagnostics ──────────────────────────────────────────────────\n\n /**\n * Return the `BlobObject` metadata for the named slot.\n * Returns `null` if the slot or blob does not exist.\n */\n async blobInfo(slotName: string): Promise<BlobObject | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n const result = await this.loadBlobObject(slot.eTag)\n return result?.blob ?? null\n }\n\n // ─── Presigned URL (E5) ────────────────────────────────────────────\n\n /**\n * Generate a presigned URL for direct client download of the blob's\n * ciphertext. Only works when the blob store supports `presignUrl`.\n *\n * **Important:** The URL returns encrypted data. The caller must\n * decrypt client-side using `decryptResponse()` or a service worker.\n *\n * Returns `null` if the slot doesn't exist or the store doesn't support presigning.\n */\n async presignedUrl(slotName: string, expiresInSeconds = 3600): Promise<string | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n // Only works for single-chunk blobs where the store supports presigning\n if (result.blob.chunkCount !== 1) return null\n if (!this.store.presignUrl) return null\n\n const chunkId = `${slot.eTag}_0`\n return this.store.presignUrl(this.vault, '_blob_chunks', chunkId, expiresInSeconds)\n }\n\n /**\n * Decrypt a ciphertext Response (e.g. from a presigned URL fetch)\n * back into a plaintext Response with correct headers.\n *\n * Usage with service worker or client-side fetch:\n * ```ts\n * const url = await blobs.presignedUrl('invoice.pdf')\n * const cipherResponse = await fetch(url)\n * const plainResponse = await blobs.decryptResponse('invoice.pdf', cipherResponse)\n * ```\n */\n async decryptResponse(slotName: string, cipherResponse: Response): Promise<Response | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n // Parse the envelope from the ciphertext response\n const text = await cipherResponse.text()\n const envelope = JSON.parse(text) as { _iv: string; _data: string }\n\n const blobDEK = this.encrypted ? await this.getDEK('_blob') : null\n if (!blobDEK) {\n return this.buildResponse(slot, result.blob, { inline: true })\n }\n\n // Decrypt the single chunk\n const aad = chunkAAD(slot.eTag, 0, result.blob.chunkCount)\n const { decryptBytesWithAAD: decryptAAD } = await import('../crypto.js')\n const decrypted = await decryptAAD(envelope._iv, envelope._data, blobDEK, aad)\n const plaintext = result.blob.compression === 'gzip'\n ? await decompressBytes(decrypted)\n : decrypted\n\n const body = new ReadableStream<Uint8Array>({\n start(controller) {\n controller.enqueue(plaintext)\n controller.close()\n },\n })\n\n const filename = slot.filename\n return new Response(body, {\n headers: {\n 'Content-Type': slot.mimeType ?? 'application/octet-stream',\n 'Content-Length': String(slot.size),\n 'ETag': `\"${slot.eTag}\"`,\n 'Content-Disposition': `inline; filename=\"${filename}\"`,\n 'Last-Modified': new Date(slot.uploadedAt).toUTCString(),\n },\n })\n }\n\n // ─── Internal: build Response from slot + blob ────────────────────\n\n private async buildResponse(\n slot: SlotRecord,\n blob: BlobObject,\n opts?: BlobResponseOptions,\n ): Promise<Response> {\n const fetchAllChunks = this.fetchAllChunks.bind(this)\n\n // buffered — all chunks loaded into memory then enqueued.\n const body = new ReadableStream<Uint8Array>({\n async start(controller) {\n try {\n const output = await fetchAllChunks(blob)\n controller.enqueue(output)\n controller.close()\n } catch (err) {\n controller.error(err)\n }\n },\n })\n\n const filename = opts?.filename ?? slot.filename\n const disposition = opts?.inline\n ? `inline; filename=\"${filename}\"`\n : `attachment; filename=\"${filename}\"`\n\n return new Response(body, {\n headers: {\n 'Content-Type': slot.mimeType ?? 'application/octet-stream',\n 'Content-Length': String(slot.size),\n 'ETag': `\"${slot.eTag}\"`,\n 'Content-Disposition': disposition,\n 'Last-Modified': new Date(slot.uploadedAt).toUTCString(),\n },\n })\n }\n}\n\n// ─── Fallback for unencrypted mode ──────────────────────────────────────\n\nimport { sha256Hex } from '../crypto.js'\n\nasync function plainSha256Hex(data: Uint8Array): Promise<string> {\n return sha256Hex(data)\n}\n","/**\n * Lightweight MIME type detection from magic bytes (file signatures).\n *\n * Designed for the blob store's auto-detection feature. Operates on the first 16 bytes of\n * plaintext — no filesystem access, no filename guessing.\n *\n * ## Detection strategies\n *\n * 1. **Prefix match** — magic bytes at offset 0 (most formats).\n * 2. **Offset match** — magic bytes at a fixed offset > 0 (ISOBMFF: offset 4).\n * 3. **Compound match** — two separate byte sequences at different offsets\n * (RIFF-based: bytes 0-3 + bytes 8-11).\n *\n * ## Formats excluded (require offset > 16 bytes)\n *\n * - TAR (`ustar` at offset 257)\n * - ISO 9660 (`CD001` at offset 32769)\n *\n * @module\n */\n\n// ─── Types ───────────────────────────────────────────────────────────────\n\ninterface MagicRule {\n /** IANA MIME type (or widely-used x- type). */\n readonly mime: string\n /** Human-readable format name for diagnostics. */\n readonly format: string\n /** Magic bytes to match, as a Uint8Array. */\n readonly bytes: Uint8Array\n /** Byte offset where the magic starts. Default 0. */\n readonly offset?: number\n /**\n * For compound checks (RIFF, FORM): a second byte sequence that must\n * also match at `secondaryOffset`.\n */\n readonly secondaryBytes?: Uint8Array\n /** Offset of the secondary match. */\n readonly secondaryOffset?: number\n /** If true, the format is already compressed — skip gzip in blob.put(). */\n readonly preCompressed?: true\n}\n\n// ─── Helpers ─────────────────────────────────────────────────────────────\n\n/** Convert a hex string like `'FF D8 FF'` to Uint8Array. */\nfunction hex(s: string): Uint8Array {\n return new Uint8Array(s.split(' ').map((b) => parseInt(b, 16)))\n}\n\n// ─── Magic rules ─────────────────────────────────────────────────────────\n//\n// Ordered by detection priority: more specific (longer) signatures first\n// within the same offset group, so that e.g. RAR v5 (8 bytes) is tested\n// before RAR v4 (7 bytes).\n//\n// Sources verified against:\n// - Gary Kessler's File Signatures Table\n// - Wikipedia \"List of file signatures\"\n// - IANA MIME type registry\n// - Individual format specifications (PNG RFC 2083, PDF ISO 32000, etc.)\n//\n// Each entry includes the original CSV row number for traceability.\n\nconst MAGIC_RULES: readonly MagicRule[] = [\n // ── Images ───────────────────────────────────────────────────────────\n\n // #2 PNG — full 8-byte signature (RFC 2083)\n { mime: 'image/png', format: 'PNG', bytes: hex('89 50 4E 47 0D 0A 1A 0A'), preCompressed: true },\n\n // #1 JPEG — FF D8 FF (third byte is start of APP marker, always FF)\n { mime: 'image/jpeg', format: 'JPEG', bytes: hex('FF D8 FF'), preCompressed: true },\n\n // #7 WebP — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = WEBP\n {\n mime: 'image/webp',\n format: 'WebP',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('57 45 42 50'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // #5 TIFF (little-endian) — II + version 42\n { mime: 'image/tiff', format: 'TIFF', bytes: hex('49 49 2A 00') },\n\n // #6 TIFF (big-endian) — MM + version 42\n { mime: 'image/tiff', format: 'TIFF', bytes: hex('4D 4D 00 2A') },\n\n // #3 GIF — GIF8 (covers GIF87a and GIF89a)\n { mime: 'image/gif', format: 'GIF', bytes: hex('47 49 46 38'), preCompressed: true },\n\n // #4 BMP — BM\n { mime: 'image/bmp', format: 'BMP', bytes: hex('42 4D') },\n\n // PSD — 8BPS\n { mime: 'image/vnd.adobe.photoshop', format: 'PSD', bytes: hex('38 42 50 53') },\n\n // #8 ICO — 00 00 01 00 (note: 00 00 02 00 is CUR cursor format)\n { mime: 'image/x-icon', format: 'ICO', bytes: hex('00 00 01 00') },\n\n // #9 HEIC — ISOBMFF: ftyp at offset 4, brand \"heic\" at offset 8\n {\n mime: 'image/heic',\n format: 'HEIC',\n bytes: hex('66 74 79 70'),\n offset: 4,\n secondaryBytes: hex('68 65 69 63'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // ── Documents ────────────────────────────────────────────────────────\n\n // PDF — %PDF\n { mime: 'application/pdf', format: 'PDF', bytes: hex('25 50 44 46') },\n\n // RTF — {\\rtf\n { mime: 'application/rtf', format: 'RTF', bytes: hex('7B 5C 72 74 66') },\n\n // ── Archives & compression ───────────────────────────────────────────\n\n // RAR v5 — 8-byte signature (test before RAR v4)\n { mime: 'application/vnd.rar', format: 'RAR v5', bytes: hex('52 61 72 21 1A 07 01 00'), preCompressed: true },\n\n // RAR v4 — 7-byte signature\n { mime: 'application/vnd.rar', format: 'RAR v4', bytes: hex('52 61 72 21 1A 07 00'), preCompressed: true },\n\n // 7-Zip — 6-byte signature\n { mime: 'application/x-7z-compressed', format: '7Z', bytes: hex('37 7A BC AF 27 1C'), preCompressed: true },\n\n // XZ — 6-byte stream header\n { mime: 'application/x-xz', format: 'XZ', bytes: hex('FD 37 7A 58 5A 00'), preCompressed: true },\n\n // ZIP — PK\\x03\\x04 (local file header)\n { mime: 'application/zip', format: 'ZIP', bytes: hex('50 4B 03 04'), preCompressed: true },\n\n // GZIP — 1F 8B\n { mime: 'application/gzip', format: 'GZIP', bytes: hex('1F 8B'), preCompressed: true },\n\n // BZIP2 — BZh\n { mime: 'application/x-bzip2', format: 'BZIP2', bytes: hex('42 5A 68'), preCompressed: true },\n\n // LZIP — LZIP\n { mime: 'application/x-lzip', format: 'LZIP', bytes: hex('4C 5A 49 50'), preCompressed: true },\n\n // ── Audio ────────────────────────────────────────────────────────────\n\n // WAV — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = WAVE\n {\n mime: 'audio/wav',\n format: 'WAV',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('57 41 56 45'),\n secondaryOffset: 8,\n },\n\n // AIFF — FORM compound: bytes 0-3 = FORM, bytes 8-11 = AIFF\n {\n mime: 'audio/aiff',\n format: 'AIFF',\n bytes: hex('46 4F 52 4D'),\n secondaryBytes: hex('41 49 46 46'),\n secondaryOffset: 8,\n },\n\n // FLAC — fLaC\n { mime: 'audio/flac', format: 'FLAC', bytes: hex('66 4C 61 43') },\n\n // OGG — OggS (container — may hold Vorbis, Opus, Theora, etc.)\n { mime: 'application/ogg', format: 'OGG', bytes: hex('4F 67 67 53') },\n\n // MIDI — MThd\n { mime: 'audio/midi', format: 'MIDI', bytes: hex('4D 54 68 64') },\n\n // MP3 (ID3-tagged) — ID3\n { mime: 'audio/mpeg', format: 'MP3', bytes: hex('49 44 33'), preCompressed: true },\n\n // ── Video ────────────────────────────────────────────────────────────\n\n // AVI — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = AVI\\x20\n {\n mime: 'video/x-msvideo',\n format: 'AVI',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('41 56 49 20'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // WMV/ASF — 8-byte ASF header GUID prefix\n { mime: 'video/x-ms-wmv', format: 'WMV', bytes: hex('30 26 B2 75 8E 66 CF 11'), preCompressed: true },\n\n // MKV/WebM — EBML header (Matroska container)\n { mime: 'video/x-matroska', format: 'MKV', bytes: hex('1A 45 DF A3'), preCompressed: true },\n\n // FLV — FLV\n { mime: 'video/x-flv', format: 'FLV', bytes: hex('46 4C 56'), preCompressed: true },\n\n // MOV — ISOBMFF: ftyp at offset 4, brand \"qt \" at offset 8\n {\n mime: 'video/quicktime',\n format: 'MOV',\n bytes: hex('66 74 79 70'),\n offset: 4,\n secondaryBytes: hex('71 74 20 20'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // MP4 — ISOBMFF: ftyp at offset 4 (brands vary: isom, mp41, mp42, etc.)\n // Tested AFTER MOV and HEIC so their specific brands match first.\n { mime: 'video/mp4', format: 'MP4', bytes: hex('66 74 79 70'), offset: 4, preCompressed: true },\n\n // ── Executables & binaries ───────────────────────────────────────────\n\n // SQLite — \"SQLite 3\" (first 8 bytes of the 16-byte header)\n { mime: 'application/vnd.sqlite3', format: 'SQLite', bytes: hex('53 51 4C 69 74 65 20 33') },\n\n // WASM — \\0asm\n { mime: 'application/wasm', format: 'WASM', bytes: hex('00 61 73 6D') },\n\n // ELF — \\x7FELF\n { mime: 'application/x-elf', format: 'ELF', bytes: hex('7F 45 4C 46') },\n\n // PE (EXE/DLL) — MZ\n { mime: 'application/vnd.microsoft.portable-executable', format: 'PE', bytes: hex('4D 5A') },\n\n // Mach-O — all four single-arch variants\n { mime: 'application/x-mach-binary', format: 'Mach-O 64 LE', bytes: hex('CF FA ED FE') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 64 BE', bytes: hex('FE ED FA CF') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 32 LE', bytes: hex('CE FA ED FE') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 32 BE', bytes: hex('FE ED FA CE') },\n\n // Java Class — CA FE BA BE\n // Note: collides with Mach-O Universal Binary. Disambiguated by checking\n // bytes 4-7: Java class version is >= 0x002D (45), while fat binary\n // arch count is a small number (typically 0x00000002).\n // We place Java after Mach-O single-arch entries so the more common\n // Mach-O variants match first. The CA FE BA BE collision between Java\n // and Mach-O fat binary is resolved by the caller if needed.\n { mime: 'application/java-vm', format: 'Java Class', bytes: hex('CA FE BA BE') },\n\n // DEX — dex\\n (Android Dalvik Executable)\n { mime: 'application/vnd.android.dex', format: 'DEX', bytes: hex('64 65 78 0A') },\n\n // ── Package formats ──────────────────────────────────────────────────\n\n // DEB — !<arch> (ar archive; DEB-specific member follows)\n { mime: 'application/vnd.debian.binary-package', format: 'DEB', bytes: hex('21 3C 61 72 63 68 3E') },\n\n // RPM — ED AB EE DB\n { mime: 'application/x-rpm', format: 'RPM', bytes: hex('ED AB EE DB') },\n\n // CAB — MSCF\n { mime: 'application/vnd.ms-cab-compressed', format: 'CAB', bytes: hex('4D 53 43 46'), preCompressed: true },\n\n // ── Capture & Flash ──────────────────────────────────────────────────\n\n // PCAP (little-endian) — D4 C3 B2 A1\n { mime: 'application/vnd.tcpdump.pcap', format: 'PCAP', bytes: hex('D4 C3 B2 A1') },\n\n // PCAP (big-endian) — A1 B2 C3 D4\n { mime: 'application/vnd.tcpdump.pcap', format: 'PCAP BE', bytes: hex('A1 B2 C3 D4') },\n\n // PCAPNG — Section Header Block\n { mime: 'application/x-pcapng', format: 'PCAPNG', bytes: hex('0A 0D 0D 0A') },\n\n // SWF — all three variants (uncompressed, zlib, LZMA)\n { mime: 'application/x-shockwave-flash', format: 'SWF', bytes: hex('46 57 53') },\n { mime: 'application/x-shockwave-flash', format: 'SWF zlib', bytes: hex('43 57 53'), preCompressed: true },\n { mime: 'application/x-shockwave-flash', format: 'SWF LZMA', bytes: hex('5A 57 53'), preCompressed: true },\n\n // ── Data formats ─────────────────────────────────────────────────────\n\n // Parquet — PAR1 (no registered IANA MIME; using Apache's informal type)\n { mime: 'application/vnd.apache.parquet', format: 'Parquet', bytes: hex('50 41 52 31') },\n\n // Avro Object Container — Obj\\x01\n { mime: 'application/avro', format: 'Avro', bytes: hex('4F 62 6A 01') },\n\n // NES ROM — NES\\x1A (iNES header)\n { mime: 'application/x-nintendo-nes-rom', format: 'NES ROM', bytes: hex('4E 45 53 1A') },\n] as const\n\n// ─── MP3 sync word ───────────────────────────────────────────────────────\n//\n// MP3 files without an ID3 tag start with a frame sync word where the top\n// 11 bits are set: 0xFFE0 mask. The ID3 signature (49 44 33) is handled\n// as a normal rule above. The sync-word check is a fallback tested in\n// `detectMimeType` after all rules.\n\nfunction isMp3SyncWord(byte0: number, byte1: number): boolean {\n return byte0 === 0xff && (byte1 & 0xe0) === 0xe0\n}\n\n// ─── Detection ───────────────────────────────────────────────────────────\n\n/**\n * Detect MIME type from the first bytes of a file.\n *\n * @param header - The first 16 bytes (or more) of the plaintext. Passing\n * fewer than 16 bytes may miss compound and offset-based matches.\n * @returns Detected MIME type, or `'application/octet-stream'` if unknown.\n */\nexport function detectMimeType(header: Uint8Array): string {\n const result = detectMagic(header)\n return result?.mime ?? 'application/octet-stream'\n}\n\n/**\n * Detect MIME type and whether the format is already compressed.\n *\n * Used by `BlobSet.put()` to decide whether to skip gzip compression.\n *\n * @param header - The first 16 bytes (or more) of the plaintext.\n * @returns `{ mime, preCompressed }` or `null` if no match.\n */\nexport function detectMagic(\n header: Uint8Array,\n): { mime: string; format: string; preCompressed: boolean } | null {\n for (const rule of MAGIC_RULES) {\n if (matchRule(header, rule)) {\n return {\n mime: rule.mime,\n format: rule.format,\n preCompressed: rule.preCompressed ?? false,\n }\n }\n }\n\n // Fallback: MP3 sync word (no ID3 tag)\n if (header.length >= 2 && isMp3SyncWord(header[0]!, header[1]!)) {\n return { mime: 'audio/mpeg', format: 'MP3', preCompressed: true }\n }\n\n return null\n}\n\n/**\n * Check whether a format is already compressed (should skip gzip).\n *\n * @param mimeType - A MIME type string.\n * @returns `true` if the format is known to be pre-compressed.\n */\nexport function isPreCompressed(mimeType: string): boolean {\n return PRE_COMPRESSED_MIMES.has(mimeType)\n}\n\n// ─── Internal matching ───────────────────────────────────────────────────\n\nfunction matchRule(header: Uint8Array, rule: MagicRule): boolean {\n const offset = rule.offset ?? 0\n const end = offset + rule.bytes.length\n\n // Not enough data for the primary match\n if (header.length < end) return false\n\n // Primary byte sequence\n for (let i = 0; i < rule.bytes.length; i++) {\n if (header[offset + i] !== rule.bytes[i]) return false\n }\n\n // Secondary byte sequence (compound check)\n if (rule.secondaryBytes && rule.secondaryOffset !== undefined) {\n const sEnd = rule.secondaryOffset + rule.secondaryBytes.length\n if (header.length < sEnd) return false\n for (let i = 0; i < rule.secondaryBytes.length; i++) {\n if (header[rule.secondaryOffset + i] !== rule.secondaryBytes[i]) return false\n }\n }\n\n return true\n}\n\n// ─── Pre-compressed MIME set ─────────────────────────────────────────────\n//\n// Built from the rules above. Used by `isPreCompressed()` for callers who\n// already know the MIME type (e.g. from a Content-Type header) and want to\n// skip the magic-byte detection step.\n\nconst PRE_COMPRESSED_MIMES = new Set<string>(\n MAGIC_RULES.filter((r) => r.preCompressed).map((r) => r.mime),\n)\n","import type { NoydbStore, NoydbBundleStore, VaultSnapshot, EncryptedEnvelope } from '../types.js'\nimport { ConflictError, BundleVersionConflictError } from '../errors.js'\n\n// ─── Bundle format ─────────────────────────────────────────────────────\n\nconst BUNDLE_STORE_VERSION = 1 as const\n\n/**\n * Wire format written by `wrapBundleStore`. A JSON-serialised object that\n * contains the entire `VaultSnapshot` (all encrypted envelopes) plus a small\n * header for integrity checking. The envelopes inside are already AES-GCM\n * encrypted by core — the bundle bytes themselves are not additionally\n * encrypted, but they are safe to store on untrusted blob hosts because\n * every record inside is already ciphertext.\n *\n * @internal\n */\ninterface BundleStoreData {\n readonly _noydb_bundle_store: typeof BUNDLE_STORE_VERSION\n readonly vault: string\n readonly ts: string\n readonly data: VaultSnapshot\n}\n\n// ─── Options ───────────────────────────────────────────────────────────\n\nexport interface WrapBundleStoreOptions {\n /**\n * When `true` (default), every `put()` and `delete()` flushes the full\n * vault snapshot to the bundle backend. Set to `false` for bulk operations\n * and call `store.flush(vaultId)` manually.\n */\n autoFlush?: boolean\n}\n\n// ─── Extended NoydbStore with flush/batch ───────────────────────────────\n\nexport interface WrappedBundleNoydbStore extends NoydbStore {\n /** Manually flush the in-memory snapshot to the bundle backend. */\n flush(vaultId: string): Promise<void>\n /**\n * Run a batch of mutations without flushing until the callback completes.\n * A single flush is performed at the end.\n */\n batch(vaultId: string, fn: () => Promise<void>): Promise<void>\n}\n\n// ─── wrapBundleStore ───────────────────────────────────────────────────\n\nconst MAX_CONFLICT_RETRIES = 3\n\n/**\n * Convert a `NoydbBundleStore` (blob-oriented read/write with OCC) into the\n * standard six-method `NoydbStore` interface expected by `createNoydb({ store })`.\n *\n * Bundle stores operate on the entire vault as a single serialised unit —\n * ideal for backends like Google Drive, WebDAV, or iCloud Drive that work\n * best with whole-file I/O rather than per-record KV operations.\n *\n * ## Optimistic concurrency\n *\n * The wrapper tracks the `version` token from the last `readBundle` and\n * passes it as `expectedVersion` on every flush. On\n * `BundleVersionConflictError`, it re-reads, merges the remote snapshot\n * (last-write-wins per record key), and retries (max 3 attempts).\n *\n * ## Flush modes\n *\n * By default, flushes on every mutation (O(vault size) per write). Options:\n * - `autoFlush: false` + explicit `store.flush(vaultId)` calls\n * - `store.batch(vaultId, async () => { ... })` — defers flush until end\n * - Pair with `syncPolicy: { push: { mode: 'debounce' } }` from \n */\nexport function wrapBundleStore(\n bundle: NoydbBundleStore,\n options?: WrapBundleStoreOptions,\n): WrappedBundleNoydbStore {\n const autoFlush = options?.autoFlush !== false\n\n // Per-vault state\n const snapshots = new Map<string, VaultSnapshot>()\n const versions = new Map<string, string | null>()\n const loaded = new Set<string>()\n\n // Batch mode: when > 0, suppress auto-flush\n let batchDepth = 0\n\n async function load(vault: string): Promise<VaultSnapshot> {\n if (loaded.has(vault)) return snapshots.get(vault)!\n\n const result = await bundle.readBundle(vault)\n if (result) {\n const text = new TextDecoder().decode(result.bytes)\n const format = JSON.parse(text) as BundleStoreData\n snapshots.set(vault, format.data)\n versions.set(vault, result.version)\n } else {\n snapshots.set(vault, {})\n versions.set(vault, null)\n }\n\n loaded.add(vault)\n return snapshots.get(vault)!\n }\n\n async function flush(vault: string): Promise<void> {\n const snapshot = snapshots.get(vault) ?? {}\n const format: BundleStoreData = {\n _noydb_bundle_store: BUNDLE_STORE_VERSION,\n vault,\n ts: new Date().toISOString(),\n data: snapshot,\n }\n const bytes = new TextEncoder().encode(JSON.stringify(format))\n const expectedVersion = versions.get(vault) ?? null\n\n for (let attempt = 0; attempt < MAX_CONFLICT_RETRIES; attempt++) {\n try {\n const { version: newVersion } = await bundle.writeBundle(vault, bytes, expectedVersion)\n versions.set(vault, newVersion)\n return\n } catch (err) {\n if (err instanceof BundleVersionConflictError && attempt < MAX_CONFLICT_RETRIES - 1) {\n // Pull remote, merge (last-write-wins by record key), retry\n const remote = await bundle.readBundle(vault)\n if (remote) {\n const remoteText = new TextDecoder().decode(remote.bytes)\n const remoteFormat = JSON.parse(remoteText) as BundleStoreData\n const localSnap = snapshots.get(vault) ?? {}\n const mergedSnap = mergeSnapshots(remoteFormat.data, localSnap)\n snapshots.set(vault, mergedSnap)\n versions.set(vault, remote.version)\n }\n // Re-encode with merged data for the retry\n continue\n }\n throw err\n }\n }\n }\n\n async function maybeFlush(vault: string): Promise<void> {\n if (autoFlush && batchDepth === 0) {\n await flush(vault)\n }\n }\n\n const store: WrappedBundleNoydbStore = {\n name: bundle.name ?? 'bundle',\n\n async flush(vaultId: string): Promise<void> {\n await flush(vaultId)\n },\n\n async batch(vaultId: string, fn: () => Promise<void>): Promise<void> {\n await load(vaultId) // ensure loaded before batch\n batchDepth++\n try {\n await fn()\n } finally {\n batchDepth--\n }\n await flush(vaultId)\n },\n\n async get(vault: string, collection: string, id: string): Promise<EncryptedEnvelope | null> {\n const snap = await load(vault)\n return snap[collection]?.[id] ?? null\n },\n\n async put(\n vault: string,\n collection: string,\n id: string,\n envelope: EncryptedEnvelope,\n expectedVersion?: number,\n ): Promise<void> {\n const snap = await load(vault)\n\n if (expectedVersion !== undefined) {\n const current = snap[collection]?.[id]\n const currentVersion = current?._v ?? 0\n if (currentVersion !== expectedVersion) {\n throw new ConflictError(\n currentVersion,\n `Expected version ${expectedVersion} but found ${currentVersion} on ${collection}/${id}`,\n )\n }\n }\n\n snap[collection] ??= {}\n snap[collection][id] = envelope\n await maybeFlush(vault)\n },\n\n async delete(vault: string, collection: string, id: string): Promise<void> {\n const snap = await load(vault)\n if (snap[collection]) {\n delete snap[collection][id]\n await maybeFlush(vault)\n }\n },\n\n async list(vault: string, collection: string): Promise<string[]> {\n const snap = await load(vault)\n return Object.keys(snap[collection] ?? {})\n },\n\n async loadAll(vault: string): Promise<VaultSnapshot> {\n return await load(vault)\n },\n\n async saveAll(vault: string, data: VaultSnapshot): Promise<void> {\n snapshots.set(vault, data)\n loaded.add(vault)\n await flush(vault)\n },\n }\n\n return store\n}\n\n// ─── Snapshot merge (last-write-wins per record) ────────────────────────\n\nfunction mergeSnapshots(remote: VaultSnapshot, local: VaultSnapshot): VaultSnapshot {\n const merged: VaultSnapshot = {}\n\n // Start with all remote collections\n for (const [coll, records] of Object.entries(remote)) {\n merged[coll] = { ...records }\n }\n\n // Overlay local collections — LWW by _ts per record\n for (const [coll, records] of Object.entries(local)) {\n if (!merged[coll]) {\n merged[coll] = { ...records }\n continue\n }\n for (const [id, envelope] of Object.entries(records)) {\n const existing = merged[coll][id]\n if (!existing || envelope._ts >= existing._ts) {\n merged[coll][id] = envelope\n }\n }\n }\n\n return merged\n}\n\n// ─── Factory helper ─────────────────────────────────────────────────────\n\n/**\n * Type-safe factory helper for `NoydbBundleStore` implementations,\n * analogous to `createStore` for KV stores.\n */\nexport function createBundleStore<TOptions>(\n factory: (options: TOptions) => NoydbBundleStore,\n): (options: TOptions) => NoydbBundleStore {\n return factory\n}\n","/**\n * Sync scheduling policy.\n *\n * ## What it controls\n *\n * A {@link SyncPolicy} has two halves:\n * - **push** ({@link PushPolicy}) — when dirty local writes are sent to the remote.\n * - **pull** ({@link PullPolicy}) — when the remote is polled for new data.\n *\n * ## Choosing a policy\n *\n * The right policy depends on the backend's operational characteristics:\n *\n * | Backend type | Recommended policy |\n * |---|---|\n * | Per-record (DynamoDB, S3, IDB) | {@link INDEXED_STORE_POLICY} — `on-change` push, `manual` pull |\n * | Bundle (Drive, WebDAV, Git) | {@link BUNDLE_STORE_POLICY} — `debounce` push, `interval` pull |\n *\n * Consumers can override via `createNoydb({ syncPolicy: { ... } })`:\n *\n * ```ts\n * const db = await createNoydb({\n * store: jsonFile({ dir: './data' }),\n * syncPolicy: {\n * push: { mode: 'debounce', debounceMs: 5_000 },\n * pull: { mode: 'on-focus' },\n * },\n * })\n * ```\n *\n * ## Scheduler lifecycle\n *\n * {@link SyncScheduler} owns all timers, debounce logic, and browser lifecycle\n * hooks (`visibilitychange`, `pagehide`, `beforeExit`). Call `scheduler.start()`\n * after opening a vault and `scheduler.stop()` when closing it. The scheduler\n * delegates actual push/pull work to {@link SyncSchedulerCallbacks} provided\n * by the {@link SyncEngine}.\n *\n * @module\n */\n\n// ─── Policy types ───────────────────────────────────────────────────────\n\n/**\n * When push operations are triggered automatically.\n *\n * - `'manual'` — only on explicit `sync.push()` calls.\n * - `'on-change'` — immediately after every local write (respecting `minIntervalMs`).\n * - `'debounce'` — after `debounceMs` of inactivity following a write.\n * - `'interval'` — on a fixed timer regardless of writes.\n */\nexport type PushMode = 'manual' | 'on-change' | 'debounce' | 'interval'\n\n/**\n * When pull operations are triggered automatically.\n *\n * - `'manual'` — only on explicit `sync.pull()` calls.\n * - `'interval'` — on a fixed `intervalMs` timer.\n * - `'on-focus'` — when the browser tab regains visibility.\n */\nexport type PullMode = 'manual' | 'interval' | 'on-focus'\n\n/**\n * Push half of a sync policy. Controls the trigger mode and timing guards\n * for outbound sync operations.\n */\nexport interface PushPolicy {\n /** Push trigger mode. */\n readonly mode: PushMode\n /** Debounce delay in ms. Only used when `mode: 'debounce'`. Default: 30_000. */\n readonly debounceMs?: number\n /** Interval in ms between automatic pushes. Used by `'interval'` and as floor for `'debounce'`. */\n readonly intervalMs?: number\n /**\n * Hard floor between pushes regardless of mode. Prevents burst writes\n * from hammering the remote. Default: 0 (no floor).\n */\n readonly minIntervalMs?: number\n /**\n * Force a push on page unload (`pagehide` / `visibilitychange → hidden`)\n * in browsers, `beforeExit` in Node. Default: true for non-manual modes.\n */\n readonly onUnload?: boolean\n}\n\n/**\n * Pull half of a sync policy. Controls when and how often inbound sync\n * operations are triggered.\n */\nexport interface PullPolicy {\n /** Pull trigger mode. */\n readonly mode: PullMode\n /** Interval in ms between automatic pulls. Used by `'interval'` mode. Default: 60_000. */\n readonly intervalMs?: number\n}\n\n/**\n * Combined push + pull sync scheduling policy for a vault.\n *\n * Pass via `createNoydb({ syncPolicy })` to override the default policy\n * derived from the active store type. Pre-built defaults are available\n * as `INDEXED_STORE_POLICY` and `BUNDLE_STORE_POLICY`.\n */\nexport interface SyncPolicy {\n readonly push: PushPolicy\n readonly pull: PullPolicy\n}\n\n// ─── Default policies by store category ─────────────────────────────────\n\n/** Default for per-record stores (DynamoDB, S3, file, IDB). */\nexport const INDEXED_STORE_POLICY: SyncPolicy = {\n push: { mode: 'on-change', minIntervalMs: 0, onUnload: true },\n pull: { mode: 'manual' },\n}\n\n/** Default for bundle stores (Drive, WebDAV, Git). */\nexport const BUNDLE_STORE_POLICY: SyncPolicy = {\n push: { mode: 'debounce', debounceMs: 30_000, minIntervalMs: 120_000, onUnload: true },\n pull: { mode: 'interval', intervalMs: 60_000 },\n}\n\n// ─── Sync scheduler ─────────────────────────────────────────────────────\n\n/**\n * Current operational state of the `SyncScheduler`.\n *\n * - `'idle'` — no pending or active sync operations.\n * - `'pending'` — local writes are queued, waiting for debounce/interval to fire.\n * - `'pushing'` — push in progress.\n * - `'pulling'` — pull in progress.\n * - `'error'` — last sync operation failed; `lastError` holds the cause.\n */\nexport type SyncSchedulerState = 'idle' | 'pending' | 'pushing' | 'pulling' | 'error'\n\n/**\n * Snapshot of the sync scheduler's state, returned by `SyncScheduler.status`.\n * Safe to expose in a reactive UI status indicator.\n */\nexport interface SyncSchedulerStatus {\n readonly state: SyncSchedulerState\n readonly lastPushAt: string | null\n readonly lastPullAt: string | null\n readonly lastError: Error | null\n readonly pendingWrites: number\n}\n\n/**\n * Callbacks injected into `SyncScheduler` by the SyncEngine.\n *\n * The scheduler owns timers and lifecycle hooks; it delegates actual push/pull\n * work to these callbacks to stay decoupled from the sync implementation.\n */\nexport interface SyncSchedulerCallbacks {\n push(): Promise<void>\n pull(): Promise<void>\n getDirtyCount(): number\n}\n\n/**\n * Manages sync timing according to a `SyncPolicy`.\n *\n * The scheduler owns all timers and lifecycle hooks. It delegates actual\n * push/pull work to callbacks provided by the SyncEngine.\n */\nexport class SyncScheduler {\n private readonly policy: SyncPolicy\n private readonly callbacks: SyncSchedulerCallbacks\n\n private _state: SyncSchedulerState = 'idle'\n private _lastPushAt: string | null = null\n private _lastPullAt: string | null = null\n private _lastError: Error | null = null\n private _lastPushTime = 0 // monotonic ms for minIntervalMs enforcement\n\n // Timers\n private debounceTimer: ReturnType<typeof setTimeout> | null = null\n private pushIntervalTimer: ReturnType<typeof setInterval> | null = null\n private pullIntervalTimer: ReturnType<typeof setInterval> | null = null\n\n // Bound handlers for cleanup\n private readonly boundOnVisibilityChange: (() => void) | null = null\n private readonly boundOnBeforeExit: (() => void) | null = null\n private readonly boundOnPageHide: (() => void) | null = null\n\n private started = false\n\n constructor(policy: SyncPolicy, callbacks: SyncSchedulerCallbacks) {\n this.policy = policy\n this.callbacks = callbacks\n\n // Pre-bind handlers\n if (this.shouldRegisterUnload()) {\n this.boundOnVisibilityChange = this.handleVisibilityChange.bind(this)\n this.boundOnPageHide = this.handlePageHide.bind(this)\n this.boundOnBeforeExit = this.handleBeforeExit.bind(this)\n }\n }\n\n /** Current scheduler status snapshot. */\n get status(): SyncSchedulerStatus {\n return {\n state: this._state,\n lastPushAt: this._lastPushAt,\n lastPullAt: this._lastPullAt,\n lastError: this._lastError,\n pendingWrites: this.callbacks.getDirtyCount(),\n }\n }\n\n /** Start the scheduler — registers timers, event listeners. */\n start(): void {\n if (this.started) return\n this.started = true\n\n // Push: interval mode\n if (this.policy.push.mode === 'interval' && this.policy.push.intervalMs) {\n this.pushIntervalTimer = setInterval(() => {\n void this.executePush()\n }, this.policy.push.intervalMs)\n }\n\n // Pull: interval mode\n if (this.policy.pull.mode === 'interval' && this.policy.pull.intervalMs) {\n this.pullIntervalTimer = setInterval(() => {\n void this.executePull()\n }, this.policy.pull.intervalMs)\n }\n\n // Pull: on-focus mode\n if (this.policy.pull.mode === 'on-focus' && typeof document !== 'undefined') {\n document.addEventListener('visibilitychange', this.handleFocusPull)\n }\n\n // Unload hooks\n if (this.shouldRegisterUnload()) {\n if (typeof document !== 'undefined' && this.boundOnVisibilityChange) {\n document.addEventListener('visibilitychange', this.boundOnVisibilityChange)\n }\n if (typeof globalThis.addEventListener === 'function' && this.boundOnPageHide) {\n globalThis.addEventListener('pagehide', this.boundOnPageHide)\n }\n if (typeof process !== 'undefined' && this.boundOnBeforeExit) {\n process.on('beforeExit', this.boundOnBeforeExit)\n }\n }\n }\n\n /** Stop the scheduler — clears timers, removes event listeners. */\n stop(): void {\n if (!this.started) return\n this.started = false\n\n if (this.debounceTimer) {\n clearTimeout(this.debounceTimer)\n this.debounceTimer = null\n }\n if (this.pushIntervalTimer) {\n clearInterval(this.pushIntervalTimer)\n this.pushIntervalTimer = null\n }\n if (this.pullIntervalTimer) {\n clearInterval(this.pullIntervalTimer)\n this.pullIntervalTimer = null\n }\n\n // Focus pull\n if (this.policy.pull.mode === 'on-focus' && typeof document !== 'undefined') {\n document.removeEventListener('visibilitychange', this.handleFocusPull)\n }\n\n // Unload hooks\n if (typeof document !== 'undefined' && this.boundOnVisibilityChange) {\n document.removeEventListener('visibilitychange', this.boundOnVisibilityChange)\n }\n if (typeof globalThis.removeEventListener === 'function' && this.boundOnPageHide) {\n globalThis.removeEventListener('pagehide', this.boundOnPageHide)\n }\n if (typeof process !== 'undefined' && this.boundOnBeforeExit) {\n process.removeListener('beforeExit', this.boundOnBeforeExit)\n }\n }\n\n /**\n * Notify the scheduler that a local write occurred.\n * For `on-change` mode: triggers immediate push (respecting minIntervalMs).\n * For `debounce` mode: resets the debounce timer.\n * For `manual` / `interval`: no-op.\n */\n notifyChange(): void {\n if (!this.started) return\n\n if (this.policy.push.mode === 'on-change') {\n void this.executePush()\n } else if (this.policy.push.mode === 'debounce') {\n this.resetDebounce()\n }\n }\n\n /** Force an immediate push, bypassing the scheduler. */\n async forcePush(): Promise<void> {\n await this.executePush()\n }\n\n /** Force an immediate pull, bypassing the scheduler. */\n async forcePull(): Promise<void> {\n await this.executePull()\n }\n\n // ─── Internal ─────────────────────────────────────────────────────\n\n private async executePush(): Promise<void> {\n if (this._state === 'pushing') return // already in progress\n\n // minIntervalMs enforcement\n const minInterval = this.policy.push.minIntervalMs ?? 0\n if (minInterval > 0) {\n const elapsed = Date.now() - this._lastPushTime\n if (elapsed < minInterval) {\n // Schedule for later if debounce mode\n if (this.policy.push.mode === 'debounce') {\n this.scheduleDebounce(minInterval - elapsed)\n }\n return\n }\n }\n\n // Nothing to push\n if (this.callbacks.getDirtyCount() === 0) {\n this._state = 'idle'\n return\n }\n\n this._state = 'pushing'\n try {\n await this.callbacks.push()\n this._lastPushAt = new Date().toISOString()\n this._lastPushTime = Date.now()\n this._lastError = null\n this._state = this.callbacks.getDirtyCount() > 0 ? 'pending' : 'idle'\n } catch (err) {\n this._lastError = err instanceof Error ? err : new Error(String(err))\n this._state = 'error'\n }\n }\n\n private async executePull(): Promise<void> {\n if (this._state === 'pulling') return\n\n const previousState = this._state\n this._state = 'pulling'\n try {\n await this.callbacks.pull()\n this._lastPullAt = new Date().toISOString()\n this._lastError = null\n this._state = previousState === 'pending' ? 'pending' : 'idle'\n } catch (err) {\n this._lastError = err instanceof Error ? err : new Error(String(err))\n this._state = 'error'\n }\n }\n\n private resetDebounce(): void {\n if (this.debounceTimer) clearTimeout(this.debounceTimer)\n const ms = this.policy.push.debounceMs ?? 30_000\n this._state = 'pending'\n this.scheduleDebounce(ms)\n }\n\n private scheduleDebounce(ms: number): void {\n if (this.debounceTimer) clearTimeout(this.debounceTimer)\n this.debounceTimer = setTimeout(() => {\n this.debounceTimer = null\n void this.executePush()\n }, ms)\n }\n\n private shouldRegisterUnload(): boolean {\n const onUnload = this.policy.push.onUnload\n if (onUnload !== undefined) return onUnload\n return this.policy.push.mode !== 'manual'\n }\n\n // ─── Event handlers ───────────────────────────────────────────────\n\n private handleVisibilityChange(): void {\n if (typeof document !== 'undefined' && document.visibilityState === 'hidden') {\n this.fireUnloadPush()\n }\n }\n\n private handlePageHide(): void {\n this.fireUnloadPush()\n }\n\n private handleBeforeExit(): void {\n this.fireUnloadPush()\n }\n\n private handleFocusPull = (): void => {\n if (typeof document !== 'undefined' && document.visibilityState === 'visible') {\n void this.executePull()\n }\n }\n\n private fireUnloadPush(): void {\n if (this.callbacks.getDirtyCount() === 0) return\n // Best-effort synchronous-ish push on unload\n void this.callbacks.push().catch(() => {})\n }\n}\n","/**\n * Store router / multiplexer.\n *\n * Dispatches `NoydbStore` operations to different backends based on\n * collection type, record size, record age, collection name, or vault name.\n *\n * ```ts\n * const db = await createNoydb({\n * store: routeStore({\n * default: dynamo({ table: 'myapp' }),\n * blobs: s3Store({ bucket: 'myapp-blobs' }),\n * }),\n * })\n * ```\n *\n * @module\n */\n\nimport type {\n NoydbStore,\n EncryptedEnvelope,\n VaultSnapshot,\n} from '../types.js'\n\n// ─── Internal collection prefixes (duplicated to avoid circular import) ──\n\nconst BLOB_CHUNKS = '_blob_chunks'\nconst BLOB_INDEX = '_blob_index'\nconst BLOB_SLOTS = '_blob_slots_'\nconst BLOB_VERSIONS = '_blob_versions_'\n\n// ─── Options ─────────────────────────────────────────────────────────────\n\n/**\n * Size-tiered blob routing configuration.\n *\n * Routes blob chunks to different stores based on byte size. Small blobs\n * (under `threshold`) stay in the primary or `small` store; large blobs\n * go to `large`. This lets you keep DynamoDB as the default while sending\n * large binary objects to S3.\n */\nexport interface BlobStoreRoute {\n /** Store for small blobs (under threshold). Falls back to `default`. */\n readonly small?: NoydbStore\n /** Store for large blobs (over threshold). */\n readonly large: NoydbStore\n /** Size threshold in bytes. Default: `400 * 1024` (DynamoDB item limit). */\n readonly threshold?: number\n}\n\n/**\n * Blob lifecycle management policies evaluated during `compact()`.\n *\n * Controls orphan cleanup, cold-tier archival, and hard deletion of\n * blobs that are no longer referenced by any record.\n */\nexport interface BlobLifecyclePolicy {\n /** Delete orphan blobs (refCount: 0) after this many days. Default: 7. */\n readonly orphanRetentionDays?: number\n /** Move blobs not accessed in this many days to the cold blob store. */\n readonly archiveAfterDays?: number\n /** Store for archived blobs. Required if archiveAfterDays is set. */\n readonly archiveStore?: NoydbStore\n /** Hard-delete archived blobs after this many days. */\n readonly expireAfterDays?: number\n}\n\n/**\n * Age-based hot/cold tiering configuration.\n *\n * Records whose `_ts` timestamp is older than `coldAfterDays` are migrated\n * to the `cold` store during `compact()`. Reads transparently fall through\n * to the cold store when the hot store returns null, so callers don't need\n * to know which tier a record lives in.\n */\nexport interface AgeRoute {\n /** Store for records older than the cutoff. */\n readonly cold: NoydbStore\n /** Days after last modification before a record is cold-eligible. */\n readonly coldAfterDays: number\n /**\n * Collections that participate in age tiering.\n * Empty array or omitted = all user collections (excluding `_` prefixed).\n */\n readonly collections?: string[]\n}\n\n/**\n * Options for `routeStore()` — the store multiplexer.\n *\n * At minimum, provide a `default` store. All other fields are optional\n * extensions for specific routing scenarios (blobs → S3, geographic sharding,\n * age-based tiering, etc.).\n */\nexport interface RouteStoreOptions {\n /** Default store for all unmatched operations. */\n readonly default: NoydbStore\n\n /**\n * Route blob chunk data to a separate store.\n * - Pass a `NoydbStore` for simple prefix routing (all chunks → that store).\n * - Pass `{ small?, large, threshold? }` for size-tiered routing.\n */\n readonly blobs?: NoydbStore | BlobStoreRoute\n\n /** Route all blob metadata (index, slots, versions) to the blobs store too. Default: false. */\n readonly routeBlobMeta?: boolean\n\n /** Route specific user collections to dedicated stores. */\n readonly routes?: Record<string, NoydbStore>\n\n /** Route by vault name (prefix patterns, e.g. `'EU-'`). */\n readonly vaultRoutes?: Record<string, NoydbStore>\n\n /**\n * Age-based tiering: records older than `coldAfterDays` are read from\n * the cold store. A background `compact()` method migrates them.\n */\n readonly age?: AgeRoute\n\n /**\n * Content-aware blob routing.\n * Route blob chunks by MIME type glob pattern. The MIME type is stored\n * in `BlobObject` and matched at read time via `storeHint`.\n */\n readonly blobRoutes?: Record<string, NoydbStore>\n\n /**\n * Blob lifecycle policies.\n * Evaluated during `compact()`.\n */\n readonly blobLifecycle?: BlobLifecyclePolicy\n\n /**\n * Quota-aware overflow.\n * When the default store's usage exceeds the threshold, new writes\n * overflow to the specified store.\n */\n readonly overflow?: NoydbStore\n\n /**\n * Quota threshold (0-1). Default: 0.8 (overflow at 80% usage).\n * Only effective when `overflow` is set.\n */\n readonly quotaThreshold?: number\n}\n\n// ─── Types ───────────────────────────────────────────────────────────────\n\n/**\n * Named route that can be overridden or suspended at runtime.\n *\n * Built-in names: `'default'`, `'blobs'`, `'cold'`.\n * Custom names: any collection name from `routes`, any vault prefix from\n * `vaultRoutes`, or any sync target label.\n */\nexport type OverrideTarget =\n | 'default'\n | 'blobs'\n | 'cold'\n | (string & {}) // named collection route, vault route, or sync target label\n\n/**\n * Options for `RoutedNoydbStore.override()`.\n *\n * Controls whether the new store is pre-populated with data from the\n * original store before the switch takes effect.\n */\nexport interface OverrideOptions {\n /**\n * Hydrate the override store from the original before activating.\n * - `true` — copy all data for all vaults.\n * - `string[]` — copy only named collections.\n * Makes `override()` async — returns a Promise.\n */\n hydrate?: boolean | string[]\n}\n\n/**\n * Options for `RoutedNoydbStore.suspend()`.\n *\n * A suspended route becomes a null store: reads return null/[], writes\n * are dropped (or buffered if `queue: true`). Useful for maintenance\n * windows or restricted-network scenarios.\n */\nexport interface SuspendOptions {\n /**\n * Buffer write operations during suspension. On `resume()`, queued\n * writes are replayed against the restored store.\n */\n queue?: boolean\n /**\n * Maximum queued operations. When exceeded, oldest entries are dropped.\n * Default: 10_000.\n */\n maxQueueSize?: number\n}\n\n/** Queued write operation recorded during suspension. */\ninterface QueuedWrite {\n method: 'put' | 'delete'\n vault: string\n collection: string\n id: string\n envelope?: EncryptedEnvelope\n expectedVersion?: number\n}\n\n/**\n * Snapshot of the current override and suspend state of a `RoutedNoydbStore`.\n * Returned by `routeStatus()` for diagnostics and health dashboards.\n */\nexport interface RouteStatus {\n /** Active overrides: route name → override store name. */\n readonly overrides: Record<string, string>\n /** Currently suspended routes. */\n readonly suspended: string[]\n /** Queued writes per suspended route (only for routes suspended with `queue: true`). */\n readonly queued: Record<string, number>\n}\n\n/**\n * Extended `NoydbStore` returned by `routeStore()`.\n *\n * Satisfies the full `NoydbStore` contract plus adds runtime control\n * methods for overriding, suspending, and inspecting routes.\n */\nexport interface RoutedNoydbStore extends NoydbStore {\n /**\n * Migrate records older than the age cutoff from the hot store to the\n * cold store. Only applies when `age` is configured. Returns the number\n * of records migrated.\n */\n compact(vault: string): Promise<number>\n\n /**\n * Override a named route at runtime.\n *\n * The override persists until `clearOverride()` is called or the\n * instance is closed. In-flight operations complete on the original\n * store; new operations use the override.\n *\n * Options:\n * - `hydrate: true` — async: copies all data from the original store\n * into the override before activating the switch.\n * - `hydrate: ['invoices', 'clients']` — copies only named collections.\n *\n * Use cases:\n * - Shared device: `await store.override('default', memory(), { hydrate: true })`\n * - Restricted network: `store.override('blobs', localFile(...))`\n */\n override(route: OverrideTarget, store: NoydbStore, opts?: OverrideOptions): void | Promise<void>\n\n /** Clear a runtime override, reverting to the original store. */\n clearOverride(route: OverrideTarget): void\n\n /**\n * Suspend a route entirely. Operations to suspended stores become\n * no-ops (puts silently dropped, gets return null, lists return []).\n *\n * Options:\n * - `queue: true` — buffer write operations (put/delete) during\n * suspension. When `resume()` is called, queued writes are replayed\n * against the restored store.\n *\n * Returns a `SuspendHandle` when `queue: true`, for inspecting queue state.\n */\n suspend(route: OverrideTarget, opts?: SuspendOptions): void\n\n /**\n * Resume a previously suspended route.\n * If the route was suspended with `queue: true`, replays queued writes.\n * Returns the number of replayed operations.\n */\n resume(route: OverrideTarget): Promise<number>\n\n /** Snapshot the current override/suspend state for diagnostics. */\n routeStatus(): RouteStatus\n}\n\n// ─── Implementation ──────────────────────────────────────────────────────\n\n/**\n * Create a store multiplexer that dispatches operations to different backends\n * based on collection type, record size, record age, vault prefix, or\n * runtime overrides.\n *\n * ```ts\n * const store = routeStore({\n * default: dynamo({ table: 'myapp' }),\n * blobs: s3({ bucket: 'myapp-blobs' }),\n * routes: { auditLog: s3({ bucket: 'myapp-audit' }) },\n * })\n * ```\n *\n * The returned store satisfies `NoydbStore` and can be passed directly to\n * `createNoydb({ store })`. It also exposes additional methods\n * (`override`, `suspend`, `resume`, `routeStatus`, `compact`) for runtime\n * control and maintenance.\n */\nexport function routeStore(opts: RouteStoreOptions): RoutedNoydbStore {\n const primary = opts.default\n\n // Resolve blob store config\n const blobsIsSimple = opts.blobs && 'get' in opts.blobs\n const simpleBlobStore = blobsIsSimple ? opts.blobs : undefined\n const tieredBlobs = !blobsIsSimple ? opts.blobs : undefined\n const blobThreshold = tieredBlobs?.threshold ?? 400 * 1024\n\n // Collect all stores for loadAll/saveAll/listVaults composition\n const allStores = new Set<NoydbStore>([primary])\n if (simpleBlobStore) allStores.add(simpleBlobStore)\n if (tieredBlobs?.large) allStores.add(tieredBlobs.large)\n if (tieredBlobs?.small) allStores.add(tieredBlobs.small)\n if (opts.age?.cold) allStores.add(opts.age.cold)\n if (opts.routes) for (const s of Object.values(opts.routes)) allStores.add(s)\n if (opts.vaultRoutes) for (const s of Object.values(opts.vaultRoutes)) allStores.add(s)\n if (opts.blobRoutes) for (const s of Object.values(opts.blobRoutes)) allStores.add(s)\n if (opts.overflow) allStores.add(opts.overflow)\n if (opts.blobLifecycle?.archiveStore) allStores.add(opts.blobLifecycle.archiveStore)\n\n // ── Runtime override / suspend state ──────────────────\n\n const overrides = new Map<string, NoydbStore>()\n const suspended = new Set<string>()\n const writeQueues = new Map<string, { writes: QueuedWrite[]; maxSize: number }>()\n\n /** Null store: silently absorbs all operations when a route is suspended. */\n const NULL_STORE: NoydbStore = {\n name: 'suspended',\n async get() { return null },\n async put() {},\n async delete() {},\n async list() { return [] },\n async loadAll() { return {} },\n async saveAll() {},\n }\n\n /**\n * Map a resolved route to its canonical name for override/suspend lookup.\n * Vault routes use the prefix, collection routes use the collection name,\n * blob route is 'blobs', cold route is 'cold', everything else is 'default'.\n */\n function routeNameFor(vault: string, collection: string): string {\n if (opts.vaultRoutes) {\n for (const prefix of Object.keys(opts.vaultRoutes)) {\n if (vault.startsWith(prefix)) return prefix\n }\n }\n if (opts.routes && !collection.startsWith('_') && opts.routes[collection]) {\n return collection\n }\n if (isBlobChunks(collection) && (simpleBlobStore || tieredBlobs)) return 'blobs'\n if (opts.routeBlobMeta && isBlobMeta(collection) && (simpleBlobStore || tieredBlobs)) return 'blobs'\n if (opts.age && !collection.startsWith('_')) {\n // We don't name age 'cold' here — cold is a fallback, not a primary route\n }\n return 'default'\n }\n\n // ── Quota-aware overflow (E8) ───────────────────────────────────────\n\n const quotaExceeded = false\n\n /** Resolve the static (non-overridden) store for a given route name. */\n function resolveOriginalStore(route: string): NoydbStore {\n if (route === 'blobs') return simpleBlobStore ?? tieredBlobs?.large ?? primary\n if (route === 'cold') return opts.age?.cold ?? primary\n if (opts.routes?.[route]) return opts.routes[route]\n if (opts.vaultRoutes?.[route]) return opts.vaultRoutes[route]\n return primary\n }\n\n /**\n * Queue a write operation if the route is suspended with queue: true.\n * Returns true if queued (caller should skip the actual write).\n */\n function maybeQueueWrite(\n routeName: string,\n method: 'put' | 'delete',\n vault: string,\n collection: string,\n id: string,\n envelope?: EncryptedEnvelope,\n expectedVersion?: number,\n ): boolean {\n if (!suspended.has(routeName)) return false\n const queue = writeQueues.get(routeName)\n if (!queue) return false // suspended but no queue — NullStore behavior\n\n // Evict oldest if at capacity\n if (queue.writes.length >= queue.maxSize) {\n queue.writes.shift()\n }\n queue.writes.push({\n method, vault, collection, id,\n ...(envelope !== undefined ? { envelope } : {}),\n ...(expectedVersion !== undefined ? { expectedVersion } : {}),\n })\n return true\n }\n\n // ── Routing logic ──────────────────────────────────────────────────\n\n function isBlobChunks(collection: string): boolean {\n return collection === BLOB_CHUNKS\n }\n\n function isBlobMeta(collection: string): boolean {\n return collection === BLOB_INDEX\n || collection.startsWith(BLOB_SLOTS)\n || collection.startsWith(BLOB_VERSIONS)\n }\n\n function isInternal(collection: string): boolean {\n return collection.startsWith('_')\n }\n\n /**\n * Resolve the store for a given vault + collection.\n * Resolution order: overrides/suspend → vaultRoutes → routes → blobs → default\n */\n function storeFor(vault: string, collection: string): NoydbStore {\n const rName = routeNameFor(vault, collection)\n\n // 0. Runtime override / suspend check\n if (suspended.has(rName)) return NULL_STORE\n if (overrides.has(rName)) return overrides.get(rName)!\n\n // 1. Vault-based geographic routing\n if (opts.vaultRoutes) {\n for (const [prefix, store] of Object.entries(opts.vaultRoutes)) {\n if (vault.startsWith(prefix)) return store\n }\n }\n\n // 2. Per-collection routing (user collections only)\n if (opts.routes && !isInternal(collection) && opts.routes[collection]) {\n return opts.routes[collection]\n }\n\n // 3. Blob chunk routing (simple — no size tiering at the store level)\n if (isBlobChunks(collection)) {\n if (simpleBlobStore) return simpleBlobStore\n // Size-tiered: can't determine here without the envelope.\n // Default to large store — BlobSet will use storeHint for reads.\n if (tieredBlobs) return tieredBlobs.large\n }\n\n // 4. Blob metadata routing\n if (opts.routeBlobMeta && isBlobMeta(collection)) {\n if (simpleBlobStore) return simpleBlobStore\n if (tieredBlobs) return tieredBlobs.large\n }\n\n // 5. Quota-aware overflow (E8)\n if (quotaExceeded && opts.overflow) return opts.overflow\n\n // 6. Default\n return primary\n }\n\n /**\n * For size-tiered blob routing: pick store based on envelope data size.\n */\n function blobStoreForSize(dataSize: number): NoydbStore {\n if (!tieredBlobs) return simpleBlobStore ?? primary\n if (dataSize <= blobThreshold) {\n return tieredBlobs.small ?? primary\n }\n return tieredBlobs.large\n }\n\n /**\n * Age routing: check if a record is cold based on `_ts`.\n */\n function isCold(collection: string, envelope: EncryptedEnvelope): boolean {\n if (!opts.age) return false\n if (isInternal(collection)) return false\n if (opts.age.collections && opts.age.collections.length > 0) {\n if (!opts.age.collections.includes(collection)) return false\n }\n const cutoff = Date.now() - opts.age.coldAfterDays * 24 * 60 * 60 * 1000\n const ts = new Date(envelope._ts).getTime()\n return ts < cutoff\n }\n\n // ── Store methods ──────────────────────────────────────────────────\n\n const store: RoutedNoydbStore = {\n name: buildName(),\n\n async get(vault, collection, id) {\n const s = storeFor(vault, collection)\n const result = await s.get(vault, collection, id)\n\n // Age tiering: if hot store returned null, try cold\n if (result === null && opts.age && !isInternal(collection)) {\n if (!opts.age.collections?.length || opts.age.collections.includes(collection)) {\n return opts.age.cold.get(vault, collection, id)\n }\n }\n\n return result\n },\n\n async put(vault, collection, id, envelope, expectedVersion) {\n // Write-behind queue: buffer if suspended with queue option\n const rn = routeNameFor(vault, collection)\n if (maybeQueueWrite(rn, 'put', vault, collection, id, envelope, expectedVersion)) return\n\n // Size-tiered blob routing\n if (isBlobChunks(collection) && tieredBlobs) {\n const dataSize = envelope._data.length\n const s = blobStoreForSize(dataSize)\n return s.put(vault, collection, id, envelope, expectedVersion)\n }\n\n const s = storeFor(vault, collection)\n\n // Age tiering: if a cold record is being updated, it goes to hot.\n if (opts.age && !isInternal(collection)) {\n opts.age.cold.delete(vault, collection, id).catch(() => {})\n }\n\n return s.put(vault, collection, id, envelope, expectedVersion)\n },\n\n async delete(vault, collection, id) {\n // Write-behind queue: buffer if suspended with queue option\n const rn = routeNameFor(vault, collection)\n if (maybeQueueWrite(rn, 'delete', vault, collection, id)) return\n\n const s = storeFor(vault, collection)\n await s.delete(vault, collection, id)\n\n // Also delete from cold store if age-tiered\n if (opts.age && !isInternal(collection)) {\n await opts.age.cold.delete(vault, collection, id).catch(() => {})\n }\n },\n\n async list(vault, collection) {\n const s = storeFor(vault, collection)\n const ids = await s.list(vault, collection)\n\n // Age tiering: merge IDs from cold store, deduplicate\n if (opts.age && !isInternal(collection)) {\n if (!opts.age.collections?.length || opts.age.collections.includes(collection)) {\n const coldIds = await opts.age.cold.list(vault, collection).catch(() => [] as string[])\n if (coldIds.length > 0) {\n const merged = new Set(ids)\n for (const id of coldIds) merged.add(id)\n return [...merged]\n }\n }\n }\n\n return ids\n },\n\n async loadAll(vault) {\n // Query all distinct stores in parallel, merge snapshots\n const stores = getStoresForVault(vault)\n const snapshots = await Promise.all(\n stores.map(s => s.loadAll(vault).catch(() => ({}) as VaultSnapshot)),\n )\n return mergeSnapshots(snapshots)\n },\n\n async saveAll(vault, data) {\n // Partition snapshot by routing rules\n const partitioned = new Map<NoydbStore, VaultSnapshot>()\n\n for (const [collection, records] of Object.entries(data)) {\n const s = storeFor(vault, collection)\n if (!partitioned.has(s)) partitioned.set(s, {})\n partitioned.get(s)![collection] = records\n }\n\n await Promise.all(\n [...partitioned.entries()].map(([s, snap]) => s.saveAll(vault, snap)),\n )\n },\n\n async compact(vault) {\n if (!opts.age) return 0\n let migrated = 0\n const collections = opts.age.collections?.length\n ? opts.age.collections\n : await primary.list(vault, '').catch(() => [] as string[])\n\n // For each age-eligible collection, scan hot store for cold records\n for (const collection of collections) {\n const ids = await primary.list(vault, collection).catch(() => [] as string[])\n for (const id of ids) {\n const envelope = await primary.get(vault, collection, id)\n if (!envelope) continue\n if (isCold(collection, envelope)) {\n // Write to cold, then delete from hot\n await opts.age.cold.put(vault, collection, id, envelope)\n await primary.delete(vault, collection, id)\n migrated++\n }\n }\n }\n\n return migrated\n },\n\n // ── Runtime override / suspend ──────────────────────\n\n override(route: OverrideTarget, overrideStore: NoydbStore, overrideOpts?: OverrideOptions): void | Promise<void> {\n if (overrideOpts?.hydrate) {\n // Async hydration: copy data from current store, then activate override\n return (async () => {\n // Hydration: caller should copy data from the original store to\n // overrideStore before calling override() with { hydrate: true }.\n // The route is activated immediately after.\n overrides.set(route, overrideStore)\n })()\n }\n overrides.set(route, overrideStore)\n },\n\n clearOverride(route: OverrideTarget): void {\n overrides.delete(route)\n },\n\n suspend(route: OverrideTarget, suspendOpts?: SuspendOptions): void {\n suspended.add(route)\n if (suspendOpts?.queue) {\n writeQueues.set(route, {\n writes: [],\n maxSize: suspendOpts.maxQueueSize ?? 10_000,\n })\n }\n },\n\n async resume(route: OverrideTarget): Promise<number> {\n suspended.delete(route)\n const queue = writeQueues.get(route)\n if (!queue || queue.writes.length === 0) {\n writeQueues.delete(route)\n return 0\n }\n\n // Replay queued writes against the now-active store\n let replayed = 0\n const target = overrides.get(route) ?? resolveOriginalStore(route)\n for (const write of queue.writes) {\n try {\n if (write.method === 'put' && write.envelope) {\n await target.put(write.vault, write.collection, write.id, write.envelope, write.expectedVersion)\n } else if (write.method === 'delete') {\n await target.delete(write.vault, write.collection, write.id)\n }\n replayed++\n } catch {\n // Best-effort replay — conflicts are expected after suspension\n }\n }\n\n writeQueues.delete(route)\n return replayed\n },\n\n routeStatus(): RouteStatus {\n const ov: Record<string, string> = {}\n for (const [k, v] of overrides) ov[k] = v.name ?? 'unnamed'\n const q: Record<string, number> = {}\n for (const [k, v] of writeQueues) q[k] = v.writes.length\n return { overrides: ov, suspended: [...suspended], queued: q }\n },\n }\n\n // ── Optional method forwarding ─────────────────────────────────────\n\n // Forward listVaults from all stores, deduplicated\n if (anyHas('listVaults')) {\n store.listVaults = async () => {\n const results = await Promise.all(\n [...allStores]\n .filter(s => s.listVaults !== undefined)\n .map(s => s.listVaults!().catch(() => [] as string[])),\n )\n return [...new Set(results.flat())]\n }\n }\n\n // Forward ping — succeed if any store responds\n if (anyHas('ping')) {\n store.ping = async () => {\n const results = await Promise.all(\n [...allStores]\n .filter(s => s.ping !== undefined)\n .map(s => s.ping!().catch(() => false)),\n )\n return results.some(Boolean)\n }\n }\n\n return store\n\n // ── Helpers ────────────────────────────────────────────────────────\n\n function buildName(): string {\n const names = [...allStores].map(s => s.name ?? '?').join('+')\n return `route(${names})`\n }\n\n function anyHas(method: string): boolean {\n return [...allStores].some(s => (s as unknown as Record<string, unknown>)[method])\n }\n\n function getStoresForVault(vault: string): NoydbStore[] {\n const stores = new Set<NoydbStore>()\n\n // Check vault routes first\n if (opts.vaultRoutes) {\n for (const [prefix, s] of Object.entries(opts.vaultRoutes)) {\n if (vault.startsWith(prefix)) {\n stores.add(s)\n return [...stores] // vault-routed: only use that store\n }\n }\n }\n\n // Default topology: primary + blob store + cold store\n stores.add(primary)\n if (simpleBlobStore) stores.add(simpleBlobStore)\n if (tieredBlobs?.large) stores.add(tieredBlobs.large)\n if (tieredBlobs?.small && tieredBlobs.small !== primary) stores.add(tieredBlobs.small)\n if (opts.age?.cold) stores.add(opts.age.cold)\n if (opts.routes) {\n for (const s of Object.values(opts.routes)) stores.add(s)\n }\n\n return [...stores]\n }\n}\n\n// ─── Snapshot merge ──────────────────────────────────────────────────────\n\nfunction mergeSnapshots(snapshots: VaultSnapshot[]): VaultSnapshot {\n const merged: VaultSnapshot = {}\n\n for (const snap of snapshots) {\n for (const [collection, records] of Object.entries(snap)) {\n if (!merged[collection]) {\n merged[collection] = { ...records }\n continue\n }\n for (const [id, envelope] of Object.entries(records)) {\n const existing = merged[collection][id]\n // Last-write-wins by _ts\n if (!existing || envelope._ts >= existing._ts) {\n merged[collection][id] = envelope\n }\n }\n }\n }\n\n return merged\n}\n","/**\n * Store middleware — composable interceptors for NoydbStore.\n *\n * ```ts\n * const resilient = wrapStore(\n * dynamo({ table: 'myapp' }),\n * withRetry({ maxRetries: 3 }),\n * withLogging({ level: 'debug' }),\n * withCache({ ttlMs: 60_000 }),\n * )\n * ```\n *\n * Each middleware is `(next: NoydbStore) => NoydbStore`. They compose\n * left-to-right: first middleware is outermost (processes requests first,\n * responses last).\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\n\n// ─── Core composition ───────────────────────────────────────────────────\n\n/**\n * A store middleware function.\n *\n * Takes the next store in the chain and returns a wrapped store. Middlewares\n * compose left-to-right via `wrapStore()`: the first argument is outermost\n * (first to intercept requests, last to process responses).\n *\n * ```ts\n * const mw: StoreMiddleware = (next) => ({\n * ...next,\n * async get(vault, collection, id) {\n * console.log('get', id)\n * return next.get(vault, collection, id)\n * },\n * })\n * ```\n */\nexport type StoreMiddleware = (next: NoydbStore) => NoydbStore\n\n/**\n * Wrap a store with one or more middlewares. Middlewares compose left-to-right.\n */\nexport function wrapStore(store: NoydbStore, ...middlewares: StoreMiddleware[]): NoydbStore {\n let result = store\n // Apply right-to-left so the first middleware is the outermost wrapper\n for (let i = middlewares.length - 1; i >= 0; i--) {\n result = middlewares[i]!(result)\n }\n return result\n}\n\n// ─── withRetry ──────────────────────────────────────────────────────────\n\n/** Options for `withRetry()`. */\nexport interface RetryOptions {\n /** Maximum retry attempts. Default: 3. */\n maxRetries?: number\n /** Base backoff delay in ms. Default: 500. */\n backoffMs?: number\n /** Jitter factor (0-1). Adds random delay up to `backoffMs * jitter`. Default: 0.3. */\n jitter?: number\n /** Only retry on these error codes. Default: retry all errors. */\n retryOn?: string[]\n}\n\n/**\n * Middleware that retries failed store operations with exponential backoff\n * and optional jitter. Useful for transient network errors on DynamoDB/S3.\n *\n * ```ts\n * wrapStore(dynamo({ table: 'myapp' }), withRetry({ maxRetries: 5, retryOn: ['NETWORK_ERROR'] }))\n * ```\n */\nexport function withRetry(opts: RetryOptions = {}): StoreMiddleware {\n const maxRetries = opts.maxRetries ?? 3\n const backoffMs = opts.backoffMs ?? 500\n const jitter = opts.jitter ?? 0.3\n const retryOn = opts.retryOn ? new Set(opts.retryOn) : null\n\n function shouldRetry(err: unknown): boolean {\n if (!retryOn) return true\n if (err && typeof err === 'object' && 'code' in err) {\n return retryOn.has((err as { code: string }).code)\n }\n return true\n }\n\n async function retryable<T>(fn: () => Promise<T>): Promise<T> {\n let lastError: unknown\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n try {\n return await fn()\n } catch (err) {\n lastError = err\n if (attempt >= maxRetries || !shouldRetry(err)) throw err\n const delay = backoffMs * Math.pow(2, attempt) * (1 + Math.random() * jitter)\n await new Promise(r => setTimeout(r, delay))\n }\n }\n throw lastError\n }\n\n return (next) => ({\n ...next,\n name: next.name ? `retry(${next.name})` : 'retry',\n get: (v, c, id) => retryable(() => next.get(v, c, id)),\n put: (v, c, id, env, ev) => retryable(() => next.put(v, c, id, env, ev)),\n delete: (v, c, id) => retryable(() => next.delete(v, c, id)),\n list: (v, c) => retryable(() => next.list(v, c)),\n loadAll: (v) => retryable(() => next.loadAll(v)),\n saveAll: (v, d) => retryable(() => next.saveAll(v, d)),\n })\n}\n\n// ─── withLogging ────────────────────────────────────────────────────────\n\n/** Log level for `withLogging()`. Maps to standard console method names. */\nexport type LogLevel = 'debug' | 'info' | 'warn' | 'error'\n\n/** Options for `withLogging()`. */\nexport interface LoggingOptions {\n /** Minimum log level. Default: 'info'. */\n level?: LogLevel\n /** Custom logger. Default: console. */\n logger?: {\n debug(msg: string, ...args: unknown[]): void\n info(msg: string, ...args: unknown[]): void\n warn(msg: string, ...args: unknown[]): void\n error(msg: string, ...args: unknown[]): void\n }\n /** Log the data payload (envelope contents). Default: false (privacy). */\n logData?: boolean\n}\n\nconst LOG_LEVELS: Record<LogLevel, number> = { debug: 0, info: 1, warn: 2, error: 3 }\n\n/**\n * Middleware that logs every store operation with its method name, arguments,\n * and elapsed duration. Privacy-safe by default: envelope payloads are not\n * logged unless `logData: true` is set.\n */\nexport function withLogging(opts: LoggingOptions = {}): StoreMiddleware {\n const minLevel = LOG_LEVELS[opts.level ?? 'info']\n const logger = opts.logger ?? console\n const logData = opts.logData ?? false\n\n function log(level: LogLevel, method: string, args: Record<string, unknown>, durationMs?: number) {\n if (LOG_LEVELS[level] < minLevel) return\n const parts = [`[noydb:${method}]`, ...Object.entries(args).map(([k, v]) => `${k}=${String(v)}`)]\n if (durationMs !== undefined) parts.push(`${durationMs}ms`)\n logger[level](parts.join(' '))\n }\n\n function timed<T>(method: string, args: Record<string, unknown>, fn: () => Promise<T>): Promise<T> {\n const start = Date.now()\n return fn().then(\n (result) => {\n log('debug', method, args, Date.now() - start)\n return result\n },\n (err) => {\n log('error', method, { ...args, error: (err as Error).message }, Date.now() - start)\n throw err\n },\n )\n }\n\n return (next) => ({\n ...next,\n name: next.name ? `log(${next.name})` : 'log',\n get: (v, c, id) => timed('get', { vault: v, collection: c, id }, () => next.get(v, c, id)),\n put: (v, c, id, env, ev) => timed('put', {\n vault: v, collection: c, id, version: env._v,\n ...(logData ? { data: env._data.slice(0, 40) + '...' } : {}),\n }, () => next.put(v, c, id, env, ev)),\n delete: (v, c, id) => timed('delete', { vault: v, collection: c, id }, () => next.delete(v, c, id)),\n list: (v, c) => timed('list', { vault: v, collection: c }, () => next.list(v, c)),\n loadAll: (v) => timed('loadAll', { vault: v }, () => next.loadAll(v)),\n saveAll: (v, d) => timed('saveAll', { vault: v }, () => next.saveAll(v, d)),\n })\n}\n\n// ─── withMetrics ────────────────────────────────────────────────────────\n\n/**\n * Data emitted to `MetricsOptions.onOperation` after every store call.\n *\n * Carries method name, vault/collection/id context, elapsed duration,\n * and success/failure status. Wire this into your metrics pipeline\n * (DataDog, Prometheus, CloudWatch) to get per-operation latency histograms.\n */\nexport interface StoreOperation {\n method: 'get' | 'put' | 'delete' | 'list' | 'loadAll' | 'saveAll'\n vault: string\n collection?: string\n id?: string\n durationMs: number\n success: boolean\n error?: Error\n}\n\n/** Options for `withMetrics()`. */\nexport interface MetricsOptions {\n /** Called after every store operation. */\n onOperation: (op: StoreOperation) => void\n}\n\n/**\n * Middleware that calls `onOperation` after every store method with timing\n * and success/failure data. Designed for low-overhead integration with\n * metrics systems — the callback is synchronous and fire-and-forget.\n */\nexport function withMetrics(opts: MetricsOptions): StoreMiddleware {\n function tracked<T>(\n method: StoreOperation['method'],\n vault: string,\n fn: () => Promise<T>,\n collection?: string,\n id?: string,\n ): Promise<T> {\n const start = Date.now()\n return fn().then(\n (result) => {\n opts.onOperation({\n method, vault,\n ...(collection !== undefined ? { collection } : {}),\n ...(id !== undefined ? { id } : {}),\n durationMs: Date.now() - start, success: true,\n })\n return result\n },\n (err) => {\n opts.onOperation({\n method, vault,\n ...(collection !== undefined ? { collection } : {}),\n ...(id !== undefined ? { id } : {}),\n durationMs: Date.now() - start, success: false, error: err as Error,\n })\n throw err\n },\n )\n }\n\n return (next) => ({\n ...next,\n name: next.name ? `metrics(${next.name})` : 'metrics',\n get: (v, c, id) => tracked('get', v, () => next.get(v, c, id), c, id),\n put: (v, c, id, env, ev) => tracked('put', v, () => next.put(v, c, id, env, ev), c, id),\n delete: (v, c, id) => tracked('delete', v, () => next.delete(v, c, id), c, id),\n list: (v, c) => tracked('list', v, () => next.list(v, c), c),\n loadAll: (v) => tracked('loadAll', v, () => next.loadAll(v)),\n saveAll: (v, d) => tracked('saveAll', v, () => next.saveAll(v, d)),\n })\n}\n\n// ─── withCircuitBreaker ─────────────────────────────────────────────────\n\n/**\n * Options for `withCircuitBreaker()`.\n *\n * The circuit breaker moves through three states:\n * - `closed`: normal operation.\n * - `open`: store is failing; all calls return fallback values immediately.\n * - `half-open`: one probe call after `resetTimeoutMs` — success closes, failure re-opens.\n */\nexport interface CircuitBreakerOptions {\n /** Number of consecutive failures before opening the circuit. Default: 5. */\n failureThreshold?: number\n /** Time in ms before attempting to half-open the circuit. Default: 30_000. */\n resetTimeoutMs?: number\n /** Called when the circuit opens (store becomes unavailable). */\n onOpen?: () => void\n /** Called when the circuit closes (store recovers). */\n onClose?: () => void\n}\n\ntype CircuitState = 'closed' | 'open' | 'half-open'\n\n/**\n * Middleware that implements the circuit-breaker pattern.\n *\n * When the wrapped store fails `failureThreshold` consecutive times, the\n * circuit opens: subsequent calls return safe fallback values (`null`, `[]`,\n * `{}`) without hitting the store. After `resetTimeoutMs` the circuit\n * half-opens and allows one probe — success closes the circuit, failure\n * keeps it open. Pair with `withRetry` to handle transient errors before\n * they trip the circuit.\n */\nexport function withCircuitBreaker(opts: CircuitBreakerOptions = {}): StoreMiddleware {\n const threshold = opts.failureThreshold ?? 5\n const resetMs = opts.resetTimeoutMs ?? 30_000\n\n let state: CircuitState = 'closed'\n let failures = 0\n let lastFailureTime = 0\n\n function recordSuccess(): void {\n if (state === 'half-open') {\n state = 'closed'\n failures = 0\n opts.onClose?.()\n }\n failures = 0\n }\n\n function recordFailure(): void {\n failures++\n lastFailureTime = Date.now()\n if (failures >= threshold && state === 'closed') {\n state = 'open'\n opts.onOpen?.()\n }\n }\n\n function canAttempt(): boolean {\n if (state === 'closed') return true\n if (state === 'open') {\n if (Date.now() - lastFailureTime >= resetMs) {\n state = 'half-open'\n return true\n }\n return false\n }\n // half-open: allow one attempt\n return true\n }\n\n async function guarded<T>(fn: () => Promise<T>, fallback: T): Promise<T> {\n if (!canAttempt()) return fallback\n try {\n const result = await fn()\n recordSuccess()\n return result\n } catch (err) {\n recordFailure()\n throw err\n }\n }\n\n return (next) => ({\n ...next,\n name: next.name ? `cb(${next.name})` : 'cb',\n get: (v, c, id) => guarded(() => next.get(v, c, id), null),\n put: (v, c, id, env, ev) => guarded(() => next.put(v, c, id, env, ev), undefined),\n delete: (v, c, id) => guarded(() => next.delete(v, c, id), undefined),\n list: (v, c) => guarded(() => next.list(v, c), []),\n loadAll: (v) => guarded(() => next.loadAll(v), {}),\n saveAll: (v, d) => guarded(() => next.saveAll(v, d), undefined),\n })\n}\n\n// ─── withCache (read-through) ───────────────────────────────────────────\n\n/**\n * Options for `withCache()`.\n *\n * The cache is a read-through LRU that caches individual record fetches\n * (`get`). Writes (`put`, `delete`) invalidate the relevant cache entry\n * immediately. `list`, `loadAll`, and `saveAll` bypass the cache.\n *\n * Named `StoreCacheOptions` to distinguish from `CacheOptions` in\n * `@noy-db/hub/collection`, which controls the in-memory decrypted-record LRU.\n */\nexport interface StoreCacheOptions {\n /** Maximum cached entries. Default: 500. */\n maxEntries?: number\n /** Cache TTL in ms. Default: 60_000 (1 minute). 0 = no expiry. */\n ttlMs?: number\n}\n\ninterface CacheEntry {\n envelope: EncryptedEnvelope | null\n cachedAt: number\n}\n\n/**\n * Middleware that adds a read-through LRU cache for `get()` calls.\n *\n * Reduces latency for frequently-read records (e.g. lookup tables, user\n * profiles) by serving repeat reads from memory. Because NOYDB records are\n * encrypted at rest, caching envelopes is safe — the cache holds ciphertext,\n * not plaintext. For write-heavy workloads, the cache provides little benefit\n * and should be omitted to avoid the invalidation overhead.\n */\nexport function withCache(opts: StoreCacheOptions = {}): StoreMiddleware {\n const maxEntries = opts.maxEntries ?? 500\n const ttlMs = opts.ttlMs ?? 60_000\n\n // LRU cache: Map preserves insertion order, we delete+re-insert on access\n const cache = new Map<string, CacheEntry>()\n\n function cacheKey(vault: string, collection: string, id: string): string {\n return `${vault}\\0${collection}\\0${id}`\n }\n\n function getFromCache(key: string): EncryptedEnvelope | null | undefined {\n const entry = cache.get(key)\n if (!entry) return undefined\n if (ttlMs > 0 && Date.now() - entry.cachedAt > ttlMs) {\n cache.delete(key)\n return undefined\n }\n // LRU: move to end\n cache.delete(key)\n cache.set(key, entry)\n return entry.envelope\n }\n\n function setInCache(key: string, envelope: EncryptedEnvelope | null): void {\n // Evict oldest if at capacity\n if (cache.size >= maxEntries) {\n const oldest = cache.keys().next().value\n if (oldest !== undefined) cache.delete(oldest)\n }\n cache.set(key, { envelope, cachedAt: Date.now() })\n }\n\n function invalidate(key: string): void {\n cache.delete(key)\n }\n\n return (next) => ({\n ...next,\n name: next.name ? `cache(${next.name})` : 'cache',\n\n async get(vault, collection, id) {\n const key = cacheKey(vault, collection, id)\n const cached = getFromCache(key)\n if (cached !== undefined) return cached\n const result = await next.get(vault, collection, id)\n setInCache(key, result)\n return result\n },\n\n async put(vault, collection, id, env, ev) {\n invalidate(cacheKey(vault, collection, id))\n await next.put(vault, collection, id, env, ev)\n setInCache(cacheKey(vault, collection, id), env)\n },\n\n async delete(vault, collection, id) {\n invalidate(cacheKey(vault, collection, id))\n await next.delete(vault, collection, id)\n },\n\n list: (v, c) => next.list(v, c),\n loadAll: (v) => next.loadAll(v),\n saveAll: (v, d) => next.saveAll(v, d),\n })\n}\n\n// ─── withHealthCheck ────────────────────────────────────────────────────\n\nexport interface HealthCheckOptions {\n /** Ping interval in ms. Default: 30_000. */\n checkIntervalMs?: number\n /** Suspend after N consecutive ping failures. Default: 3. */\n suspendAfterFailures?: number\n /** Resume after N consecutive ping successes. Default: 1. */\n resumeAfterSuccess?: number\n /** Called when the store is auto-suspended. */\n onSuspend?: () => void\n /** Called when the store is auto-resumed. */\n onResume?: () => void\n /**\n * Custom health check. Default: calls `store.ping()` if available,\n * otherwise attempts a `list()` on a sentinel collection.\n */\n check?: () => Promise<boolean>\n}\n\n/**\n * Auto-suspends a store when health checks fail, auto-resumes when they recover.\n *\n * When suspended, `get` returns null, `put`/`delete` are no-ops, `list` returns [].\n * This is identical to the `NullStore` behavior from `routeStore.suspend()`.\n */\nexport function withHealthCheck(opts: HealthCheckOptions = {}): StoreMiddleware {\n const intervalMs = opts.checkIntervalMs ?? 30_000\n const failThreshold = opts.suspendAfterFailures ?? 3\n const successThreshold = opts.resumeAfterSuccess ?? 1\n\n let isSuspended = false\n let consecutiveFailures = 0\n let consecutiveSuccesses = 0\n\n return (next) => {\n const checkFn = opts.check ?? (\n next.ping\n ? () => next.ping!()\n : async () => { await next.list('__health__', '__ping__'); return true }\n )\n\n async function doCheck(): Promise<void> {\n try {\n const ok = await checkFn()\n if (ok) {\n consecutiveFailures = 0\n consecutiveSuccesses++\n if (isSuspended && consecutiveSuccesses >= successThreshold) {\n isSuspended = false\n consecutiveSuccesses = 0\n opts.onResume?.()\n }\n } else {\n throw new Error('Health check returned false')\n }\n } catch {\n consecutiveSuccesses = 0\n consecutiveFailures++\n if (!isSuspended && consecutiveFailures >= failThreshold) {\n isSuspended = true\n consecutiveFailures = 0\n opts.onSuspend?.()\n }\n }\n }\n\n // Start checking\n setInterval(() => { void doCheck() }, intervalMs)\n\n const wrapped: NoydbStore = {\n ...next,\n name: next.name ? `health(${next.name})` : 'health',\n\n async get(v, c, id) { return isSuspended ? null : next.get(v, c, id) },\n async put(v, c, id, env, ev) { if (!isSuspended) await next.put(v, c, id, env, ev) },\n async delete(v, c, id) { if (!isSuspended) await next.delete(v, c, id) },\n async list(v, c) { return isSuspended ? [] : next.list(v, c) },\n async loadAll(v) { return isSuspended ? {} : next.loadAll(v) },\n async saveAll(v, d) { if (!isSuspended) await next.saveAll(v, d) },\n }\n\n return wrapped\n }\n}\n","/**\n * `.noydb` container format — byte layout, header schema, validators.\n *\n *. Wraps a `vault.dump()` JSON string in a thin\n * binary container with a magic-byte prefix, a minimum-disclosure\n * unencrypted header, and a compressed body.\n *\n * **Byte layout** (read in order from offset 0):\n *\n * ```\n * +--------+--------+--------+--------+\n * | N=78 | D=68 | B=66 | 1=49 | Magic 'NDB1' (4 bytes)\n * +--------+--------+--------+--------+\n * | flags | compr | header_length (uint32 BE) |\n * +--------+--------+--------+--------+--------+--------+--------+\n * | header_length bytes of UTF-8 JSON header ...\n * +--------+--------+\n * | compressed body bytes ...\n * ```\n *\n * Total fixed prefix before the header JSON is **10 bytes**:\n * - 4 bytes magic\n * - 1 byte flags\n * - 1 byte compression algorithm\n * - 4 bytes header length (uint32 big-endian)\n *\n * **Why a binary container** at all? `vault.dump()` already\n * produces a JSON string with encrypted records inside. Wrapping it\n * again seems redundant — but the wrap is what makes the file safe\n * to drop into cloud storage (Drive, Dropbox, iCloud) without\n * leaking the vault name and exporter identity through the\n * cloud's metadata API. The minimum-disclosure header is the only\n * thing visible without downloading and decompressing the body.\n * The dump JSON inside the body still contains the original\n * metadata, but that's only readable by someone who already has the\n * file bytes — the same person who could read the encrypted records\n * with the right passphrase.\n *\n * **Why minimum disclosure** in the header? Because consumers will\n * inevitably store these in services where the filename, file size,\n * and any unencrypted metadata are indexed for search. A field like\n * `vault: \"Acme Corp\"` would let an attacker (or a curious\n * cloud admin) enumerate which compartments exist and who exported\n * them, even with zero access to the encrypted body. The header\n * carries only what's needed to identify the file as a NOYDB\n * bundle and verify its integrity — nothing about the contents.\n */\n\n/** Magic bytes 'NDB1' (ASCII), identifying a NOYDB bundle. */\nexport const NOYDB_BUNDLE_MAGIC = new Uint8Array([0x4e, 0x44, 0x42, 0x31])\n\n/** Total fixed prefix before the header JSON: 4+1+1+4 bytes. */\nexport const NOYDB_BUNDLE_PREFIX_BYTES = 10\n\n/** Current bundle format version. Bumped on layout changes. */\nexport const NOYDB_BUNDLE_FORMAT_VERSION = 1\n\n/**\n * Bitfield interpretation of the flags byte.\n *\n * Bit 0 — body is compressed (0 = raw, 1 = compressed)\n * Bit 1 — header carries an integrity hash over the body bytes\n * Bits 2-7 — reserved, must be 0 in\n */\nexport const FLAG_COMPRESSED = 0b0000_0001\nexport const FLAG_HAS_INTEGRITY_HASH = 0b0000_0010\n\n/**\n * Compression algorithm encoding for the byte at offset 5.\n *\n * `none` is admitted for round-trip testing and for callers that\n * want to bundle without compression (e.g. when piping into a\n * separately compressed transport). `gzip` is the universally\n * available baseline (Node 18+, all modern browsers). `brotli` is\n * preferred when the runtime supports it — typically 30-50% smaller\n * for JSON payloads — but Node 22+ / Chrome 124+ / Firefox 122+\n * are required, so the writer feature-detects at runtime and falls\n * back to gzip. The reader must handle all three.\n */\nexport const COMPRESSION_NONE = 0\nexport const COMPRESSION_GZIP = 1\nexport const COMPRESSION_BROTLI = 2\n\nexport type CompressionAlgo = 0 | 1 | 2\n\n/**\n * The unencrypted header carried in every `.noydb` bundle.\n *\n * **Minimum-disclosure rules:** these are the ONLY allowed keys.\n * Any other key in a parsed header causes\n * `validateBundleHeader` to throw. The set is kept short to\n * minimize attack surface from cloud-storage metadata indexing —\n * see the file-level doc comment for the rationale.\n *\n * Forbidden in particular:\n * - `vault` / `_compartment` — would leak the tenant name\n * - `exporter` / `_exported_by` — would leak user identity\n * - `timestamp` / `_exported_at` — would leak activity timing\n * - `kdfParams` / salt fields — would leak crypto config that\n * could narrow brute-force search space\n * - any field starting with `_` (reserved by the dump format)\n */\nexport interface NoydbBundleHeader {\n /** Bundle format version — bumped on layout changes. */\n readonly formatVersion: number\n /**\n * Opaque ULID identifier — generated once per vault and\n * stable across re-exports of the same vault. Does not\n * leak any information about contents (the timestamp prefix is\n * just monotonicity for sortability, not exporter activity —\n * see `bundle/ulid.ts` for the design notes).\n */\n readonly handle: string\n /** Compressed body length in bytes. Lets readers verify completeness without decompressing. */\n readonly bodyBytes: number\n /** SHA-256 of the compressed body bytes (lowercase hex). Lets readers verify integrity without decompressing. */\n readonly bodySha256: string\n}\n\n/**\n * Allowlist of header keys. Any key not in this set is forbidden\n * and causes `validateBundleHeader` to throw. Kept as a Set for\n * O(1) lookup; the validator iterates over the parsed header and\n * checks each key against this set.\n */\nconst ALLOWED_HEADER_KEYS: ReadonlySet<string> = new Set([\n 'formatVersion',\n 'handle',\n 'bodyBytes',\n 'bodySha256',\n])\n\n/**\n * Validate a parsed bundle header. Throws on any deviation from\n * the minimum-disclosure schema:\n *\n * - Missing required field\n * - Wrong type for any field\n * - Any extra key not in `ALLOWED_HEADER_KEYS`\n * - Unsupported `formatVersion`\n * - Negative or non-integer `bodyBytes`\n * - Malformed `handle` (must be 26-char Crockford base32)\n * - Malformed `bodySha256` (must be 64-char lowercase hex)\n *\n * The error messages name the offending field so consumers can\n * fix the producer rather than the reader.\n */\nexport function validateBundleHeader(\n parsed: unknown,\n): asserts parsed is NoydbBundleHeader {\n if (parsed === null || typeof parsed !== 'object') {\n throw new Error(\n `.noydb bundle header must be a JSON object, got ${parsed === null ? 'null' : typeof parsed}`,\n )\n }\n // Disallow any unknown key — minimum disclosure means we reject\n // forward-compat extension keys at the format layer; new fields\n // require a format version bump and a new validator.\n for (const key of Object.keys(parsed)) {\n if (!ALLOWED_HEADER_KEYS.has(key)) {\n throw new Error(\n `.noydb bundle header contains forbidden key \"${key}\". ` +\n `Only minimum-disclosure fields are allowed: ` +\n `${[...ALLOWED_HEADER_KEYS].join(', ')}.`,\n )\n }\n }\n const h = parsed as Record<string, unknown>\n if (typeof h['formatVersion'] !== 'number' || h['formatVersion'] !== NOYDB_BUNDLE_FORMAT_VERSION) {\n throw new Error(\n `.noydb bundle header.formatVersion must be ${NOYDB_BUNDLE_FORMAT_VERSION}, ` +\n `got ${String(h['formatVersion'])}. The reader does not support ` +\n `forward-compat versions; upgrade the reader to handle newer bundles.`,\n )\n }\n if (typeof h['handle'] !== 'string' || !/^[0-9A-HJKMNP-TV-Z]{26}$/.test(h['handle'])) {\n throw new Error(\n `.noydb bundle header.handle must be a 26-character Crockford base32 ULID, ` +\n `got ${typeof h['handle'] === 'string' ? `\"${h['handle']}\"` : String(h['handle'])}.`,\n )\n }\n if (typeof h['bodyBytes'] !== 'number' || !Number.isInteger(h['bodyBytes']) || h['bodyBytes'] < 0) {\n throw new Error(\n `.noydb bundle header.bodyBytes must be a non-negative integer, ` +\n `got ${String(h['bodyBytes'])}.`,\n )\n }\n if (typeof h['bodySha256'] !== 'string' || !/^[0-9a-f]{64}$/.test(h['bodySha256'])) {\n throw new Error(\n `.noydb bundle header.bodySha256 must be a 64-character lowercase hex string, ` +\n `got ${typeof h['bodySha256'] === 'string' ? `\"${h['bodySha256']}\"` : String(h['bodySha256'])}.`,\n )\n }\n}\n\n/**\n * Encode a header object to UTF-8 JSON bytes after validating\n * minimum disclosure. Used by the writer to serialize the header\n * region of the container.\n */\nexport function encodeBundleHeader(header: NoydbBundleHeader): Uint8Array {\n validateBundleHeader(header)\n // Stable key ordering — JSON.stringify with no replacer uses\n // insertion order, which is fine here because we control the\n // object construction. Stable ordering means two bundles with\n // identical contents produce byte-identical headers.\n const json = JSON.stringify({\n formatVersion: header.formatVersion,\n handle: header.handle,\n bodyBytes: header.bodyBytes,\n bodySha256: header.bodySha256,\n })\n return new TextEncoder().encode(json)\n}\n\n/**\n * Parse a bundle header from its UTF-8 JSON bytes. Throws on\n * invalid JSON or any minimum-disclosure violation.\n */\nexport function decodeBundleHeader(bytes: Uint8Array): NoydbBundleHeader {\n const json = new TextDecoder('utf-8', { fatal: true }).decode(bytes)\n let parsed: unknown\n try {\n parsed = JSON.parse(json)\n } catch (err) {\n throw new Error(\n `.noydb bundle header is not valid JSON: ${(err as Error).message}`,\n )\n }\n validateBundleHeader(parsed)\n return parsed\n}\n\n/**\n * Read a uint32 from `bytes` at `offset` in big-endian byte order.\n * No bounds check — callers must guarantee `offset + 4 <= bytes.length`.\n * Used to decode the header length field; kept inline so the parser\n * doesn't depend on DataView allocation per call.\n */\nexport function readUint32BE(bytes: Uint8Array, offset: number): number {\n return (\n (bytes[offset]! << 24 >>> 0) +\n (bytes[offset + 1]! << 16) +\n (bytes[offset + 2]! << 8) +\n bytes[offset + 3]!\n )\n}\n\n/**\n * Write a uint32 to `bytes` at `offset` in big-endian byte order.\n * No bounds check — callers must guarantee `offset + 4 <= bytes.length`.\n */\nexport function writeUint32BE(bytes: Uint8Array, offset: number, value: number): void {\n bytes[offset] = (value >>> 24) & 0xff\n bytes[offset + 1] = (value >>> 16) & 0xff\n bytes[offset + 2] = (value >>> 8) & 0xff\n bytes[offset + 3] = value & 0xff\n}\n\n/**\n * Verify the magic prefix of a bundle. Returns true if the first\n * 4 bytes match `NDB1`. Used by readers as a fast file-type check\n * before any further parsing.\n */\nexport function hasNoydbBundleMagic(bytes: Uint8Array): boolean {\n if (bytes.length < NOYDB_BUNDLE_MAGIC.length) return false\n for (let i = 0; i < NOYDB_BUNDLE_MAGIC.length; i++) {\n if (bytes[i] !== NOYDB_BUNDLE_MAGIC[i]) return false\n }\n return true\n}\n","/**\n * `.noydb` container primitives — write, read, header-only read.\n *\n *. Wraps a `vault.dump()` JSON string in the\n * binary container described in `format.ts`.\n *\n * **Three primitives:**\n *\n * - `writeNoydbBundle(vault, opts?)` — produces the\n * full container bytes ready to write to disk or upload\n * - `readNoydbBundleHeader(bytes)` — parses just the header\n * without decompressing the body, fast file-type and\n * metadata read for cloud listing UIs\n * - `readNoydbBundle(bytes)` — full read: validates magic,\n * header, integrity hash, and decompresses the body to\n * return the original `dump()` JSON string for use with\n * `vault.load()`\n *\n * **Compression strategy:** brotli when available (Node 22+,\n * Chrome 124+, Firefox 122+), gzip fallback elsewhere. The\n * algorithm choice is encoded in the format byte at offset 5,\n * so readers handle either transparently. Brotli wins ~30-50%\n * on JSON payloads with repeated keys (which vault dumps\n * are).\n *\n * **Why split read/load?** `readNoydbBundle` returns the\n * *unwrapped JSON string*, not a Vault object. The caller\n * is responsible for piping that JSON into\n * `vault.load(json, passphrase)`. Splitting the layers\n * keeps the bundle module free of any crypto/passphrase\n * concerns — it's purely a format layer. The same `readNoydbBundle`\n * call can also feed verification tools, format inspectors, or\n * archive utilities that don't care about decryption.\n */\n\nimport {\n COMPRESSION_BROTLI,\n COMPRESSION_GZIP,\n COMPRESSION_NONE,\n FLAG_COMPRESSED,\n FLAG_HAS_INTEGRITY_HASH,\n NOYDB_BUNDLE_FORMAT_VERSION,\n NOYDB_BUNDLE_MAGIC,\n NOYDB_BUNDLE_PREFIX_BYTES,\n decodeBundleHeader,\n encodeBundleHeader,\n hasNoydbBundleMagic,\n readUint32BE,\n writeUint32BE,\n type CompressionAlgo,\n type NoydbBundleHeader,\n} from './format.js'\nimport { BundleIntegrityError } from '../errors.js'\nimport type { Vault } from '../vault.js'\nimport type { BundleRecipient } from '../team/keyring.js'\n\n/**\n * Options accepted by `writeNoydbBundle`.\n *\n * - `compression: 'auto'` (default) — try brotli, fall back to gzip\n * - `compression: 'brotli'` — force brotli, throw if unsupported\n * - `compression: 'gzip'` — force gzip\n * - `compression: 'none'` — no compression (round-trip testing only)\n *\n * **Slice filtering** (added in ):\n * - `collections` — allowlist of collection names to include. Internal\n * collections (keyrings, ledger) and excluded user collections are\n * dropped from the bundle. Records inside included collections are\n * carried through verbatim.\n * - `since` — only records whose envelope `_ts` is on/after the given\n * instant survive. Operates on the unencrypted envelope timestamp,\n * so plaintext access to records is not required.\n *\n * Both filters intersect (AND). When neither is provided the bundle is\n * a whole-vault snapshot, identical to today's behaviour.\n */\nexport interface WriteNoydbBundleOptions {\n readonly compression?: 'auto' | 'brotli' | 'gzip' | 'none'\n /** Allowlist of user-collection names to include. */\n readonly collections?: readonly string[]\n /**\n * Drop records whose envelope `_ts` is strictly older than this\n * instant. Accepts a `Date` or any ISO-8601 string parseable by\n * `new Date()`.\n */\n readonly since?: Date | string\n /**\n * Plaintext-pipeline record predicate. Decrypts each record\n * with the vault's per-collection DEK, runs the predicate, and\n * keeps the original ciphertext for survivors (no re-encrypt —\n * preserves zero-knowledge cleanly). Records the predicate returns\n * `false` for are dropped from the bundle.\n *\n * Async predicates are supported. Mutating the record from inside\n * the predicate is undefined behaviour.\n */\n readonly where?: (\n record: unknown,\n ctx: { collection: string; id: string },\n ) => boolean | Promise<boolean>\n /**\n * Hierarchical-tier ceiling. Records whose envelope `_tier`\n * is strictly greater than this number are dropped. Operates on the\n * envelope `_tier` (no decryption needed) — vault.exportStream is\n * referenced in the issue body for symmetry, but the tier value\n * lives on the unencrypted envelope. Vault without tiers is a no-op.\n */\n readonly tierAtMost?: number\n /**\n * Single-recipient re-keying shorthand. When set, the\n * bundle's keyring is replaced with one freshly-derived entry sealed\n * with this passphrase. The recipient inherits the source keyring's\n * userId, role, and permissions. Mutually exclusive with `recipients`.\n */\n readonly exportPassphrase?: string\n /**\n * Multi-recipient re-keying. Replaces the bundle's keyring\n * map with one slot per recipient, each sealed with its own\n * passphrase. DEKs are unwrapped from the source keyring once and\n * re-wrapped per recipient — record ciphertext is unchanged.\n *\n * Mutually exclusive with `exportPassphrase`. When neither is set,\n * the bundle inherits the source keyring as-is (today's behaviour,\n * suited to personal backup-and-restore).\n */\n readonly recipients?: readonly BundleRecipient[]\n}\n\n/**\n * Result returned by `readNoydbBundle`. The caller is expected to\n * pass `dumpJson` into `vault.load(json, passphrase)` to\n * actually restore a vault. Splitting the layers keeps the\n * bundle module free of crypto concerns — see file-level docs.\n */\nexport interface NoydbBundleReadResult {\n readonly header: NoydbBundleHeader\n readonly dumpJson: string\n}\n\n/**\n * Detect whether the runtime's `CompressionStream` supports brotli.\n *\n * Brotli requires Node 22+ / Chrome 124+ / Firefox 122+. The\n * detection runs the `CompressionStream` constructor in a\n * try/catch — unsupported formats throw `TypeError` synchronously,\n * making this a safe one-shot check that we cache for the\n * lifetime of the process.\n */\nlet cachedBrotliSupport: boolean | null = null\nfunction supportsBrotliCompression(): boolean {\n if (cachedBrotliSupport !== null) return cachedBrotliSupport\n try {\n new CompressionStream('br' as CompressionFormat)\n cachedBrotliSupport = true\n } catch {\n cachedBrotliSupport = false\n }\n return cachedBrotliSupport\n}\n\n/** Test-only: reset the brotli detection cache between tests. */\nexport function resetBrotliSupportCache(): void {\n cachedBrotliSupport = null\n}\n\n/**\n * Pick the compression algorithm and the corresponding format byte\n * from a user option. Throws if the user explicitly requests brotli\n * on a runtime that doesn't support it — a silent fallback would\n * make the produced bundle smaller-than-expected and confuse\n * size-bound tests.\n */\nfunction selectCompression(option: WriteNoydbBundleOptions['compression']): {\n format: CompressionAlgo\n streamFormat: CompressionFormat | null\n} {\n const choice = option ?? 'auto'\n if (choice === 'none') return { format: COMPRESSION_NONE, streamFormat: null }\n if (choice === 'gzip') return { format: COMPRESSION_GZIP, streamFormat: 'gzip' }\n if (choice === 'brotli') {\n if (!supportsBrotliCompression()) {\n throw new Error(\n `writeNoydbBundle({ compression: 'brotli' }) is not supported on this ` +\n `runtime. Brotli requires Node 22+, Chrome 124+, or Firefox 122+. ` +\n `Use { compression: 'auto' } to fall back to gzip silently, or ` +\n `{ compression: 'gzip' } to be explicit.`,\n )\n }\n return { format: COMPRESSION_BROTLI, streamFormat: 'br' as CompressionFormat }\n }\n // 'auto' — prefer brotli, fall back to gzip\n if (supportsBrotliCompression()) {\n return { format: COMPRESSION_BROTLI, streamFormat: 'br' as CompressionFormat }\n }\n return { format: COMPRESSION_GZIP, streamFormat: 'gzip' }\n}\n\n/**\n * Pump a Uint8Array through a CompressionStream / DecompressionStream\n * and collect the output. Both APIs are universally available in\n * Node 18+ and modern browsers; the only variance is which\n * formats they support, handled by `selectCompression` above.\n *\n * Implementation: build a single-chunk ReadableStream from the\n * input, pipe through the transform, then drain the resulting\n * ReadableStream into a single concatenated Uint8Array. This is\n * O(N) memory in the input + output sizes, which is fine for the\n * dump-sized payloads (typically <50MB) targets.\n */\nasync function pumpThroughStream(\n input: Uint8Array,\n stream: CompressionStream | DecompressionStream,\n): Promise<Uint8Array> {\n const readable = new Blob([input as BlobPart]).stream().pipeThrough(stream)\n const reader = readable.getReader()\n const chunks: Uint8Array[] = []\n let total = 0\n for (;;) {\n const { value, done } = await reader.read()\n if (done) break\n if (value) {\n chunks.push(value as Uint8Array)\n total += value.length\n }\n }\n const out = new Uint8Array(total)\n let offset = 0\n for (const chunk of chunks) {\n out.set(chunk, offset)\n offset += chunk.length\n }\n return out\n}\n\n/**\n * SHA-256 hex digest of `bytes`. Used for the bundle integrity\n * hash carried in the header. Web Crypto API only — no Node\n * crypto module, no third-party hash library.\n *\n * The output format is lowercase hex (64 chars for SHA-256). The\n * format validator pins this — uppercase or mixed-case digests\n * are rejected, so the writer and reader agree on canonicalization.\n */\nasync function sha256Hex(bytes: Uint8Array): Promise<string> {\n // Copy into a fresh ArrayBuffer-backed Uint8Array. The\n // underlying buffer of `bytes` may be SharedArrayBuffer (e.g.\n // from a worker), which `subtle.digest` rejects via TypeScript's\n // BufferSource type. Allocating a fresh ArrayBuffer-backed view\n // sidesteps the type narrowing and is portable across all\n // runtimes — the copy cost is O(N) but bundle bodies are\n // typically <50MB, well below the threshold where the copy\n // matters.\n const copy = new Uint8Array(bytes.length)\n copy.set(bytes)\n const digest = await crypto.subtle.digest('SHA-256', copy)\n const view = new Uint8Array(digest)\n let hex = ''\n for (let i = 0; i < view.length; i++) {\n hex += view[i]!.toString(16).padStart(2, '0')\n }\n return hex\n}\n\n/**\n * Concatenate any number of Uint8Arrays into a single new buffer.\n * Used to assemble the final bundle from its prefix + header +\n * body parts.\n */\nfunction concatBytes(parts: readonly Uint8Array[]): Uint8Array {\n let total = 0\n for (const p of parts) total += p.length\n const out = new Uint8Array(total)\n let offset = 0\n for (const p of parts) {\n out.set(p, offset)\n offset += p.length\n }\n return out\n}\n\n/**\n * Replace the bundle's keyrings with freshly built recipient slots,\n * one per supplied recipient. No-op when neither `exportPassphrase`\n * nor `recipients` is set — the source keyring is inherited as-is.\n *\n * The single-passphrase shorthand creates a one-recipient list whose\n * id, role, and permissions inherit from the source vault — useful\n * for \"back up to a different passphrase\" without changing role\n * semantics. The multi-recipient form wraps each slot independently\n * with its declared role + permissions.\n *\n * @internal\n */\nasync function applyRecipientRewrap(\n vault: Vault,\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): Promise<string> {\n if (opts.exportPassphrase === undefined && opts.recipients === undefined) {\n return dumpJson\n }\n\n const recipients: readonly BundleRecipient[] =\n opts.recipients ?? [\n {\n id: vault.userId,\n passphrase: opts.exportPassphrase as string,\n role: vault.role,\n },\n ]\n\n const recipientKeyrings = await vault.buildBundleRecipientKeyrings(recipients)\n\n const backup = JSON.parse(dumpJson) as { keyrings: unknown; [k: string]: unknown }\n backup.keyrings = recipientKeyrings\n return JSON.stringify(backup)\n}\n\n/**\n * Apply opt-in slice filters to a vault dump JSON string. Filters that\n * narrow the bundle without crossing the encryption boundary — both\n * operate on metadata (collection name, envelope `_ts`) and never need\n * to decrypt records. When neither filter is set, the dump is returned\n * unchanged so the no-arg path stays a pure passthrough.\n *\n * Internal-collection filtering: when a `collections` allowlist is\n * provided, the bundle still carries `_internal` (ledger entries) and\n * the keyrings — they're necessary for the receiver to verify and\n * unlock the bundle. The allowlist applies to the user-collection\n * map only.\n *\n * @internal\n */\nfunction applySliceFilters(\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): string {\n const collectionsFilter = opts.collections\n ? new Set(opts.collections)\n : null\n const sinceMs =\n opts.since !== undefined ? new Date(opts.since).getTime() : null\n if (collectionsFilter === null && sinceMs === null) return dumpJson\n\n // Parse, prune, re-serialize. The dump shape is stable\n // (VaultBackup) so this is a one-off allocation; for vaults beyond\n // the documented 1K–50K target a streaming variant would be a\n // follow-up, but the simple parse path keeps the slice path\n // type-safe and trivially auditable.\n const backup = JSON.parse(dumpJson) as {\n collections?: Record<string, Record<string, { _ts?: string }>>\n [k: string]: unknown\n }\n\n if (backup.collections && typeof backup.collections === 'object') {\n const next: Record<string, Record<string, unknown>> = {}\n for (const [name, records] of Object.entries(backup.collections)) {\n if (collectionsFilter && !collectionsFilter.has(name)) continue\n if (sinceMs === null) {\n next[name] = records\n continue\n }\n const kept: Record<string, unknown> = {}\n for (const [id, env] of Object.entries(records)) {\n const envTs = env._ts ? new Date(env._ts).getTime() : NaN\n if (Number.isFinite(envTs) && envTs >= sinceMs) {\n kept[id] = env\n }\n }\n next[name] = kept\n }\n backup.collections = next as typeof backup.collections\n }\n\n return JSON.stringify(backup)\n}\n\n/**\n * Apply opt-in plaintext-tier filters\n * to a vault dump. Operates BEFORE `applySliceFilters` so the metadata\n * pass sees the trimmed record set.\n *\n * The filter never re-encrypts: surviving records carry their original\n * envelope unchanged. Failing records are dropped from the\n * `collections` map. Internal collections (ledger, deltas) and the\n * keyrings map are untouched.\n *\n * @internal\n */\nasync function applyPlaintextFilters(\n vault: Vault,\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): Promise<string> {\n if (opts.where === undefined && opts.tierAtMost === undefined) {\n return dumpJson\n }\n\n type Env = { _ts?: string; _tier?: number; _iv: string; _data: string }\n const backup = JSON.parse(dumpJson) as {\n collections?: Record<string, Record<string, Env>>\n [k: string]: unknown\n }\n if (!backup.collections || typeof backup.collections !== 'object') {\n return dumpJson\n }\n\n const tierCeiling = opts.tierAtMost\n const where = opts.where\n\n const next: Record<string, Record<string, Env>> = {}\n for (const [collName, records] of Object.entries(backup.collections)) {\n const kept: Record<string, Env> = {}\n for (const [id, env] of Object.entries(records)) {\n // Tier ceiling — runs FIRST so we don't waste a decrypt on\n // records about to be dropped anyway. Envelope tier defaults to\n // 0 when absent (matches Vault's tier-0 conventions).\n if (tierCeiling !== undefined) {\n const tier = env._tier ?? 0\n if (tier > tierCeiling) continue\n }\n // Plaintext predicate — decrypt, run, keep on truthy. Errors\n // from inside the predicate propagate (callers want to see why\n // their filter blew up rather than getting a silent passthrough).\n if (where !== undefined) {\n const record = await vault._decryptEnvelopeForBundleFilter(\n env as never,\n collName,\n )\n const ok = await where(record, { collection: collName, id })\n if (!ok) continue\n }\n kept[id] = env\n }\n next[collName] = kept\n }\n backup.collections = next\n return JSON.stringify(backup)\n}\n\n/**\n * Write a `.noydb` bundle for the given vault.\n *\n * Pipeline:\n * 1. Resolve or create the compartment's stable bundle handle\n * via `vault.getBundleHandle()` — same handle on\n * every export from the same vault instance, so cloud\n * adapters can use it as a primary key.\n * 2. `vault.dump()` → JSON string with encrypted records\n * inside.\n * 3. UTF-8 encode the dump string.\n * 4. Compress (brotli if available, gzip fallback by default).\n * 5. Compute SHA-256 of the compressed body for integrity.\n * 6. Build the minimum-disclosure header from format version,\n * handle, body length, body sha.\n * 7. Serialize: magic (4) + flags (1) + algo (1) + headerLen (4)\n * + header JSON (N) + compressed body (M).\n *\n * The output is a single `Uint8Array`. Consumers writing to disk\n * pass it to `fs.writeFile`; consumers uploading to cloud storage\n * pass it as the request body. The `@noy-db/file` adapter wraps\n * this with a `saveBundle(path, vault)` helper.\n */\nexport async function writeNoydbBundle(\n vault: Vault,\n opts: WriteNoydbBundleOptions = {},\n): Promise<Uint8Array> {\n if (opts.exportPassphrase !== undefined && opts.recipients !== undefined) {\n throw new Error(\n 'writeNoydbBundle: pass either exportPassphrase or recipients, not both',\n )\n }\n\n const handle = await vault.getBundleHandle()\n const dumpJson = await vault.dump()\n\n // Re-keying: when caller supplied recipients (or the single-recipient\n // shorthand), substitute the bundle's `keyrings` map with freshly\n // built recipient slots before slice filters run.\n const rekeyed = await applyRecipientRewrap(vault, dumpJson, opts)\n // Plaintext-tier filters run BEFORE\n // the metadata-only slice — that way the metadata pass sees the\n // already-trimmed record set and the two filter chains compose\n // cleanly.\n const plainFiltered = await applyPlaintextFilters(vault, rekeyed, opts)\n const filtered = applySliceFilters(plainFiltered, opts)\n const dumpBytes = new TextEncoder().encode(filtered)\n\n const { format, streamFormat } = selectCompression(opts.compression)\n const body = streamFormat === null\n ? dumpBytes\n : await pumpThroughStream(dumpBytes, new CompressionStream(streamFormat))\n\n const bodySha256 = await sha256Hex(body)\n const header: NoydbBundleHeader = {\n formatVersion: NOYDB_BUNDLE_FORMAT_VERSION,\n handle,\n bodyBytes: body.length,\n bodySha256,\n }\n const headerBytes = encodeBundleHeader(header)\n\n // Assemble the fixed prefix in a 10-byte buffer.\n const prefix = new Uint8Array(NOYDB_BUNDLE_PREFIX_BYTES)\n prefix.set(NOYDB_BUNDLE_MAGIC, 0)\n prefix[4] =\n (streamFormat === null ? 0 : FLAG_COMPRESSED) | FLAG_HAS_INTEGRITY_HASH\n prefix[5] = format\n writeUint32BE(prefix, 6, headerBytes.length)\n\n return concatBytes([prefix, headerBytes, body])\n}\n\n/**\n * Internal helper shared by both readers — parses just the prefix\n * + header region of a bundle without touching the body. Returns\n * the parsed header plus the offset where the body starts and the\n * compression algorithm needed to decompress it.\n *\n * Throws on any format violation: missing/invalid magic, truncated\n * prefix, header length larger than the file, or unknown\n * compression algorithm.\n */\nfunction parsePrefixAndHeader(bytes: Uint8Array): {\n header: NoydbBundleHeader\n bodyOffset: number\n algo: CompressionAlgo\n flags: number\n} {\n if (!hasNoydbBundleMagic(bytes)) {\n throw new Error(\n `Not a .noydb bundle: missing 'NDB1' magic prefix. The first 4 bytes ` +\n `are ${[...bytes.slice(0, 4)].map((b) => b.toString(16).padStart(2, '0')).join(' ')}.`,\n )\n }\n if (bytes.length < NOYDB_BUNDLE_PREFIX_BYTES) {\n throw new Error(\n `Truncated .noydb bundle: file is only ${bytes.length} bytes, ` +\n `which is less than the ${NOYDB_BUNDLE_PREFIX_BYTES}-byte fixed prefix.`,\n )\n }\n const flags = bytes[4]!\n const algo = bytes[5]!\n if (algo !== COMPRESSION_NONE && algo !== COMPRESSION_GZIP && algo !== COMPRESSION_BROTLI) {\n throw new Error(\n `.noydb bundle declares unknown compression algorithm ${algo}. ` +\n `Known values: 0 (none), 1 (gzip), 2 (brotli).`,\n )\n }\n const headerLength = readUint32BE(bytes, 6)\n const bodyOffset = NOYDB_BUNDLE_PREFIX_BYTES + headerLength\n if (bodyOffset > bytes.length) {\n throw new Error(\n `Truncated .noydb bundle: declared header length ${headerLength} ` +\n `would extend past end of file (${bytes.length} bytes).`,\n )\n }\n const headerBytes = bytes.slice(NOYDB_BUNDLE_PREFIX_BYTES, bodyOffset)\n const header = decodeBundleHeader(headerBytes)\n return { header, bodyOffset, algo: algo as CompressionAlgo, flags }\n}\n\n/**\n * Read just the bundle header — no body decompression, no\n * integrity verification. Fast (O(prefix + header bytes)) and\n * intended for cloud-listing UIs that want to show the handle and\n * size before downloading the full body.\n *\n * Returns the same `NoydbBundleHeader` shape as the writer, with\n * minimum-disclosure validation already applied.\n */\nexport function readNoydbBundleHeader(bytes: Uint8Array): NoydbBundleHeader {\n return parsePrefixAndHeader(bytes).header\n}\n\n/**\n * Read a full `.noydb` bundle: validate magic + header, verify\n * integrity hash over the body bytes, decompress, and return the\n * original `vault.dump()` JSON string ready to pass to\n * `vault.load()`.\n *\n * Throws `BundleIntegrityError` if the body's actual SHA-256 does\n * not match the value declared in the header. Distinct from a\n * format error so consumers can pattern-match in catch blocks\n * (corrupted-in-transit vs malformed-by-producer).\n *\n * Note: this function does NOT take a passphrase. The dump JSON\n * inside the body still contains encrypted records — restoring\n * the vault requires `vault.load(dumpJson, passphrase)`\n * after this call. Splitting the layers keeps the bundle module\n * free of crypto concerns and lets the same code feed format\n * inspectors that never decrypt anything.\n */\nexport async function readNoydbBundle(\n bytes: Uint8Array,\n): Promise<NoydbBundleReadResult> {\n const { header, bodyOffset, algo } = parsePrefixAndHeader(bytes)\n const body = bytes.slice(bodyOffset)\n\n // Length check before hash check — a length mismatch is the\n // cheapest tamper signal and produces a more actionable error.\n if (body.length !== header.bodyBytes) {\n throw new BundleIntegrityError(\n `body length ${body.length} does not match header.bodyBytes ` +\n `${header.bodyBytes}. The bundle was truncated or padded ` +\n `between write and read.`,\n )\n }\n\n const actualSha = await sha256Hex(body)\n if (actualSha !== header.bodySha256) {\n throw new BundleIntegrityError(\n `body sha256 ${actualSha} does not match header.bodySha256 ` +\n `${header.bodySha256}. The bundle bytes were modified between ` +\n `write and read — refuse to decompress.`,\n )\n }\n\n let dumpBytes: Uint8Array\n if (algo === COMPRESSION_NONE) {\n dumpBytes = body\n } else {\n const streamFormat: CompressionFormat =\n algo === COMPRESSION_BROTLI ? ('br' as CompressionFormat) : 'gzip'\n try {\n dumpBytes = await pumpThroughStream(body, new DecompressionStream(streamFormat))\n } catch (err) {\n throw new BundleIntegrityError(\n `decompression failed: ${(err as Error).message}. The bundle ` +\n `passed the integrity hash but the body is not valid ` +\n `${streamFormat} data — likely a producer bug.`,\n )\n }\n }\n\n const dumpJson = new TextDecoder('utf-8', { fatal: true }).decode(dumpBytes)\n return { header, dumpJson }\n}\n","/**\n * Standard Schema v1 integration.\n *\n * This file is the entry point for **schema validation**. Any\n * validator that implements the [Standard Schema v1\n * protocol](https://standardschema.dev) — Zod, Valibot, ArkType, Effect\n * Schema, etc. — can be attached to a `Collection` or `defineNoydbStore`\n * and will:\n *\n * 1. Validate the record BEFORE encryption on `put()` — bad data is\n * rejected at the store boundary with a rich issue list.\n * 2. Validate the record AFTER decryption on `get()`/`list()`/`query()`\n * — stored data that has drifted from the current schema throws\n * loudly instead of silently propagating garbage to the UI.\n *\n * ## Why vendor the types?\n *\n * Standard Schema is a protocol, not a library. The spec is <200 lines of\n * TypeScript and has no runtime. There's an official `@standard-schema/spec`\n * types package on npm, but pulling it in would add a dependency edge\n * purely for type definitions. Vendoring the minimal surface keeps\n * `@noy-db/core` at **zero runtime dependencies** and gives us freedom to\n * evolve the helpers without a version-lock on the spec package.\n *\n * If the spec changes in a breaking way (unlikely — it's frozen at v1),\n * we update this file and bump our minor.\n *\n * ## Why not just run `schema.parse(value)` directly?\n *\n * Because then we'd be locked to whichever validator happens to have\n * `.parse`. Standard Schema's `'~standard'.validate` contract is the same\n * across every implementation and includes a structured issues list,\n * which is much more useful than a thrown error for programmatic error\n * handling (e.g., rendering field-level messages in a Vue component).\n */\n\nimport { SchemaValidationError } from './errors.js'\n\n/**\n * The Standard Schema v1 protocol. A schema is any object that exposes a\n * `'~standard'` property with `version: 1` and a `validate` function.\n *\n * The type parameters are:\n * - `Input` — the type accepted by `validate` (what the user passes in)\n * - `Output` — the type produced by `validate` (what we store/return,\n * may differ from Input if the schema transforms or coerces)\n *\n * In most cases `Input === Output`, but validators that transform\n * (Zod's `.transform`, Valibot's `transform`, etc.) can narrow or widen.\n *\n * We intentionally keep the `types` field `readonly` and optional — the\n * spec marks it as optional because it's only used for inference, and\n * not every implementation bothers populating it at runtime.\n */\nexport interface StandardSchemaV1<Input = unknown, Output = Input> {\n readonly '~standard': {\n readonly version: 1\n readonly vendor: string\n readonly validate: (\n value: unknown,\n ) =>\n | StandardSchemaV1SyncResult<Output>\n | Promise<StandardSchemaV1SyncResult<Output>>\n readonly types?:\n | {\n readonly input: Input\n readonly output: Output\n }\n | undefined\n }\n}\n\n/**\n * The result of a single call to `schema['~standard'].validate`. Either\n * `{ value }` on success or `{ issues }` on failure — never both.\n *\n * The spec allows `issues` to be undefined on success (and some\n * validators leave it that way), so consumers should discriminate on\n * `issues?.length` rather than on truthiness of `value`.\n */\nexport type StandardSchemaV1SyncResult<Output> =\n | { readonly value: Output; readonly issues?: undefined }\n | {\n readonly value?: undefined\n readonly issues: readonly StandardSchemaV1Issue[]\n }\n\n/**\n * A single validation issue. The `message` is always present; the `path`\n * is optional and points at the offending field when the schema tracks\n * it (virtually every validator does for object types).\n *\n * The path is deliberately permissive — both a plain `PropertyKey` and a\n * `{ key }` wrapper are allowed so validators that wrap path segments in\n * objects (Zod does this in some modes) don't need special handling.\n */\nexport interface StandardSchemaV1Issue {\n readonly message: string\n readonly path?:\n | ReadonlyArray<PropertyKey | { readonly key: PropertyKey }>\n | undefined\n}\n\n/**\n * Infer the output type of a Standard Schema. Consumers use this to\n * pull the type out of a schema instance when they want to declare a\n * Collection<T> or defineNoydbStore<T> with `T` derived from the schema.\n *\n * Example:\n * ```ts\n * const InvoiceSchema = z.object({ id: z.string(), amount: z.number() })\n * type Invoice = InferOutput<typeof InvoiceSchema>\n * ```\n */\nexport type InferOutput<T extends StandardSchemaV1> =\n T extends StandardSchemaV1<unknown, infer O> ? O : never\n\n/**\n * Validate an input value against a schema. Throws\n * `SchemaValidationError` if the schema rejects, with the rich issue\n * list attached. Otherwise returns the (possibly transformed) output\n * value.\n *\n * The `context` string is included in the thrown error's message so the\n * caller knows where the failure happened (e.g. `\"put(inv-001)\"`) without\n * every caller having to wrap the throw in a try/catch.\n *\n * This function is ALWAYS async because some validators (notably Effect\n * Schema and Zod's `.refine` with async predicates) can return a\n * Promise. We `await` the result unconditionally to normalize the\n * contract — the extra microtask is free compared to the cost of an\n * encrypt/decrypt round-trip.\n */\nexport async function validateSchemaInput<Output>(\n schema: StandardSchemaV1<unknown, Output>,\n value: unknown,\n context: string,\n): Promise<Output> {\n const result = await schema['~standard'].validate(value)\n if (result.issues !== undefined && result.issues.length > 0) {\n throw new SchemaValidationError(\n `Schema validation failed on ${context}: ${summarizeIssues(result.issues)}`,\n result.issues,\n 'input',\n )\n }\n // Safe: the spec guarantees `value` is present when `issues` is absent.\n return result.value as Output\n}\n\n/**\n * Validate an already-stored value coming OUT of the collection. This\n * is a distinct helper from `validateSchemaInput` because the error\n * semantics differ: an output-validation failure means the data in\n * storage has drifted from the current schema (an unexpected state),\n * whereas an input-validation failure means the user passed bad data\n * (an expected state for a UI that isn't guarding its inputs).\n *\n * We still throw — silently returning bad data would be worse — but\n * the error carries `direction: 'output'` so upstream code (and a\n * potential migrate hook) can distinguish the two cases.\n */\nexport async function validateSchemaOutput<Output>(\n schema: StandardSchemaV1<unknown, Output>,\n value: unknown,\n context: string,\n): Promise<Output> {\n const result = await schema['~standard'].validate(value)\n if (result.issues !== undefined && result.issues.length > 0) {\n throw new SchemaValidationError(\n `Stored data for ${context} does not match the current schema — ` +\n `schema drift? ${summarizeIssues(result.issues)}`,\n result.issues,\n 'output',\n )\n }\n return result.value as Output\n}\n\n/**\n * Produce a short human-readable summary of an issue list for the\n * thrown error's message. The full issue array is still attached to the\n * error as a property — this is only for the `.message` string that\n * shows up in console.error / stack traces.\n *\n * Format: `field: message; field2: message2` (up to 3 issues, then `…`).\n * Issues without a path are shown as `root: message`.\n */\nfunction summarizeIssues(\n issues: readonly StandardSchemaV1Issue[],\n): string {\n const shown = issues.slice(0, 3).map((issue) => {\n const pathStr = formatPath(issue.path)\n return `${pathStr}: ${issue.message}`\n })\n const suffix = issues.length > 3 ? ` (+${issues.length - 3} more)` : ''\n return shown.join('; ') + suffix\n}\n\nfunction formatPath(\n path: StandardSchemaV1Issue['path'],\n): string {\n if (!path || path.length === 0) return 'root'\n return path\n .map((segment) =>\n typeof segment === 'object' && segment !== null\n ? String(segment.key)\n : String(segment),\n )\n .join('.')\n}\n","import type { NoydbStore, EncryptedEnvelope, HistoryOptions, PruneOptions } from '../types.js'\n\n/**\n * History storage convention:\n * Collection: `_history`\n * ID format: `{collection}:{recordId}:{paddedVersion}`\n * Version is zero-padded to 10 digits for lexicographic sorting.\n */\n\nconst HISTORY_COLLECTION = '_history'\nconst VERSION_PAD = 10\n\nfunction historyId(collection: string, recordId: string, version: number): string {\n return `${collection}:${recordId}:${String(version).padStart(VERSION_PAD, '0')}`\n}\n\n// Unused today, kept for future history-id parsing utilities.\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nfunction parseHistoryId(id: string): { collection: string; recordId: string; version: number } | null {\n const lastColon = id.lastIndexOf(':')\n if (lastColon < 0) return null\n const versionStr = id.slice(lastColon + 1)\n const rest = id.slice(0, lastColon)\n const firstColon = rest.indexOf(':')\n if (firstColon < 0) return null\n return {\n collection: rest.slice(0, firstColon),\n recordId: rest.slice(firstColon + 1),\n version: parseInt(versionStr, 10),\n }\n}\n\nfunction matchesPrefix(id: string, collection: string, recordId?: string): boolean {\n if (recordId) {\n return id.startsWith(`${collection}:${recordId}:`)\n }\n return id.startsWith(`${collection}:`)\n}\n\n/** Save a history entry (a complete encrypted envelope snapshot). */\nexport async function saveHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n envelope: EncryptedEnvelope,\n): Promise<void> {\n const id = historyId(collection, recordId, envelope._v)\n await adapter.put(vault, HISTORY_COLLECTION, id, envelope)\n}\n\n/** Get history entries for a record, sorted newest-first. */\nexport async function getHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n options?: HistoryOptions,\n): Promise<EncryptedEnvelope[]> {\n const allIds = await adapter.list(vault, HISTORY_COLLECTION)\n const matchingIds = allIds\n .filter(id => matchesPrefix(id, collection, recordId))\n .sort()\n .reverse() // newest first\n\n const entries: EncryptedEnvelope[] = []\n\n for (const id of matchingIds) {\n const envelope = await adapter.get(vault, HISTORY_COLLECTION, id)\n if (!envelope) continue\n\n // Apply time filters\n if (options?.from && envelope._ts < options.from) continue\n if (options?.to && envelope._ts > options.to) continue\n\n entries.push(envelope)\n\n if (options?.limit && entries.length >= options.limit) break\n }\n\n return entries\n}\n\n/** Get a specific version's envelope from history. */\nexport async function getVersionEnvelope(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n version: number,\n): Promise<EncryptedEnvelope | null> {\n const id = historyId(collection, recordId, version)\n return adapter.get(vault, HISTORY_COLLECTION, id)\n}\n\n/** Prune history entries. Returns the number of entries deleted. */\nexport async function pruneHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string | undefined,\n options: PruneOptions,\n): Promise<number> {\n const allIds = await adapter.list(vault, HISTORY_COLLECTION)\n const matchingIds = allIds\n .filter(id => recordId ? matchesPrefix(id, collection, recordId) : matchesPrefix(id, collection))\n .sort()\n\n let toDelete: string[] = []\n\n if (options.keepVersions !== undefined) {\n // Keep only the N most recent, delete the rest\n const keep = options.keepVersions\n if (matchingIds.length > keep) {\n toDelete = matchingIds.slice(0, matchingIds.length - keep)\n }\n }\n\n if (options.beforeDate) {\n // Delete entries older than the specified date\n for (const id of matchingIds) {\n if (toDelete.includes(id)) continue\n const envelope = await adapter.get(vault, HISTORY_COLLECTION, id)\n if (envelope && envelope._ts < options.beforeDate) {\n toDelete.push(id)\n }\n }\n }\n\n // Deduplicate\n const uniqueDeletes = [...new Set(toDelete)]\n\n for (const id of uniqueDeletes) {\n await adapter.delete(vault, HISTORY_COLLECTION, id)\n }\n\n return uniqueDeletes.length\n}\n\n/** Clear all history for a vault, optionally scoped to a collection or record. */\nexport async function clearHistory(\n adapter: NoydbStore,\n vault: string,\n collection?: string,\n recordId?: string,\n): Promise<number> {\n const allIds = await adapter.list(vault, HISTORY_COLLECTION)\n let toDelete: string[]\n\n if (collection && recordId) {\n toDelete = allIds.filter(id => matchesPrefix(id, collection, recordId))\n } else if (collection) {\n toDelete = allIds.filter(id => matchesPrefix(id, collection))\n } else {\n toDelete = allIds\n }\n\n for (const id of toDelete) {\n await adapter.delete(vault, HISTORY_COLLECTION, id)\n }\n\n return toDelete.length\n}\n","/**\n * Time-machine queries — point-in-time reads reconstructed from the\n * existing history + ledger infrastructure.\n *\n * ## Usage\n *\n * ```ts\n * const vault = await db.openVault('acme', { passphrase })\n * const q1End = vault.at('2026-03-31T23:59:59Z')\n * const invoice = await q1End.collection<Invoice>('invoices').get('inv-001')\n * // → the record as it stood at the close of Q1 2026\n * ```\n *\n * ## How it works\n *\n * Every write path already fans out into two persistence lanes:\n *\n * 1. `saveHistory(...)` persists a **full encrypted envelope snapshot**\n * per version under the `_history` collection (one envelope per\n * version, keyed by `{collection}:{id}:{paddedVersion}`). Each\n * envelope carries its own `_ts` (the write timestamp).\n * 2. `ledger.append(...)` appends a hash-chained audit entry that\n * records the `op` (put / delete), `version`, and `ts`.\n *\n * Reconstruction at a target timestamp T is therefore:\n *\n * - Find the newest history envelope for `(collection, id)` whose\n * `_ts ≤ T` — that's the state the record was in at T.\n * - Check the ledger for any `op: 'delete'` entry for the same\n * `(collection, id)` with `entry.ts` in `(latestEnvelope._ts, T]` —\n * if present, the record was deleted before T, so return `null`.\n * - Decrypt the surviving envelope with the current collection DEK\n * (DEKs are per-collection but stable across versions — the same\n * key encrypts v1 and v15 of a record).\n *\n * No delta replay. The existing `history.ts` module already stores\n * complete snapshots; we just pick the right one.\n *\n * ## Read-only contract\n *\n * Every write method on `CollectionInstant` throws\n * {@link ReadOnlyAtInstantError}. A historical view is a *read*\n * surface — mutating the past would require either a branch/shadow\n * mechanism (tracked under shadow vaults) or a rewrite of\n * history, which breaks the ledger's tamper-evidence guarantee.\n *\n * @module\n */\nimport type { EncryptedEnvelope, NoydbStore } from '../types.js'\nimport type { LedgerStore } from './ledger/store.js'\nimport { getHistory } from './history.js'\nimport { decrypt } from '../crypto.js'\nimport { ReadOnlyAtInstantError } from '../errors.js'\n\n/**\n * Narrow view of a {@link Vault}'s internals that\n * {@link VaultInstant} needs. Passed in by `Vault.at()` rather than\n * constructed here so all crypto + adapter access stays inside the\n * Vault class.\n *\n * Not exported from the public barrel — consumers should get a\n * `VaultInstant` via `vault.at(ts)`, never by constructing one\n * directly.\n */\nexport interface VaultEngine {\n readonly adapter: NoydbStore\n /** Vault name (the compartment). */\n readonly name: string\n /**\n * `true` when the vault was opened with a passphrase (the normal\n * case). `false` in plaintext-mode vaults (`encrypt: false`) — in\n * that case `envelope._data` is raw JSON and we skip the DEK lookup.\n */\n readonly encrypted: boolean\n /**\n * Resolves the DEK used to decrypt a given collection's envelopes.\n * Not called when `encrypted` is false.\n */\n getDEK(collection: string): Promise<CryptoKey>\n /**\n * Lazily-initialised ledger. We consult it to detect deletes that\n * happened between the latest history snapshot and the target\n * timestamp. `null` when history is disabled for this vault — in\n * that case time-machine reads fall back to history-only\n * reconstruction (which may miss deletes).\n */\n getLedger(): LedgerStore | null\n}\n\n/**\n * A vault at a fixed instant. Produced by `vault.at(timestamp)`.\n * Carries no session state of its own — every read is a fresh\n * lookup through the vault's adapter.\n *\n * Cheap to construct; safe to throw away. Create one per query.\n */\nexport class VaultInstant {\n constructor(\n private readonly engine: VaultEngine,\n /** Fully-resolved target timestamp (ISO-8601 UTC). */\n public readonly timestamp: string,\n ) {}\n\n /** Get a point-in-time view of a collection. */\n collection<T = unknown>(name: string): CollectionInstant<T> {\n return new CollectionInstant<T>(this.engine, this.timestamp, name)\n }\n}\n\n/**\n * A read-only collection view anchored to a past instant.\n *\n * Every write method throws {@link ReadOnlyAtInstantError} — see the\n * module docstring for why. The read surface is intentionally smaller\n * than the live {@link Collection}: `get` and `list` cover the\n * \"what did the books look like on date X\" use case without pulling\n * in the full query DSL / joins / aggregates at this stage. Follow-up\n * work tracked under.\n */\nexport class CollectionInstant<T = unknown> {\n constructor(\n private readonly engine: VaultEngine,\n private readonly targetTs: string,\n public readonly name: string,\n ) {}\n\n /**\n * Return the record as it existed at the target timestamp, or\n * `null` if the record had not been created yet or had already been\n * deleted by then.\n */\n async get(id: string): Promise<T | null> {\n const envelope = await this.resolveEnvelope(id)\n if (!envelope) return null\n const plaintext = this.engine.encrypted\n ? await decrypt(envelope._iv, envelope._data, await this.engine.getDEK(this.name))\n : envelope._data\n return JSON.parse(plaintext) as T\n }\n\n /**\n * IDs of records that existed (had at least one `put` and were not\n * subsequently deleted) at the target timestamp.\n *\n * Implemented as a linear scan over history + ledger. Performance\n * is bounded by total history size (not live-vault size), so the\n * memory-first vault-scale cap (1K–50K records × average history\n * depth) still applies.\n */\n async list(): Promise<string[]> {\n const historyIds = await collectHistoryIds(this.engine.adapter, this.engine.name, this.name)\n const liveIds = await this.engine.adapter.list(this.engine.name, this.name)\n const candidateIds = new Set<string>([...historyIds, ...liveIds])\n const alive: string[] = []\n for (const id of candidateIds) {\n const env = await this.resolveEnvelope(id)\n if (env) alive.push(id)\n }\n return alive.sort()\n }\n\n // ── write guards ───────────────────────────────────────────────────\n\n async put(_id: string, _record: T): Promise<never> {\n throw new ReadOnlyAtInstantError('put', this.targetTs)\n }\n async delete(_id: string): Promise<never> {\n throw new ReadOnlyAtInstantError('delete', this.targetTs)\n }\n async update(_id: string, _patch: Partial<T>): Promise<never> {\n throw new ReadOnlyAtInstantError('update', this.targetTs)\n }\n\n // ── internals ─────────────────────────────────────────────────────\n\n /**\n * Return the envelope that represents the record's state at\n * `targetTs`, accounting for deletes. `null` if the record didn't\n * exist at that instant.\n *\n * ## Why we use the ledger as the authoritative timeline\n *\n * The per-version history snapshots saved by `saveHistory()` do\n * carry a `_ts` field, but that timestamp is the moment the\n * snapshot was *captured* (i.e. the instant right before the\n * subsequent overwrite), not the original write time. The ledger,\n * by contrast, records `ts` at the moment of each `put` / `delete`\n * — it's the only source that tracks the real timeline. So:\n *\n * 1. Walk the ledger; find the latest entry for `(collection, id)`\n * with `ts ≤ targetTs`.\n * 2. If that entry is a `delete`, the record was gone at the\n * target instant — return null.\n * 3. Otherwise it's a `put` with a specific `version`. Load the\n * envelope for that version from history, falling back to the\n * live collection for the most recent version.\n *\n * ## Fallback when the ledger is disabled\n *\n * If the vault has history disabled, `getLedger()` returns null and\n * we fall back to comparing envelope `_ts` fields. This is\n * approximate and gets the *last write* right but may confuse the\n * intermediate versions; adopters needing accurate time-machine\n * reads should leave history enabled.\n */\n private async resolveEnvelope(id: string): Promise<EncryptedEnvelope | null> {\n const ledger = this.engine.getLedger()\n if (ledger) {\n return this.resolveViaLedger(id, ledger)\n }\n return this.resolveViaEnvelopeTs(id)\n }\n\n private async resolveViaLedger(id: string, ledger: LedgerStore): Promise<EncryptedEnvelope | null> {\n const entries = await ledger.entries()\n // Entries are already ordered by index which is the mutation order.\n let latest: { op: 'put' | 'delete'; version: number } | null = null\n for (const e of entries) {\n if (e.collection !== this.name || e.id !== id) continue\n if (e.ts > this.targetTs) break // entries are time-ordered by index\n latest = { op: e.op, version: e.version }\n }\n if (!latest) return null\n if (latest.op === 'delete') return null\n return this.loadVersion(id, latest.version)\n }\n\n private async resolveViaEnvelopeTs(id: string): Promise<EncryptedEnvelope | null> {\n const history = await getHistory(\n this.engine.adapter, this.engine.name, this.name, id,\n )\n const live = await this.engine.adapter.get(this.engine.name, this.name, id)\n const byVersion = new Map<number, EncryptedEnvelope>()\n for (const e of history) byVersion.set(e._v, e)\n if (live) byVersion.set(live._v, live)\n const sorted = [...byVersion.values()].sort((a, b) =>\n a._ts < b._ts ? 1 : a._ts > b._ts ? -1 : 0,\n )\n return sorted.find((e) => e._ts <= this.targetTs) ?? null\n }\n\n /**\n * Fetch the envelope for a specific version. The live record (most\n * recent put) lives in the main collection; prior versions live in\n * `_history`. We check live first because the common case after a\n * delete is that we're trying to load the last-live version from\n * history, and skipping live for the current-version case avoids a\n * redundant lookup.\n */\n private async loadVersion(id: string, version: number): Promise<EncryptedEnvelope | null> {\n const live = await this.engine.adapter.get(this.engine.name, this.name, id)\n if (live && live._v === version) return live\n\n // Direct lookup by (collection, id, version) — avoids scanning all history.\n const historyId = `${this.name}:${id}:${String(version).padStart(10, '0')}`\n return await this.engine.adapter.get(this.engine.name, '_history', historyId)\n }\n}\n\n/**\n * Scan the `_history` collection once and collect every distinct\n * `recordId` for the given collection. History keys follow the\n * shape `<collection>:<recordId>:<paddedVersion>`; we split on the\n * last two colons (delimiter-safe because `paddedVersion` is\n * exactly 10 digits).\n */\nasync function collectHistoryIds(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n): Promise<string[]> {\n const all = await adapter.list(vault, '_history')\n const prefix = `${collection}:`\n const seen = new Set<string>()\n for (const key of all) {\n if (!key.startsWith(prefix)) continue\n const lastColon = key.lastIndexOf(':')\n if (lastColon <= prefix.length) continue\n const middle = key.slice(prefix.length, lastColon)\n seen.add(middle)\n }\n return [...seen]\n}\n","/**\n * Shadow vaults — `vault.frame()` returns a read-only view of the\n * CURRENT vault state.\n *\n * Companion to {@link VaultInstant} from `history/time-machine.ts`:\n *\n * | Type | Reads from | Use case |\n * |------|------------|----------|\n * | `VaultInstant` | past snapshots (ledger + history) | \"books on date X\" |\n * | `VaultFrame` | live vault state | screen-share / demo / audit |\n *\n * ```ts\n * const readonly = vault.frame()\n * const invoices = await readonly.collection<Invoice>('invoices').list()\n * await readonly.collection<Invoice>('invoices').put(...)\n * // → throws ReadOnlyFrameError\n * ```\n *\n * ## Contract\n *\n * Every write method on {@link CollectionFrame} throws\n * {@link ReadOnlyFrameError}. Reads delegate to the underlying\n * collection, so validation, locale handling, and caching all work\n * exactly as they do on the live collection.\n *\n * ## Security note: behaviour-enforced, not cryptographically-enforced\n *\n * A VaultFrame rejects writes by contract in the JavaScript layer.\n * It does NOT strip the DEKs from the underlying keyring — the same\n * in-memory keys that decrypt records could, in principle, encrypt\n * new writes via a hand-crafted adapter call. Cryptographic\n * enforcement (keyring variants with the write half of each DEK\n * removed) is hierarchical-access work. Use a VaultFrame to\n * prevent *accidental* writes in a read-scoped flow — do not rely on\n * it as a security boundary against a hostile caller sharing the\n * same process.\n *\n * @module\n */\nimport type { Collection } from '../collection.js'\nimport type { Vault } from '../vault.js'\nimport type { LocaleReadOptions } from '../types.js'\nimport { ReadOnlyFrameError } from '../errors.js'\n\n/**\n * A read-only view of a vault's current state. Produced by\n * `vault.frame()`. Cheap to construct; safe to throw away.\n */\nexport class VaultFrame {\n constructor(private readonly vault: Vault) {}\n\n /**\n * Get a read-only view of one collection. The returned\n * {@link CollectionFrame} delegates all reads to the underlying\n * live collection — cache, locale handling, and validation all\n * work identically to the live collection.\n */\n collection<T = unknown>(name: string): CollectionFrame<T> {\n return new CollectionFrame<T>(this.vault.collection<T>(name), name)\n }\n\n /** List all collection names visible in the underlying vault. */\n async collections(): Promise<string[]> {\n return this.vault.collections()\n }\n}\n\n/**\n * Read-only collection view. All write methods throw\n * {@link ReadOnlyFrameError}; all read methods delegate to the\n * underlying live {@link Collection}.\n */\nexport class CollectionFrame<T = unknown> {\n constructor(\n private readonly inner: Collection<T>,\n /** The underlying collection name. Captured at construction so\n * we don't need to peek into the private Collection state. */\n public readonly name: string,\n ) {}\n\n // ── reads (delegated) ──────────────────────────────────────────────\n\n get(id: string, locale?: LocaleReadOptions): Promise<T | null> {\n return this.inner.get(id, locale)\n }\n\n list(locale?: LocaleReadOptions): Promise<T[]> {\n return this.inner.list(locale)\n }\n\n /**\n * Return the chainable query builder. Terminals like `.toArray()`,\n * `.first()`, `.count()`, `.aggregate()` all work; the builder has\n * no write surface of its own, so exposing it directly is safe.\n */\n query(...args: Parameters<Collection<T>['query']>): ReturnType<Collection<T>['query']> {\n return this.inner.query(...args)\n }\n\n /** History reads — allowed (history is read-only by nature). */\n history(...args: Parameters<Collection<T>['history']>): ReturnType<Collection<T>['history']> {\n return this.inner.history(...args)\n }\n\n getVersion(id: string, version: number): Promise<T | null> {\n return this.inner.getVersion(id, version)\n }\n\n // ── write guards ──────────────────────────────────────────────────\n\n async put(_id: string, _record: T): Promise<never> {\n throw new ReadOnlyFrameError('put')\n }\n async delete(_id: string): Promise<never> {\n throw new ReadOnlyFrameError('delete')\n }\n async update(_id: string, _patch: Partial<T>): Promise<never> {\n throw new ReadOnlyFrameError('update')\n }\n async revert(_id: string, _version: number): Promise<never> {\n throw new ReadOnlyFrameError('revert')\n }\n async putMany(_entries: ReadonlyArray<readonly [string, T]>): Promise<never> {\n throw new ReadOnlyFrameError('putMany')\n }\n async deleteMany(_ids: readonly string[]): Promise<never> {\n throw new ReadOnlyFrameError('deleteMany')\n }\n}\n","/**\n * Consent boundaries — per-access audit log.\n *\n * ```ts\n * const audit = await vault.withConsent(\n * { purpose: 'quarterly-review', consentHash: '7f3a...' },\n * async () => {\n * const invoices = await vault.collection<Invoice>('invoices').list()\n * return invoices\n * },\n * )\n *\n * const log = await vault.consentAudit({ since: '2026-01-01T00:00:00Z' })\n * // → entries: { actor, purpose, consentHash, ts, op, collection, id }\n * ```\n *\n * ## Contract\n *\n * Every `get` / `put` / `delete` that happens inside a `withConsent`\n * callback writes one entry to the reserved `_consent_audit`\n * collection. Entries are encrypted with the vault's consent-audit\n * DEK (separate from per-user-collection DEKs so access-log queries\n * don't require unwrapping individual collection keys). Outside a\n * `withConsent` scope, no entries are written — consent is\n * opt-in by design (GDPR Art. 7: *demonstrable*, *specific*\n * consent).\n *\n * ## Why store the hash, not the consent text?\n *\n * The `consentHash` is the sha256 of whatever consent receipt the\n * actor presented (a signed GDPR banner click, a HIPAA authorisation\n * PDF, an API-level `X-Consent-Hash` header). Storing only the hash:\n *\n * 1. Keeps the audit log small and indexable.\n * 2. Preserves zero-knowledge at the adapter — adapters see\n * ciphertext envelopes of `{ actor, purpose, consentHash, ts,\n * op, collection, id }`, never the consent record itself.\n * 3. Lets the regulator verify a presented consent doc matches\n * the logged hash at audit time without the system ever\n * possessing the doc.\n *\n * ## Concurrency\n *\n * The consent context lives on the {@link Vault} instance. Two\n * concurrent `withConsent` calls on the same Vault would stomp each\n * other — documented limitation; adopters needing per-flight scope\n * should use separate Vault instances or an AsyncLocalStorage shim.\n *\n * @module\n */\nimport type { EncryptedEnvelope, NoydbStore } from '../types.js'\nimport { encrypt, decrypt } from '../crypto.js'\nimport { generateULID } from '../bundle/ulid.js'\n\n/** Reserved collection for consent-audit entries. */\nexport const CONSENT_AUDIT_COLLECTION = '_consent_audit'\n\n/**\n * The consent scope active for a block of work. Set via\n * `vault.withConsent()`; observed by the collection's access hooks.\n */\nexport interface ConsentContext {\n /**\n * What this access is for. Used by the audit query (`consentAudit\n * ({ purpose })`) and carried in the stored entry. Free-form; the\n * regulator or compliance tooling decides the vocabulary.\n */\n readonly purpose: string\n /**\n * Hex-encoded sha256 of whatever consent artefact the actor\n * presented. Stored as-is in each entry.\n */\n readonly consentHash: string\n}\n\n/** Access operation recorded in an audit entry. */\nexport type ConsentOp = 'get' | 'put' | 'delete'\n\n/** One consent-audit record, as decrypted for the caller. */\nexport interface ConsentAuditEntry {\n /** ULID — stable insertion-order key. */\n readonly id: string\n readonly timestamp: string\n readonly actor: string\n readonly purpose: string\n readonly consentHash: string\n readonly op: ConsentOp\n readonly collection: string\n readonly recordId: string\n}\n\n/** Filter passed to `vault.consentAudit()`. */\nexport interface ConsentAuditFilter {\n /** Only entries at or after this ISO timestamp. */\n readonly since?: string\n /** Only entries at or before this ISO timestamp. */\n readonly until?: string\n /** Match entries targeting this collection. */\n readonly collection?: string\n /** Match entries written by this actor. */\n readonly actor?: string\n /** Match entries with this purpose. */\n readonly purpose?: string\n}\n\n/**\n * Write one audit entry. Called by Vault's onAccess hook when a\n * consent context is active.\n */\nexport async function writeConsentEntry(\n adapter: NoydbStore,\n vault: string,\n encrypted: boolean,\n entry: Omit<ConsentAuditEntry, 'id' | 'timestamp'>,\n getDEK: (collection: string) => Promise<CryptoKey>,\n): Promise<void> {\n const id = generateULID()\n const full: ConsentAuditEntry = {\n id,\n timestamp: new Date().toISOString(),\n ...entry,\n }\n const envelope = await buildEnvelope(full, encrypted, getDEK)\n await adapter.put(vault, CONSENT_AUDIT_COLLECTION, id, envelope)\n}\n\n/** Load + decrypt + filter all entries. */\nexport async function loadConsentEntries(\n adapter: NoydbStore,\n vault: string,\n encrypted: boolean,\n getDEK: (collection: string) => Promise<CryptoKey>,\n filter: ConsentAuditFilter = {},\n): Promise<ConsentAuditEntry[]> {\n const ids = await adapter.list(vault, CONSENT_AUDIT_COLLECTION)\n const entries: ConsentAuditEntry[] = []\n\n for (const id of ids.sort()) {\n const envelope = await adapter.get(vault, CONSENT_AUDIT_COLLECTION, id)\n if (!envelope) continue\n const entry = await decryptEntry(envelope, encrypted, getDEK)\n if (!matchesFilter(entry, filter)) continue\n entries.push(entry)\n }\n return entries\n}\n\n// ── internals ──────────────────────────────────────────────────────\n\nasync function buildEnvelope(\n entry: ConsentAuditEntry,\n encrypted: boolean,\n getDEK: (collection: string) => Promise<CryptoKey>,\n): Promise<EncryptedEnvelope> {\n const json = JSON.stringify(entry)\n if (!encrypted) {\n return {\n _noydb: 1,\n _v: 1,\n _ts: entry.timestamp,\n _iv: '',\n _data: json,\n }\n }\n const dek = await getDEK(CONSENT_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: 1,\n _v: 1,\n _ts: entry.timestamp,\n _iv: iv,\n _data: data,\n }\n}\n\nasync function decryptEntry(\n envelope: EncryptedEnvelope,\n encrypted: boolean,\n getDEK: (collection: string) => Promise<CryptoKey>,\n): Promise<ConsentAuditEntry> {\n const json = encrypted\n ? await decrypt(envelope._iv, envelope._data, await getDEK(CONSENT_AUDIT_COLLECTION))\n : envelope._data\n return JSON.parse(json) as ConsentAuditEntry\n}\n\nfunction matchesFilter(entry: ConsentAuditEntry, f: ConsentAuditFilter): boolean {\n if (f.since && entry.timestamp < f.since) return false\n if (f.until && entry.timestamp > f.until) return false\n if (f.collection && entry.collection !== f.collection) return false\n if (f.actor && entry.actor !== f.actor) return false\n if (f.purpose && entry.purpose !== f.purpose) return false\n return true\n}\n","/**\n * Foreign-key references — the soft-FK mechanism.\n *\n * A collection declares its references as metadata at construction\n * time:\n *\n * ```ts\n * import { ref } from '@noy-db/hub'\n *\n * const invoices = company.collection<Invoice>('invoices', {\n * refs: {\n * clientId: ref('clients'), // default: strict\n * categoryId: ref('categories', 'warn'),\n * parentId: ref('invoices', 'cascade'), // self-reference OK\n * },\n * })\n * ```\n *\n * Three modes:\n *\n * - **strict** — the default. `put()` rejects records whose\n * reference target doesn't exist, and `delete()` of the target\n * rejects if any strict-referencing records still exist.\n * Matches SQL's default FK semantics.\n *\n * - **warn** — both operations succeed unconditionally. Broken\n * references surface only through\n * `vault.checkIntegrity()`, which walks every collection\n * and reports orphans. Use when you want soft validation for\n * imports from messy sources.\n *\n * - **cascade** — `put()` is same as warn. `delete()` of the\n * target deletes every referencing record. Cycles are detected\n * and broken via an in-progress set, so mutual cascades\n * terminate instead of recursing forever.\n *\n * Cross-vault refs are explicitly rejected: if the target\n * name contains a `/`, `ref()` throws `RefScopeError`. Cross-\n * vault refs need an auth story (multi-keyring reads) that\n * doesn't ship — tracked for.\n */\n\nimport { NoydbError } from './errors.js'\n\n/** The three enforcement modes. Default for new refs is `'strict'`. */\nexport type RefMode = 'strict' | 'warn' | 'cascade'\n\n/**\n * Descriptor returned by `ref()`. Collections accept a\n * `Record<string, RefDescriptor>` in their options. The key is the\n * field name on the record (top-level only — dotted paths are out of\n * scope), the value describes which target collection the\n * field references and under what mode.\n *\n * The descriptor carries only plain data so it can be serialized,\n * passed around, and introspected without any class machinery.\n */\nexport interface RefDescriptor {\n readonly target: string\n readonly mode: RefMode\n}\n\n/**\n * Thrown when a strict reference is violated — either `put()` with a\n * missing target id, or `delete()` of a target that still has\n * strict-referencing records.\n *\n * Carries structured detail so UI code (and a potential future\n * devtools panel) can render \"client X cannot be deleted because\n * invoices 1, 2, and 3 reference it\" instead of a bare error string.\n */\nexport class RefIntegrityError extends NoydbError {\n readonly collection: string\n readonly id: string\n readonly field: string\n readonly refTo: string\n readonly refId: string | null\n\n constructor(opts: {\n collection: string\n id: string\n field: string\n refTo: string\n refId: string | null\n message: string\n }) {\n super('REF_INTEGRITY', opts.message)\n this.name = 'RefIntegrityError'\n this.collection = opts.collection\n this.id = opts.id\n this.field = opts.field\n this.refTo = opts.refTo\n this.refId = opts.refId\n }\n}\n\n/**\n * Thrown when `ref()` is called with a target name that looks like\n * a cross-vault reference (contains a `/`). Separate error\n * class because the fix is different: RefIntegrityError means \"data\n * is wrong\"; RefScopeError means \"the ref declaration is wrong\".\n */\nexport class RefScopeError extends NoydbError {\n constructor(target: string) {\n super(\n 'REF_SCOPE',\n `Cross-vault references are not supported in — got target \"${target}\". ` +\n `Use a simple collection name (e.g. \"clients\"), not a path. ` +\n `Cross-vault refs are tracked for a future release.`,\n )\n this.name = 'RefScopeError'\n }\n}\n\n/**\n * Helper constructor. Thin wrapper around the object literal so user\n * code reads like `ref('clients')` instead of `{ target: 'clients',\n * mode: 'strict' }` — this is the only ergonomics reason it exists.\n *\n * Validates the target name eagerly so a misconfigured ref declaration\n * fails at collection construction time, not at the first put.\n */\nexport function ref(target: string, mode: RefMode = 'strict'): RefDescriptor {\n if (target.includes('/')) {\n throw new RefScopeError(target)\n }\n if (!target || target.startsWith('_')) {\n throw new Error(\n `ref(): target collection name must be non-empty and cannot start with '_' (reserved for internal collections). Got \"${target}\".`,\n )\n }\n return { target, mode }\n}\n\n/**\n * Per-vault registry of reference declarations.\n *\n * The registry is populated by `Collection` constructors (which pass\n * their `refs` option through the Vault) and consulted by the\n * Vault on every `put` / `delete` and by `checkIntegrity`. A\n * single instance lives on the Vault for its lifetime; there's\n * no global state.\n *\n * The data structure is two parallel maps:\n *\n * - `outbound`: `collection → { field → RefDescriptor }` — what\n * refs does `collection` declare? Used on put to check\n * strict-target-exists and on checkIntegrity to walk each\n * collection's outbound refs.\n *\n * - `inbound`: `target → Array<{ collection, field, mode }>` —\n * which collections reference `target`? Used on delete to find\n * the records that might be affected by cascade / strict.\n *\n * The two views are kept in sync by `register()` and never mutated\n * otherwise — refs can't be unregistered at runtime in.\n */\nexport class RefRegistry {\n private readonly outbound = new Map<string, Record<string, RefDescriptor>>()\n private readonly inbound = new Map<\n string,\n Array<{ collection: string; field: string; mode: RefMode }>\n >()\n\n /**\n * Register the refs declared by a single collection. Idempotent in\n * the happy path — calling twice with the same data is a no-op.\n * Calling twice with DIFFERENT data throws, because silent\n * overrides would be confusing (\"I changed the ref and it doesn't\n * update\" vs \"I declared the same collection twice with different\n * refs and the second call won\").\n */\n register(collection: string, refs: Record<string, RefDescriptor>): void {\n const existing = this.outbound.get(collection)\n if (existing) {\n // Compare shallowly — if any field disagrees, reject.\n const existingKeys = Object.keys(existing).sort()\n const newKeys = Object.keys(refs).sort()\n if (existingKeys.join(',') !== newKeys.join(',')) {\n throw new Error(\n `RefRegistry: conflicting ref declarations for collection \"${collection}\"`,\n )\n }\n for (const k of existingKeys) {\n const a = existing[k]\n const b = refs[k]\n if (!a || !b || a.target !== b.target || a.mode !== b.mode) {\n throw new Error(\n `RefRegistry: conflicting ref declarations for collection \"${collection}\" field \"${k}\"`,\n )\n }\n }\n return\n }\n this.outbound.set(collection, { ...refs })\n for (const [field, desc] of Object.entries(refs)) {\n const list = this.inbound.get(desc.target) ?? []\n list.push({ collection, field, mode: desc.mode })\n this.inbound.set(desc.target, list)\n }\n }\n\n /** Get the outbound refs declared by a collection (or `{}` if none). */\n getOutbound(collection: string): Record<string, RefDescriptor> {\n return this.outbound.get(collection) ?? {}\n }\n\n /** Get the inbound refs that target a given collection (or `[]`). */\n getInbound(\n target: string,\n ): ReadonlyArray<{ collection: string; field: string; mode: RefMode }> {\n return this.inbound.get(target) ?? []\n }\n\n /**\n * Iterate every (collection → refs) pair that has at least one\n * declared reference. Used by `checkIntegrity` to walk the full\n * universe of outbound refs without needing to track collection\n * names elsewhere.\n */\n entries(): Array<[string, Record<string, RefDescriptor>]> {\n return [...this.outbound.entries()]\n }\n\n /** Clear the registry. Test-only escape hatch; never called from production code. */\n clear(): void {\n this.outbound.clear()\n this.inbound.clear()\n }\n}\n\n/**\n * Shape of a single violation reported by `vault.checkIntegrity()`.\n *\n * `refId` is the value we saw in the referencing field — it's the\n * ID we expected to find in `refTo`, but didn't. Left as `unknown`\n * because records are loosely typed at the integrity-check layer.\n */\nexport interface RefViolation {\n readonly collection: string\n readonly id: string\n readonly field: string\n readonly refTo: string\n readonly refId: unknown\n readonly mode: RefMode\n}\n","import type { NoydbStore, KeyringFile, Role, Permissions, GrantOptions, RevokeOptions, UserInfo, EncryptedEnvelope, ExportCapability, ExportFormat, ImportCapability } from '../types.js'\nimport { NOYDB_KEYRING_VERSION, NOYDB_FORMAT_VERSION } from '../types.js'\nimport {\n deriveKey,\n generateDEK,\n generateSalt,\n wrapKey,\n unwrapKey,\n encrypt,\n decrypt,\n bufferToBase64,\n base64ToBuffer,\n} from '../crypto.js'\nimport { NoAccessError, PermissionDeniedError, PrivilegeEscalationError, KeyringExpiredError } from '../errors.js'\n\n// ─── Roles that can grant/revoke ───────────────────────────────────────\n\n/**\n * Roles that an `admin` is allowed to grant and revoke.\n *\n * Includes `'admin'` itself: the model bottlenecked all admin\n * onboarding through the single `owner` principal, which made lateral\n * delegation impossible and left a single-owner bus-factor risk\n * unresolved even when multiple trusted humans existed. opens up\n * admin↔admin lateral delegation, with two guardrails:\n *\n * 1. **No privilege escalation.** Enforced in `grant()`: every DEK\n * wrapped into the new admin's keyring must be present in the\n * grantor's own DEK set. Today this is structurally trivially\n * true (admin grants always inherit the full caller DEK set),\n * but the check is wired in so future per-collection admin scoping\n * cannot accidentally bypass it. See `PrivilegeEscalationError`.\n *\n * 2. **Cascade on revoke.** Enforced in `revoke()`: when an admin is\n * revoked, every admin they (transitively) granted is either\n * revoked too (`cascade: 'strict'`, default) or left in place with\n * a console warning (`cascade: 'warn'`). The walk uses the\n * `granted_by` field on each keyring file as the parent pointer.\n */\nconst ADMIN_GRANTABLE_TARGETS: readonly Role[] = ['operator', 'viewer', 'client', 'admin']\n\nfunction canGrant(callerRole: Role, targetRole: Role): boolean {\n if (callerRole === 'owner') return true\n if (callerRole === 'admin') return ADMIN_GRANTABLE_TARGETS.includes(targetRole)\n return false\n}\n\nfunction canRevoke(callerRole: Role, targetRole: Role): boolean {\n if (targetRole === 'owner') return false // owner cannot be revoked\n if (callerRole === 'owner') return true\n if (callerRole === 'admin') return ADMIN_GRANTABLE_TARGETS.includes(targetRole)\n return false\n}\n\n// ─── Unlocked Keyring ──────────────────────────────────────────────────\n\n/** In-memory representation of an unlocked keyring. */\nexport interface UnlockedKeyring {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly permissions: Permissions\n readonly deks: Map<string, CryptoKey>\n readonly kek: CryptoKey\n readonly salt: Uint8Array\n /**\n * `@noy-db/as-*` export capability. Absent when the\n * keyring was written before this RFC landed — role-based defaults\n * apply via `hasExportCapability`.\n */\n readonly exportCapability?: ExportCapability\n /**\n * `@noy-db/as-*` import capability (issue ). Absent when the\n * keyring was written before landed — default-closed semantics\n * apply via `hasImportCapability` (no plaintext format granted, no\n * bundle import granted, regardless of role).\n */\n readonly importCapability?: ImportCapability\n}\n\n// ─── Load / Create ─────────────────────────────────────────────────────\n\n/** Load and unlock a user's keyring for a vault. */\nexport async function loadKeyring(\n adapter: NoydbStore,\n vault: string,\n userId: string,\n passphrase: string,\n): Promise<UnlockedKeyring> {\n const envelope = await adapter.get(vault, '_keyring', userId)\n\n if (!envelope) {\n throw new NoAccessError(`No keyring found for user \"${userId}\" in vault \"${vault}\"`)\n }\n\n const keyringFile = JSON.parse(envelope._data) as KeyringFile\n\n // — refuse to unwrap an expired slot. Check happens before any\n // KEK derivation so an expired slot doesn't leak timing on the\n // passphrase. Comparison uses Date.parse → ms-since-epoch; an\n // unparseable expires_at is treated as \"no expiry\" so a malformed\n // value can't silently lock users out (it'll surface in tests).\n if (keyringFile.expires_at !== undefined) {\n const cutoff = Date.parse(keyringFile.expires_at)\n if (Number.isFinite(cutoff) && Date.now() >= cutoff) {\n throw new KeyringExpiredError({ userId: keyringFile.user_id, expiresAt: keyringFile.expires_at })\n }\n }\n\n const salt = base64ToBuffer(keyringFile.salt)\n const kek = await deriveKey(passphrase, salt)\n\n const deks = new Map<string, CryptoKey>()\n for (const [collName, wrappedDek] of Object.entries(keyringFile.deks)) {\n const dek = await unwrapKey(wrappedDek, kek)\n deks.set(collName, dek)\n }\n\n return {\n userId: keyringFile.user_id,\n displayName: keyringFile.display_name,\n role: keyringFile.role,\n permissions: keyringFile.permissions,\n deks,\n kek,\n salt,\n ...(keyringFile.export_capability !== undefined && { exportCapability: keyringFile.export_capability }),\n ...(keyringFile.import_capability !== undefined && { importCapability: keyringFile.import_capability }),\n }\n}\n\n/** Create the initial owner keyring for a new vault. */\nexport async function createOwnerKeyring(\n adapter: NoydbStore,\n vault: string,\n userId: string,\n passphrase: string,\n): Promise<UnlockedKeyring> {\n const salt = generateSalt()\n const kek = await deriveKey(passphrase, salt)\n\n const keyringFile: KeyringFile = {\n _noydb_keyring: NOYDB_KEYRING_VERSION,\n user_id: userId,\n display_name: userId,\n role: 'owner',\n permissions: {},\n deks: {},\n salt: bufferToBase64(salt),\n created_at: new Date().toISOString(),\n granted_by: userId,\n }\n\n await writeKeyringFile(adapter, vault, userId, keyringFile)\n\n return {\n userId,\n displayName: userId,\n role: 'owner',\n permissions: {},\n deks: new Map(),\n kek,\n salt,\n }\n}\n\n// ─── Grant ─────────────────────────────────────────────────────────────\n\n/** Grant access to a new user. Caller must have grant privilege. */\nexport async function grant(\n adapter: NoydbStore,\n vault: string,\n callerKeyring: UnlockedKeyring,\n options: GrantOptions,\n): Promise<void> {\n if (!canGrant(callerKeyring.role, options.role)) {\n throw new PermissionDeniedError(\n `Role \"${callerKeyring.role}\" cannot grant role \"${options.role}\"`,\n )\n }\n\n // Determine which collections the new user gets access to\n const permissions = resolvePermissions(options.role, options.permissions)\n\n // Derive the new user's KEK from their passphrase\n const newSalt = generateSalt()\n const newKek = await deriveKey(options.passphrase, newSalt)\n\n // Wrap the appropriate DEKs with the new user's KEK\n const wrappedDeks: Record<string, string> = {}\n for (const collName of Object.keys(permissions)) {\n const dek = callerKeyring.deks.get(collName)\n if (dek) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n }\n\n // For owner/admin/viewer roles, wrap ALL known DEKs\n if (options.role === 'owner' || options.role === 'admin' || options.role === 'viewer') {\n for (const [collName, dek] of callerKeyring.deks) {\n if (!(collName in wrappedDeks)) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n }\n }\n\n // For ALL roles, propagate system-prefixed collection DEKs\n // (`_ledger`, `_history`, `_sync`, …). These are internal collections\n // that any user with access to the vault must be able to\n // read and write — for example, the hash-chained ledger writes\n // an entry on every put/delete, so operators and clients with write\n // access to a single data collection still need the `_ledger` DEK.\n //\n // Trade-off: a granted user can decrypt every system-collection\n // entry, including ones they would not otherwise have access to\n // (e.g., an operator on `invoices` can read ledger entries for\n // mutations in `salaries`). This is a metadata leak, not a\n // plaintext leak — the ledger entries record collection names,\n // record ids, and ciphertext hashes, but never plaintext records.\n // Per-collection ledger DEKs are tracked as a follow-up.\n for (const [collName, dek] of callerKeyring.deks) {\n if (collName.startsWith('_') && !(collName in wrappedDeks)) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n }\n\n // Anti-privilege-escalation check. Every DEK we just\n // wrapped into the new keyring must come from the caller's own DEK\n // set — the grantor cannot give the grantee access to a collection\n // they themselves can't read. Today this is structurally trivially\n // satisfied because every wrapped DEK was looked up in\n // `callerKeyring.deks` above, but the explicit check is wired in\n // so a future change (per-collection admin scoping, escrow-based\n // re-wrapping, etc.) cannot accidentally let a widening grant\n // through. See `PrivilegeEscalationError` for the rationale.\n for (const collName of Object.keys(wrappedDeks)) {\n if (!callerKeyring.deks.has(collName)) {\n throw new PrivilegeEscalationError(collName)\n }\n }\n\n const keyringFile: KeyringFile = {\n _noydb_keyring: NOYDB_KEYRING_VERSION,\n user_id: options.userId,\n display_name: options.displayName,\n role: options.role,\n permissions,\n deks: wrappedDeks,\n salt: bufferToBase64(newSalt),\n created_at: new Date().toISOString(),\n granted_by: callerKeyring.userId,\n ...(options.exportCapability !== undefined && { export_capability: options.exportCapability }),\n ...(options.importCapability !== undefined && { import_capability: options.importCapability }),\n }\n\n await writeKeyringFile(adapter, vault, options.userId, keyringFile)\n}\n\n// ─── Revoke ────────────────────────────────────────────────────────────\n\n/**\n * Walk every keyring in the vault to find admins that the given\n * `rootUserId` (transitively) granted, via the `granted_by` parent\n * pointer recorded on each keyring file.\n *\n * Returns the set of descendant admin user-ids in DFS order, NOT\n * including the root itself. Non-admin descendants are excluded\n * because operators/viewers/clients cannot grant other users — they\n * are leaves in the delegation tree and cleaning them up is the\n * caller's job (or the next rotate, since they'd lose key access\n * anyway when the cascading admin's collections rotate).\n *\n * The walk uses a visited set keyed by user-id so cycles introduced\n * by re-grants (admin-A revoked, then re-granted later by admin-B who\n * was originally granted by A) terminate cleanly.\n */\nasync function findAdminDescendants(\n adapter: NoydbStore,\n vault: string,\n rootUserId: string,\n): Promise<string[]> {\n const allUserIds = await adapter.list(vault, '_keyring')\n\n // Build a map: parentUserId → child KeyringFiles. We only ever\n // descend into admins, so non-admin children are skipped at the\n // edge level rather than after a recursive call.\n const childrenByParent = new Map<string, string[]>()\n for (const userId of allUserIds) {\n const env = await adapter.get(vault, '_keyring', userId)\n if (!env) continue\n const kf = JSON.parse(env._data) as KeyringFile\n if (kf.role !== 'admin') continue // only admins can grant — leaves are uninteresting\n if (kf.user_id === rootUserId) continue // self-edges are noise\n const list = childrenByParent.get(kf.granted_by) ?? []\n list.push(kf.user_id)\n childrenByParent.set(kf.granted_by, list)\n }\n\n const visited = new Set<string>()\n const order: string[] = []\n const stack: string[] = [...(childrenByParent.get(rootUserId) ?? [])]\n while (stack.length > 0) {\n const next = stack.pop()!\n if (visited.has(next)) continue\n visited.add(next)\n order.push(next)\n for (const grandchild of childrenByParent.get(next) ?? []) {\n if (!visited.has(grandchild)) stack.push(grandchild)\n }\n }\n return order\n}\n\n/** Revoke a user's access. Optionally rotate keys for affected collections. */\nexport async function revoke(\n adapter: NoydbStore,\n vault: string,\n callerKeyring: UnlockedKeyring,\n options: RevokeOptions,\n): Promise<void> {\n // Load the target's keyring to check their role\n const targetEnvelope = await adapter.get(vault, '_keyring', options.userId)\n if (!targetEnvelope) {\n throw new NoAccessError(`User \"${options.userId}\" has no keyring in vault \"${vault}\"`)\n }\n\n const targetKeyring = JSON.parse(targetEnvelope._data) as KeyringFile\n\n if (!canRevoke(callerKeyring.role, targetKeyring.role)) {\n throw new PermissionDeniedError(\n `Role \"${callerKeyring.role}\" cannot revoke role \"${targetKeyring.role}\"`,\n )\n }\n\n // Cascade-on-revoke. Only meaningful when the target is\n // an admin — operators/viewers/clients cannot grant other users so\n // they have no delegation subtree to walk.\n const cascadeMode = options.cascade ?? 'strict'\n const usersToRevoke: string[] = [options.userId]\n const affectedCollections = new Set(Object.keys(targetKeyring.deks))\n\n if (targetKeyring.role === 'admin') {\n const descendants = await findAdminDescendants(adapter, vault, options.userId)\n if (descendants.length > 0) {\n if (cascadeMode === 'warn') {\n // Diagnostic mode: leave the descendants in place but make\n // them visible. The owner / a different admin can clean up\n // manually. The single console.warn is intentionally noisy\n // (a list, not a count) so the operator sees exactly which\n // keyrings will become orphans.\n console.warn(\n `[noy-db] revoke(${options.userId}): cascade='warn' — leaving ` +\n `${descendants.length} descendant admin(s) in place: ` +\n `${descendants.join(', ')}. These admins were granted by the revoked user ` +\n `(transitively) and will become orphans in the delegation tree.`,\n )\n } else {\n // Strict mode (default): pull every descendant into the\n // revoke set. We collect their affected collections too so\n // the single rotation pass at the end covers everything.\n for (const userId of descendants) {\n const descEnv = await adapter.get(vault, '_keyring', userId)\n if (!descEnv) continue\n const descKf = JSON.parse(descEnv._data) as KeyringFile\n usersToRevoke.push(userId)\n for (const c of Object.keys(descKf.deks)) affectedCollections.add(c)\n }\n }\n }\n }\n\n // Delete every keyring in the revoke set. Order doesn't matter\n // because each keyring file is independent on disk; we don't have\n // referential integrity to maintain across deletes.\n for (const userId of usersToRevoke) {\n await adapter.delete(vault, '_keyring', userId)\n }\n\n // Single rotation pass at the end. The cost is O(records in\n // affected collections), NOT O(records × cascade depth) — every\n // descendant's collections were unioned into `affectedCollections`\n // before we got here, so the rotation re-encrypts each affected\n // record exactly once regardless of how deep the cascade went.\n if (options.rotateKeys !== false && affectedCollections.size > 0) {\n await rotateKeys(adapter, vault, callerKeyring, [...affectedCollections])\n }\n}\n\n// ─── Key Rotation ──────────────────────────────────────────────────────\n\n/**\n * Rotate DEKs for specified collections:\n * 1. Generate new DEKs\n * 2. Re-encrypt all records in affected collections\n * 3. Re-wrap new DEKs for all remaining users\n */\nexport async function rotateKeys(\n adapter: NoydbStore,\n vault: string,\n callerKeyring: UnlockedKeyring,\n collections: string[],\n): Promise<void> {\n // Generate new DEKs for each affected collection\n const newDeks = new Map<string, CryptoKey>()\n for (const collName of collections) {\n newDeks.set(collName, await generateDEK())\n }\n\n // Re-encrypt all records in affected collections\n for (const collName of collections) {\n const oldDek = callerKeyring.deks.get(collName)\n const newDek = newDeks.get(collName)!\n if (!oldDek) continue\n\n const ids = await adapter.list(vault, collName)\n for (const id of ids) {\n const envelope = await adapter.get(vault, collName, id)\n if (!envelope || !envelope._iv) continue\n\n // Decrypt with old DEK\n const plaintext = await decrypt(envelope._iv, envelope._data, oldDek)\n\n // Re-encrypt with new DEK\n const { iv, data } = await encrypt(plaintext, newDek)\n const newEnvelope: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: envelope._v,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n }\n await adapter.put(vault, collName, id, newEnvelope)\n }\n }\n\n // Update caller's keyring with new DEKs\n for (const [collName, newDek] of newDeks) {\n callerKeyring.deks.set(collName, newDek)\n }\n await persistKeyring(adapter, vault, callerKeyring)\n\n // Update all remaining users' keyrings with re-wrapped new DEKs\n const userIds = await adapter.list(vault, '_keyring')\n for (const userId of userIds) {\n if (userId === callerKeyring.userId) continue\n\n const userEnvelope = await adapter.get(vault, '_keyring', userId)\n if (!userEnvelope) continue\n\n const userKeyringFile = JSON.parse(userEnvelope._data) as KeyringFile\n // Note: we can't derive other users' KEKs to re-wrap DEKs for them.\n // Rotation requires users to re-unlock and be re-granted after the caller\n // re-wraps with the raw DEKs held in memory. See rotation flow below.\n // The trick: import the user's KEK from their salt? No — we need their passphrase.\n //\n // Per the spec: the caller (owner/admin) wraps the new DEKs with each remaining\n // user's KEK. But we can't derive their KEK without their passphrase.\n //\n // Real solution from the spec: the caller wraps the DEK using the approach of\n // reading each user's existing wrapping. Since we can't derive their KEK,\n // we use a RE-KEYING approach: the new DEK is wrapped with a key-wrapping-key\n // that we CAN derive — we use the existing wrapped DEK as proof that the user\n // had access, and we replace it with the new wrapped DEK.\n //\n // Practical approach: Since the owner/admin has all raw DEKs in memory,\n // and each user's keyring contains their salt, we need the users to\n // re-authenticate to get the new wrapped keys. This is the standard approach.\n //\n // For NOYDB Phase 2: we'll update the keyring file to include a \"pending_rekey\"\n // flag. Users will get new DEKs on next login when the owner provides them.\n //\n // SIMPLER approach used here: Since the owner performed the rotation,\n // the owner has both old and new DEKs. We store a \"rekey token\" that the\n // user can use to unwrap: we wrap the new DEK with the OLD DEK (which the\n // user can still unwrap from their keyring, since their keyring has the old\n // wrapped DEK and their KEK can unwrap it).\n\n // Actually even simpler: we just need the user's KEK. We don't have it.\n // The spec says the owner wraps new DEKs for each remaining user.\n // This requires knowing each user's KEK (or having a shared secret).\n //\n // The CORRECT implementation from the spec: the owner/admin has all DEKs.\n // Each user's keyring stores DEKs wrapped with THAT USER's KEK.\n // To re-wrap, we need each user's KEK — which we can't get.\n //\n // Real-world solution: use a KEY ESCROW approach where the owner stores\n // each user's wrapping key (not their passphrase, but a key derived from\n // the grant process). During grant, the owner stores a copy of the new user's\n // KEK (wrapped with the owner's KEK) so they can re-wrap later.\n //\n // For now: mark the user's keyring as needing rekey. The user will need to\n // re-authenticate (owner provides new passphrase or re-grants).\n\n // Update: simplest correct approach — during grant, we store the user's KEK\n // wrapped with the owner's KEK in a separate escrow field. Then during rotation,\n // the owner unwraps the user's KEK from escrow and wraps the new DEKs.\n //\n // BUT: that means we need to change the KeyringFile format.\n // For Phase 2 MVP: just delete the user's old DEK entries and require re-grant.\n // This is secure (revoked keys are gone) but inconvenient (remaining users\n // need re-grant for rotated collections).\n\n // PHASE 2 APPROACH: Remove the affected collection DEKs from remaining users'\n // keyrings. The owner must re-grant access to those collections.\n // This is correct and secure — just requires the owner to re-run grant().\n\n const updatedDeks = { ...userKeyringFile.deks }\n for (const collName of collections) {\n delete updatedDeks[collName]\n }\n\n const updatedPermissions = { ...userKeyringFile.permissions }\n for (const collName of collections) {\n delete updatedPermissions[collName]\n }\n\n const updatedKeyring: KeyringFile = {\n ...userKeyringFile,\n deks: updatedDeks,\n permissions: updatedPermissions,\n }\n\n await writeKeyringFile(adapter, vault, userId, updatedKeyring)\n }\n}\n\n// ─── Change Secret ─────────────────────────────────────────────────────\n\n/** Change the user's passphrase. Re-wraps all DEKs with the new KEK. */\nexport async function changeSecret(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n newPassphrase: string,\n): Promise<UnlockedKeyring> {\n const newSalt = generateSalt()\n const newKek = await deriveKey(newPassphrase, newSalt)\n\n // Re-wrap all DEKs with the new KEK\n const wrappedDeks: Record<string, string> = {}\n for (const [collName, dek] of keyring.deks) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n\n const keyringFile: KeyringFile = {\n _noydb_keyring: NOYDB_KEYRING_VERSION,\n user_id: keyring.userId,\n display_name: keyring.displayName,\n role: keyring.role,\n permissions: keyring.permissions,\n deks: wrappedDeks,\n salt: bufferToBase64(newSalt),\n created_at: new Date().toISOString(),\n granted_by: keyring.userId,\n }\n\n await writeKeyringFile(adapter, vault, keyring.userId, keyringFile)\n\n return {\n userId: keyring.userId,\n displayName: keyring.displayName,\n role: keyring.role,\n permissions: keyring.permissions,\n deks: keyring.deks, // Same DEKs, different wrapping\n kek: newKek,\n salt: newSalt,\n }\n}\n\n// ─── Bundle recipients ──────────────────────────────────────────\n\n/**\n * Recipient slot in a re-keyed `.noydb` bundle. Each slot becomes its\n * own keyring file inside the bundle, sealed with its own passphrase.\n * Same role/permission semantics as `db.grant()` but no adapter side\n * effect — the slot only exists inside the bundle bytes.\n *\n * @public\n */\nexport interface BundleRecipient {\n /** User id stamped onto the keyring file in the bundle. */\n readonly id: string\n /** Optional display name. Defaults to `id`. */\n readonly displayName?: string\n /** Passphrase the recipient will type to unlock. */\n readonly passphrase: string\n /** Role on the destination vault. Defaults to `'viewer'`. */\n readonly role?: Role\n /**\n * Per-collection permissions. When omitted, role defaults apply.\n * Restricting permissions here ALSO restricts which DEKs are wrapped\n * into the slot — a slot with `{ invoices: 'ro' }` cannot decrypt\n * other collections even though their ciphertext sits in the bundle.\n */\n readonly permissions?: Permissions\n /**\n * Optional `as-*` export grants on the destination vault.\n * Mirrors the `exportCapability` field on a live keyring.\n */\n readonly exportCapability?: ExportCapability\n /**\n * Optional `as-*` import grants on the destination vault.\n * Mirrors the `importCapability` field on a live keyring.\n * Default-closed: no plaintext format granted, no bundle import.\n */\n readonly importCapability?: ImportCapability\n /**\n * Optional bundle-slot expiry. ISO-8601 timestamp; past the\n * cutoff this slot's keyring refuses to load with\n * `KeyringExpiredError`. Time-boxed audit access pattern: \"this\n * slot works for 30 days then becomes opaque to its holder.\"\n */\n readonly expiresAt?: string\n}\n\n/**\n * Build a `KeyringFile` for one bundle recipient, given the source\n * vault's unwrapped DEKs. Mirrors `grant()` minus the adapter write —\n * the produced file is meant to be embedded in the bundle's\n * `keyrings` map, never persisted to the source vault.\n *\n * Privilege-escalation check still runs: every DEK wrapped into the\n * recipient's keyring must come from the source's own DEK set.\n *\n * @internal\n */\nexport async function buildRecipientKeyringFile(\n callerKeyring: UnlockedKeyring,\n recipient: BundleRecipient,\n): Promise<KeyringFile> {\n const role: Role = recipient.role ?? 'viewer'\n const permissions = resolvePermissions(role, recipient.permissions)\n\n const newSalt = generateSalt()\n const newKek = await deriveKey(recipient.passphrase, newSalt)\n\n const wrappedDeks: Record<string, string> = {}\n\n // Collections the recipient was explicitly granted permission to.\n for (const collName of Object.keys(permissions)) {\n const dek = callerKeyring.deks.get(collName)\n if (dek) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n }\n\n // owner / admin / viewer: wrap every known DEK (matches grant).\n if (role === 'owner' || role === 'admin' || role === 'viewer') {\n for (const [collName, dek] of callerKeyring.deks) {\n if (!(collName in wrappedDeks)) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n }\n }\n\n // Always propagate system-prefixed collection DEKs (`_ledger`, etc.) —\n // the recipient needs them to verify the bundle on import.\n for (const [collName, dek] of callerKeyring.deks) {\n if (collName.startsWith('_') && !(collName in wrappedDeks)) {\n wrappedDeks[collName] = await wrapKey(dek, newKek)\n }\n }\n\n // Anti-privilege-escalation: every wrapped DEK must come from the\n // caller's own DEK set. Belt-and-braces with the lookups above.\n for (const collName of Object.keys(wrappedDeks)) {\n if (!callerKeyring.deks.has(collName)) {\n throw new PrivilegeEscalationError(collName)\n }\n }\n\n return {\n _noydb_keyring: NOYDB_KEYRING_VERSION,\n user_id: recipient.id,\n display_name: recipient.displayName ?? recipient.id,\n role,\n permissions,\n deks: wrappedDeks,\n salt: bufferToBase64(newSalt),\n created_at: new Date().toISOString(),\n granted_by: callerKeyring.userId,\n ...(recipient.exportCapability !== undefined\n ? { export_capability: recipient.exportCapability }\n : {}),\n ...(recipient.importCapability !== undefined\n ? { import_capability: recipient.importCapability }\n : {}),\n ...(recipient.expiresAt !== undefined\n ? { expires_at: recipient.expiresAt }\n : {}),\n }\n}\n\n// ─── List Users ────────────────────────────────────────────────────────\n\n/** List all users with access to a vault. */\nexport async function listUsers(\n adapter: NoydbStore,\n vault: string,\n): Promise<UserInfo[]> {\n const userIds = await adapter.list(vault, '_keyring')\n const users: UserInfo[] = []\n\n for (const userId of userIds) {\n const envelope = await adapter.get(vault, '_keyring', userId)\n if (!envelope) continue\n const kf = JSON.parse(envelope._data) as KeyringFile\n users.push({\n userId: kf.user_id,\n displayName: kf.display_name,\n role: kf.role,\n permissions: kf.permissions,\n createdAt: kf.created_at,\n grantedBy: kf.granted_by,\n })\n }\n\n return users\n}\n\n// ─── DEK Management ────────────────────────────────────────────────────\n\n/** Ensure a DEK exists for a collection. Generates one if new. */\nexport async function ensureCollectionDEK(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n): Promise<(collectionName: string) => Promise<CryptoKey>> {\n // Dedupe concurrent first-time DEK creates per collection. Without\n // this, two concurrent `getDEK('foo')` calls both pass the `existing`\n // check (the Map is empty), both generate fresh DEKs, and the second\n // `set` overwrites the first — making any envelope encrypted with\n // the discarded DEK fail to decrypt later (TamperedError on read).\n // Pre-existing race exposed by the multi-writer ledger work in #296.\n const inFlight = new Map<string, Promise<CryptoKey>>()\n return async (collectionName: string): Promise<CryptoKey> => {\n const existing = keyring.deks.get(collectionName)\n if (existing) return existing\n const pending = inFlight.get(collectionName)\n if (pending) return pending\n\n const promise = (async () => {\n const dek = await generateDEK()\n keyring.deks.set(collectionName, dek)\n await persistKeyring(adapter, vault, keyring)\n return dek\n })()\n inFlight.set(collectionName, promise)\n try {\n return await promise\n } finally {\n inFlight.delete(collectionName)\n }\n }\n}\n\n// ─── Permission Checks ─────────────────────────────────────────────────\n\n/** Check if a user has write permission for a collection. */\nexport function hasWritePermission(keyring: UnlockedKeyring, collectionName: string): boolean {\n if (keyring.role === 'owner' || keyring.role === 'admin') return true\n if (keyring.role === 'viewer' || keyring.role === 'client') return false\n return keyring.permissions[collectionName] === 'rw'\n}\n\n/** Check if a user has any access to a collection. */\nexport function hasAccess(keyring: UnlockedKeyring, collectionName: string): boolean {\n if (keyring.role === 'owner' || keyring.role === 'admin' || keyring.role === 'viewer') return true\n return collectionName in keyring.permissions\n}\n\n// ─── Helpers ───────────────────────────────────────────────────────────\n\n/** Persist a keyring file to the adapter. */\nexport async function persistKeyring(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n): Promise<void> {\n const wrappedDeks: Record<string, string> = {}\n for (const [collName, dek] of keyring.deks) {\n wrappedDeks[collName] = await wrapKey(dek, keyring.kek)\n }\n\n const keyringFile: KeyringFile = {\n _noydb_keyring: NOYDB_KEYRING_VERSION,\n user_id: keyring.userId,\n display_name: keyring.displayName,\n role: keyring.role,\n permissions: keyring.permissions,\n deks: wrappedDeks,\n salt: bufferToBase64(keyring.salt),\n created_at: new Date().toISOString(),\n granted_by: keyring.userId,\n ...(keyring.exportCapability !== undefined && { export_capability: keyring.exportCapability }),\n ...(keyring.importCapability !== undefined && { import_capability: keyring.importCapability }),\n }\n\n await writeKeyringFile(adapter, vault, keyring.userId, keyringFile)\n}\n\n// ─── Export capability ──────────────────────────────────────\n\n/**\n * Role-based default policy for the encrypted-bundle capability.\n *\n * Applied when `keyring.exportCapability` is absent or\n * `exportCapability.bundle` is undefined:\n *\n * - `owner` / `admin` → `true` (happy-path backup without friction)\n * - `operator` / `viewer` / `client` → `false` (explicit grant required)\n *\n * Rationale: a bundle is inert without the KEK, so an owner backing up\n * their own vault doesn't need friction; a non-admin role producing a\n * bundle for an external party does, because the bundle outlives\n * keyring revocation.\n */\nfunction defaultBundleCapability(role: Role): boolean {\n return role === 'owner' || role === 'admin'\n}\n\n/**\n * Check whether a keyring is authorised for a given `@noy-db/as-*`\n * export tier.\n *\n * - `tier: 'plaintext'` — returns true iff `exportCapability.plaintext`\n * contains the requested `format` or the `'*'` wildcard. Default for\n * every role is empty — no grant, no plaintext export.\n * - `tier: 'bundle'` — returns `exportCapability.bundle` if present, or\n * the role-based default otherwise (owner/admin → true, else false).\n *\n * `@noy-db/as-*` packages MUST call this before invoking the underlying\n * export primitive. Rogue forks that skip the check are caught by code\n * review — the single-entry-point contract is a convention, not a\n * runtime invariant. Vault-level gated wrappers\n * (`vault.exportRecords` / `exportBlobs` / `writeBundle`) will land in a\n * follow-up PR to enforce at the primitive level.\n */\nexport function hasExportCapability(\n keyring: UnlockedKeyring,\n tier: 'plaintext',\n format: ExportFormat,\n): boolean\nexport function hasExportCapability(\n keyring: UnlockedKeyring,\n tier: 'bundle',\n): boolean\nexport function hasExportCapability(\n keyring: UnlockedKeyring,\n tier: 'plaintext' | 'bundle',\n format?: ExportFormat,\n): boolean {\n const cap = keyring.exportCapability\n if (tier === 'plaintext') {\n const allowed = cap?.plaintext ?? []\n return allowed.includes('*') || (format !== undefined && allowed.includes(format))\n }\n // tier === 'bundle'\n return cap?.bundle ?? defaultBundleCapability(keyring.role)\n}\n\n/**\n * Same-shape inspector for an `ExportCapability` value that isn't yet\n * attached to a keyring (e.g. for previewing a grant before applying).\n * Role must be supplied separately so bundle defaults can be computed.\n */\nexport function evaluateExportCapability(\n capability: ExportCapability | undefined,\n role: Role,\n tier: 'plaintext',\n format: ExportFormat,\n): boolean\nexport function evaluateExportCapability(\n capability: ExportCapability | undefined,\n role: Role,\n tier: 'bundle',\n): boolean\nexport function evaluateExportCapability(\n capability: ExportCapability | undefined,\n role: Role,\n tier: 'plaintext' | 'bundle',\n format?: ExportFormat,\n): boolean {\n if (tier === 'plaintext') {\n const allowed = capability?.plaintext ?? []\n return allowed.includes('*') || (format !== undefined && allowed.includes(format))\n }\n return capability?.bundle ?? defaultBundleCapability(role)\n}\n\n// ─── Import capability (issue ) ────────────────────────────────────\n\n/**\n * Check whether a keyring is authorised for a given `@noy-db/as-*`\n * import tier (issue ).\n *\n * - `tier: 'plaintext'` — true iff `importCapability.plaintext`\n * contains the requested `format` or the `'*'` wildcard.\n * - `tier: 'bundle'` — true iff `importCapability.bundle === true`.\n *\n * **Default-closed for every role on every dimension** — including\n * owner. Import is more dangerous than export (corrupts vs leaks), so\n * the policy refuses to assume intent. Owners must positively grant\n * the capability via `vault.grant({ importCapability: ... })`.\n */\nexport function hasImportCapability(\n keyring: UnlockedKeyring,\n tier: 'plaintext',\n format: ExportFormat,\n): boolean\nexport function hasImportCapability(\n keyring: UnlockedKeyring,\n tier: 'bundle',\n): boolean\nexport function hasImportCapability(\n keyring: UnlockedKeyring,\n tier: 'plaintext' | 'bundle',\n format?: ExportFormat,\n): boolean {\n const cap = keyring.importCapability\n if (tier === 'plaintext') {\n const allowed = cap?.plaintext ?? []\n return allowed.includes('*') || (format !== undefined && allowed.includes(format))\n }\n // tier === 'bundle' — closed default for every role\n return cap?.bundle === true\n}\n\n/**\n * Same-shape inspector for an `ImportCapability` value that isn't yet\n * attached to a keyring (e.g. previewing a grant before applying).\n * `role` is accepted for symmetry with `evaluateExportCapability` even\n * though the import policy ignores it — bundle defaults are\n * role-agnostic and closed.\n */\nexport function evaluateImportCapability(\n capability: ImportCapability | undefined,\n role: Role,\n tier: 'plaintext',\n format: ExportFormat,\n): boolean\nexport function evaluateImportCapability(\n capability: ImportCapability | undefined,\n role: Role,\n tier: 'bundle',\n): boolean\nexport function evaluateImportCapability(\n capability: ImportCapability | undefined,\n _role: Role,\n tier: 'plaintext' | 'bundle',\n format?: ExportFormat,\n): boolean {\n if (tier === 'plaintext') {\n const allowed = capability?.plaintext ?? []\n return allowed.includes('*') || (format !== undefined && allowed.includes(format))\n }\n return capability?.bundle === true\n}\n\nfunction resolvePermissions(role: Role, explicit?: Permissions): Permissions {\n if (role === 'owner' || role === 'admin' || role === 'viewer') return {}\n return explicit ?? {}\n}\n\nasync function writeKeyringFile(\n adapter: NoydbStore,\n vault: string,\n userId: string,\n keyringFile: KeyringFile,\n): Promise<void> {\n const envelope = {\n _noydb: 1 as const,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: JSON.stringify(keyringFile),\n }\n await adapter.put(vault, '_keyring', userId, envelope)\n}\n","import type {\n NoydbOptions,\n NoydbEventMap,\n GrantOptions,\n RevokeOptions,\n UserInfo,\n PushResult,\n PullResult,\n PushOptions,\n PullOptions,\n SyncStatus,\n SyncTarget,\n NoydbStore,\n Role,\n AccessibleVault,\n ListAccessibleVaultsOptions,\n QueryAcrossOptions,\n QueryAcrossResult,\n ReAuthOperation,\n TranslatorAuditEntry,\n} from './types.js'\nimport { ValidationError, NoAccessError, InvalidKeyError, StoreCapabilityError } from './errors.js'\nimport { Vault } from './vault.js'\nimport { NoydbEventEmitter } from './events.js'\nimport {\n loadKeyring,\n createOwnerKeyring,\n grant as keyringGrant,\n revoke as keyringRevoke,\n rotateKeys as keyringRotate,\n changeSecret as keyringChangeSecret,\n listUsers as keyringListUsers,\n} from './team/keyring.js'\nimport type { UnlockedKeyring } from './team/keyring.js'\nimport type { SyncEngine } from './team/sync.js'\nimport type { SyncTransaction } from './team/sync-transaction.js'\nimport { NO_SYNC, type SyncStrategy } from './team/sync-strategy.js'\nimport type { TxContext } from './tx/transaction.js'\nimport { NO_TX, type TxStrategy } from './tx/strategy.js'\nimport { INDEXED_STORE_POLICY } from './store/sync-policy.js'\nimport type { PolicyEnforcer } from './session/session-policy.js'\nimport { NO_SESSION, type SessionStrategy } from './session/strategy.js'\n\n/**\n * Privilege rank used by `listAccessibleVaults({ minRole })` to\n * filter the result. Higher number = more privileged. Owner is at the\n * top; client is at the bottom. Viewer outranks client because viewer\n * has read-all access while client has only explicit-collection read\n * — the ordering reflects \"how much can this principal see,\" not\n * \"how much can this principal modify.\"\n */\nconst ROLE_RANK: Record<Role, number> = {\n client: 1,\n viewer: 2,\n operator: 3,\n admin: 4,\n owner: 5,\n}\n\n/** Dummy keyring for unencrypted mode. */\nfunction createPlaintextKeyring(userId: string): UnlockedKeyring {\n return {\n userId,\n displayName: userId,\n role: 'owner',\n permissions: {},\n deks: new Map(),\n kek: null as unknown as CryptoKey,\n salt: new Uint8Array(0),\n }\n}\n\n/** The top-level NOYDB instance. */\nexport class Noydb {\n private readonly options: NoydbOptions\n private readonly emitter = new NoydbEventEmitter()\n private readonly vaultCache = new Map<string, Vault>()\n private readonly keyringCache = new Map<string, UnlockedKeyring>()\n private readonly syncEngines = new Map<string, SyncEngine>()\n private closed = false\n private sessionTimer: ReturnType<typeof setTimeout> | null = null\n /** Per-vault policy enforcers. */\n private readonly policyEnforcers = new Map<string, PolicyEnforcer>()\n private readonly txStrategy: TxStrategy\n private readonly sessionStrategy: SessionStrategy\n private readonly syncStrategy: SyncStrategy\n\n // ─── plaintextTranslator state ─────────────────────────\n /**\n * In-process translation cache. Key is `\"${field}\\x00${collection}\\x00${from}\\x00${to}\\x00${text}\"`.\n * Cleared on `close()` alongside the KEK and DEKs.\n */\n private readonly translatorCache = new Map<string, string>()\n /** Audit log for all translator invocations in this session. Cleared on `close()`. */\n private readonly _translatorAuditLog: TranslatorAuditEntry[] = []\n\n constructor(options: NoydbOptions) {\n this.options = options\n this.txStrategy = options.txStrategy ?? NO_TX\n this.sessionStrategy = options.sessionStrategy ?? NO_SESSION\n this.syncStrategy = options.syncStrategy ?? NO_SYNC\n // Validate sessionPolicy at construction time (developer error if invalid).\n // The strategy's stub throws with a pointer at the subpath if the\n // consumer set a policy without opting in.\n if (options.sessionPolicy) {\n this.sessionStrategy.validateSessionPolicy(options.sessionPolicy)\n }\n this.resetSessionTimer()\n }\n\n private resetSessionTimer(): void {\n if (this.sessionTimer) clearTimeout(this.sessionTimer)\n // Honor the new sessionPolicy.idleTimeoutMs if present, fall back to\n // the legacy sessionTimeout for backwards compatibility.\n const idleMs = this.options.sessionPolicy?.idleTimeoutMs ?? this.options.sessionTimeout\n if (idleMs && idleMs > 0) {\n this.sessionTimer = setTimeout(() => {\n this.close()\n }, idleMs)\n }\n }\n\n /**\n * Attach a policy enforcer for a vault.\n * Called internally when a session is started for a vault; the\n * enforcer handles idle/absolute timeouts and background-lock behavior.\n */\n private attachPolicyEnforcer(vault: string, sessionId: string): void {\n const policy = this.options.sessionPolicy\n if (!policy) return\n\n // Tear down any previous enforcer for this vault\n this.policyEnforcers.get(vault)?.destroy()\n\n const enforcer = this.sessionStrategy.createEnforcer({\n policy,\n sessionId,\n onRevoke: (_reason) => {\n this.keyringCache.delete(vault)\n this.vaultCache.delete(vault)\n this.policyEnforcers.delete(vault)\n },\n })\n this.policyEnforcers.set(vault, enforcer)\n }\n\n /**\n * Touch the policy enforcer for a vault (records activity, resets\n * idle timer). Also touches the legacy session timer. No-op if no enforcer.\n */\n private touchPolicy(vault?: string): void {\n this.resetSessionTimer()\n if (vault) {\n this.policyEnforcers.get(vault)?.touch()\n }\n }\n\n /**\n * Check that a policy-guarded operation is permitted.\n * Throws `SessionPolicyError` if re-auth is required.\n */\n private checkPolicyOperation(vault: string, op: ReAuthOperation): void {\n this.policyEnforcers.get(vault)?.checkOperation(op)\n }\n\n /**\n * Open a vault by name.\n *\n * @param name Vault identifier.\n * @param opts Optional settings for this session.\n * @param opts.locale Default locale for i18n/dictKey field resolution\n *. Set here to avoid passing `{ locale }`\n * on every individual `get()`/`list()` call.\n */\n async openVault(\n name: string,\n opts?: { locale?: string },\n ): Promise<Vault> {\n if (this.closed) throw new ValidationError('Instance is closed')\n this.touchPolicy(name)\n\n let comp = this.vaultCache.get(name)\n if (comp) {\n // Update locale on existing cached vault if specified\n if (opts?.locale !== undefined) {\n comp.setLocale(opts.locale)\n }\n return comp\n }\n\n const keyring = await this.getKeyring(name)\n\n // Set up sync engine(s) — handles bare NoydbStore, SyncTarget, or SyncTarget[]\n let syncEngine: SyncEngine | undefined\n const targets = normalizeSyncTargets(this.options.sync)\n if (targets.length > 0) {\n // Primary sync engine is the first sync-peer (or first target if none)\n const primary = targets.find(t => t.role === 'sync-peer') ?? targets[0]!\n const effectivePolicy = this.options.syncPolicy ?? primary.policy ?? INDEXED_STORE_POLICY\n syncEngine = this.syncStrategy.buildSyncEngine({\n local: this.options.store,\n remote: primary.store,\n vault: name,\n strategy: this.options.conflict ?? 'version',\n emitter: this.emitter,\n syncPolicy: effectivePolicy,\n role: primary.role,\n ...(primary.label !== undefined ? { label: primary.label } : {}),\n })\n this.syncEngines.set(name, syncEngine)\n\n // Additional targets get their own engines (backup/archive are push-only)\n for (const target of targets) {\n if (target === primary) continue\n const targetPolicy = target.policy ?? this.options.syncPolicy ?? INDEXED_STORE_POLICY\n const engine = this.syncStrategy.buildSyncEngine({\n local: this.options.store,\n remote: target.store,\n vault: name,\n strategy: this.options.conflict ?? 'version',\n emitter: this.emitter,\n syncPolicy: targetPolicy,\n role: target.role,\n ...(target.label !== undefined ? { label: target.label } : {}),\n })\n const key = `${name}::${target.label ?? target.role}`\n this.syncEngines.set(key, engine)\n }\n }\n\n comp = new Vault({\n adapter: this.options.store,\n name,\n noydb: this,\n keyring,\n encrypted: this.options.encrypt !== false,\n emitter: this.emitter,\n onDirty: targets.length > 0\n ? async (coll, id, action, version) => {\n // Fan out dirty tracking to all sync engines for this vault\n for (const [key, engine] of this.syncEngines) {\n if (key === name || key.startsWith(`${name}::`)) {\n void engine.trackChange(coll, id, action, version)\n }\n }\n }\n : undefined,\n onRegisterConflictResolver: syncEngine\n ? (resolverName, resolver) => syncEngine.registerConflictResolver(resolverName, resolver)\n : undefined,\n syncAdapter: targets.length > 0 ? targets[0]!.store : undefined,\n historyConfig: this.options.history,\n ...(this.options.blobStrategy !== undefined ? { blobStrategy: this.options.blobStrategy } : {}),\n ...(this.options.indexStrategy !== undefined ? { indexStrategy: this.options.indexStrategy } : {}),\n ...(this.options.aggregateStrategy !== undefined ? { aggregateStrategy: this.options.aggregateStrategy } : {}),\n ...(this.options.crdtStrategy !== undefined ? { crdtStrategy: this.options.crdtStrategy } : {}),\n ...(this.options.consentStrategy !== undefined ? { consentStrategy: this.options.consentStrategy } : {}),\n ...(this.options.periodsStrategy !== undefined ? { periodsStrategy: this.options.periodsStrategy } : {}),\n ...(this.options.shadowStrategy !== undefined ? { shadowStrategy: this.options.shadowStrategy } : {}),\n ...(this.options.historyStrategy !== undefined ? { historyStrategy: this.options.historyStrategy } : {}),\n ...(this.options.i18nStrategy !== undefined ? { i18nStrategy: this.options.i18nStrategy } : {}),\n ...(this.options.syncStrategy !== undefined ? { syncStrategy: this.options.syncStrategy } : {}),\n locale: opts?.locale,\n // Thread the translator hook so Collection.put() can invoke it\n plaintextTranslator: this.options.plaintextTranslator\n ? (text, from, to, field, collection) =>\n this.invokeTranslator(text, from, to, field, collection)\n : undefined,\n // Refresh callback used by Vault.load() to re-derive\n // the in-memory keyring from a freshly-loaded keyring file.\n // Encrypted compartments need this so post-load decrypts work\n // against the loaded session's wrapped DEKs; plaintext\n // compartments leave it null and load() skips the refresh.\n reloadKeyring:\n this.options.encrypt !== false && this.options.secret\n ? async () => {\n // Drop the cached keyring so the next loadKeyring\n // call reads fresh from the adapter, then update the\n // cache so subsequent openVault calls see the\n // refreshed keyring too.\n this.keyringCache.delete(name)\n const refreshed = await loadKeyring(\n this.options.store,\n name,\n this.options.user,\n this.options.secret as string,\n )\n this.keyringCache.set(name, refreshed)\n return refreshed\n }\n : undefined,\n })\n this.vaultCache.set(name, comp)\n return comp\n }\n\n /** Synchronous vault access (must call openVault first, or auto-opens). */\n vault(name: string): Vault {\n const cached = this.vaultCache.get(name)\n if (cached) return cached\n\n // For backwards compat: if not opened yet, create with cached keyring or plaintext\n if (this.options.encrypt === false) {\n const keyring = createPlaintextKeyring(this.options.user)\n const comp = new Vault({\n adapter: this.options.store,\n name,\n noydb: this,\n keyring,\n encrypted: false,\n emitter: this.emitter,\n historyConfig: this.options.history,\n ...(this.options.blobStrategy !== undefined ? { blobStrategy: this.options.blobStrategy } : {}),\n ...(this.options.indexStrategy !== undefined ? { indexStrategy: this.options.indexStrategy } : {}),\n ...(this.options.aggregateStrategy !== undefined ? { aggregateStrategy: this.options.aggregateStrategy } : {}),\n ...(this.options.crdtStrategy !== undefined ? { crdtStrategy: this.options.crdtStrategy } : {}),\n ...(this.options.consentStrategy !== undefined ? { consentStrategy: this.options.consentStrategy } : {}),\n ...(this.options.periodsStrategy !== undefined ? { periodsStrategy: this.options.periodsStrategy } : {}),\n ...(this.options.shadowStrategy !== undefined ? { shadowStrategy: this.options.shadowStrategy } : {}),\n ...(this.options.historyStrategy !== undefined ? { historyStrategy: this.options.historyStrategy } : {}),\n ...(this.options.i18nStrategy !== undefined ? { i18nStrategy: this.options.i18nStrategy } : {}),\n ...(this.options.syncStrategy !== undefined ? { syncStrategy: this.options.syncStrategy } : {}),\n })\n this.vaultCache.set(name, comp)\n return comp\n }\n\n const keyring = this.keyringCache.get(name)\n if (!keyring) {\n throw new ValidationError(\n `Vault \"${name}\" not opened. Use await db.openVault(\"${name}\") first.`,\n )\n }\n\n const comp = new Vault({\n adapter: this.options.store,\n name,\n noydb: this,\n keyring,\n encrypted: true,\n historyConfig: this.options.history,\n ...(this.options.blobStrategy !== undefined ? { blobStrategy: this.options.blobStrategy } : {}),\n ...(this.options.indexStrategy !== undefined ? { indexStrategy: this.options.indexStrategy } : {}),\n ...(this.options.aggregateStrategy !== undefined ? { aggregateStrategy: this.options.aggregateStrategy } : {}),\n ...(this.options.crdtStrategy !== undefined ? { crdtStrategy: this.options.crdtStrategy } : {}),\n ...(this.options.consentStrategy !== undefined ? { consentStrategy: this.options.consentStrategy } : {}),\n ...(this.options.periodsStrategy !== undefined ? { periodsStrategy: this.options.periodsStrategy } : {}),\n ...(this.options.shadowStrategy !== undefined ? { shadowStrategy: this.options.shadowStrategy } : {}),\n ...(this.options.historyStrategy !== undefined ? { historyStrategy: this.options.historyStrategy } : {}),\n ...(this.options.i18nStrategy !== undefined ? { i18nStrategy: this.options.i18nStrategy } : {}),\n ...(this.options.syncStrategy !== undefined ? { syncStrategy: this.options.syncStrategy } : {}),\n emitter: this.emitter,\n })\n this.vaultCache.set(name, comp)\n return comp\n }\n\n /** Grant access to a user for a vault. */\n async grant(vault: string, options: GrantOptions): Promise<void> {\n this.checkPolicyOperation(vault, 'grant')\n const keyring = await this.getKeyring(vault)\n await keyringGrant(this.options.store, vault, keyring, options)\n }\n\n /** Revoke a user's access to a vault. */\n async revoke(vault: string, options: RevokeOptions): Promise<void> {\n this.checkPolicyOperation(vault, 'revoke')\n const keyring = await this.getKeyring(vault)\n await keyringRevoke(this.options.store, vault, keyring, options)\n }\n\n /**\n * Rotate the DEKs for the given collections in a vault.\n *\n * Generates fresh DEKs, re-encrypts every record in each collection,\n * and re-wraps the new DEKs into every remaining user's keyring. The\n * old DEKs become unreachable — useful as a defense-in-depth measure\n * after a suspected key leak, or as the scheduled half of a\n * key-rotation policy.\n *\n * Unlike `revoke({ rotateKeys: true })`, this call does NOT remove\n * any users — every current member keeps access, but with fresh\n * keys. This is the \"just rotate\" path; the \"revoke and rotate\"\n * path still lives in `revoke()`.\n *\n * Exposed on Noydb (rather than only on the lower-level keyring\n * module) so CLI and admin tooling can trigger rotation without\n * reaching into internals. See `noy-db rotate` for the CLI wrapper.\n */\n async rotate(vault: string, collections: string[]): Promise<void> {\n this.checkPolicyOperation(vault, 'rotate')\n const keyring = await this.getKeyring(vault)\n await keyringRotate(this.options.store, vault, keyring, collections)\n // Refresh the cached keyring so subsequent operations see the\n // freshly-rotated DEKs. Without this, `ensureCollectionDEK` on\n // the next Collection access would still hold the old ones.\n this.keyringCache.set(vault, keyring)\n }\n\n /** List all users with access to a vault. */\n async listUsers(vault: string): Promise<UserInfo[]> {\n return keyringListUsers(this.options.store, vault)\n }\n\n // ─── Cross-vault queries ──────────────────────\n\n /**\n * Enumerate every vault the calling principal can unwrap,\n * optionally filtered by minimum role.\n *\n * The walk is a two-step pipeline: first ask the adapter for the\n * universe of compartments it stores, then for each one attempt to\n * load the calling user's keyring with the in-memory passphrase.\n * Compartments where the user has no keyring file (`NoAccessError`)\n * or where the passphrase doesn't unwrap (`InvalidKeyError`) are\n * silently dropped from the result — the existence of those\n * compartments is **not** confirmed in the return value.\n *\n * Requires the optional `NoydbStore.listVaults()` capability.\n * Throws `StoreCapabilityError` against stores that don't\n * implement it (today: store-aws-dynamo, store-aws-s3, store-browser-local, store-browser-idb). For those backends the\n * consumer should either pass an explicit candidate list to\n * `queryAcross()` directly, or maintain a vault index out of\n * band.\n *\n * **Privacy note.** This method's return value never reveals the\n * existence of a vault the caller cannot unwrap. The adapter\n * sees the enumeration call (it has to — it owns the storage), but\n * downstream consumers of `listAccessibleVaults()` only see\n * the filtered list. That's the boundary the existence-leak\n * guarantee draws.\n *\n * **Known edge case.** A vault whose keyring file\n * happens to have an empty wrapped-DEKs map (because the owner\n * granted access before any collection was created) will pass the\n * `loadKeyring` probe with *any* passphrase — there are no DEKs to\n * unwrap, so the integrity-checked unwrap that normally rejects\n * wrong passphrases never runs. The result is that an unrelated\n * principal who happens to know the user-id and the vault\n * name can show up in `listAccessibleVaults()` as having\n * access to that empty vault. They cannot read any actual\n * data (their DEK set is empty), so this is a metadata leak\n * (vault name + user-id), not a content leak. Hardening this\n * via a passphrase canary in the keyring file is a deferred\n * follow-up.\n *\n * **Cost.** O(compartments × keyring-load) — one `loadKeyring`\n * attempt per vault in the universe. Each attempt does one\n * adapter `get` + one PBKDF2 derivation + N AES-KW unwraps. For\n * dozens of compartments this is fine; for thousands the consumer\n * should cache the result and refresh on grant/revoke events. A\n * future optimization could batch the keyring reads via\n * `loadAll('_keyring')` if such a thing existed at the adapter\n * layer, but the contract doesn't expose that.\n *\n * @example\n * ```ts\n * // All compartments I can unwrap\n * const all = await db.listAccessibleVaults()\n *\n * // Only compartments where I'm at least admin\n * const admin = await db.listAccessibleVaults({ minRole: 'admin' })\n *\n * // Only compartments I own\n * const owned = await db.listAccessibleVaults({ minRole: 'owner' })\n * ```\n */\n async listAccessibleVaults(\n options: ListAccessibleVaultsOptions = {},\n ): Promise<AccessibleVault[]> {\n if (this.closed) throw new ValidationError('Instance is closed')\n this.resetSessionTimer()\n\n const adapter = this.options.store\n if (typeof adapter.listVaults !== 'function') {\n throw new StoreCapabilityError(\n 'listVaults',\n 'Noydb.listAccessibleVaults()',\n adapter.name,\n )\n }\n\n if (this.options.encrypt === false) {\n // Plaintext mode: no keyrings exist; every vault the\n // adapter knows about is \"accessible\" trivially as owner.\n const all = await adapter.listVaults()\n return all.map((id) => ({ id, role: 'owner' as Role }))\n }\n\n if (!this.options.secret) {\n throw new ValidationError(\n 'Noydb.listAccessibleVaults(): a secret (passphrase) is required ' +\n 'when encryption is enabled.',\n )\n }\n\n const minRank = ROLE_RANK[options.minRole ?? 'client']\n const universe = await adapter.listVaults()\n const accessible: AccessibleVault[] = []\n\n for (const vault of universe) {\n // Probe with loadKeyring directly (NOT getKeyring, which would\n // auto-create a fresh owner keyring on miss — that would\n // silently grant access to every empty vault in the\n // universe and is exactly the wrong shape for an enumeration\n // API). The two expected failure modes — no keyring file, or\n // wrong passphrase — are caught and silently dropped so the\n // return value never leaks existence.\n let keyring: UnlockedKeyring\n try {\n keyring = await loadKeyring(\n adapter,\n vault,\n this.options.user,\n this.options.secret,\n )\n } catch (err) {\n if (err instanceof NoAccessError || err instanceof InvalidKeyError) {\n continue // silent: caller has no key material for this vault\n }\n throw err // unexpected error — surface it\n }\n\n if (ROLE_RANK[keyring.role] < minRank) continue\n accessible.push({ id: vault, role: keyring.role })\n\n // Opportunistically prime the keyring cache so a subsequent\n // openVault() doesn't have to re-derive the KEK. The cost\n // is one Map.set per vault we already paid to unwrap.\n this.keyringCache.set(vault, keyring)\n }\n\n return accessible\n }\n\n /**\n * Run a per-vault callback against a list of compartments and\n * collect the results.\n *\n * Pure orchestration — there is no new crypto, no new sync, no new\n * authorization layer. Each vault is opened via the existing\n * `openVault()` path (which honors the cache primed by\n * `listAccessibleVaults`), the callback runs against the\n * resulting `Vault` instance, and the result (or thrown\n * error) is captured into the per-vault slot.\n *\n * **Per-vault errors do not abort the fan-out.** If one\n * vault's callback throws, that vault's slot carries\n * the error and the remaining compartments still run. The caller\n * decides how to handle the partition between success and failure.\n * This is the right shape for cross-tenant reports where one\n * tenant's outage shouldn't hide the other tenants' data.\n *\n * **Concurrency** is opt-in via `options.concurrency`. The default\n * is `1` (sequential) — conservative because per-vault\n * callbacks typically do their own I/O and an unbounded fan-out\n * can exhaust adapter connections (DynamoDB throughput, S3 socket\n * limits, browser fetch concurrency). Bump to 4-8 for cloud-backed\n * adapters where parallelism is the whole point.\n *\n * @example\n * ```ts\n * // Cross-tenant invoice totals as a flat list\n * const accessible = await db.listAccessibleVaults({ minRole: 'admin' })\n * const results = await db.queryAcross(\n * accessible.map((c) => c.id),\n * async (comp) => {\n * return comp.collection<Invoice>('invoices').query()\n * .where('month', '==', '2026-03')\n * .toArray()\n * },\n * { concurrency: 4 },\n * )\n * // results: Array<{ vault, result?: Invoice[], error?: Error }>\n *\n * // Compose with exportStream() — cross-vault plaintext export\n * const exports = await db.queryAcross(accessible.map((c) => c.id), async (comp) => {\n * const out: unknown[] = []\n * for await (const chunk of comp.exportStream()) out.push(chunk)\n * return out\n * })\n * ```\n */\n async queryAcross<T>(\n vaultIds: string[],\n fn: (vault: Vault) => Promise<T>,\n options: QueryAcrossOptions = {},\n ): Promise<QueryAcrossResult<T>[]> {\n if (this.closed) throw new ValidationError('Instance is closed')\n this.resetSessionTimer()\n\n const concurrency = Math.max(1, options.concurrency ?? 1)\n const results: QueryAcrossResult<T>[] = new Array(vaultIds.length)\n\n // Tiny inline p-limit. Maintains a sliding window of `concurrency`\n // in-flight promises and schedules the next vault as each\n // one settles. No external dep. Index-keyed result array so the\n // output preserves caller-supplied order even when concurrency\n // > 1 lets later compartments finish before earlier ones.\n let nextIndex = 0\n const inFlight: Set<Promise<void>> = new Set()\n\n const launch = (): Promise<void> | null => {\n if (nextIndex >= vaultIds.length) return null\n const idx = nextIndex++\n const vaultId = vaultIds[idx]!\n const task = (async () => {\n try {\n const comp = await this.openVault(vaultId)\n const result = await fn(comp)\n results[idx] = { vault: vaultId, result }\n } catch (err) {\n results[idx] = {\n vault: vaultId,\n error: err instanceof Error ? err : new Error(String(err)),\n }\n }\n })()\n inFlight.add(task)\n // Fire-and-forget cleanup. The task itself never rejects (the\n // try/catch above swallows everything into the result slot), so\n // there's no rejection to handle here — `void` tells the linter\n // we know what we're doing.\n void task.finally(() => inFlight.delete(task))\n return task\n }\n\n // Prime the window.\n for (let i = 0; i < concurrency; i++) {\n if (launch() === null) break\n }\n\n // Drain. As each task settles, kick off the next one until the\n // input is exhausted. `Promise.race` against the live set is the\n // simplest way to \"wake up on whichever finishes first\" without\n // pulling in p-limit / async-pool / etc.\n while (inFlight.size > 0) {\n await Promise.race(inFlight)\n while (inFlight.size < concurrency && nextIndex < vaultIds.length) {\n if (launch() === null) break\n }\n }\n\n return results\n }\n\n /** Change the current user's passphrase for a vault. */\n async changeSecret(vault: string, newPassphrase: string): Promise<void> {\n this.checkPolicyOperation(vault, 'changeSecret')\n const keyring = await this.getKeyring(vault)\n const updated = await keyringChangeSecret(\n this.options.store,\n vault,\n keyring,\n newPassphrase,\n )\n this.keyringCache.set(vault, updated)\n }\n\n // ─── Sync ──────────────────────────────────────────────────────\n\n /** Push local changes to remote for a vault. */\n async push(vault: string, options?: PushOptions): Promise<PushResult> {\n const engine = this.getSyncEngine(vault)\n return engine.push(options)\n }\n\n /** Pull remote changes to local for a vault. */\n async pull(vault: string, options?: PullOptions): Promise<PullResult> {\n const engine = this.getSyncEngine(vault)\n return engine.pull(options)\n }\n\n /**\n * Bidirectional sync: pull then push for all targets.\n * `sync-peer` targets do pull+push; `backup`/`archive` targets do push-only.\n */\n async sync(vault: string, options?: { push?: PushOptions; pull?: PullOptions }): Promise<{ pull: PullResult; push: PushResult }> {\n const primary = this.getSyncEngine(vault)\n const result = await primary.sync(options)\n\n // Fan out push to backup/archive targets (fire-and-mark-dirty)\n for (const [key, engine] of this.syncEngines) {\n if (key === vault) continue\n if (!key.startsWith(`${vault}::`)) continue\n if (engine.role === 'sync-peer') {\n await engine.sync(options).catch((err: Error) => {\n this.emitter.emit('sync:backup-error', {\n vault,\n target: engine.label ?? engine.role,\n error: err,\n })\n })\n } else {\n // backup/archive: push-only\n await engine.push(options?.push).catch((err: Error) => {\n this.emitter.emit('sync:backup-error', {\n vault,\n target: engine.label ?? engine.role,\n error: err,\n })\n })\n }\n }\n\n return result\n }\n\n /**\n * Multi-record atomic transaction.\n *\n * The callback stages writes across any number of vaults /\n * collections; on return the hub pre-flights version checks, then\n * commits every staged op. If the body throws, nothing is\n * persisted. If any staged op fails its `expectedVersion` check,\n * the batch throws `ConflictError` with zero writes performed. If a\n * mid-commit failure occurs after one or more ops have already\n * written, each executed op is reverted best-effort (see\n * `runTransaction` for the crash-window caveat).\n *\n * Distinct from `transaction(vault: string) → SyncTransaction`\n * which batches push/pull across sync peers.\n */\n transaction<T>(fn: (tx: TxContext) => Promise<T> | T): Promise<T>\n /**\n * Create a sync transaction for the given vault.\n * The vault must already be open via `openVault()`.\n * Call `tx.put()` / `tx.delete()` to stage changes, then `tx.commit()`\n * to write all locally and push atomically to remote.\n */\n transaction(vault: string): SyncTransaction\n transaction<T>(\n arg: string | ((tx: TxContext) => Promise<T> | T),\n ): SyncTransaction | Promise<T> {\n if (typeof arg === 'function') {\n return this.txStrategy.runTransaction(this, arg)\n }\n const vault = arg\n const comp = this.vaultCache.get(vault)\n if (!comp) {\n throw new ValidationError(\n `Vault \"${vault}\" is not open. Call openVault() first.`,\n )\n }\n const engine = this.getSyncEngine(vault)\n return this.syncStrategy.buildSyncTransaction(comp, engine)\n }\n\n /**\n * Internal accessor for the primary store — used by the tx\n * executor to perform raw adapter reads for pre-flight CAS and\n * raw writes for rollback. Not part of the public API.\n *\n * @internal\n */\n get _store(): NoydbStore {\n return this.options.store\n }\n\n /** Get sync status for a vault. */\n syncStatus(vault: string): SyncStatus {\n const engine = this.syncEngines.get(vault)\n if (!engine) {\n return { dirty: 0, lastPush: null, lastPull: null, online: true }\n }\n return engine.status()\n }\n\n private getSyncEngine(vault: string): SyncEngine {\n const engine = this.syncEngines.get(vault)\n if (!engine) {\n throw new ValidationError('No sync adapter configured. Pass a `sync` adapter to createNoydb().')\n }\n return engine\n }\n\n // ─── Events ────────────────────────────────────────────────────\n\n on<K extends keyof NoydbEventMap>(event: K, handler: (data: NoydbEventMap[K]) => void): void {\n this.emitter.on(event, handler)\n }\n\n off<K extends keyof NoydbEventMap>(event: K, handler: (data: NoydbEventMap[K]) => void): void {\n this.emitter.off(event, handler)\n }\n\n close(): void {\n this.closed = true\n if (this.sessionTimer) {\n clearTimeout(this.sessionTimer)\n this.sessionTimer = null\n }\n // Destroy all policy enforcers (cancels timers + visibility listeners)\n for (const enforcer of this.policyEnforcers.values()) {\n enforcer.destroy()\n }\n this.policyEnforcers.clear()\n // Revoke all in-memory session keys\n this.sessionStrategy.revokeAllSessions()\n // Stop all sync engines\n for (const engine of this.syncEngines.values()) {\n engine.stopAutoSync()\n }\n this.syncEngines.clear()\n this.keyringCache.clear()\n this.vaultCache.clear()\n this.emitter.removeAllListeners()\n // Clear translator state — same lifetime as KEK/DEKs\n this.translatorCache.clear()\n this._translatorAuditLog.length = 0\n }\n\n /**\n * Returns a snapshot of all translator invocations since the last\n * `close()`. Useful for testing and compliance auditing. The log is\n * in-memory only — it is cleared when `db.close()` is called.\n *\n * Entries deliberately omit content hashes. See `TranslatorAuditEntry`\n * and issue for the rationale.\n */\n translatorAuditLog(): readonly TranslatorAuditEntry[] {\n return [...this._translatorAuditLog]\n }\n\n /**\n * Invoke the configured `plaintextTranslator` (or serve from cache).\n * Records one `TranslatorAuditEntry` per call regardless of cache hit.\n * Called by `Vault` during `put()` for `autoTranslate: true` fields.\n *\n * @internal — not part of the public API surface\n */\n async invokeTranslator(\n text: string,\n from: string,\n to: string,\n field: string,\n collection: string,\n ): Promise<string> {\n const cacheKey = `${field}\\x00${collection}\\x00${from}\\x00${to}\\x00${text}`\n const translatorName = this.options.plaintextTranslatorName ?? 'anonymous'\n\n const cached = this.translatorCache.get(cacheKey)\n if (cached !== undefined) {\n this._translatorAuditLog.push({\n type: 'translator-invocation',\n field,\n collection,\n fromLocale: from,\n toLocale: to,\n translatorName,\n timestamp: new Date().toISOString(),\n cached: true,\n })\n return cached\n }\n\n const result = await this.options.plaintextTranslator!({ text, from, to, field, collection })\n this.translatorCache.set(cacheKey, result)\n this._translatorAuditLog.push({\n type: 'translator-invocation',\n field,\n collection,\n fromLocale: from,\n toLocale: to,\n translatorName,\n timestamp: new Date().toISOString(),\n })\n return result\n }\n\n /** Get or load the keyring for a vault. */\n private async getKeyring(vault: string): Promise<UnlockedKeyring> {\n if (this.options.encrypt === false) {\n return createPlaintextKeyring(this.options.user)\n }\n\n const cached = this.keyringCache.get(vault)\n if (cached) return cached\n\n if (!this.options.secret) {\n throw new ValidationError('A secret (passphrase) is required when encryption is enabled')\n }\n\n let keyring: UnlockedKeyring\n try {\n keyring = await loadKeyring(this.options.store, vault, this.options.user, this.options.secret)\n } catch (err) {\n // Only create a new keyring if no keyring exists (NoAccessError).\n // If the keyring exists but the passphrase is wrong (InvalidKeyError), propagate the error.\n if (err instanceof NoAccessError) {\n keyring = await createOwnerKeyring(this.options.store, vault, this.options.user, this.options.secret)\n } else {\n throw err\n }\n }\n\n this.keyringCache.set(vault, keyring)\n return keyring\n }\n}\n\n/** Create a new NOYDB instance. */\nexport async function createNoydb(options: NoydbOptions): Promise<Noydb> {\n const encrypted = options.encrypt !== false\n\n if (encrypted && !options.secret) {\n throw new ValidationError('A secret (passphrase) is required when encryption is enabled')\n }\n\n return new Noydb(options)\n}\n\n// ─── Internal helpers ─────────────────────────────────────────────────\n\n/**\n * Normalize `NoydbOptions.sync` to a `SyncTarget[]`.\n * Accepts a bare NoydbStore, a SyncTarget, or an array.\n */\nfunction normalizeSyncTargets(\n sync: NoydbOptions['sync'],\n): SyncTarget[] {\n if (!sync) return []\n if (Array.isArray(sync)) return sync\n // SyncTarget has a `role` property; bare NoydbStore does not\n if ('role' in sync && typeof sync.role === 'string') {\n return [sync]\n }\n // Bare NoydbStore — wrap as sync-peer\n return [{ store: sync as NoydbStore, role: 'sync-peer' }]\n}\n","import type {\n NoydbStore,\n EncryptedEnvelope,\n VaultBackup,\n VaultSnapshot,\n HistoryConfig,\n ExportStreamOptions,\n ExportChunk,\n CollectionConflictResolver,\n CrossTierAccessEvent,\n TierMode,\n Role,\n} from './types.js'\nimport type { Noydb } from './noydb.js'\nimport type { IssueDelegationOptions, DelegationToken } from './team/delegation.js'\nimport { NOYDB_BACKUP_VERSION, NOYDB_FORMAT_VERSION } from './types.js'\nimport { Collection } from './collection.js'\nimport type { CacheOptions } from './collection.js'\nimport type { IndexDef } from './indexing/eager-indexes.js'\nimport type { JoinableSource } from './query/index.js'\nimport type { OnDirtyCallback } from './collection.js'\nimport type { UnlockedKeyring, BundleRecipient } from './team/keyring.js'\nimport { buildRecipientKeyringFile } from './team/keyring.js'\nimport { ensureCollectionDEK, hasAccess, hasExportCapability, hasImportCapability } from './team/keyring.js'\nimport type { ExportFormat, KeyringFile } from './types.js'\nimport {\n ExportCapabilityError,\n ImportCapabilityError,\n ValidationError,\n AlreadyElevatedError,\n ElevationExpiredError,\n TierNotGrantedError,\n} from './errors.js'\nimport type { NoydbEventEmitter } from './events.js'\nimport { BackupLedgerError, BackupCorruptedError } from './errors.js'\nimport type { StandardSchemaV1 } from './schema.js'\nimport type { BlobStrategy } from './blobs/strategy.js'\nimport type { IndexStrategy } from './indexing/strategy.js'\nimport type { AggregateStrategy } from './aggregate/strategy.js'\nimport type { CrdtStrategy } from './crdt/strategy.js'\n// — import from leaf modules (NOT from ./history/ledger/index.js\n// or store.js) so the LedgerStore class never reaches the floor\n// bundle. The leaf files hold pure constants + a tiny hash helper;\n// the class lives behind the history strategy seam.\nimport type { LedgerStore } from './history/ledger/store.js'\nimport { LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION } from './history/ledger/constants.js'\nimport { sha256Hex } from './history/ledger/entry.js'\nimport type { VaultInstant } from './history/time-machine.js'\nimport { NO_HISTORY, type HistoryStrategy } from './history/strategy.js'\nimport type { VaultFrame } from './shadow/vault-frame.js'\nimport { NO_SHADOW, type ShadowStrategy } from './shadow/strategy.js'\nimport type { ConsentContext, ConsentAuditEntry, ConsentAuditFilter, ConsentOp } from './consent/consent.js'\nimport { NO_CONSENT, type ConsentStrategy } from './consent/strategy.js'\nimport { NO_PERIODS, type PeriodsStrategy } from './periods/strategy.js'\nimport {\n RefRegistry,\n RefIntegrityError,\n type RefDescriptor,\n type RefViolation,\n} from './refs.js'\nimport type { DictionaryHandle, DictionaryOptions, DictKeyDescriptor } from './i18n/dictionary.js'\nimport { isDictCollectionName } from './i18n/dictionary.js'\nimport type { I18nTextDescriptor } from './i18n/core.js'\nimport { NO_I18N, type I18nStrategy } from './i18n/strategy.js'\nimport { NO_SYNC, type SyncStrategy } from './team/sync-strategy.js'\nimport type { LocaleReadOptions, ConflictPolicy } from './types.js'\nimport type { CrdtMode } from './crdt/crdt.js'\nimport { ReservedCollectionNameError } from './errors.js'\nimport {\n PERIODS_COLLECTION,\n type PeriodRecord,\n type ClosePeriodOptions,\n type OpenPeriodOptions,\n} from './periods/index.js'\nimport { encrypt, decrypt } from './crypto.js'\nimport {\n createExportBlobsHandle,\n EXPORT_AUDIT_COLLECTION,\n type ExportBlobsOptions,\n type ExportBlobsHandle,\n type ExportBlobsAuditEntry,\n} from './blobs/export-blobs.js'\nimport { runCompaction, type BlobFieldsConfig, type CompactRunOptions, type CompactionResult } from './blobs/blob-compaction.js'\nimport {\n writeMagicLinkGrant,\n type IssueMagicLinkGrantOptions,\n type MagicLinkGrantRecord,\n} from './team/magic-link-grant.js'\n\n/** A vault (tenant namespace) containing collections. */\nexport class Vault {\n private readonly adapter: NoydbStore\n /** The vault's name as passed to `openVault()`. Stable for the instance lifetime. */\n public readonly name: string\n /**\n * Backreference to the parent `Noydb`. Lets vault-scoped subsystems\n * (e.g. `as-*` reader `apply()` paths gating on `withTransactions()`)\n * reach the strategy seam without threading `db` through every API.\n *\n * Type-only Noydb import keeps the module graph acyclic at runtime.\n */\n public readonly noydb: Noydb\n /**\n * The active in-memory keyring. NOT readonly because `load()`\n * needs to refresh it after restoring a different keyring file —\n * otherwise the in-memory DEKs (from the pre-load session) and\n * the on-disk wrapped DEKs (from the loaded backup) drift apart\n * and every subsequent decrypt fails with TamperedError.\n */\n private keyring: UnlockedKeyring\n private readonly encrypted: boolean\n private readonly emitter: NoydbEventEmitter\n private readonly onDirty: OnDirtyCallback | undefined\n private readonly onRegisterConflictResolver: ((name: string, resolver: CollectionConflictResolver) => void) | undefined\n private readonly syncAdapter: NoydbStore | undefined\n private readonly historyConfig: HistoryConfig\n /**\n * tree-shake seam for the optional blob subsystem. Undefined\n * means \"blobs are off for this vault\"; every `collection.blob(id)`\n * call throws with a pointer at `@noy-db/hub/blobs`.\n */\n private readonly blobStrategy: BlobStrategy | undefined\n private readonly indexStrategy: IndexStrategy | undefined\n private readonly aggregateStrategy: AggregateStrategy | undefined\n private readonly crdtStrategy: CrdtStrategy | undefined\n private readonly consentStrategy: ConsentStrategy\n private readonly periodsStrategy: PeriodsStrategy\n private readonly shadowStrategy: ShadowStrategy\n private readonly historyStrategy: HistoryStrategy\n private readonly i18nStrategy: I18nStrategy\n private readonly syncStrategy: SyncStrategy\n private getDEK: (collectionName: string) => Promise<CryptoKey>\n\n /**\n * Optional callback that re-derives an UnlockedKeyring from the\n * adapter using the active user's passphrase. Called by `load()`\n * after the on-disk keyring file has been replaced — refreshes\n * `this.keyring` so the next DEK access uses the loaded wrapped\n * DEKs instead of the stale pre-load ones.\n *\n * Provided by Noydb at openVault() time. Tests that\n * construct Vault directly can pass `undefined`; load()\n * skips the refresh in that case (which is fine for plaintext\n * compartments — there's nothing to re-unwrap).\n */\n private readonly reloadKeyring: (() => Promise<UnlockedKeyring>) | undefined\n private readonly collectionCache = new Map<string, Collection<unknown>>()\n\n /**\n * per-collection `blobFields` retention/TTL config.\n * Populated on `collection({ blobFields })` and read by\n * `vault.compact()`. Indexed by collection name.\n */\n private readonly blobFieldsRegistry = new Map<string, BlobFieldsConfig<unknown>>()\n\n /**\n * Per-vault ledger store. Lazy-initialized on first\n * `collection()` call (which passes it through to the Collection)\n * or on first `ledger()` call from user code.\n *\n * One LedgerStore is shared across all collections in a vault\n * because the hash chain is vault-scoped: the chain head is a\n * single \"what did this vault do last\" identifier, not a\n * per-collection one. Two collections appending concurrently is the\n * single-writer concurrency concern documented in the LedgerStore\n * docstring.\n */\n private ledgerStore: LedgerStore | null = null\n\n /**\n * Per-vault foreign-key reference registry. Collections\n * register their `refs` option here on construction; the\n * vault uses the registry on every put/delete/checkIntegrity\n * call. One instance lives for the compartment's lifetime.\n */\n private readonly refRegistry = new RefRegistry()\n\n /**\n * Set of collection record-ids currently being deleted as part of\n * a cascade. Populated on entry to `enforceRefsOnDelete` and\n * drained on exit. Used to break mutual-cascade cycles: deleting\n * A → cascade to B → cascade back to A would otherwise recurse\n * forever, so we short-circuit when we see an already-in-progress\n * delete on the same (collection, id) pair.\n */\n private readonly cascadeInProgress = new Set<string>()\n\n /**\n * Vault-default locale. Set via\n * `openVault(name, { locale })`. Used as the fallback locale\n * when per-call `{ locale }` options are not specified on individual\n * `get()`/`list()` calls.\n */\n private locale: string | undefined\n\n /**\n * Current consent scope. Set by `withConsent()` and\n * restored in its finally block. When non-null, every collection\n * access inside the scope writes one entry to `_consent_audit`.\n *\n * Single-slot by design — two concurrent withConsent calls on the\n * same Vault stomp each other. Adopters needing per-flight scoping\n * should use separate Vault instances.\n */\n private consentContext: ConsentContext | null = null\n\n /**\n * Cache of closed/opened accounting periods.\n * Populated on first `closePeriod` / `openPeriod` / `listPeriods` /\n * per-collection write call. Kept in memory as an ordered list (by\n * `closedAt`) so the `periodGuard` hook runs synchronously against\n * each collection's put/delete path.\n *\n * Sentinel `null` means \"not yet loaded\" — the first consumer\n * triggers a one-time `loadPeriods()` pass. Every subsequent\n * closure/opening pushes into the cache in-place so the next write\n * sees the updated chain without re-reading the adapter.\n */\n private periodCache: PeriodRecord[] | null = null\n\n /**\n * Registry of dictKey fields declared across all collections in this\n * vault. Keyed by collection name → field name → dictionary name.\n * Used by `DictionaryHandle.rename()` to find and update all records\n * referencing a renamed key.\n *\n * Populated by `collection()` when the `dictKeyFields` option is passed.\n */\n private readonly dictKeyFieldRegistry = new Map<\n string, // collection name\n Record<string, string> // field name → dictionary name\n >()\n\n /**\n * Registry of i18nText fields declared across all collections. Keyed\n * by collection name → field name → I18nTextDescriptor. Used by\n * `applyI18nLocale` on reads and by `validateI18nTextValue` on puts.\n *\n * Populated by `collection()` when the `i18nFields` option is passed.\n */\n private readonly i18nFieldRegistry = new Map<\n string, // collection name\n Record<string, I18nTextDescriptor>\n >()\n\n /** Cache of DictionaryHandle instances, one per dictionary name. */\n private readonly dictionaryCache = new Map<string, DictionaryHandle>()\n\n /** — subscribers for cross-tier access events. */\n private readonly crossTierSubs = new Set<(event: CrossTierAccessEvent) => void>()\n\n /** — currently-active elevation, or null. One per vault. */\n private activeElevation: {\n readonly tier: number\n readonly expiresAt: number\n readonly reason: string\n readonly handle: ElevatedHandle\n } | null = null\n\n /**\n * Optional translator callback threaded from `Noydb.invokeTranslator`.\n * Present only when `plaintextTranslator` was configured on `createNoydb()`.\n */\n private readonly translateText:\n | ((text: string, from: string, to: string, field: string, collection: string) => Promise<string>)\n | undefined\n\n constructor(opts: {\n adapter: NoydbStore\n name: string\n noydb: Noydb\n keyring: UnlockedKeyring\n encrypted: boolean\n emitter: NoydbEventEmitter\n onDirty?: OnDirtyCallback | undefined\n historyConfig?: HistoryConfig | undefined\n reloadKeyring?: (() => Promise<UnlockedKeyring>) | undefined\n /** Vault-default locale. */\n locale?: string | undefined\n /** Translator callback from Noydb. */\n plaintextTranslator?:\n | ((text: string, from: string, to: string, field: string, collection: string) => Promise<string>)\n | undefined\n /**\n * callback to register a per-collection envelope-level\n * conflict resolver with the SyncEngine. Present when sync is configured.\n */\n onRegisterConflictResolver?: ((name: string, resolver: CollectionConflictResolver) => void) | undefined\n /** — optional remote/sync adapter for presence broadcasting. */\n syncAdapter?: NoydbStore | undefined\n /**\n * tree-shake seam — strategy for optional blob storage.\n * Passed through to every `Collection` built by `vault.collection()`.\n * `undefined` => every `collection.blob(id)` throws with a pointer\n * at `@noy-db/hub/blobs`.\n */\n blobStrategy?: BlobStrategy | undefined\n indexStrategy?: IndexStrategy | undefined\n aggregateStrategy?: AggregateStrategy | undefined\n crdtStrategy?: CrdtStrategy | undefined\n consentStrategy?: ConsentStrategy | undefined\n periodsStrategy?: PeriodsStrategy | undefined\n shadowStrategy?: ShadowStrategy | undefined\n historyStrategy?: HistoryStrategy | undefined\n i18nStrategy?: I18nStrategy | undefined\n syncStrategy?: SyncStrategy | undefined\n }) {\n this.adapter = opts.adapter\n this.name = opts.name\n this.noydb = opts.noydb\n this.keyring = opts.keyring\n this.encrypted = opts.encrypted\n this.emitter = opts.emitter\n this.onDirty = opts.onDirty\n this.onRegisterConflictResolver = opts.onRegisterConflictResolver\n this.syncAdapter = opts.syncAdapter\n this.blobStrategy = opts.blobStrategy\n this.indexStrategy = opts.indexStrategy\n this.aggregateStrategy = opts.aggregateStrategy\n this.crdtStrategy = opts.crdtStrategy\n this.consentStrategy = opts.consentStrategy ?? NO_CONSENT\n this.periodsStrategy = opts.periodsStrategy ?? NO_PERIODS\n this.shadowStrategy = opts.shadowStrategy ?? NO_SHADOW\n this.historyStrategy = opts.historyStrategy ?? NO_HISTORY\n this.i18nStrategy = opts.i18nStrategy ?? NO_I18N\n this.syncStrategy = opts.syncStrategy ?? NO_SYNC\n this.historyConfig = opts.historyConfig ?? { enabled: true }\n this.reloadKeyring = opts.reloadKeyring\n this.locale = opts.locale\n this.translateText = opts.plaintextTranslator\n\n // Build the lazy DEK resolver. Pulled out into a private method\n // so `load()` can rebuild it after a keyring refresh — the\n // closure captures `this.keyring` by reference, so changing the\n // field is enough, but resetting the cached `getDEKFn` ensures\n // ensureCollectionDEK runs again against the freshly-loaded\n // wrapped DEKs.\n this.getDEK = this.makeGetDEK()\n }\n\n /**\n * Construct (or reconstruct) the lazy DEK resolver. Captures the\n * CURRENT value of `this.keyring` and `this.adapter` in a closure,\n * memoizing the inner getDEKFn after first use so subsequent\n * lookups are O(1).\n *\n * `load()` calls this after refreshing `this.keyring` to discard\n * the prior session's cached DEKs.\n */\n private makeGetDEK(): (collectionName: string) => Promise<CryptoKey> {\n let getDEKFn: ((collectionName: string) => Promise<CryptoKey>) | null = null\n return async (collectionName: string): Promise<CryptoKey> => {\n if (!getDEKFn) {\n getDEKFn = await ensureCollectionDEK(this.adapter, this.name, this.keyring)\n }\n return getDEKFn(collectionName)\n }\n }\n\n /**\n * Open a typed collection within this vault.\n *\n * - `options.indexes` declares secondary indexes for the query DSL.\n * Indexes are computed in memory after decryption; adapters never\n * see plaintext index data.\n * - `options.prefetch` (default `true`) controls hydration. Eager mode\n * loads everything on first access; lazy mode (`prefetch: false`)\n * loads records on demand and bounds memory via the LRU cache.\n * - `options.cache` configures the LRU bounds. Required in lazy mode.\n * Accepts `{ maxRecords, maxBytes: '50MB' | 1024 }`.\n * - `options.schema` attaches a Standard Schema v1 validator (Zod,\n * Valibot, ArkType, Effect Schema, etc.). Every `put()` is validated\n * before encryption; every read is validated after decryption.\n * Failing records throw `SchemaValidationError`.\n * - `options.i18nFields` declares per-field `i18nText()` descriptors\n *. Validated on `put()` and locale-resolved on reads.\n * - `options.dictKeyFields` declares per-field `dictKey()` descriptors\n *. `put()` validates keys against the declared set; reads\n * with `{ locale }` add `<field>Label` virtual fields.\n *\n * Throws `ReservedCollectionNameError` for names starting with `_dict_`.\n * Use `vault.dictionary(name)` to access dictionary collections.\n *\n * Lazy mode + indexes is rejected at construction time — see the\n * Collection constructor for the rationale.\n */\n collection<T>(collectionName: string, options?: {\n indexes?: IndexDef[]\n /** — auto-reconcile policy for persisted-index drift. */\n reconcileOnOpen?: 'off' | 'dry-run' | 'auto'\n prefetch?: boolean\n cache?: CacheOptions\n schema?: StandardSchemaV1<unknown, T>\n refs?: Record<string, RefDescriptor>\n /** — declare i18nText fields for locale-aware reads. */\n i18nFields?: Record<string, I18nTextDescriptor>\n /** — declare dictKey fields for label resolution on reads. */\n dictKeyFields?: Record<string, DictKeyDescriptor>\n /** — per-collection conflict resolution policy. */\n conflictPolicy?: ConflictPolicy<T>\n /** — CRDT mode for collaborative editing without conflicts. */\n crdt?: CrdtMode\n /**\n * declare deterministic-encryption fields for blind\n * equality search. See `Collection` constructor docs for the full\n * trade-off. Requires `acknowledgeDeterministicRisk: true`.\n */\n deterministicFields?: readonly string[]\n /** — explicit ack that deterministic encryption leaks equality. */\n acknowledgeDeterministicRisk?: boolean\n /**\n * declarative blob retention / TTL policy per slot\n * name. Values are `{ retainDays?, evictWhen? }`. Evaluated only\n * when `vault.compact()` runs.\n */\n blobFields?: BlobFieldsConfig<T>\n /** — declared tiers for this collection. */\n tiers?: readonly number[]\n /** — how lower-tier reads see above-tier records. */\n tierMode?: TierMode\n }): Collection<T> {\n // Guard: reject reserved _dict_* names\n if (isDictCollectionName(collectionName)) {\n throw new ReservedCollectionNameError(collectionName)\n }\n\n let coll = this.collectionCache.get(collectionName)\n if (!coll) {\n // Register ref declarations (if any) with the vault-level\n // registry BEFORE constructing the Collection. This way the\n // first put() on the new collection already sees its refs via\n // vault.enforceRefsOnPut.\n if (options?.refs) {\n this.refRegistry.register(collectionName, options.refs)\n }\n\n // Register i18nText fields\n if (options?.i18nFields) {\n this.i18nFieldRegistry.set(collectionName, options.i18nFields)\n }\n\n // register blobFields retention/TTL policy\n if (options?.blobFields) {\n this.blobFieldsRegistry.set(collectionName, options.blobFields as BlobFieldsConfig<unknown>)\n }\n\n // Register dictKey fields: store field → dictionary name mapping\n if (options?.dictKeyFields) {\n const dictFieldMap: Record<string, string> = {}\n for (const [field, desc] of Object.entries(options.dictKeyFields)) {\n dictFieldMap[field] = desc.name\n }\n this.dictKeyFieldRegistry.set(collectionName, dictFieldMap)\n }\n\n const collOpts: ConstructorParameters<typeof Collection<T>>[0] = {\n adapter: this.adapter,\n vault: this.name,\n name: collectionName,\n keyring: this.keyring,\n encrypted: this.encrypted,\n emitter: this.emitter,\n getDEK: this.getDEK,\n onDirty: this.onDirty,\n historyConfig: this.historyConfig,\n // thread the vault-wide blob strategy into every\n // collection. `undefined` is intentionally preserved so the\n // Collection constructor uses its NO_BLOBS default.\n ...(this.blobStrategy !== undefined ? { blobStrategy: this.blobStrategy } : {}),\n ...(this.indexStrategy !== undefined ? { indexStrategy: this.indexStrategy } : {}),\n ...(this.aggregateStrategy !== undefined ? { aggregateStrategy: this.aggregateStrategy } : {}),\n ...(this.crdtStrategy !== undefined ? { crdtStrategy: this.crdtStrategy } : {}),\n historyStrategy: this.historyStrategy,\n i18nStrategy: this.i18nStrategy,\n syncStrategy: this.syncStrategy,\n ledger: this.getLedgerOrNull() ?? undefined,\n refEnforcer: this,\n joinResolver: this,\n defaultLocale: this.locale,\n onRegisterConflictResolver: this.onRegisterConflictResolver,\n onAccess: (op, id) => this._logConsent(op, collectionName, id),\n periodGuard: (existing, incoming) => this._assertTsWritable(existing, incoming),\n }\n if (options?.indexes !== undefined) collOpts.indexes = options.indexes\n if (options?.reconcileOnOpen !== undefined) collOpts.reconcileOnOpen = options.reconcileOnOpen\n if (options?.prefetch !== undefined) collOpts.prefetch = options.prefetch\n if (options?.cache !== undefined) collOpts.cache = options.cache\n if (options?.schema !== undefined) collOpts.schema = options.schema\n if (options?.conflictPolicy !== undefined) collOpts.conflictPolicy = options.conflictPolicy\n if (options?.crdt !== undefined) collOpts.crdt = options.crdt\n if (options?.deterministicFields !== undefined) {\n collOpts.deterministicFields = options.deterministicFields\n }\n if (options?.acknowledgeDeterministicRisk !== undefined) {\n collOpts.acknowledgeDeterministicRisk = options.acknowledgeDeterministicRisk\n }\n if (options?.tiers !== undefined) collOpts.tiers = options.tiers\n if (options?.tierMode !== undefined) collOpts.tierMode = options.tierMode\n collOpts.onCrossTierAccess = (event) => this.emitCrossTier(event)\n if (this.syncAdapter !== undefined) collOpts.syncAdapter = this.syncAdapter\n if (options?.i18nFields !== undefined) collOpts.i18nFields = options.i18nFields\n if (options?.dictKeyFields !== undefined) {\n // Build the label resolver callback for this collection\n collOpts.dictLabelResolver = async (dictName, key, locale, fallback) => {\n const handle = this.dictionary(dictName)\n return handle.resolveLabel(key, locale, fallback)\n }\n collOpts.dictKeyFields = options.dictKeyFields\n }\n // i18n validation on put — enforced via the compartment's put hook\n if (options?.i18nFields !== undefined || options?.dictKeyFields !== undefined) {\n collOpts.i18nPutValidator = (record: unknown) => {\n this.enforceI18nOnPut(collectionName, record)\n }\n }\n // Wire the translator for autoTranslate: true fields\n if (options?.i18nFields !== undefined && this.translateText) {\n collOpts.autoTranslateHook = this.translateText\n }\n coll = new Collection<T>(collOpts)\n this.collectionCache.set(collectionName, coll)\n }\n return coll as Collection<T>\n }\n\n /**\n * Validate i18nText fields on a `put()`. Called by Collection just\n * before the adapter write, after schema validation. Throws\n * `MissingTranslationError` when a required translation is absent.\n */\n enforceI18nOnPut(collectionName: string, record: unknown): void {\n const i18nFields = this.i18nFieldRegistry.get(collectionName)\n if (!i18nFields || Object.keys(i18nFields).length === 0) return\n if (!record || typeof record !== 'object') return\n\n const obj = record as Record<string, unknown>\n for (const [field, descriptor] of Object.entries(i18nFields)) {\n const value = obj[field]\n if (value === undefined || value === null) continue\n this.i18nStrategy.validateI18nTextValue(value, field, descriptor)\n }\n }\n\n /**\n * Apply locale resolution to a record for the given collection.\n *\n * Called by Collection after decryption when locale options are present.\n * Returns a new object (never mutates the cached record).\n */\n async applyLocale(\n collectionName: string,\n record: Record<string, unknown>,\n localeOpts: LocaleReadOptions,\n ): Promise<Record<string, unknown>> {\n const locale = localeOpts.locale ?? this.locale\n if (!locale) return record\n\n let result = record\n\n // 1. i18nText resolution\n const i18nFields = this.i18nFieldRegistry.get(collectionName)\n if (i18nFields && Object.keys(i18nFields).length > 0) {\n result = this.i18nStrategy.applyI18nLocale(result, i18nFields, locale, localeOpts.fallback)\n }\n\n // 2. dictKey label resolution — add <field>Label virtual fields\n const dictFields = this.dictKeyFieldRegistry.get(collectionName)\n if (dictFields && Object.keys(dictFields).length > 0 && locale !== 'raw') {\n const withLabels = { ...result }\n for (const [field, dictName] of Object.entries(dictFields)) {\n const key = result[field]\n if (typeof key !== 'string') continue\n const handle = this.dictionary(dictName)\n const label = await handle.resolveLabel(key, locale, localeOpts.fallback)\n if (label !== undefined) {\n withLabels[`${field}Label`] = label\n }\n }\n result = withLabels\n }\n\n return result\n }\n\n /**\n * Open a dictionary by name. Returns a `DictionaryHandle` for CRUD\n * operations on the `_dict_<name>/` reserved collection.\n *\n * The handle is cached — multiple calls with the same name return the\n * same instance.\n *\n * @param name The dictionary name (e.g. `'status'` → `_dict_status/`).\n * @param options Optional ACL overrides (default `writableBy: 'admin'`).\n *\n * @example\n * ```ts\n * await company.dictionary('status').putAll({\n * draft: { en: 'Draft', th: 'ฉบับร่าง' },\n * paid: { en: 'Paid', th: 'ชำระแล้ว' },\n * })\n * ```\n */\n dictionary<Keys extends string = string>(\n name: string,\n options: DictionaryOptions = {},\n ): DictionaryHandle<Keys> {\n let handle = this.dictionaryCache.get(name)\n if (!handle) {\n handle = this.i18nStrategy.buildDictionaryHandle<Keys>({\n adapter: this.adapter,\n compartmentName: this.name,\n dictionaryName: name,\n keyring: this.keyring,\n getDEK: this.getDEK,\n encrypted: this.encrypted,\n ledger: this.getLedgerOrNull() ?? undefined,\n options,\n // findAndUpdateReferences: rewrite dictKey fields in all\n // registered collections when rename() is called\n findAndUpdateReferences: async (dictionaryName, oldKey, newKey) => {\n for (const [collectionName, dictFields] of this.dictKeyFieldRegistry) {\n // Find fields that point at this dictionary\n const fields = Object.entries(dictFields)\n .filter(([, dn]) => dn === dictionaryName)\n .map(([field]) => field)\n if (fields.length === 0) continue\n\n const coll = this.collection<Record<string, unknown>>(collectionName)\n const records = await coll.list()\n for (const record of records) {\n let changed = false\n const updated = { ...record }\n for (const field of fields) {\n if (updated[field] === oldKey) {\n updated[field] = newKey\n changed = true\n }\n }\n if (changed) {\n const id = (record['id'] as string | undefined)\n if (id !== undefined) {\n await coll.put(id, updated)\n }\n }\n }\n }\n },\n emitter: this.emitter,\n })\n this.dictionaryCache.set(name, handle)\n }\n return handle as DictionaryHandle<Keys>\n }\n\n /**\n * Build a `JoinableSource` for a dictKey field, for use in dict joins\n *. Returns a source whose snapshot contains `{ key, ...labels }`\n * records — one per dictionary entry — keyed by the stable key.\n *\n * Returns `null` when `field` is not a dictKey in `leftCollection`.\n *\n * The snapshot is built synchronously from whatever the dictionary\n * handle has in its cached state. For empty dictionaries this returns\n * an empty snapshot rather than `null`.\n */\n /**\n * Build a `JoinableSource` for a dictKey field, for use in dict joins\n *. Returns a source whose snapshot contains\n * `{ key, labels, ...labels }` records — one per dictionary entry —\n * keyed by the stable key.\n *\n * The snapshot is built synchronously from the DictionaryHandle's\n * write-through cache, which is populated on every `put()`, `rename()`,\n * `delete()`, and `list()` call. For pre-existing data not yet touched\n * this session, call `await vault.dictionary(name).list()` first\n * to warm the cache.\n *\n * Returns `null` when `field` is not a dictKey in `leftCollection`.\n */\n resolveDictSource(leftCollection: string, field: string): JoinableSource | null {\n const dictFields = this.dictKeyFieldRegistry.get(leftCollection)\n if (!dictFields || !(field in dictFields)) return null\n const dictName = dictFields[field]\n if (!dictName) return null\n const handle = this.dictionary(dictName)\n return {\n snapshot(): readonly unknown[] {\n return handle.snapshotEntries()\n },\n lookupById(id: string): unknown {\n const entries = handle.snapshotEntries()\n return entries.find((e) => e['key'] === id)\n },\n }\n }\n\n /**\n * Set or update the vault-default locale at runtime.\n * Useful when the user switches their preferred language after opening\n * the vault.\n */\n setLocale(locale: string | undefined): void {\n this.locale = locale\n }\n\n /** Return the current vault-default locale. */\n getLocale(): string | undefined {\n return this.locale\n }\n\n /**\n * The user id of the keyring backing this vault session. Useful for\n * UI affordances (\"you are alice\"), audit trails, and orchestration\n * composables that need to stamp records with the current actor.\n */\n get userId(): string {\n return this.keyring.userId\n }\n\n /**\n * The role of the keyring backing this vault session — one of\n * `owner | admin | operator | viewer | client`. Useful for UI\n * affordance gates and approval workflows that need to confirm\n * the caller can perform a given action before attempting it.\n */\n get role(): Role {\n return this.keyring.role\n }\n\n /**\n * Build keyring files for bundle recipients without persisting them\n * to the source vault. Used by `writeNoydbBundle()` when the bundle\n * is re-keyed for distinct recipients.\n *\n * Each recipient becomes its own `KeyringFile` sealed with that\n * recipient's passphrase. The DEKs wrapped into each slot are\n * exactly those the recipient's role + permissions justify, and\n * never wider than the source keyring's own DEK set\n * (privilege-escalation check).\n *\n * Returns a `Record<userId, KeyringFile>` ready to substitute for\n * the `keyrings` field of a `vault.dump()` JSON. Adapter is never\n * touched; the produced files exist only in the bundle bytes.\n *\n * @public\n */\n async buildBundleRecipientKeyrings(\n recipients: readonly BundleRecipient[],\n ): Promise<Record<string, KeyringFile>> {\n const result: Record<string, KeyringFile> = {}\n for (const recipient of recipients) {\n if (recipient.id in result) {\n throw new Error(`buildBundleRecipientKeyrings: duplicate recipient id \"${recipient.id}\"`)\n }\n result[recipient.id] = await buildRecipientKeyringFile(this.keyring, recipient)\n }\n return result\n }\n\n /**\n * Authorize an `@noy-db/as-*` export against the current keyring's\n * `exportCapability`. Throws `ExportCapabilityError` if\n * the invoking keyring is not authorised.\n *\n * `as-*` packages MUST call this before invoking the underlying\n * export primitive (`exportStream()` / `writeNoydbBundle()` / …).\n *\n * - `assertCanExport('plaintext', 'xlsx')` — check plaintext tier\n * for a specific format. Defaults to empty for every role; owner\n * must positively grant.\n * - `assertCanExport('bundle')` — check encrypted-bundle tier.\n * Defaults to on for owner/admin, off for others.\n *\n * See `docs/patterns/as-exports.md` for the full policy.\n */\n assertCanExport(tier: 'plaintext', format: ExportFormat): void\n assertCanExport(tier: 'bundle'): void\n assertCanExport(tier: 'plaintext' | 'bundle', format?: ExportFormat): void {\n if (tier === 'plaintext') {\n if (format === undefined) {\n throw new Error('vault.assertCanExport: plaintext tier requires a format')\n }\n if (!hasExportCapability(this.keyring, 'plaintext', format)) {\n throw new ExportCapabilityError({\n tier: 'plaintext',\n userId: this.keyring.userId,\n format,\n })\n }\n return\n }\n if (!hasExportCapability(this.keyring, 'bundle')) {\n throw new ExportCapabilityError({\n tier: 'bundle',\n userId: this.keyring.userId,\n })\n }\n }\n\n /**\n * Authorize an `@noy-db/as-*` import against the current keyring's\n * `importCapability` (issue ). Throws `ImportCapabilityError` if\n * the invoking keyring is not authorised.\n *\n * `as-*` reader entry-points (`fromString` / `fromBytes`) MUST call\n * this before parsing or building an `ImportPlan`.\n *\n * - `assertCanImport('plaintext', 'csv')` — check plaintext-tier\n * import for a specific format. Default-closed for every role.\n * - `assertCanImport('bundle')` — check `.noydb` bundle-import gate.\n * Default-closed for every role, including owner — import is more\n * dangerous than export (corrupts vs leaks).\n *\n * Owner who wants to import re-grants own keyring with\n * `importCapability` set explicitly.\n */\n assertCanImport(tier: 'plaintext', format: ExportFormat): void\n assertCanImport(tier: 'bundle'): void\n assertCanImport(tier: 'plaintext' | 'bundle', format?: ExportFormat): void {\n if (tier === 'plaintext') {\n if (format === undefined) {\n throw new Error('vault.assertCanImport: plaintext tier requires a format')\n }\n if (!hasImportCapability(this.keyring, 'plaintext', format)) {\n throw new ImportCapabilityError({\n tier: 'plaintext',\n userId: this.keyring.userId,\n format,\n })\n }\n return\n }\n if (!hasImportCapability(this.keyring, 'bundle')) {\n throw new ImportCapabilityError({\n tier: 'bundle',\n userId: this.keyring.userId,\n })\n }\n }\n\n /**\n * Bulk blob extraction primitive.\n *\n * Returns an async-iterable handle over every blob attached to\n * records in the vault. Single capability check (`plaintext/blob`)\n * at handle creation; single audit entry to `_export_audit` before\n * the first yield. Per-blob decryption happens lazily as the\n * consumer pulls tuples.\n *\n * ```ts\n * const handle = vault.exportBlobs({\n * collections: ['invoiceScans'],\n * where: (rec) => (rec as { clientId?: string }).clientId === 'c-123',\n * })\n * for await (const { bytes, meta, recordRef } of handle) {\n * await uploadToColdStorage(bytes, recordRef)\n * }\n * ```\n *\n * @see `@noy-db/hub/store/export-blobs` for the full option surface.\n */\n /**\n * Evict blob slots per the per-collection `blobFields` retention\n * policy.\n *\n * Iterates every collection declared with `{ blobFields: {...} }`.\n * For each record, checks every configured slot against its\n * policy — `retainDays` (age-based TTL) and/or `evictWhen(record)`\n * (predicate) — and evicts matching slots. Every eviction writes\n * one entry to `_blob_eviction_audit` (actor + eTag + reason +\n * timestamp, no plaintext). Consumer-scheduled; noy-db never runs\n * this on its own.\n *\n * ```ts\n * await vault.compact() // run full pass\n * await vault.compact({ dryRun: true }) // preview counts\n * await vault.compact({ maxEvictions: 1000 }) // cap batch\n * ```\n */\n async compact(options: CompactRunOptions = {}): Promise<CompactionResult> {\n return runCompaction({\n adapter: this.adapter,\n vault: this.name,\n actor: this.keyring.userId,\n encrypted: this.encrypted,\n getDEK: this.getDEK,\n getBlobFields: <T>(name: string): BlobFieldsConfig<T> | null =>\n (this.blobFieldsRegistry.get(name) as BlobFieldsConfig<T> | undefined) ?? null,\n listCollections: () => this.collections(),\n listRecords: (name: string) => this.adapter.list(this.name, name),\n getRecord: async <T>(name: string, id: string) => {\n const coll = this.collection<T>(name)\n return coll.get(id)\n },\n listSlots: async (name: string, id: string) => {\n const coll = this.collection(name)\n return coll.blob(id).list()\n },\n deleteSlot: async (name: string, id: string, slotName: string) => {\n const coll = this.collection(name)\n await coll.blob(id).delete(slotName)\n },\n }, options)\n }\n\n exportBlobs(options: ExportBlobsOptions = {}): ExportBlobsHandle {\n this.assertCanExport('plaintext', 'blob')\n return createExportBlobsHandle(\n this.keyring.userId,\n () => this.collections(),\n (name) => this.collection(name),\n (entry) => this.writeExportAudit(entry),\n options,\n )\n }\n\n private async writeExportAudit(entry: ExportBlobsAuditEntry): Promise<void> {\n const json = JSON.stringify(entry)\n const envelope: EncryptedEnvelope = this.encrypted\n ? await (async () => {\n const dek = await this.getDEK(EXPORT_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: entry.startedAt, _iv: iv, _data: data, _by: entry.actor }\n })()\n : { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: entry.startedAt, _iv: '', _data: json, _by: entry.actor }\n await this.adapter.put(this.name, EXPORT_AUDIT_COLLECTION, entry.id, envelope)\n }\n\n /**\n * Read-only accessor for the invoking keyring's export capability,\n * with role-based defaults resolved. Useful for UI affordances\n * (grey out the export button if no capability) without throwing.\n */\n canExport(tier: 'plaintext', format: ExportFormat): boolean\n canExport(tier: 'bundle'): boolean\n canExport(tier: 'plaintext' | 'bundle', format?: ExportFormat): boolean {\n if (tier === 'plaintext') {\n if (format === undefined) return false\n return hasExportCapability(this.keyring, 'plaintext', format)\n }\n return hasExportCapability(this.keyring, 'bundle')\n }\n\n /**\n * Decrypt a single envelope using the per-collection DEK, returning\n * the parsed plaintext record. Internal helper for bundle-pipeline\n * plaintext filters — keeps DEK access encapsulated\n * inside Vault so callers don't reach into private state.\n *\n * @internal\n */\n async _decryptEnvelopeForBundleFilter(\n env: EncryptedEnvelope,\n collectionName: string,\n ): Promise<unknown> {\n if (!this.encrypted) {\n return JSON.parse(env._data)\n }\n const dek = await this.getDEK(collectionName)\n const json = await decrypt(env._iv, env._data, dek)\n return JSON.parse(json)\n }\n\n /**\n * Read-only accessor for the invoking keyring's import capability\n * (issue ). UI affordance — returns false in every default-closed\n * case (every role with no explicit `importCapability` grant).\n */\n canImport(tier: 'plaintext', format: ExportFormat): boolean\n canImport(tier: 'bundle'): boolean\n canImport(tier: 'plaintext' | 'bundle', format?: ExportFormat): boolean {\n if (tier === 'plaintext') {\n if (format === undefined) return false\n return hasImportCapability(this.keyring, 'plaintext', format)\n }\n return hasImportCapability(this.keyring, 'bundle')\n }\n\n /**\n * Enforce strict outbound refs on a `put()`. Called by Collection\n * just before it writes to the adapter. For every strict ref\n * declared on the collection, check that the target id exists in\n * the target collection; throw `RefIntegrityError` if not.\n *\n * `warn` and `cascade` modes don't affect put semantics — they're\n * enforced at delete time or via `checkIntegrity()`.\n */\n async enforceRefsOnPut(collectionName: string, record: unknown): Promise<void> {\n const outbound = this.refRegistry.getOutbound(collectionName)\n if (Object.keys(outbound).length === 0) return\n if (!record || typeof record !== 'object') return\n const obj = record as Record<string, unknown>\n\n for (const [field, descriptor] of Object.entries(outbound)) {\n if (descriptor.mode !== 'strict') continue\n const rawId = obj[field]\n // Nullish ref values are allowed — treat them as \"no reference\".\n // Users who want \"always required\" should express it in their\n // Standard Schema validator via a non-optional field.\n if (rawId === null || rawId === undefined) continue\n // Refs must be strings or numbers — anything else (object,\n // array, boolean) is a programming error and should fail\n // loudly rather than serialize as \"[object Object]\".\n if (typeof rawId !== 'string' && typeof rawId !== 'number') {\n throw new RefIntegrityError({\n collection: collectionName,\n id: (obj['id'] as string | undefined) ?? '<unknown>',\n field,\n refTo: descriptor.target,\n refId: null,\n message:\n `Ref field \"${collectionName}.${field}\" must be a string or number, got ${typeof rawId}.`,\n })\n }\n const refId = String(rawId)\n const target = this.collection<Record<string, unknown>>(descriptor.target)\n const exists = await target.get(refId)\n if (!exists) {\n throw new RefIntegrityError({\n collection: collectionName,\n id: (obj['id'] as string | undefined) ?? '<unknown>',\n field,\n refTo: descriptor.target,\n refId,\n message:\n `Strict ref \"${collectionName}.${field}\" → \"${descriptor.target}\" ` +\n `cannot be satisfied: target id \"${refId}\" not found in \"${descriptor.target}\".`,\n })\n }\n }\n }\n\n /**\n * Enforce inbound ref modes on a `delete()`. Called by Collection\n * just before it deletes from the adapter. Walks every inbound\n * ref that targets this (collection, id) and:\n *\n * - `strict`: throws if any referencing records exist\n * - `cascade`: deletes every referencing record\n * - `warn`: no-op (checkIntegrity picks it up)\n *\n * Cascade cycles are broken via `cascadeInProgress` — re-entering\n * for the same (collection, id) returns immediately so two\n * mutually-cascading collections don't recurse forever.\n */\n async enforceRefsOnDelete(collectionName: string, id: string): Promise<void> {\n const key = `${collectionName}/${id}`\n if (this.cascadeInProgress.has(key)) return\n this.cascadeInProgress.add(key)\n\n try {\n const inbound = this.refRegistry.getInbound(collectionName)\n for (const rule of inbound) {\n const fromCollection = this.collection<Record<string, unknown>>(rule.collection)\n // Scan the referencing collection for records whose ref\n // field matches this id. For eager-mode collections this\n // is an in-memory filter; for lazy-mode it requires a scan.\n const allRecords = await fromCollection.list()\n const matches = allRecords.filter((rec) => {\n const raw = rec[rule.field]\n // Same string/number-only restriction as enforceRefsOnPut.\n // Anything else can't have been a valid ref to begin with,\n // so it can't match.\n if (typeof raw !== 'string' && typeof raw !== 'number') return false\n return String(raw) === id\n })\n if (matches.length === 0) continue\n\n if (rule.mode === 'strict') {\n const first = matches[0]\n throw new RefIntegrityError({\n collection: rule.collection,\n id: (first?.['id'] as string | undefined) ?? '<unknown>',\n field: rule.field,\n refTo: collectionName,\n refId: id,\n message:\n `Cannot delete \"${collectionName}\"/\"${id}\": ` +\n `${matches.length} record(s) in \"${rule.collection}\" still reference it via strict ref \"${rule.field}\".`,\n })\n }\n if (rule.mode === 'cascade') {\n for (const match of matches) {\n const matchId = (match['id'] as string | undefined) ?? null\n if (matchId === null) continue\n // Recursive delete — the cycle breaker above catches\n // infinite loops.\n await fromCollection.delete(matchId)\n }\n }\n // warn: no-op\n }\n } finally {\n this.cascadeInProgress.delete(key)\n }\n }\n\n // ─── Join resolver) ────────────────────\n\n /**\n * Look up the `RefDescriptor` the left collection declared for a\n * given field name. Returns `null` when the field has no ref\n * declaration — the Query builder turns that into an actionable\n * error at plan time (before any records are touched).\n *\n * Implements the `joinResolver.resolveRef` half of the structural\n * interface that `Collection.query()` consumes. See\n * `query/join.ts` for the full design.\n */\n resolveRef(leftCollection: string, field: string): RefDescriptor | null {\n const outbound = this.refRegistry.getOutbound(leftCollection)\n return outbound[field] ?? null\n }\n\n /**\n * Resolve a right-side join source by target collection name.\n * Returns `null` for unknown collections so the Query executor can\n * surface an actionable error naming the missing target.\n *\n * Implements the `joinResolver.resolveSource` half of the\n * structural interface. The returned JoinableSource is a thin\n * wrapper that reads the target collection's in-memory cache via\n * `list()` / `get()` synchronously — the cache is populated by an\n * earlier `ensureHydrated()` call through the target's query/list\n * path. If the target has not been opened yet in this session the\n * join will see an empty snapshot; consumers who hit this can\n * open the target collection explicitly before running the query.\n *\n * Only same-vault targets are resolvable — cross-vault\n * joins are explicitly forbidden by the architecture`).\n */\n resolveSource(collectionName: string): JoinableSource | null {\n // Reject internal / reserved collection names — joins against\n // `_ledger/`, `_keyring/`, `_deltas/`, etc. are never legitimate.\n if (collectionName.startsWith('_')) return null\n const coll = this.collectionCache.get(collectionName)\n if (!coll) return null\n // Collection exposes a structural `querySourceForJoin()` method\n // that returns a lightweight snapshot/lookupById view backed by\n // its in-memory cache. Typed as unknown here because\n // Collection<T> is covariant on T — the join executor only\n // reads fields by name and doesn't care about the concrete type.\n return (coll as unknown as {\n querySourceForJoin(): JoinableSource\n }).querySourceForJoin()\n }\n\n /**\n * Walk every collection that has declared refs, load its records,\n * and report any reference whose target id is missing. Modes are\n * reported alongside each violation so the caller can distinguish\n * \"this is a warning the user asked for\" from \"this should never\n * have happened\" (strict violations produced by out-of-band\n * writes).\n *\n * Returns `{ violations: [...] }` instead of throwing — the whole\n * point of `checkIntegrity()` is to surface a list for display\n * or repair, not to fail noisily.\n */\n async checkIntegrity(): Promise<{ violations: RefViolation[] }> {\n const violations: RefViolation[] = []\n for (const [collectionName, refs] of this.refRegistry.entries()) {\n const coll = this.collection<Record<string, unknown>>(collectionName)\n const records = await coll.list()\n for (const record of records) {\n const recId = (record['id'] as string | undefined) ?? '<unknown>'\n for (const [field, descriptor] of Object.entries(refs)) {\n const rawId = record[field]\n if (rawId === null || rawId === undefined) continue\n // Non-scalar ref values are flagged as a violation rather\n // than thrown — `checkIntegrity` is a \"report what's wrong\"\n // tool, not a \"block on first failure\" tool. The thrown\n // version lives in `enforceRefsOnPut`.\n if (typeof rawId !== 'string' && typeof rawId !== 'number') {\n violations.push({\n collection: collectionName,\n id: recId,\n field,\n refTo: descriptor.target,\n refId: rawId,\n mode: descriptor.mode,\n })\n continue\n }\n const refId = String(rawId)\n const target = this.collection<Record<string, unknown>>(descriptor.target)\n const exists = await target.get(refId)\n if (!exists) {\n violations.push({\n collection: collectionName,\n id: recId,\n field,\n refTo: descriptor.target,\n refId: rawId,\n mode: descriptor.mode,\n })\n }\n }\n }\n }\n return { violations }\n }\n\n /**\n * Return this compartment's hash-chained audit log.\n *\n * The ledger is lazy-initialized on first access and cached for the\n * lifetime of the Vault instance. Every LedgerStore instance\n * shares the same adapter and DEK resolver, so `vault.ledger()`\n * can be called repeatedly without performance cost.\n *\n * The LedgerStore itself is the public API: consumers call\n * `.append()` (via Collection internals), `.head()`, `.verify()`,\n * and `.entries({ from, to })`. See the LedgerStore docstring for\n * the full surface and the concurrency caveats.\n */\n ledger(): LedgerStore {\n const store = this.getLedgerOrNull()\n if (!store) {\n throw new Error(\n 'vault.ledger() requires the history strategy. Import ' +\n '`{ withHistory }` from \"@noy-db/hub/history\" and pass it to ' +\n '`createNoydb({ historyStrategy: withHistory() })`.',\n )\n }\n return store\n }\n\n /**\n * Internal accessor — returns the LedgerStore if the history\n * strategy is opted in, or `null` otherwise. Used by dump/restore/\n * verifyBackupIntegrity and by Collection write paths that already\n * gate on `if (this.ledger)`. The public `ledger()` accessor above\n * throws on null; this one stays silent so the off-path no-ops.\n */\n private getLedgerOrNull(): LedgerStore | null {\n if (!this.ledgerStore) {\n this.ledgerStore = this.historyStrategy.buildLedger({\n adapter: this.adapter,\n vault: this.name,\n encrypted: this.encrypted,\n getDEK: this.getDEK,\n actor: this.keyring.userId,\n })\n }\n return this.ledgerStore\n }\n\n /**\n * Return a read-only view of this vault as it existed at\n * `timestamp`. Time-machine queries are reconstructed from the\n * per-version history snapshots persisted by every `put()`, then\n * cross-checked against the ledger for deletes that happened\n * between the snapshot and the target timestamp.\n *\n * ```ts\n * const q1End = vault.at('2026-03-31T23:59:59Z')\n * const invoice = await q1End.collection<Invoice>('invoices').get('inv-001')\n * // → the record as it stood at the close of Q1 2026\n * ```\n *\n * `timestamp` accepts an ISO-8601 string or a `Date`. Time-machine\n * views are read-only — writes throw {@link ReadOnlyAtInstantError}.\n * Accuracy bounded by history retention: if `historyConfig.maxVersions`\n * pruned earlier versions, queries before the oldest retained\n * snapshot return null even for records that existed.\n *\n *.\n */\n at(timestamp: string | Date): VaultInstant {\n const iso = timestamp instanceof Date ? timestamp.toISOString() : timestamp\n return this.historyStrategy.buildVaultInstant(\n {\n adapter: this.adapter,\n name: this.name,\n encrypted: this.encrypted,\n getDEK: this.getDEK,\n getLedger: () => (this.historyConfig.enabled === false ? null : this.getLedgerOrNull()),\n },\n iso,\n )\n }\n\n /**\n * Return a read-only \"shadow\" view of this vault. Every read method\n * on the returned {@link VaultFrame} delegates to the underlying\n * live collection; every write method throws\n * {@link ReadOnlyFrameError}.\n *\n * ```ts\n * const presentation = vault.frame()\n * const invoices = await presentation.collection<Invoice>('invoices').list()\n * ```\n *\n * Use for screen-sharing a live vault, demo mode, or compliance\n * review where the reviewer should not be able to edit. Writes are\n * blocked at the JavaScript layer — the keyring DEKs are unchanged,\n * so this is **not** a cryptographic security boundary against a\n * hostile caller in the same process. See {@link VaultFrame} for\n * the full caveat.\n *\n *.\n */\n frame(): VaultFrame {\n return this.shadowStrategy.buildFrame(this)\n }\n\n /**\n * Run `fn` under a consent scope. Every `get` / `put` / `delete`\n * that happens inside `fn` writes one entry to `_consent_audit`\n * with the supplied `purpose` and `consentHash`. Outside a scope,\n * no entries are written — consent logging is opt-in by design.\n *\n * ```ts\n * await vault.withConsent(\n * { purpose: 'quarterly-review', consentHash: '7f3a...' },\n * async () => {\n * const invoices = await vault.collection<Invoice>('invoices').list()\n * return invoices\n * },\n * )\n * ```\n *\n * The scope is a single slot on this Vault instance — two\n * concurrent `withConsent` calls stomp each other. Use separate\n * Vault instances (or an external `AsyncLocalStorage` shim) for\n * per-flight scoping.\n *\n *.\n */\n async withConsent<T>(ctx: ConsentContext, fn: () => Promise<T>): Promise<T> {\n const prior = this.consentContext\n this.consentContext = ctx\n try {\n return await fn()\n } finally {\n this.consentContext = prior\n }\n }\n\n /**\n * Query the consent-audit log. Returns every entry matching the\n * filter, newest-first isn't enforced — entries carry ULID ids so\n * sorting by id is insertion-order stable. Caller may sort further.\n *\n *.\n */\n async consentAudit(filter: ConsentAuditFilter = {}): Promise<ConsentAuditEntry[]> {\n return this.consentStrategy.read(this.adapter, this.name, this.encrypted, this.getDEK, filter)\n }\n\n /**\n * Called by Collection after every access when a consent scope is\n * active. Internal — not part of the public API.\n *\n * @internal\n */\n async _logConsent(op: ConsentOp, collection: string, recordId: string): Promise<void> {\n const ctx = this.consentContext\n if (!ctx) return\n await this.consentStrategy.write(\n this.adapter,\n this.name,\n this.encrypted,\n {\n actor: this.keyring.userId,\n purpose: ctx.purpose,\n consentHash: ctx.consentHash,\n op,\n collection,\n recordId,\n },\n this.getDEK,\n )\n }\n\n // ─── Hierarchical access ─────────────────────────\n\n /**\n * Subscribe to cross-tier access events. The callback fires every\n * time a record at a tier above the caller's inherent clearance is\n * read, written, elevated, or demoted successfully via this vault.\n * Returns an unsubscribe function.\n */\n onCrossTierAccess(\n listener: (event: CrossTierAccessEvent) => void,\n ): () => void {\n this.crossTierSubs.add(listener)\n return () => this.crossTierSubs.delete(listener)\n }\n\n private emitCrossTier(event: CrossTierAccessEvent): void {\n for (const sub of this.crossTierSubs) {\n try {\n sub(event)\n } catch {\n // subscriber failures are swallowed — audit sinks must be best-effort\n }\n }\n }\n\n /**\n * issue a time-boxed cross-tier delegation. Writes an\n * encrypted envelope to the reserved `_delegations` collection that\n * the target user's runtime will pick up next time they open the\n * vault.\n *\n * Caller must hold the tier DEK for the requested tier and\n * collection.\n */\n async delegate(opts: IssueDelegationOptions): Promise<DelegationToken> {\n const { issueDelegation, DELEGATIONS_COLLECTION } = await import('./team/delegation.js')\n // The target user's KEK is derived from THEIR keyring — we read\n // the keyring file to pick up the wrapped DEKs and their KEK salt,\n // but we cannot derive their KEK from our side (we don't have\n // their passphrase). For the delegation wraps against the\n // grantor's own KEK as a simpler first cut; swapping to a proper\n // per-target KEK exchange (via `on-magic-link` or OIDC) is a\n // follow-up tracked in the design doc.\n const targetKek = this.keyring.kek\n const delegationsDek = await this.getDEK(DELEGATIONS_COLLECTION)\n return issueDelegation(\n this.adapter,\n this.name,\n this.keyring,\n targetKek,\n delegationsDek,\n opts,\n )\n }\n\n /**\n * revoke an issued delegation by id. Safe to call even\n * if the id does not exist.\n */\n async revokeDelegation(id: string): Promise<void> {\n const { revokeDelegation, DELEGATIONS_COLLECTION } = await import('./team/delegation.js')\n await revokeDelegation(this.adapter, this.name, id)\n // Trigger store to note the delete.\n void DELEGATIONS_COLLECTION\n }\n\n // ─── Scoped tier elevation ───────────────────────────\n\n /**\n * Briefly elevate this vault to a higher tier and return a scoped\n * handle whose writes land at that tier. Reads on the original\n * vault continue at the caller's inherent tier; only the returned\n * handle is privileged. Auto-reverts when `release()` is called or\n * `ttlMs` elapses, whichever comes first.\n *\n * Capability semantics:\n * - The keyring must already carry a wrap for the target tier on\n * at least one collection (or be `owner` / `admin`, who can\n * auto-mint). Otherwise throws {@link TierNotGrantedError}.\n * - Per-collection capability gates (`canExportPlaintext`,\n * `canExportBundle`) are NOT bypassed — elevation is a tier\n * projection, not a privilege escalation path.\n * - Only one elevation can be active per vault at a time.\n * Calling `elevate(...)` while another is live throws\n * {@link AlreadyElevatedError}.\n *\n * Audit:\n * - One `_elevation_audit` envelope is written at start with\n * `{ id, actor, tier, reason, ttlMs, startedAt, expiresAt }`.\n * - Each write through the elevated handle additionally fires a\n * {@link CrossTierAccessEvent} with `authorization: 'elevation'`,\n * stamped with `reason` and `elevatedFrom`.\n */\n async elevate(\n tier: number,\n options: { ttlMs: number; reason: string },\n ): Promise<ElevatedHandle> {\n if (!Number.isInteger(tier) || tier <= 0) {\n throw new ValidationError(`elevate: tier must be a positive integer, got ${tier}`)\n }\n if (!options || typeof options.reason !== 'string' || options.reason.length === 0) {\n throw new ValidationError('elevate: reason is required (non-empty string)')\n }\n if (typeof options.ttlMs !== 'number' || options.ttlMs <= 0) {\n throw new ValidationError('elevate: ttlMs must be a positive number')\n }\n if (this.activeElevation) {\n throw new AlreadyElevatedError(this.activeElevation.tier)\n }\n // Construction-time tier-reach check: scan keyring for any\n // `*#${tier}` DEK. Owners and admins skip — they auto-mint at\n // write time per the existing `assertTierAccess` rules.\n if (this.keyring.role !== 'owner' && this.keyring.role !== 'admin') {\n const suffix = `#${tier}`\n let found = false\n for (const k of this.keyring.deks.keys()) {\n if (k.endsWith(suffix)) { found = true; break }\n }\n if (!found) {\n // Match the existing error class so adopters with one catch()\n // for tier-related failures don't need a second branch.\n throw new TierNotGrantedError('(any collection)', tier)\n }\n }\n\n const startedAt = new Date()\n const expiresAt = startedAt.getTime() + options.ttlMs\n const reason = options.reason\n\n const handle = new ElevatedHandle({\n vault: this,\n tier,\n reason,\n expiresAt,\n onRelease: () => {\n if (this.activeElevation && this.activeElevation.handle === handle) {\n this.activeElevation = null\n }\n },\n })\n\n this.activeElevation = { tier, expiresAt, reason, handle }\n await this.writeElevationAudit({\n actor: this.keyring.userId,\n tier,\n reason,\n ttlMs: options.ttlMs,\n startedAt: startedAt.toISOString(),\n expiresAt: new Date(expiresAt).toISOString(),\n })\n return handle\n }\n\n /**\n * Internal — invoked by an `ElevatedHandle.collection().put()` call.\n * Routes through the existing `Collection.putAtTier` code path with\n * the elevation context attached so the cross-tier event reflects\n * the right authorization class.\n */\n async _elevatedPut<T>(\n collectionName: string,\n id: string,\n record: T,\n tier: number,\n reason: string,\n ): Promise<void> {\n const coll = this.collection<T>(collectionName)\n await coll.putAtTier(id, record, tier, {\n elevation: { reason, fromTier: 0 },\n })\n }\n\n private async writeElevationAudit(entry: {\n actor: string\n tier: number\n reason: string\n ttlMs: number\n startedAt: string\n expiresAt: string\n }): Promise<void> {\n const id = `elev-${Date.now().toString(36)}-${Math.random().toString(16).slice(2, 10)}`\n const json = JSON.stringify({ id, ...entry })\n const envelope: EncryptedEnvelope = this.encrypted\n ? await (async () => {\n const dek = await this.getDEK(ELEVATION_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.startedAt,\n _iv: iv,\n _data: data,\n _by: entry.actor,\n }\n })()\n : {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.startedAt,\n _iv: '',\n _data: json,\n _by: entry.actor,\n }\n await this.adapter.put(this.name, ELEVATION_AUDIT_COLLECTION, id, envelope)\n }\n\n /**\n * low-level escape hatch used by `@noy-db/on-magic-link`\n * to persist a magic-link-bound grant after the auth package has\n * derived the content key + KEK from `(serverSecret, token, vault)`.\n *\n * Callers outside of `@noy-db/on-magic-link` should use\n * `issueMagicLinkDelegation()` from that package instead — it handles\n * the HKDF derivation, record-id composition, and batch logic so the\n * grantor doesn't touch this method directly.\n */\n async writeMagicLinkGrant(\n contentKey: CryptoKey,\n grantKek: CryptoKey,\n recordId: string,\n opts: IssueMagicLinkGrantOptions,\n ): Promise<MagicLinkGrantRecord> {\n return writeMagicLinkGrant(\n this.adapter,\n this.name,\n this.keyring,\n contentKey,\n grantKek,\n recordId,\n opts,\n )\n }\n\n // ─── Accounting periods ────────────────────────\n\n /**\n * Close an accounting period. After this call every record whose\n * envelope `_ts` is at or before `endDate` is write-locked: further\n * `put` or `delete` calls against such records throw\n * {@link PeriodClosedError}. New records (with fresh timestamps)\n * remain freely writable, and records last written AFTER `endDate`\n * are unaffected.\n *\n * Each closure writes a `PeriodRecord` to the reserved `_periods`\n * collection. The record carries the hash of the prior period's\n * record, so a tamper with any closure breaks the chain visible to\n * {@link listPeriods} + `vault.ledger().verify()`.\n *\n * Correctness is tied to the `_ts` field the hub assigns on every\n * write. Backdating records by editing the envelope directly is\n * outside the threat model — see SPEC § zero-knowledge envelopes.\n *\n *.\n */\n async closePeriod(options: ClosePeriodOptions): Promise<PeriodRecord> {\n const existing = await this._loadPeriodsCache()\n this.periodsStrategy.validatePeriodName(options.name, existing)\n if (typeof options.endDate !== 'string' || options.endDate.length === 0) {\n throw new ValidationError('closePeriod: endDate must be a non-empty ISO string.')\n }\n const anchor = await this.periodsStrategy.chainAnchor(existing)\n const record: PeriodRecord = {\n name: options.name,\n kind: 'closed',\n endDate: options.endDate,\n closedAt: new Date().toISOString(),\n closedBy: this.keyring.userId,\n priorPeriodHash: anchor.priorPeriodHash,\n ...(anchor.priorPeriodName !== undefined && { priorPeriodName: anchor.priorPeriodName }),\n ...(options.dateField !== undefined && { dateField: options.dateField }),\n }\n const envelope = await this._writePeriodRecord(record)\n await this.periodsStrategy.appendPeriodLedgerEntry(this.getLedgerOrNull(), this.keyring.userId, envelope, record.name)\n existing.push(record)\n this.periodCache = existing\n return record\n }\n\n /**\n * Open a new period that carries forward from a prior closed one\n *. The `carryForward` callback receives a read-only\n * {@link VaultInstant} view anchored at the prior period's\n * `endDate` — use it to compute opening balances, closing-trial\n * snapshots, or any aggregate the new period should inherit. The\n * returned `{ [collection]: { [id]: record } }` map is written\n * before the new `PeriodRecord` lands, so the opening entries\n * materialise with fresh `_ts` values that fall outside every\n * closed period (the guard lets them through).\n *\n * The new period is stored with `kind: 'opened'` and hash-chained\n * to the same chain the close calls build — `listPeriods()` sees\n * both closed and opened entries in `closedAt` order.\n */\n async openPeriod<TCollections extends Record<string, Record<string, unknown>>>(\n options: OpenPeriodOptions<TCollections>,\n ): Promise<PeriodRecord> {\n const existing = await this._loadPeriodsCache()\n this.periodsStrategy.validatePeriodName(options.name, existing)\n const prior = existing.find((p) => p.name === options.fromPeriod)\n if (!prior) {\n throw new ValidationError(\n `openPeriod: fromPeriod \"${options.fromPeriod}\" does not exist in this vault.`,\n )\n }\n if (prior.kind !== 'closed') {\n throw new ValidationError(\n `openPeriod: fromPeriod \"${options.fromPeriod}\" is of kind \"${prior.kind}\" — only closed periods can be carried forward.`,\n )\n }\n\n // Build a read-only facade over CURRENT state + the prior\n // period's endDate; after close, records dated <= endDate are\n // frozen so current state equals closing state. The caller\n // filters by business date via their own query against this\n // facade.\n const ctx = {\n priorEndDate: prior.endDate,\n collection: <T = unknown>(name: string) => {\n const c = this.collection<T>(name)\n return {\n get: (id: string) => c.get(id),\n list: () => c.list(),\n }\n },\n }\n const openings = await options.carryForward(ctx)\n\n // Write opening entries via the normal Collection path so they\n // get encryption, ledger entries, and change events. Each record\n // is timestamped NOW (outside every closed period) — that's why\n // the guard permits them.\n const openingCollections: string[] = []\n for (const [collName, records] of Object.entries(openings)) {\n if (!records || typeof records !== 'object') continue\n const recordEntries = Object.entries(records)\n if (recordEntries.length === 0) continue\n const coll = this.collection(collName)\n for (const [id, record] of recordEntries) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n await coll.put(id, record as any)\n }\n openingCollections.push(collName)\n }\n\n const anchor = await this.periodsStrategy.chainAnchor(existing)\n const record: PeriodRecord = {\n name: options.name,\n kind: 'opened',\n startDate: options.startDate,\n endDate: prior.endDate, // sealing boundary inherited from prior close\n closedAt: new Date().toISOString(),\n closedBy: this.keyring.userId,\n priorPeriodHash: anchor.priorPeriodHash,\n priorPeriodName: anchor.priorPeriodName ?? prior.name,\n ...(openingCollections.length > 0 && { openingCollections }),\n }\n const envelope = await this._writePeriodRecord(record)\n await this.periodsStrategy.appendPeriodLedgerEntry(this.getLedgerOrNull(), this.keyring.userId, envelope, record.name)\n existing.push(record)\n this.periodCache = existing\n return record\n }\n\n /** Return every closed / opened period in `closedAt` order. */\n async listPeriods(): Promise<readonly PeriodRecord[]> {\n return [...(await this._loadPeriodsCache())]\n }\n\n /** Look up a single period by name. Returns `null` if not found. */\n async getPeriod(name: string): Promise<PeriodRecord | null> {\n const all = await this._loadPeriodsCache()\n return all.find((p) => p.name === name) ?? null\n }\n\n /** @internal — periodGuard callback installed on every Collection. */\n async _assertTsWritable(\n existing: { ts: string | null; record: Record<string, unknown> | null } | null,\n incoming: Record<string, unknown> | null,\n ): Promise<void> {\n // Fast path: nothing to check, and no periods ever touched this\n // vault — avoid a full adapter scan for every put.\n if (existing === null && incoming === null) return\n if (this.periodCache === null) {\n this.periodCache = await this.periodsStrategy.loadPeriods(\n this.adapter,\n this.name,\n (env) => this._decryptPeriodRecord(env),\n )\n }\n if (this.periodCache.length === 0) return\n this.periodsStrategy.assertTsWritable(existing, incoming, this.periodCache)\n }\n\n private async _loadPeriodsCache(): Promise<PeriodRecord[]> {\n if (this.periodCache !== null) return this.periodCache\n const loaded = await this.periodsStrategy.loadPeriods(\n this.adapter,\n this.name,\n (env: EncryptedEnvelope) => this._decryptPeriodRecord(env),\n )\n this.periodCache = loaded\n return loaded\n }\n\n private async _writePeriodRecord(record: PeriodRecord): Promise<EncryptedEnvelope> {\n const json = JSON.stringify(record)\n let envelope: EncryptedEnvelope\n if (this.encrypted) {\n const dek = await this.getDEK(PERIODS_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: this.keyring.userId,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: json,\n _by: this.keyring.userId,\n }\n }\n await this.adapter.put(this.name, PERIODS_COLLECTION, record.name, envelope)\n return envelope\n }\n\n private async _decryptPeriodRecord(envelope: EncryptedEnvelope): Promise<PeriodRecord> {\n let json: string\n if (this.encrypted) {\n const dek = await this.getDEK(PERIODS_COLLECTION)\n json = await decrypt(envelope._iv, envelope._data, dek)\n } else {\n json = envelope._data\n }\n return JSON.parse(json) as PeriodRecord\n }\n\n /** List all collection names in this vault. */\n async collections(): Promise<string[]> {\n const snapshot = await this.adapter.loadAll(this.name)\n return Object.keys(snapshot)\n }\n\n /**\n * Return the stable opaque bundle handle for this vault,\n * generating and persisting a fresh ULID on first call.\n *\n * used by `writeNoydbBundle()` to identify the\n * vault in the unencrypted bundle header without\n * exposing the vault name. The handle is persisted in\n * the reserved `_meta` internal collection so subsequent\n * exports of the same vault produce the same handle —\n * bundle adapters (Drive, Dropbox, iCloud) will use it\n * as their primary key.\n *\n * **Storage path:** the handle is written via the adapter\n * directly with collection name `_meta` and id `handle`. The\n * envelope's `_data` field contains a plain JSON\n * `{ handle: '...' }` payload — the handle is opaque, doesn't\n * need encryption, and the bundle header exposes the same\n * value anyway. This mirrors the storage approach `_keyring`\n * uses for its plain-JSON wrapped-DEK envelopes (also bypasses\n * the AES-GCM layer; the `_iv` field is left empty).\n *\n * **Cross-process stability:** the handle survives process\n * restarts because it's persisted on the adapter, not just\n * cached in memory. A new Vault instance opened on the\n * same adapter sees the same `_meta/handle` envelope and\n * returns the same ULID.\n *\n * **Round-trip after restore:** the receiving vault of a\n * `load()` call generates its OWN handle on first export. The\n * dump body does not include `_meta`, because handle stability\n * is per-vault-instance, not per-vault-content. Two\n * separate restorations of the same backup produce two\n * distinct handles, which is the right behavior — they're\n * separate vault instances now.\n */\n async getBundleHandle(): Promise<string> {\n const existing = await this.adapter.get(this.name, '_meta', 'handle')\n if (existing) {\n try {\n const parsed = JSON.parse(existing._data) as unknown\n if (parsed !== null && typeof parsed === 'object' && 'handle' in parsed) {\n const handle = (parsed as { handle: unknown }).handle\n if (typeof handle === 'string' && /^[0-9A-HJKMNP-TV-Z]{26}$/.test(handle)) {\n return handle\n }\n }\n } catch {\n // Fall through to regenerate — corrupted handle envelope\n // is treated as missing, not as an error. The new handle\n // overwrites the bad one.\n }\n }\n // Lazy import to avoid a top-of-file circular dependency:\n // bundle/bundle.ts imports from vault.ts (the\n // Vault type), and vault.ts can't statically\n // import from bundle/* without forming a cycle. The dynamic\n // import is invoked once per fresh handle generation, which\n // is rare enough that the cost doesn't matter.\n const { generateULID } = await import('./bundle/ulid.js')\n const handle = generateULID()\n const envelope: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: JSON.stringify({ handle }),\n }\n await this.adapter.put(this.name, '_meta', 'handle', envelope)\n return handle\n }\n\n /**\n * Dump vault as a verifiable encrypted JSON backup string.\n *\n * backups embed the current ledger head and the full\n * `_ledger` + `_ledger_deltas` internal collections so the\n * receiver can run `verifyBackupIntegrity()` after `load()` and\n * detect any tampering between dump and restore. Backups produced\n * without a ledger (older formats or hub instances built without\n * the history strategy) skip the integrity check with a warning —\n * both modes round-trip cleanly.\n */\n async dump(): Promise<string> {\n const snapshot = await this.adapter.loadAll(this.name)\n\n // Load keyrings (separate path because loadAll filters them out\n // along with all other underscore-prefixed internal collections).\n const keyringIds = await this.adapter.list(this.name, '_keyring')\n const keyrings: Record<string, unknown> = {}\n for (const keyringId of keyringIds) {\n const envelope = await this.adapter.get(this.name, '_keyring', keyringId)\n if (envelope) {\n keyrings[keyringId] = JSON.parse(envelope._data)\n }\n }\n\n // Load the ledger entries + deltas so the receiver can replay\n // the chain after restore. Without this, `load()` would have an\n // empty ledger and `verifyBackupIntegrity()` would have nothing\n // to compare against.\n const internalSnapshot: VaultSnapshot = {}\n for (const internalName of [LEDGER_COLLECTION, LEDGER_DELTAS_COLLECTION]) {\n const ids = await this.adapter.list(this.name, internalName)\n if (ids.length === 0) continue\n const records: Record<string, EncryptedEnvelope> = {}\n for (const id of ids) {\n const envelope = await this.adapter.get(this.name, internalName, id)\n if (envelope) records[id] = envelope\n }\n internalSnapshot[internalName] = records\n }\n\n // Embed the ledger head if there's a chain. An empty ledger\n // (fresh vault) leaves `ledgerHead` undefined, which\n // load() treats the same as a legacy backup (no integrity\n // check, console warning). If history is not opted in,\n // `getLedgerOrNull` returns null and we skip embedding entirely\n // — the backup is still valid, just without the integrity head.\n const ledgerForHead = this.getLedgerOrNull()\n const head = ledgerForHead ? await ledgerForHead.head() : null\n const backup: VaultBackup = {\n _noydb_backup: NOYDB_BACKUP_VERSION,\n _compartment: this.name,\n _exported_at: new Date().toISOString(),\n _exported_by: this.keyring.userId,\n keyrings: keyrings as VaultBackup['keyrings'],\n collections: snapshot,\n ...(Object.keys(internalSnapshot).length > 0\n ? { _internal: internalSnapshot }\n : {}),\n ...(head\n ? {\n ledgerHead: {\n hash: head.hash,\n index: head.entry.index,\n ts: head.entry.ts,\n },\n }\n : {}),\n }\n\n return JSON.stringify(backup)\n }\n\n /**\n * Restore a vault from a verifiable backup.\n *\n * After loading, runs `verifyBackupIntegrity()` to confirm:\n * 1. The hash chain is intact (no `prevHash` mismatches)\n * 2. The chain head matches the embedded `ledgerHead.hash`\n * from the backup\n * 3. Every data envelope's `payloadHash` matches the\n * corresponding ledger entry — i.e. nobody swapped\n * ciphertext between dump and restore\n *\n * On any failure, throws `BackupLedgerError` (chain or head\n * mismatch) or `BackupCorruptedError` (data envelope mismatch).\n * The vault state on the adapter has already been written\n * by the time we throw, so the caller is responsible for either\n * accepting the suspect state or wiping it and trying a different\n * backup.\n *\n * Legacy backups (no `ledgerHead` field, no `_internal`) load\n * with a console warning and skip the integrity check entirely\n * — there's no chain to verify against.\n */\n async load(backupJson: string): Promise<void> {\n const backup = JSON.parse(backupJson) as VaultBackup\n\n // 1. Restore data collections.\n await this.adapter.saveAll(this.name, backup.collections)\n\n // 2. Restore keyrings.\n for (const [userId, keyringFile] of Object.entries(backup.keyrings)) {\n const envelope = {\n _noydb: 1 as const,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: JSON.stringify(keyringFile),\n }\n await this.adapter.put(this.name, '_keyring', userId, envelope)\n }\n\n // 3. Restore internal collections (`_ledger`, `_ledger_deltas`).\n // Required so verifyBackupIntegrity has the chain to walk.\n if (backup._internal) {\n for (const [internalName, records] of Object.entries(backup._internal)) {\n for (const [id, envelope] of Object.entries(records)) {\n await this.adapter.put(this.name, internalName, id, envelope)\n }\n }\n }\n\n // 4. Refresh the in-memory keyring from the freshly-loaded\n // keyring file. Without this, the Vault's getDEK\n // closure still holds the OLD session's DEKs, and every\n // decrypt of a loaded ledger entry / data envelope fails\n // with TamperedError because the DEK doesn't match the\n // ciphertext that was encrypted with the SOURCE user's DEK.\n // Skipped for plaintext vaults and for tests that\n // construct Vault without a reloadKeyring callback.\n if (this.reloadKeyring) {\n this.keyring = await this.reloadKeyring()\n // Rebuild the DEK resolver against the refreshed keyring so\n // the next ensureCollectionDEK call sees the loaded wrapped\n // DEKs, not the cached pre-load ones.\n this.getDEK = this.makeGetDEK()\n }\n\n // 5. Clear collection cache + reset the ledger store so the\n // next ledger() call rebuilds its head cache from the\n // freshly-loaded entries.\n this.collectionCache.clear()\n this.ledgerStore = null\n\n // 5. Run the verification gate. Legacy backups (no ledgerHead)\n // skip this with a one-line warning so existing consumers can\n // still read them while migrating.\n if (!backup.ledgerHead) {\n console.warn(\n `[noy-db] Loaded a legacy backup with no ledgerHead — ` +\n `verifiable-backup integrity check skipped. ` +\n `Re-export with a ledger-aware build to get tamper detection.`,\n )\n return\n }\n\n const result = await this.verifyBackupIntegrity()\n if (!result.ok) {\n // Surface the most specific error class we can. The result\n // shape carries enough info for callers to inspect.\n if (result.kind === 'data') {\n throw new BackupCorruptedError(\n result.collection,\n result.id,\n result.message,\n )\n }\n throw new BackupLedgerError(result.message, result.divergedAt)\n }\n\n // 6. Cross-check: the freshly-verified head must match the\n // value embedded at dump time. A mismatch means someone\n // truncated or extended the chain after dump.\n if (result.head !== backup.ledgerHead.hash) {\n throw new BackupLedgerError(\n `Backup ledger head mismatch: embedded \"${backup.ledgerHead.hash}\" ` +\n `but reconstructed \"${result.head}\".`,\n )\n }\n }\n\n /**\n * End-to-end backup integrity check. Runs both:\n *\n * 1. `ledger.verify()` — walks the hash chain and confirms\n * every `prevHash` matches the recomputed hash of its\n * predecessor.\n *\n * 2. **Data envelope cross-check** — for every (collection, id)\n * that has a current value, find the most recent ledger\n * entry recording a `put` for that pair, recompute the\n * sha256 of the stored envelope's `_data`, and compare to\n * the entry's `payloadHash`. Any mismatch means an\n * out-of-band write modified the data without updating the\n * ledger.\n *\n * Returns a discriminated union so callers can handle the two\n * failure modes differently:\n * - `{ ok: true, head, length }` — chain verified and all\n * data matches; safe to use.\n * - `{ ok: false, kind: 'chain', divergedAt, message }` — the\n * chain itself is broken at the given index.\n * - `{ ok: false, kind: 'data', collection, id, message }` —\n * a specific data envelope doesn't match its ledger entry.\n *\n * This method is exposed so users can call it any time, not just\n * during `load()`. A scheduled background check is the simplest\n * way to detect tampering of an in-place vault.\n */\n async verifyBackupIntegrity(): Promise<\n | { readonly ok: true; readonly head: string; readonly length: number }\n | {\n readonly ok: false\n readonly kind: 'chain'\n readonly divergedAt: number\n readonly message: string\n }\n | {\n readonly ok: false\n readonly kind: 'data'\n readonly collection: string\n readonly id: string\n readonly message: string\n }\n > {\n // Step 1: chain verification. Without the history strategy there\n // is no ledger; an unaudited backup verifies trivially as `ok`\n // because there's nothing to diverge from.\n const ledgerForVerify = this.getLedgerOrNull()\n if (!ledgerForVerify) {\n return { ok: true, head: '', length: 0 }\n }\n const chainResult = await ledgerForVerify.verify()\n if (!chainResult.ok) {\n return {\n ok: false,\n kind: 'chain',\n divergedAt: chainResult.divergedAt,\n message:\n `Ledger chain diverged at index ${chainResult.divergedAt}: ` +\n `expected prevHash \"${chainResult.expected}\" but found \"${chainResult.actual}\".`,\n }\n }\n\n // Step 2: data envelope cross-check. Walk every entry in the\n // ledger and, for the LATEST `put` per (collection, id), recompute\n // the data envelope's payloadHash and compare. Earlier puts of the\n // same id are skipped because the data collection only holds the\n // current version — historical envelopes live in the deltas\n // collection (which is itself protected by the chain).\n // Reuse the ledger we already resolved in step 1.\n const allEntries = await ledgerForVerify.loadAllEntries()\n\n // Find the latest non-delete entry per (collection, id). Walk\n // the entries in reverse so we hit the latest first; mark each\n // (collection, id) as seen and skip subsequent entries.\n const seen = new Set<string>()\n const latest = new Map<\n string,\n { collection: string; id: string; expectedHash: string }\n >()\n for (let i = allEntries.length - 1; i >= 0; i--) {\n const entry = allEntries[i]\n if (!entry) continue\n const key = `${entry.collection}/${entry.id}`\n if (seen.has(key)) continue\n seen.add(key)\n // For deletes the data collection should NOT have the record,\n // so we skip — there's nothing to cross-check.\n if (entry.op === 'delete') continue\n latest.set(key, {\n collection: entry.collection,\n id: entry.id,\n expectedHash: entry.payloadHash,\n })\n }\n\n for (const { collection, id, expectedHash } of latest.values()) {\n const envelope = await this.adapter.get(this.name, collection, id)\n if (!envelope) {\n return {\n ok: false,\n kind: 'data',\n collection,\n id,\n message:\n `Ledger expects data record \"${collection}/${id}\" to exist, ` +\n `but the adapter has no envelope for it.`,\n }\n }\n const actualHash = await sha256Hex(envelope._data)\n if (actualHash !== expectedHash) {\n return {\n ok: false,\n kind: 'data',\n collection,\n id,\n message:\n `Data envelope \"${collection}/${id}\" has been tampered with: ` +\n `expected payloadHash \"${expectedHash}\", got \"${actualHash}\".`,\n }\n }\n }\n\n return {\n ok: true,\n head: chainResult.head,\n length: chainResult.length,\n }\n }\n\n /**\n * Stream every collection in this vault as decrypted, ACL-scoped\n * chunks.\n *\n * ⚠ **This method decrypts your records.** noy-db's threat model assumes\n * that records on disk are encrypted; the values yielded here are\n * plaintext. The consumer is responsible for ensuring the yielded data\n * is handled in a way that matches the data's sensitivity. If your goal\n * is encrypted backup or transport between noy-db instances, use\n * `dump()` instead — it produces a tamper-evident encrypted envelope and\n * never exposes plaintext.\n *\n * ## Behavior\n *\n * - **ACL-scoped.** Collections the calling principal cannot read are\n * silently skipped (same rule as `Collection.list()`). An operator\n * with `{ invoices: 'rw', clients: 'ro' }` permissions on a\n * five-collection vault exports only `invoices` and `clients`,\n * with no error on the others.\n * - **Streaming.** Returns an `AsyncIterableIterator` so consumers can\n * process chunks as they arrive without holding the full export in\n * memory. Note: the underlying adapter call (`loadAll`) is still a\n * single bulk read — the streaming benefit is on the *output* side.\n * True per-record adapter streaming arrives with the query DSL.\n * - **Schema + refs surfaced** as metadata on every chunk so downstream\n * serializers (`@noy-db/as-csv`, `@noy-db/as-xlsx`, custom\n * exporters) can produce schema-aware output without reaching into\n * collection internals.\n * - **Internal collections filtered.** `_ledger`, `_keyring`, etc. are\n * never yielded — they're noy-db's own bookkeeping and have no value\n * in a plaintext export. Use `dump()` for full backup including\n * internal collections.\n *\n * ## Composition\n *\n * Once cross-vault queries land, fanning this out across\n * every vault the caller can unlock is `queryAcross(ids, c =>\n * c.exportStream())` — no new primitive needed. That's part of why this\n * method belongs in core: it's the single decrypt+ACL+metadata path\n * that every export-format package will build on, and pushing it into\n * a `@noy-db/as-*` package would force every format to re-solve\n * the same problems independently.\n *\n * @example\n * ```ts\n * for await (const chunk of company.exportStream()) {\n * // chunk.collection: 'invoices'\n * // chunk.schema: ZodObject | null\n * // chunk.refs: { clientId: { target: 'clients', mode: 'strict' } }\n * // chunk.records: Invoice[]\n * }\n * ```\n *\n * @example\n * ```ts\n * // Per-record streaming for arbitrarily large collections.\n * for await (const chunk of company.exportStream({ granularity: 'record' })) {\n * // chunk.records is always length 1\n * await writer.write(serialize(chunk.records[0]))\n * }\n * ```\n */\n async *exportStream(opts: ExportStreamOptions = {}): AsyncIterableIterator<ExportChunk> {\n const granularity = opts.granularity ?? 'collection'\n\n // One bulk read to enumerate collections. `loadAll` filters out\n // underscore-prefixed internal collections, which is exactly what we\n // want — internal bookkeeping has no place in a plaintext export.\n const snapshot = await this.adapter.loadAll(this.name)\n const collectionNames = Object.keys(snapshot).sort()\n\n // Resolve the ledger head once if requested. The head is identical\n // across every yielded chunk (one ledger per vault) — we copy\n // it onto each chunk so consumers doing per-record streaming don't\n // have to thread state across yields, and so the chunk shape stays\n // forward-compatible with future per-partition ledgers where the\n // head genuinely will differ per chunk.\n const ledgerHead = opts.withLedgerHead\n ? await (async () => {\n const ledger = this.getLedgerOrNull()\n if (!ledger) return undefined\n const head = await ledger.head()\n return head\n ? { hash: head.hash, index: head.entry.index, ts: head.entry.ts }\n : undefined\n })()\n : undefined\n\n // Capture ALL dictionary snapshots upfront before the first yield.\n // Building all snapshots eagerly before yielding anything ensures that\n // concurrent mutations during streaming do not affect the snapshot — any\n // dictionary.put() that happens after the first yield sees the pre-yield\n // state here. Keyed by collection name.\n const dictSnapshotCache = new Map<\n string, // collection name\n Record<string, Record<string, Record<string, string>>> // field → key → locale → label\n >()\n for (const collectionName of collectionNames) {\n const dictFields = this.dictKeyFieldRegistry.get(collectionName)\n if (dictFields && Object.keys(dictFields).length > 0) {\n const snap: Record<string, Record<string, Record<string, string>>> = {}\n for (const [fieldName, dictName] of Object.entries(dictFields)) {\n const entries = await this.dictionary(dictName).list()\n const keyMap: Record<string, Record<string, string>> = {}\n for (const entry of entries) {\n keyMap[entry.key] = entry.labels\n }\n snap[fieldName] = keyMap\n }\n dictSnapshotCache.set(collectionName, snap)\n }\n }\n\n for (const collectionName of collectionNames) {\n // ACL gate. The same `hasAccess` check that `Collection.list()`\n // honors — silent skip, no error, matches the \"operator can read\n // some but not all\" pattern.\n if (!hasAccess(this.keyring, collectionName)) continue\n\n const coll = this.collection(collectionName)\n const schema = coll.getSchema() ?? null\n const refs = this.refRegistry.getOutbound(collectionName)\n const ids = Object.keys(snapshot[collectionName] ?? {})\n\n const dictionaries = dictSnapshotCache.get(collectionName)\n\n if (granularity === 'collection') {\n // Decrypt every record in the collection, then yield once.\n // Using `coll.get(id)` rather than the loadAll envelope directly\n // because `get()` is the canonical decrypt+schema-validate path\n // and any future cache/index plumbing rides through it.\n const records: unknown[] = []\n for (const id of ids) {\n const record = await coll.get(id)\n if (record !== null) records.push(record)\n }\n const chunk: ExportChunk = {\n collection: collectionName,\n schema,\n refs,\n records,\n ...(dictionaries !== undefined ? { dictionaries } : {}),\n ...(ledgerHead ? { ledgerHead } : {}),\n }\n yield chunk\n } else {\n // Per-record yield. Memory profile: O(1 record) at a time.\n // The schema/refs metadata is repeated on every chunk so\n // consumers don't have to thread state across yields.\n for (const id of ids) {\n const record = await coll.get(id)\n if (record === null) continue\n const chunk: ExportChunk = {\n collection: collectionName,\n schema,\n refs,\n records: [record],\n ...(dictionaries !== undefined ? { dictionaries } : {}),\n ...(ledgerHead ? { ledgerHead } : {}),\n }\n yield chunk\n }\n }\n }\n }\n\n /**\n * Convenience wrapper that consumes `exportStream()` and serializes the\n * result to a single JSON string.\n *\n * ⚠ **`exportJSON()` decrypts your records and produces plaintext.**\n *\n * noy-db's threat model assumes that records on disk are encrypted.\n * This function deliberately violates that assumption: it produces a\n * JSON string in plaintext, which the consumer is then responsible for\n * protecting (filesystem permissions, full-disk encryption, secure\n * transfer, secure deletion).\n *\n * Use this function only when:\n * - You are the authorized owner of the data, **and**\n * - You have a legitimate downstream tool that requires plaintext\n * JSON, **and**\n * - You have a documented plan for how the resulting plaintext will be\n * protected and eventually destroyed.\n *\n * If your goal is encrypted backup or transport between noy-db\n * instances, use `dump()` instead — it produces a tamper-evident\n * encrypted envelope, never plaintext.\n *\n * ## Why `Promise<string>` instead of writing to a file path\n *\n * Core has zero `node:` imports — it runs unchanged in browsers, Node,\n * Bun, Deno, and edge runtimes. Accepting a file path would force a\n * `node:fs` import (breaks browsers) or a runtime dynamic import\n * (doesn't tree-shake, inflates bundles). Returning a string lets the\n * consumer choose any sink and forces the destination decision to be\n * explicit at the call site — which is also better for the security\n * warning.\n *\n * @example\n * ```ts\n * // Node: write to a file\n * import { writeFile } from 'node:fs/promises'\n * await writeFile('./backup.json', await company.exportJSON())\n * ```\n *\n * @example\n * ```ts\n * // Browser: download as a file\n * const json = await company.exportJSON()\n * const blob = new Blob([json], { type: 'application/json' })\n * const url = URL.createObjectURL(blob)\n * // ... attach to an <a download> and click\n * ```\n *\n * @example\n * ```ts\n * // Stream upload to a server\n * await fetch('/upload', {\n * method: 'POST',\n * body: await company.exportJSON(),\n * })\n * ```\n *\n * ## On-disk shape\n *\n * ```json\n * {\n * \"_noydb_export\": 1,\n * \"_compartment\": \"acme\",\n * \"_exported_at\": \"2026-04-07T12:00:00.000Z\",\n * \"_exported_by\": \"alice@acme.example\",\n * \"collections\": {\n * \"invoices\": {\n * \"schema\": null,\n * \"refs\": { \"clientId\": { \"target\": \"clients\", \"mode\": \"strict\" } },\n * \"records\": [ ... ]\n * }\n * },\n * \"ledgerHead\": { \"hash\": \"...\", \"index\": 42, \"ts\": \"...\" }\n * }\n * ```\n *\n * `schema` is included for forward compatibility but is currently\n * always `null` because Standard Schema validators are not JSON-\n * serializable. Format-package serializers that need the schema\n * should use `exportStream()` directly and read `chunk.schema` (which\n * is the live validator object, not a serialization of it).\n */\n async exportJSON(opts: ExportStreamOptions = {}): Promise<string> {\n // Force per-collection granularity regardless of caller setting:\n // record-by-record output doesn't make sense in a single string.\n const collections: Record<\n string,\n {\n schema: null\n refs: Record<string, { target: string; mode: 'strict' | 'warn' | 'cascade' }>\n records: unknown[]\n }\n > = {}\n let ledgerHead: ExportChunk['ledgerHead'] | undefined\n // Merged dictionary snapshot across all collections.\n // Only populated when `resolveLabels` is not set.\n const allDictionaries: Record<\n string, // collection name\n Record<string, Record<string, Record<string, string>>>\n > = {}\n\n for await (const chunk of this.exportStream({\n granularity: 'collection',\n withLedgerHead: opts.withLedgerHead === true,\n })) {\n collections[chunk.collection] = {\n schema: null, // Standard Schema validators are not JSON-serializable\n refs: chunk.refs,\n records: chunk.records,\n }\n if (chunk.ledgerHead) ledgerHead = chunk.ledgerHead\n // Collect dictionary snapshots unless resolveLabels is set\n if (!opts.resolveLabels && chunk.dictionaries) {\n allDictionaries[chunk.collection] = chunk.dictionaries\n }\n }\n\n const hasDictionaries = Object.keys(allDictionaries).length > 0\n return JSON.stringify({\n _noydb_export: 1,\n _compartment: this.name,\n _exported_at: new Date().toISOString(),\n _exported_by: this.keyring.userId,\n collections,\n ...(hasDictionaries ? { _dictionaries: allDictionaries } : {}),\n ...(ledgerHead ? { ledgerHead } : {}),\n })\n }\n}\n\n// ─── Elevation handle ────────────────────────────────────\n\n/**\n * Reserved collection that holds the audit ledger of elevation\n * sessions. One envelope per `vault.elevate(...)` call.\n */\nexport const ELEVATION_AUDIT_COLLECTION = '_elevation_audit'\n\n/**\n * Scoped handle returned by `vault.elevate(...)`. Writes through this\n * handle land at the elevated tier with `authorization: 'elevation'`\n * stamped on the audit event; reads stay on the original `Vault`.\n *\n * The handle lazily checks its TTL on every operation, so a\n * forgotten `release()` cannot keep elevated writes alive past\n * `expiresAt` — the next call simply throws\n * {@link ElevationExpiredError}.\n *\n * Naming note: the issue's spec text used `elevated.session`\n * for this field; we name the field `handle` to avoid conflicting\n * with the codebase's existing `SessionToken` value type. The\n * semantics are unchanged.\n */\nexport class ElevatedHandle {\n /** Target tier this handle writes at. */\n readonly tier: number\n /** Audit string stamped on every cross-tier event. */\n readonly reason: string\n /** Absolute expiration in ms (Date.now()). */\n readonly expiresAt: number\n private released = false\n private readonly vault: Vault\n private readonly onRelease: () => void\n\n constructor(opts: {\n vault: Vault\n tier: number\n reason: string\n expiresAt: number\n onRelease: () => void\n }) {\n this.vault = opts.vault\n this.tier = opts.tier\n this.reason = opts.reason\n this.expiresAt = opts.expiresAt\n this.onRelease = opts.onRelease\n }\n\n /**\n * Scoped collection accessor. Returns a thin wrapper exposing the\n * single elevated operation (`put`). Reads, deletes, queries —\n * everything else — should go through the original `vault`'s\n * `collection(...)`, which keeps \"writes elevated, reads\n * unprivileged\" trivially true.\n */\n collection<T>(name: string): { put(id: string, record: T): Promise<void> } {\n // Don't gate the wrapper itself — just the operation. Adopters\n // commonly cache `const docs = elev.collection('docs')` and the\n // lazy-check still works correctly because assertActive runs at\n // every `put` call, against a fresh `Date.now()`.\n return {\n put: async (id: string, record: T): Promise<void> => {\n this.assertActive()\n await this.vault._elevatedPut<T>(name, id, record, this.tier, this.reason)\n },\n }\n }\n\n /**\n * Manually revert the elevation. Idempotent — calling twice (or\n * after the TTL expired) is a safe no-op. The vault's\n * active-elevation slot is cleared so a subsequent\n * `vault.elevate(...)` succeeds without throwing\n * {@link AlreadyElevatedError}.\n */\n async release(): Promise<void> {\n if (this.released) return\n this.released = true\n this.onRelease()\n }\n\n private assertActive(): void {\n if (this.released) {\n throw new ElevationExpiredError({ tier: this.tier, expiresAt: this.expiresAt })\n }\n if (Date.now() > this.expiresAt) {\n // Auto-release on first use past TTL so the vault's active\n // slot frees up without requiring the caller to think about\n // explicit release on expiry.\n this.released = true\n this.onRelease()\n throw new ElevationExpiredError({ tier: this.tier, expiresAt: this.expiresAt })\n }\n }\n}\n","import type { NoydbStore, EncryptedEnvelope, ChangeEvent, HistoryConfig, HistoryOptions, HistoryEntry, PruneOptions, ListPageResult, LocaleReadOptions, ConflictPolicy, CollectionConflictResolver, PutManyItemOptions, PutManyOptions, PutManyResult, DeleteManyResult } from './types.js'\nimport { NOYDB_FORMAT_VERSION } from './types.js'\nimport type { CrdtMode, CrdtState, LwwMapState, RgaState } from './crdt/crdt.js'\nimport { NO_CRDT, type CrdtStrategy } from './crdt/strategy.js'\nimport type { I18nTextDescriptor } from './i18n/core.js'\nimport type { DictKeyDescriptor } from './i18n/dictionary.js'\nimport { NO_I18N, type I18nStrategy } from './i18n/strategy.js'\nimport { encrypt, decrypt, encryptDeterministic } from './crypto.js'\nimport { ConflictError, ReadOnlyError, TranslatorNotConfiguredError, TierDemoteDeniedError } from './errors.js'\nimport { dekKey, assertTierAccess } from './team/tiers.js'\nimport type { GhostRecord, TierMode, CrossTierAccessEvent } from './types.js'\nimport type { UnlockedKeyring } from './team/keyring.js'\nimport { hasWritePermission } from './team/keyring.js'\nimport type { NoydbEventEmitter } from './events.js'\nimport type { StandardSchemaV1 } from './schema.js'\nimport { validateSchemaInput, validateSchemaOutput } from './schema.js'\nimport type { LedgerStore } from './history/ledger/index.js'\nimport type { DiffEntry } from './history/diff.js'\nimport { NO_HISTORY, type HistoryStrategy } from './history/strategy.js'\nimport { Query, ScanBuilder } from './query/index.js'\nimport type { QuerySource, JoinContext, JoinableSource } from './query/index.js'\nimport type { CollectionIndexes, IndexDef } from './indexing/eager-indexes.js'\nimport { encodeIdxId, decodeIdxId } from './indexing/persisted-indexes.js'\nimport type { PersistedCollectionIndex, PersistedIndexDef } from './indexing/persisted-indexes.js'\nimport { LazyQuery } from './indexing/lazy-builder.js'\nimport type { LazyQuerySource } from './indexing/lazy-builder.js'\nimport { NO_INDEXING, type IndexStrategy, type IndexState } from './indexing/strategy.js'\nimport { IndexWriteFailureError } from './errors.js'\nimport type { RefDescriptor } from './refs.js'\nimport { Lru, parseBytes, estimateRecordBytes, type LruStats } from './cache/index.js'\nimport { generateULID } from './bundle/ulid.js'\nimport type { PresenceHandle, PresenceHandleOpts } from './team/presence.js'\nimport { NO_SYNC, type SyncStrategy } from './team/sync-strategy.js'\nimport type { BlobSet } from './blobs/blob-set.js'\nimport { NO_BLOBS, type BlobStrategy } from './blobs/strategy.js'\nimport { NO_AGGREGATE, type AggregateStrategy } from './aggregate/strategy.js'\n\n/** Callback for dirty tracking (sync engine integration). */\nexport type OnDirtyCallback = (collection: string, id: string, action: 'put' | 'delete', version: number) => Promise<void>\n\n/**\n * Event delivered to a `collection.subscribe()` callback. Distinct\n * from the hub-level `ChangeEvent` — this one is bound to a single\n * collection's type `T` and hydrates the record from cache on put.\n *\n * - `type: 'put'` — `record` is the current decrypted value, or\n * `null` in the rare case where another op deleted the record\n * between the emit and the handler firing.\n * - `type: 'delete'` — `record` is always `null`; the deletion is\n * the only information.\n */\nexport interface CollectionChangeEvent<T> {\n readonly type: 'put' | 'delete'\n readonly id: string\n readonly record: T | null\n}\n\n/**\n * Per-collection cache configuration. Only meaningful when paired with\n * `prefetch: false` (lazy mode); eager mode keeps the entire decrypted\n * cache in memory and ignores these bounds.\n */\nexport interface CacheOptions {\n /** Maximum number of records to keep in memory before LRU eviction. */\n maxRecords?: number\n /**\n * Maximum total decrypted byte size before LRU eviction. Accepts a raw\n * number or a human-friendly string: `'50KB'`, `'50MB'`, `'1GB'`.\n * Eviction picks the least-recently-used entry until both budgets\n * (maxRecords AND maxBytes, if both are set) are satisfied.\n */\n maxBytes?: number | string\n}\n\n/** Statistics exposed via `Collection.cacheStats()`. */\nexport interface CacheStats extends LruStats {\n /** True if this collection is in lazy mode. */\n lazy: boolean\n}\n\n/**\n * Track which adapter names have already triggered the listPage fallback\n * warning. We only emit once per adapter per process so consumers see the\n * heads-up without log spam.\n */\nconst fallbackWarned = new Set<string>()\nfunction warnOnceFallback(adapterName: string): void {\n if (fallbackWarned.has(adapterName)) return\n fallbackWarned.add(adapterName)\n // Only warn in non-test environments — vitest runs are noisy enough.\n if (typeof process !== 'undefined' && process.env['NODE_ENV'] === 'test') return\n console.warn(\n `[noy-db] Adapter \"${adapterName}\" does not implement listPage(); ` +\n `Collection.scan()/listPage() are using a synthetic fallback (slower). ` +\n `Add a listPage method to opt into the streaming fast path.`,\n )\n}\n\n/** A typed collection of records within a vault. */\nexport class Collection<T> {\n private readonly adapter: NoydbStore\n private readonly vault: string\n private readonly name: string\n private readonly keyring: UnlockedKeyring\n private readonly encrypted: boolean\n private readonly emitter: NoydbEventEmitter\n private readonly getDEK: (collectionName: string) => Promise<CryptoKey>\n private readonly onDirty: OnDirtyCallback | undefined\n private readonly historyConfig: HistoryConfig\n\n /**\n * tree-shake seam — the strategy that backs `collection.blob(id)`.\n * Defaults to `NO_BLOBS`, a ~10-line stub that throws with an actionable\n * message. Consumers opt into real blob storage by importing\n * `{ blobs }` from `@noy-db/hub/blobs` and passing the returned\n * strategy to `createNoydb({ blobStrategy: blobs() })`. With the\n * default stub, none of the BlobSet / chunk / MIME-magic machinery\n * reaches the bundle.\n */\n private readonly blobStrategy: BlobStrategy\n private readonly aggregateStrategy: AggregateStrategy\n private readonly crdtStrategy: CrdtStrategy\n private readonly historyStrategy: HistoryStrategy\n private readonly i18nStrategy: I18nStrategy\n private readonly syncStrategy: SyncStrategy\n\n // In-memory cache of decrypted records (eager mode only). Lazy mode\n // uses `lru` instead. Both fields exist so a single Collection instance\n // doesn't need a runtime branch on every cache access.\n private readonly cache = new Map<string, { record: T; version: number }>()\n private hydrated = false\n\n /**\n * Lazy mode flag. `true` when constructed with `prefetch: false`.\n * In lazy mode the cache is bounded by an LRU and `list()`/`query()`\n * throw — callers must use `scan()` or per-id `get()` instead.\n */\n private readonly lazy: boolean\n\n /**\n * LRU cache for lazy mode. Only allocated when `prefetch: false` is set.\n * Stores `{ record, version }` entries the same shape as `this.cache`.\n * Tree-shaking note: importing Collection without setting `prefetch:false`\n * still pulls in the Lru class today; future bundle-size work could\n * lazy-import the cache module.\n */\n private readonly lru: Lru<string, { record: T; version: number }> | null\n\n /**\n * tree-shake seam — per-Collection indexing state. Owned by the\n * `IndexStrategy` passed through from `createNoydb({ indexStrategy })`.\n * Defaults to a disabled state (both accessors return null) so the\n * `CollectionIndexes` / `PersistedCollectionIndex` / `LazyQuery`\n * classes never reach the bundle when indexing is unused.\n *\n * Accessor helpers below (`get indexes()`, `get persistedIndexes()`)\n * preserve the field-access ergonomics without changing every\n * caller site.\n */\n private readonly indexState: IndexState\n\n /**\n * True once `_idx/*` side-cars have been bulk-loaded into\n * `persistedIndexes`. Flipped by `ensurePersistedIndexesLoaded()` on\n * first lazy-mode query so subsequent queries skip the adapter round\n * trip. Invalidation (remote sync, rotation) resets it alongside\n * `persistedIndexes.clear()`.\n */\n private persistedIndexesLoaded = false\n\n /**\n * Accessor for the in-memory eager-mode index mirror. Returns `null`\n * when indexing is disabled on this Noydb instance (the\n * `NO_INDEXING` default) or when the collection is in lazy mode\n * (which uses the persisted mirror instead).\n */\n private get indexes(): CollectionIndexes | null {\n return this.indexState.getEagerIndexes()\n }\n\n /**\n * Accessor for the persisted-mirror (lazy-mode) index. Returns `null`\n * when indexing is disabled or the collection is in eager mode.\n */\n private get persistedIndexes(): PersistedCollectionIndex | null {\n return this.indexState.getPersistedIndexes()\n }\n\n /**\n * per-collection reconcile-on-open policy. Read once\n * from `CollectionOptions.reconcileOnOpen` and applied by\n * `ensurePersistedIndexesLoaded()` on the first lazy-mode query.\n */\n private readonly reconcileOnOpen: 'off' | 'dry-run' | 'auto'\n\n /**\n * Re-entrancy guard for the auto-reconcile path. `reconcileIndex`\n * reloads the mirror after applying fixes, which re-enters\n * `ensurePersistedIndexesLoaded`; without this flag we'd trigger a\n * second auto-reconcile pass and potentially infinite recursion.\n */\n private autoReconciling = false\n\n /**\n * Optional Standard Schema v1 validator. When set, every `put()` runs\n * the input through `validateSchemaInput` before encryption, and every\n * record coming OUT of `decryptRecord` runs through\n * `validateSchemaOutput`. A rejected input throws\n * `SchemaValidationError` with `direction: 'input'`; drifted stored\n * data throws with `direction: 'output'`. Both carry the rich issue\n * list from the validator so UI code can render field-level messages.\n *\n * The schema is stored as `StandardSchemaV1<unknown, T>` because the\n * collection type parameter `T` is the OUTPUT type — whatever the\n * validator produces after transforms and coercion. Users who pass a\n * schema to `defineNoydbStore` (or `Collection.constructor`) get their\n * `T` inferred automatically via `InferOutput<Schema>`.\n */\n private readonly schema: StandardSchemaV1<unknown, T> | undefined\n\n /**\n * Vault-default locale. Used as the fallback when no per-call\n * locale option is passed to `get()`/`list()`. Provided by Vault\n * at collection construction time via the `collection({ locale })` or\n * `openVault(name, { locale })` path.\n *\n * `undefined` means \"no default locale set\" — i18nText fields will\n * throw `LocaleNotSpecifiedError` unless a per-call locale is passed.\n */\n private readonly defaultLocale: string | undefined\n\n /**\n * Map of field name → `I18nTextDescriptor` for fields declared with\n * `i18nText()`. Used by:\n * - `put()` via `i18nPutValidator` to enforce required translations\n * - `get()`/`list()` to apply locale resolution after decryption\n *\n * Declared via the `i18nFields` collection option.\n */\n private readonly i18nFields: Record<string, I18nTextDescriptor> | undefined\n\n /**\n * Map of field name → `DictKeyDescriptor` for fields declared with\n * `dictKey()`. Used by `get()`/`list()` to add `<field>Label` virtual\n * fields when a locale is requested.\n */\n private readonly dictKeyFields: Record<string, DictKeyDescriptor> | undefined\n\n /**\n * Async callback provided by the Vault that resolves a dict key\n * to its label for a given locale. Used by the locale-read path for\n * dictKey fields.\n *\n * Signature: `(dictName, key, locale, fallback?) => Promise<string | undefined>`\n */\n private readonly dictLabelResolver:\n | ((\n dictName: string,\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ) => Promise<string | undefined>)\n | undefined\n\n /**\n * Synchronous callback provided by the Vault that validates\n * i18nText fields on `put()`. Throws `MissingTranslationError` when\n * a required translation is absent. Called after schema validation,\n * before encryption.\n */\n private readonly i18nPutValidator: ((record: unknown) => void) | undefined\n\n /**\n * declared deterministic fields. `null` when the feature\n * is inactive for this collection; a frozen `Set` otherwise.\n */\n private readonly deterministicFields: ReadonlySet<string> | null\n\n /**\n * declared tiers for this collection. `null` when\n * tier-aware methods are disabled. Tier 0 is implicit and never\n * stored here.\n */\n private readonly tiers: ReadonlySet<number> | null\n private readonly tierMode: TierMode\n private readonly onCrossTierAccess: ((event: CrossTierAccessEvent) => void) | undefined\n\n /**\n * Async translator callback provided by Noydb via Vault for\n * `i18nText` fields with `autoTranslate: true`. Called\n * before i18n validation so translated values are present when the\n * validator runs. `undefined` when no `plaintextTranslator` was\n * configured on `createNoydb()`.\n */\n private readonly autoTranslateHook:\n | ((text: string, from: string, to: string, field: string, collection: string) => Promise<string>)\n | undefined\n\n /**\n * Optional reference to the vault-level hash-chained audit\n * log. When present, every successful `put()` and `delete()` appends\n * an entry to the ledger AFTER the adapter write succeeds (so a\n * failed adapter write never produces an orphan ledger entry).\n *\n * The ledger is always a vault-wide singleton — all\n * collections in the same vault share the same LedgerStore.\n * Vault.ledger() does the lazy init; this field just holds\n * the reference so Collection doesn't need to reach back up to the\n * vault on every mutation.\n *\n * `undefined` means \"no ledger attached\" — supported for tests that\n * construct a Collection directly without a vault, and for\n * future backwards-compat scenarios. Production usage always has a\n * ledger because Vault.collection() passes one through.\n */\n private readonly ledger: LedgerStore | undefined\n\n /** — per-collection CRDT mode, or undefined for normal LWW-at-record-level. */\n private readonly crdtMode: CrdtMode | undefined\n\n /** — optional remote/sync adapter for presence broadcasting. */\n private readonly syncAdapter: NoydbStore | undefined\n\n /** — consent-audit hook, no-op when no scope is active. */\n private readonly onAccess:\n | ((op: 'get' | 'put' | 'delete', id: string) => Promise<void>)\n | undefined\n\n /**\n * accounting-period write guard. Called BEFORE any\n * adapter write with:\n * - `existing` — the prior envelope's `_ts` and decrypted record\n * (or `null` if no prior envelope exists)\n * - `incoming` — the record being written (or `null` for delete)\n *\n * Throws `PeriodClosedError` if either side falls inside a closed\n * period. Installed by Vault; no-op when no period has been closed.\n * Async so the Vault can lazy-load the period list from the\n * adapter on first use.\n */\n private readonly periodGuard:\n | ((\n existing: { ts: string | null; record: Record<string, unknown> | null } | null,\n incoming: Record<string, unknown> | null,\n ) => Promise<void>)\n | undefined\n\n /**\n * Optional back-reference to the owning compartment's ref\n * enforcer. When present, `Collection.put` calls\n * `refEnforcer.enforceRefsOnPut(name, record)` before the adapter\n * write, and `Collection.delete` calls\n * `refEnforcer.enforceRefsOnDelete(name, id)` before its own\n * adapter delete. The Vault handles the actual registry\n * lookup and cross-collection enforcement — Collection just\n * notifies it at the right points in the lifecycle.\n *\n * Typed as a structural interface rather than `Vault`\n * directly to avoid a circular import. Vault implements\n * these two methods; any other object with the same shape would\n * work too (used only in unit tests).\n */\n private readonly refEnforcer:\n | {\n enforceRefsOnPut(collectionName: string, record: unknown): Promise<void>\n enforceRefsOnDelete(collectionName: string, id: string): Promise<void>\n }\n | undefined\n\n /**\n * Optional back-reference to the owning compartment's join resolver\n *`). When present,\n * `Collection.query()` builds a `JoinContext` that lets the Query\n * resolve `.join(field)` calls into target collections via this\n * resolver.\n *\n * Two methods:\n * - `resolveSource(name)` — fetch a `JoinableSource` for the\n * right-side collection by name. Returning `null` means \"no\n * such collection in this compartment\" — the executor then\n * throws an actionable error naming the missing target.\n * - `resolveRef(leftCollection, field)` — look up the ref\n * descriptor the left collection declared for this field.\n * `null` when the field has no ref, which makes `.join()`\n * throw at plan time before any records are touched.\n *\n * Typed structurally rather than as `Vault` to avoid a\n * circular import. Vault implements these two methods; any\n * other object with the same shape works too (used only in unit\n * tests against a plain object).\n */\n private readonly joinResolver:\n | {\n resolveSource(collectionName: string): JoinableSource | null\n resolveRef(leftCollection: string, field: string): RefDescriptor | null\n resolveDictSource?: (leftCollection: string, field: string) => JoinableSource | null\n }\n | undefined\n\n constructor(opts: {\n adapter: NoydbStore\n vault: string\n name: string\n keyring: UnlockedKeyring\n encrypted: boolean\n emitter: NoydbEventEmitter\n getDEK: (collectionName: string) => Promise<CryptoKey>\n historyConfig?: HistoryConfig | undefined\n onDirty?: OnDirtyCallback | undefined\n /**\n * tree-shake seam. When omitted, `collection.blob(id)` throws\n * with a pointer at the `@noy-db/hub/blobs` subpath. When set (via\n * `createNoydb({ blobStrategy: blobs() })`), blob storage is live.\n * `@internal` by virtue of `BlobStrategy` being `@internal`.\n */\n blobStrategy?: BlobStrategy | undefined\n aggregateStrategy?: AggregateStrategy | undefined\n crdtStrategy?: CrdtStrategy | undefined\n /**\n * tree-shake seam — strategy for optional history/ledger/\n * time-machine. When omitted, history snapshots and ledger appends\n * become silent no-ops (data still writes); the read APIs\n * (`history`, `getVersion`, `revert`, `diff`, `clearHistory`,\n * `pruneRecordHistory`) throw with a pointer at `@noy-db/hub/history`.\n */\n historyStrategy?: HistoryStrategy | undefined\n i18nStrategy?: I18nStrategy | undefined\n syncStrategy?: SyncStrategy | undefined\n /**\n * tree-shake seam. When omitted, indexing is off for this\n * collection — every `.lazyQuery()` call throws, `.rebuildIndexes()`\n * is a no-op, and `indexes: [...]` declarations are ignored. Enable\n * by passing `withIndexing()` from `@noy-db/hub/indexing` at\n * `createNoydb` time.\n */\n indexStrategy?: IndexStrategy | undefined\n indexes?: IndexDef[] | undefined\n /**\n * Auto-reconcile behavior for persisted-index drift on lazy-mode\n * collections. Defaults to `'off'` — operators call\n * `collection.reconcileIndex(field)` explicitly.\n *\n * - `'off'` (default): no implicit work. Same semantics as.\n * - `'dry-run'`: on first lazy-mode query, run\n * `reconcileIndex(field, { dryRun: true })` per declared field\n * and emit `index:reconciled` with the diff. Nothing is written.\n * - `'auto'`: same walk as `'dry-run'` but with `dryRun: false`.\n * Drift is repaired in-place and the fix count surfaces on the\n * event.\n *\n * Unattended long-lived processes (Workers, Node services with no\n * human operator) should set `'auto'`. Attended desktop apps should\n * leave it `'off'` and surface a manual \"rebuild indexes\" button.\n */\n reconcileOnOpen?: 'off' | 'dry-run' | 'auto'\n /**\n * Hydration mode. `'eager'` (default) loads everything into memory on\n * first access — matches behavior exactly. `'lazy'` defers loads\n * to per-id `get()` calls and bounds memory via the `cache` option.\n */\n prefetch?: boolean\n /**\n * LRU cache options. Only meaningful when `prefetch: false`. At least\n * one of `maxRecords` or `maxBytes` must be set in lazy mode — an\n * unbounded lazy cache defeats the purpose.\n */\n cache?: CacheOptions | undefined\n /**\n * Optional Standard Schema v1 validator (Zod, Valibot, ArkType,\n * Effect Schema, etc.). When set, every `put()` is validated before\n * encryption and every read is validated after decryption. See the\n * `schema` field docstring for the error semantics.\n */\n schema?: StandardSchemaV1<unknown, T> | undefined\n /**\n * Optional reference to the compartment's hash-chained ledger.\n * When present, successful mutations append a ledger entry via\n * `LedgerStore.append()`. Constructed at the Vault level and\n * threaded through — see the Vault.collection() source for\n * the wiring.\n */\n ledger?: LedgerStore | undefined\n /**\n * Optional back-reference to the owning compartment's ref\n * enforcer`).\n * Collection.put calls `enforceRefsOnPut` before the adapter\n * write; Collection.delete calls `enforceRefsOnDelete` before\n * its own adapter delete. See the `refEnforcer` field docstring\n * for the full protocol.\n */\n refEnforcer?:\n | {\n enforceRefsOnPut(collectionName: string, record: unknown): Promise<void>\n enforceRefsOnDelete(collectionName: string, id: string): Promise<void>\n }\n | undefined\n /**\n * Optional back-reference to the owning compartment's join\n * resolver. When present, `query()` builds a\n * `JoinContext` so `.join(field)` can resolve through the\n * existing `ref()` declaration into the target collection.\n * Absent in tests that construct a Collection directly without\n * a vault; production usage always has one because\n * Vault.collection() passes `this` through.\n */\n joinResolver?:\n | {\n resolveSource(collectionName: string): JoinableSource | null\n resolveRef(leftCollection: string, field: string): RefDescriptor | null\n }\n | undefined\n /** — i18nText field descriptors for locale-aware reads. */\n i18nFields?: Record<string, I18nTextDescriptor> | undefined\n /** — dictKey field descriptors for label resolution on reads. */\n dictKeyFields?: Record<string, DictKeyDescriptor> | undefined\n /**\n * async callback that resolves a dict key to its label\n * for a given locale. Provided by the Vault.\n */\n dictLabelResolver?:\n | ((\n dictName: string,\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ) => Promise<string | undefined>)\n | undefined\n /**\n * synchronous callback that validates i18nText fields\n * on put. Provided by the Vault. Throws MissingTranslationError.\n */\n i18nPutValidator?: ((record: unknown) => void) | undefined\n /**\n * translator callback from Noydb. When present, missing\n * translations for `autoTranslate: true` i18nText fields are generated\n * before the i18n validator runs.\n */\n autoTranslateHook?:\n | ((text: string, from: string, to: string, field: string, collection: string) => Promise<string>)\n | undefined\n /**\n * vault-default locale, inherited from\n * `openVault(name, { locale })` or `vault.setLocale()`.\n */\n defaultLocale?: string | undefined\n /**\n * collection-level conflict resolution policy.\n * Overrides the db-level `conflict` option for this collection only.\n */\n conflictPolicy?: ConflictPolicy<T> | undefined\n /**\n * callback to register an envelope-level resolver with the\n * SyncEngine. Provided by the Vault (wired from the SyncEngine).\n */\n onRegisterConflictResolver?: ((name: string, resolver: CollectionConflictResolver) => void) | undefined\n /**\n * CRDT mode for this collection. When set, `put()` stores\n * CRDT state in the envelope and `get()` returns the resolved snapshot.\n * `getRaw(id)` returns the full CRDT state for merge operations.\n */\n crdt?: CrdtMode | undefined\n /**\n * optional remote/sync adapter. When present, `presence()`\n * writes heartbeats to this adapter so other devices can read them.\n * If the adapter implements pub/sub, presence updates are real-time.\n */\n syncAdapter?: NoydbStore | undefined\n /**\n * called by the collection after every successful\n * `get` / `put` / `delete`. The Vault installs a callback that\n * appends a consent-audit entry when `withConsent` is active;\n * outside a consent scope the callback is a no-op. Awaited so a\n * thrown audit write surfaces to the caller.\n */\n onAccess?: (op: 'get' | 'put' | 'delete', id: string) => Promise<void>\n /**\n * invoked by `put`/`delete` before any adapter\n * write. Receives the prior envelope timestamp + decrypted\n * record (or `null` if no prior) and the incoming record (or\n * `null` for delete). Throws `PeriodClosedError` to abort.\n */\n /**\n * opt-in deterministic-encryption index.\n *\n * Field names listed here get a deterministic AES-GCM ciphertext\n * attached to every envelope's `_det` map, which enables blind\n * equality search via `collection.findByDet(field, value)`.\n *\n * **Leaks equality.** Two records with the same value in a\n * deterministic field produce identical ciphertexts, so anyone\n * with store access can tell which records share a value without\n * learning the value itself. This is the textbook trade-off of\n * deterministic encryption — strictly opt-in for that reason.\n *\n * Declaring any field here without also passing\n * `acknowledgeDeterministicRisk: true` throws at construction,\n * so the risk must be explicitly acknowledged.\n */\n deterministicFields?: readonly string[] | undefined\n /**\n * gate for `deterministicFields`. Must be `true` when\n * any deterministic field is declared. Any other value throws.\n */\n acknowledgeDeterministicRisk?: boolean | undefined\n /**\n * declared tiers this collection supports. An\n * undefined or empty list disables the hierarchical-tier surface\n * on this collection (`putAtTier`, `getAtTier`, `elevate`, `demote`\n * throw). Tier 0 is implicit and always available.\n */\n tiers?: readonly number[] | undefined\n /**\n * what a lower-tier caller sees for above-tier\n * records. Default `'invisibility'`.\n */\n tierMode?: TierMode | undefined\n /**\n * optional callback fired on every cross-tier access.\n * Provided by the Vault; collects notification events and writes\n * to the ledger.\n */\n onCrossTierAccess?: ((event: CrossTierAccessEvent) => void) | undefined\n periodGuard?: (\n existing: { ts: string | null; record: Record<string, unknown> | null } | null,\n incoming: Record<string, unknown> | null,\n ) => Promise<void>\n }) {\n this.adapter = opts.adapter\n this.vault = opts.vault\n this.name = opts.name\n this.keyring = opts.keyring\n this.encrypted = opts.encrypted\n this.emitter = opts.emitter\n this.blobStrategy = opts.blobStrategy ?? NO_BLOBS\n this.aggregateStrategy = opts.aggregateStrategy ?? NO_AGGREGATE\n this.crdtStrategy = opts.crdtStrategy ?? NO_CRDT\n this.historyStrategy = opts.historyStrategy ?? NO_HISTORY\n this.i18nStrategy = opts.i18nStrategy ?? NO_I18N\n this.syncStrategy = opts.syncStrategy ?? NO_SYNC\n this.reconcileOnOpen = opts.reconcileOnOpen ?? 'off'\n this.getDEK = opts.getDEK\n this.onDirty = opts.onDirty\n this.historyConfig = opts.historyConfig ?? { enabled: true }\n this.schema = opts.schema\n this.ledger = opts.ledger\n this.refEnforcer = opts.refEnforcer\n this.joinResolver = opts.joinResolver\n this.i18nFields = opts.i18nFields\n this.dictKeyFields = opts.dictKeyFields\n this.dictLabelResolver = opts.dictLabelResolver\n this.i18nPutValidator = opts.i18nPutValidator\n this.autoTranslateHook = opts.autoTranslateHook\n this.defaultLocale = opts.defaultLocale\n this.crdtMode = opts.crdt\n this.syncAdapter = opts.syncAdapter\n this.onAccess = opts.onAccess\n this.periodGuard = opts.periodGuard\n\n // hierarchical-tier wiring\n this.tiers = opts.tiers && opts.tiers.length > 0 ? new Set(opts.tiers) : null\n this.tierMode = opts.tierMode ?? 'invisibility'\n this.onCrossTierAccess = opts.onCrossTierAccess\n\n // deterministic-encryption wiring\n if (opts.deterministicFields && opts.deterministicFields.length > 0) {\n if (opts.acknowledgeDeterministicRisk !== true) {\n throw new Error(\n `Collection \"${opts.name}\": deterministicFields requires \\`acknowledgeDeterministicRisk: true\\`. ` +\n `Deterministic encryption leaks equality between records — two records with the same field value ` +\n `produce identical ciphertexts visible to anyone with store access. If that trade-off is acceptable ` +\n `for your threat model, set \\`acknowledgeDeterministicRisk: true\\` to enable.`,\n )\n }\n this.deterministicFields = Object.freeze(new Set(opts.deterministicFields))\n } else {\n this.deterministicFields = null\n }\n\n // register CRDT conflict resolver with SyncEngine\n if (opts.crdt && opts.onRegisterConflictResolver) {\n const crdtMode = opts.crdt\n const crdtResolver: CollectionConflictResolver = async (_id, local, remote) => {\n if (crdtMode === 'yjs') {\n // Core cannot merge Yjs without the yjs package — take the higher version\n return local._v >= remote._v ? local : remote\n }\n const localJson = await this.decryptJsonString(local)\n const remoteJson = await this.decryptJsonString(remote)\n const localState = JSON.parse(localJson) as CrdtState\n const remoteState = JSON.parse(remoteJson) as CrdtState\n const merged = this.crdtStrategy.mergeCrdtStates(localState, remoteState)\n const mergedVersion = Math.max(local._v, remote._v) + 1\n return this.encryptJsonString(JSON.stringify(merged), mergedVersion)\n }\n opts.onRegisterConflictResolver(this.name, crdtResolver)\n }\n\n // build and register per-collection conflict resolver with SyncEngine\n if (opts.conflictPolicy !== undefined && opts.onRegisterConflictResolver) {\n const policy = opts.conflictPolicy\n const compartmentName = this.vault\n const collectionName = this.name\n const emitter = this.emitter\n let resolver: CollectionConflictResolver\n\n if (policy === 'last-writer-wins') {\n resolver = async (_id, local, remote) => (local._ts >= remote._ts ? local : remote)\n } else if (policy === 'first-writer-wins') {\n resolver = async (_id, local, remote) => (local._v <= remote._v ? local : remote)\n } else if (policy === 'manual') {\n resolver = (id, local, remote) =>\n new Promise<EncryptedEnvelope | null>(resolvePromise => {\n let settled = false\n const resolveCallback = (winner: EncryptedEnvelope | null) => {\n if (!settled) {\n settled = true\n resolvePromise(winner)\n }\n }\n emitter.emit('sync:conflict', {\n vault: compartmentName,\n collection: collectionName,\n id,\n local,\n remote,\n localVersion: local._v,\n remoteVersion: remote._v,\n resolve: resolveCallback,\n })\n // Defer if no handler called resolve synchronously\n if (!settled) {\n settled = true\n resolvePromise(null)\n }\n })\n } else {\n // Custom merge fn: decrypt both → merge → re-encrypt\n const mergeFn = policy as (local: T, remote: T) => T\n resolver = async (_id, local, remote) => {\n const localRecord = await this.decryptRecord(local, { skipValidation: true })\n const remoteRecord = await this.decryptRecord(remote, { skipValidation: true })\n const merged = mergeFn(localRecord, remoteRecord)\n const mergedVersion = Math.max(local._v, remote._v) + 1\n return this.encryptRecord(merged, mergedVersion)\n }\n }\n\n opts.onRegisterConflictResolver(collectionName, resolver)\n }\n\n // Default `prefetch: true` keeps semantics. Only opt-in to lazy\n // mode when the consumer explicitly sets `prefetch: false`.\n this.lazy = opts.prefetch === false\n\n if (this.lazy) {\n if (!opts.cache || (opts.cache.maxRecords === undefined && opts.cache.maxBytes === undefined)) {\n throw new Error(\n `Collection \"${this.name}\": lazy mode (prefetch: false) requires a cache option ` +\n `with maxRecords and/or maxBytes. An unbounded lazy cache defeats the purpose.`,\n )\n }\n const lruOptions: { maxRecords?: number; maxBytes?: number } = {}\n if (opts.cache.maxRecords !== undefined) lruOptions.maxRecords = opts.cache.maxRecords\n if (opts.cache.maxBytes !== undefined) lruOptions.maxBytes = parseBytes(opts.cache.maxBytes)\n this.lru = new Lru<string, { record: T; version: number }>(lruOptions)\n this.hydrated = true // lazy mode is always \"hydrated\" — no bulk load\n } else {\n this.lru = null\n }\n\n // delegate mirror construction + declaration to the active\n // indexing strategy. `NO_INDEXING` returns a state whose accessors\n // both return null; the active strategy (from `@noy-db/hub/indexing`)\n // constructs the appropriate mirror based on lazy vs eager mode and\n // declares every IndexDef. With NO_INDEXING the heavy index classes\n // never reach the bundle.\n const strategy = opts.indexStrategy ?? NO_INDEXING\n this.indexState = strategy.createState({\n defs: opts.indexes ?? [],\n lazy: this.lazy,\n })\n }\n\n /**\n * Return the Standard Schema validator attached to this collection,\n * or `undefined` if none was provided at construction time.\n *\n * Exposed (read-only) for the Vault-level export primitive,\n * which surfaces each collection's schema in the per-chunk metadata\n * so downstream serializers (`@noy-db/as-*` packages, custom\n * exporters) can produce schema-aware output without poking at\n * collection internals. The validator object is returned by\n * reference — callers must treat it as immutable.\n */\n getSchema(): StandardSchemaV1<unknown, T> | undefined {\n return this.schema\n }\n\n /**\n * Get a single record by ID.\n *\n * @param id Record identifier.\n * @param locale Optional locale options. When provided,\n * `i18nText` fields are resolved to the requested locale\n * string, and `dictKey` fields get a `<field>Label`\n * virtual field added. Pass `{ locale: 'raw' }` to\n * return the full `{ [locale]: string }` map instead.\n *\n * @returns The decrypted (and optionally locale-resolved) record, or\n * `null` if not found.\n */\n async get(id: string, locale?: LocaleReadOptions): Promise<T | null> {\n let record: T | null\n\n if (this.lazy && this.lru) {\n // Cache hit: promote and return.\n const cached = this.lru.get(id)\n if (cached) {\n record = cached.record\n } else {\n // Cache miss: hit the adapter, decrypt, populate the LRU.\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (!envelope) return null\n record = await this.decryptRecord(envelope)\n this.lru.set(id, { record, version: envelope._v }, estimateRecordBytes(record))\n }\n } else {\n // Eager mode: load everything once, then serve from the in-memory map.\n await this.ensureHydrated()\n const entry = this.cache.get(id)\n record = entry ? entry.record : null\n }\n\n if (record === null) return null\n await this.onAccess?.('get', id)\n return this.applyLocaleToRecord(record, locale)\n }\n\n /**\n * Return the raw CRDT state for a record.\n * Only available on collections configured with `crdt: 'lww-map' | 'rga' | 'yjs'`.\n * Use this for merge operations or to pass to `@noy-db/yjs`.\n * Throws if the collection is not in CRDT mode.\n */\n async getRaw(id: string): Promise<CrdtState | null> {\n if (!this.crdtMode) {\n throw new Error(\n `Collection \"${this.name}\": getRaw() is only available when the collection ` +\n `is created with a 'crdt' option ('lww-map', 'rga', or 'yjs').`,\n )\n }\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (!envelope) return null\n const json = await this.decryptJsonString(envelope)\n return JSON.parse(json) as CrdtState\n }\n\n /**\n * Return a presence handle for this collection.\n *\n * The handle manages an encrypted ephemeral presence channel keyed by an\n * HKDF derivation of this collection's DEK. Presence payloads are invisible\n * to the adapter.\n *\n * @param opts.staleMs Milliseconds before a peer is considered inactive.\n * Default: 30 000.\n * @param opts.pollIntervalMs Milliseconds between storage polls (fallback mode).\n * Default: 5 000.\n */\n presence<P = unknown>(opts?: { staleMs?: number; pollIntervalMs?: number }): PresenceHandle<P> {\n const presenceOpts: PresenceHandleOpts = {\n adapter: this.adapter,\n vault: this.vault,\n collectionName: this.name,\n userId: this.keyring.userId,\n encrypted: this.encrypted,\n getDEK: this.getDEK,\n }\n if (this.syncAdapter !== undefined) presenceOpts.syncAdapter = this.syncAdapter\n if (opts?.staleMs !== undefined) presenceOpts.staleMs = opts.staleMs\n if (opts?.pollIntervalMs !== undefined) presenceOpts.pollIntervalMs = opts.pollIntervalMs\n return this.syncStrategy.buildPresence<P>(presenceOpts)\n }\n\n /** Create or update a record. */\n async put(id: string, record: T): Promise<void> {\n if (!hasWritePermission(this.keyring, this.name)) {\n throw new ReadOnlyError()\n }\n\n // accounting-period guard. Runs BEFORE any other\n // work so a closed-period write fails fast and leaves no partial\n // trace (no schema work, no i18n translation, no history). Reads\n // the existing envelope + decrypts the prior record so\n // business-date comparison against the closed period's\n // `dateField` can use the stored value (late entries don't slip\n // through a write-time check). For first-time inserts the prior\n // is null.\n if (this.periodGuard !== undefined) {\n const existingEnv = await this.adapter.get(this.vault, this.name, id)\n let priorRecord: Record<string, unknown> | null = null\n if (existingEnv) {\n try {\n priorRecord = (await this.decryptRecord(existingEnv, { skipValidation: true })) as unknown as Record<string, unknown>\n } catch {\n priorRecord = null\n }\n }\n await this.periodGuard(\n existingEnv ? { ts: existingEnv._ts, record: priorRecord } : null,\n record as unknown as Record<string, unknown>,\n )\n }\n\n // Schema validation — runs BEFORE encryption so invalid records are\n // rejected at the store boundary. The validator may transform the\n // input (e.g., coerce strings → numbers, strip unknown fields), in\n // which case we persist the validated value rather than the raw one.\n // Users who pass a bad shape get a SchemaValidationError with a\n // structured issue list, not a stack trace from deep inside the\n // encrypt path.\n if (this.schema !== undefined) {\n record = await validateSchemaInput(this.schema, record, `put(${id})`)\n }\n\n // Auto-translate missing i18nText translations.\n // Runs BEFORE i18n validation so translated values satisfy the\n // required-locale constraint. Throws TranslatorNotConfiguredError\n // when a field has autoTranslate: true but no hook was configured.\n if (this.i18nFields) {\n const obj = record as Record<string, unknown>\n for (const [field, descriptor] of Object.entries(this.i18nFields)) {\n if (!descriptor.options.autoTranslate) continue\n const value = obj[field]\n if (!value || typeof value !== 'object' || Array.isArray(value)) continue\n const map = value as Record<string, string>\n // Determine which locales need translation. For 'all', translate all\n // declared languages that are missing. For 'any', only translate if\n // none are present. For string[], translate the listed required ones.\n const { languages, required } = descriptor.options\n const missing: string[] = languages.filter(\n (lang) => !(lang in map) || map[lang] === '',\n )\n if (missing.length === 0) continue\n // Find a source locale (first present non-empty value)\n const sourceLocale = languages.find((l) => l in map && map[l] !== '')\n if (!sourceLocale) continue\n if (!this.autoTranslateHook) {\n throw new TranslatorNotConfiguredError(field, this.name)\n }\n // Only translate locales that are actually needed\n const toTranslate =\n required === 'any'\n ? [] // 'any' is already satisfied since sourceLocale exists\n : required === 'all'\n ? missing\n : missing.filter((l) => required.includes(l))\n const translated = { ...map }\n for (const targetLocale of toTranslate) {\n translated[targetLocale] = await this.autoTranslateHook(\n map[sourceLocale]!,\n sourceLocale,\n targetLocale,\n field,\n this.name,\n )\n }\n ;(record as Record<string, unknown>)[field] = translated\n }\n }\n\n // i18nText validation — runs AFTER schema validation so\n // the record shape is trustworthy. Throws MissingTranslationError\n // when required translations are absent.\n if (this.i18nPutValidator !== undefined) {\n this.i18nPutValidator(record)\n }\n\n // Foreign-key ref enforcement. Runs AFTER schema\n // validation (so the record shape is trustworthy) but BEFORE\n // any write (so a failed strict ref leaves no trace on disk,\n // in history, or in the ledger). The Vault handles the\n // actual target lookups — see `enforceRefsOnPut` over there.\n if (this.refEnforcer !== undefined) {\n await this.refEnforcer.enforceRefsOnPut(this.name, record)\n }\n\n // ─── CRDT mode ─────────────────────────────────────────\n // In CRDT mode we always read the raw envelope from the adapter to get\n // the existing CRDT state, merge the incoming record into it, then\n // encrypt the merged CRDT state — bypassing the normal version path.\n if (this.crdtMode) {\n const existingEnvelope = await this.adapter.get(this.vault, this.name, id)\n const existingVersion = existingEnvelope?._v ?? 0\n const now = new Date().toISOString()\n\n let crdtState: CrdtState\n\n if (this.crdtMode === 'lww-map') {\n let existingState: LwwMapState | undefined\n if (existingEnvelope) {\n const prevJson = await this.decryptJsonString(existingEnvelope)\n const prevParsed = JSON.parse(prevJson) as unknown\n if (prevParsed !== null && typeof prevParsed === 'object' && '_crdt' in prevParsed) {\n existingState = prevParsed as LwwMapState\n }\n }\n crdtState = this.crdtStrategy.buildLwwMapState(record as Record<string, unknown>, existingState, now)\n } else if (this.crdtMode === 'rga') {\n let existingState: RgaState | undefined\n if (existingEnvelope) {\n const prevJson = await this.decryptJsonString(existingEnvelope)\n const prevParsed = JSON.parse(prevJson) as unknown\n if (prevParsed !== null && typeof prevParsed === 'object' && '_crdt' in prevParsed) {\n existingState = prevParsed as RgaState\n }\n }\n const arr = Array.isArray(record) ? record : [record]\n crdtState = this.crdtStrategy.buildRgaState(arr, existingState, generateULID)\n } else {\n // yjs: record is the base64 update string (produced by @noy-db/yjs)\n crdtState = { _crdt: 'yjs', update: record as unknown as string }\n }\n\n const version = existingVersion + 1\n const envelope = await this.encryptJsonString(JSON.stringify(crdtState), version)\n await this.adapter.put(this.vault, this.name, id, envelope)\n\n // Resolve snapshot for cache and history\n const resolvedRecord = this.crdtStrategy.resolveCrdtSnapshot(crdtState) as T\n const existingResolved = existingEnvelope\n ? { record: await this.decryptRecord(existingEnvelope, { skipValidation: true }), version: existingVersion }\n : undefined\n\n if (existingResolved && this.historyConfig.enabled !== false) {\n const histEnvelope = await this.encryptRecord(existingResolved.record, existingResolved.version)\n await this.historyStrategy.saveHistory(this.adapter, this.vault, this.name, id, histEnvelope)\n this.emitter.emit('history:save', { vault: this.vault, collection: this.name, id, version: existingResolved.version })\n if (this.historyConfig.maxVersions) {\n await this.historyStrategy.pruneHistory(this.adapter, this.vault, this.name, id, { keepVersions: this.historyConfig.maxVersions })\n }\n }\n\n if (this.ledger) {\n const appendInput: Parameters<typeof this.ledger.append>[0] = {\n op: 'put', collection: this.name, id, version, actor: this.keyring.userId,\n payloadHash: await this.historyStrategy.envelopePayloadHash(envelope),\n }\n if (existingResolved) appendInput.delta = this.historyStrategy.computePatch(resolvedRecord, existingResolved.record)\n await this.ledger.append(appendInput)\n }\n\n if (this.lazy && this.lru) {\n this.lru.set(id, { record: resolvedRecord, version }, estimateRecordBytes(resolvedRecord))\n await this.maintainPersistedIndexesOnPut(\n id,\n resolvedRecord,\n existingResolved ? existingResolved.record : null,\n version,\n )\n } else {\n this.cache.set(id, { record: resolvedRecord, version })\n this.indexes?.upsert(id, resolvedRecord, existingResolved ? existingResolved.record : null)\n }\n\n await this.onDirty?.(this.name, id, 'put', version)\n this.emitter.emit('change', { vault: this.vault, collection: this.name, id, action: 'put' } satisfies ChangeEvent)\n await this.onAccess?.('put', id)\n return\n }\n // ─── End CRDT mode ──────────────────────────────────────────────────\n\n // Resolve the previous record. In eager mode this comes from the\n // in-memory map (no I/O); in lazy mode we have to ask the adapter\n // because the record may have been evicted (or never loaded).\n let existing: { record: T; version: number } | undefined\n if (this.lazy && this.lru) {\n existing = this.lru.get(id)\n if (!existing) {\n const previousEnvelope = await this.adapter.get(this.vault, this.name, id)\n if (previousEnvelope) {\n const previousRecord = await this.decryptRecord(previousEnvelope)\n existing = { record: previousRecord, version: previousEnvelope._v }\n }\n }\n } else {\n await this.ensureHydrated()\n existing = this.cache.get(id)\n }\n\n const version = existing ? existing.version + 1 : 1\n\n // Save history snapshot of the PREVIOUS version before overwriting\n if (existing && this.historyConfig.enabled !== false) {\n const historyEnvelope = await this.encryptRecord(existing.record, existing.version)\n await this.historyStrategy.saveHistory(this.adapter, this.vault, this.name, id, historyEnvelope)\n\n this.emitter.emit('history:save', {\n vault: this.vault,\n collection: this.name,\n id,\n version: existing.version,\n })\n\n // Auto-prune if maxVersions configured\n if (this.historyConfig.maxVersions) {\n await this.historyStrategy.pruneHistory(this.adapter, this.vault, this.name, id, {\n keepVersions: this.historyConfig.maxVersions,\n })\n }\n }\n\n const envelope = await this.encryptRecord(record, version)\n await this.adapter.put(this.vault, this.name, id, envelope)\n\n // Ledger append — AFTER the adapter write succeeds so a failed\n // write never produces an orphan ledger entry. Computing the\n // payloadHash here uses the envelope we just wrote, which is the\n // exact bytes the adapter now holds. The ledger entry records\n // only metadata (collection, id, version, hash) — NOT the record\n // itself — and is then encrypted with the compartment's ledger\n // DEK, preserving zero-knowledge. See `LedgerStore.append`.\n //\n // **Delta history**: if there was a previous version, we\n // compute a JSON Patch from it to the new record and pass it\n // through `append.delta`. The LedgerStore stores the patch in\n // the sibling `_ledger_deltas/` collection and records its hash\n // in the entry's `deltaHash` field. Genesis puts (no existing\n // record) leave `delta` undefined — there's nothing to diff\n // against — and the ledger entry has no `deltaHash`.\n if (this.ledger) {\n const appendInput: Parameters<typeof this.ledger.append>[0] = {\n op: 'put',\n collection: this.name,\n id,\n version,\n actor: this.keyring.userId,\n payloadHash: await this.historyStrategy.envelopePayloadHash(envelope),\n }\n if (existing) {\n // REVERSE patch: describes how to undo this put — i.e., how\n // to transform the NEW record back into the PREVIOUS one.\n // Storing reverse patches lets `ledger.reconstruct()` walk\n // backward from the current state (readily available in the\n // data collection) without needing a forward-walking base\n // snapshot, which would double the storage cost of the\n // delta scheme. See `LedgerStore.reconstruct` for the walk.\n appendInput.delta = this.historyStrategy.computePatch(record, existing.record)\n }\n await this.ledger.append(appendInput)\n }\n\n if (this.lazy && this.lru) {\n this.lru.set(id, { record, version }, estimateRecordBytes(record))\n // Maintain persisted-index side-cars. Lazy mode is the\n // only place `persistedIndexes` is populated; eager mode uses the\n // in-memory `CollectionIndexes` above.\n await this.maintainPersistedIndexesOnPut(id, record, existing ? existing.record : null, version)\n } else {\n this.cache.set(id, { record, version })\n // Update secondary indexes incrementally — no-op if no indexes are\n // declared. Pass the previous record (if any) so old buckets are\n // cleaned up before the new value is added.\n this.indexes?.upsert(id, record, existing ? existing.record : null)\n }\n\n await this.onDirty?.(this.name, id, 'put', version)\n\n this.emitter.emit('change', {\n vault: this.vault,\n collection: this.name,\n id,\n action: 'put',\n } satisfies ChangeEvent)\n\n await this.onAccess?.('put', id)\n }\n\n /** Delete a record by ID. */\n async delete(id: string): Promise<void> {\n if (!hasWritePermission(this.keyring, this.name)) {\n throw new ReadOnlyError()\n }\n\n // accounting-period guard (same contract as put;\n // incoming is null because this is a delete).\n if (this.periodGuard !== undefined) {\n const existingEnv = await this.adapter.get(this.vault, this.name, id)\n let priorRecord: Record<string, unknown> | null = null\n if (existingEnv) {\n try {\n priorRecord = (await this.decryptRecord(existingEnv, { skipValidation: true })) as unknown as Record<string, unknown>\n } catch {\n priorRecord = null\n }\n }\n await this.periodGuard(\n existingEnv ? { ts: existingEnv._ts, record: priorRecord } : null,\n null,\n )\n }\n\n // Foreign-key ref enforcement on delete. Runs BEFORE\n // the adapter delete so a `strict` inbound ref with existing\n // references blocks the delete entirely (no partial state, no\n // history churn, no ledger entry for a rejected op). `cascade`\n // recursively deletes the referencing records first, then falls\n // through to the normal delete path below. `warn` is a no-op\n // here — violations surface through `checkIntegrity()`.\n if (this.refEnforcer !== undefined) {\n await this.refEnforcer.enforceRefsOnDelete(this.name, id)\n }\n\n // In lazy mode the record may not be cached; ask the adapter so we\n // can still write a history snapshot if history is enabled.\n let existing: { record: T; version: number } | undefined\n if (this.lazy && this.lru) {\n existing = this.lru.get(id)\n if (!existing && this.historyConfig.enabled !== false) {\n const previousEnvelope = await this.adapter.get(this.vault, this.name, id)\n if (previousEnvelope) {\n const previousRecord = await this.decryptRecord(previousEnvelope)\n existing = { record: previousRecord, version: previousEnvelope._v }\n }\n }\n } else {\n existing = this.cache.get(id)\n }\n\n // Save history snapshot before deleting\n if (existing && this.historyConfig.enabled !== false) {\n const historyEnvelope = await this.encryptRecord(existing.record, existing.version)\n await this.historyStrategy.saveHistory(this.adapter, this.vault, this.name, id, historyEnvelope)\n }\n\n // Capture the previous envelope's payloadHash BEFORE delete so we\n // have a stable reference for the ledger entry. The hash is of\n // whatever was last visible to readers — for a `delete` of a\n // never-existed record, we use the empty string (which the\n // ledger entry's `payloadHash` field tolerates).\n const previousEnvelope = await this.adapter.get(this.vault, this.name, id)\n const previousPayloadHash = await this.historyStrategy.envelopePayloadHash(previousEnvelope)\n\n await this.adapter.delete(this.vault, this.name, id)\n\n // Ledger append — same after-write timing as put(). The recorded\n // version is the version that WAS deleted (existing?.version), not\n // a successor. A delete of a missing record still appends an\n // entry with version 0 so the chain captures the intent.\n if (this.ledger) {\n await this.ledger.append({\n op: 'delete',\n collection: this.name,\n id,\n version: existing?.version ?? 0,\n actor: this.keyring.userId,\n payloadHash: previousPayloadHash,\n })\n }\n\n if (this.lazy && this.lru) {\n this.lru.remove(id)\n // Tear down persisted-index side-cars for any declared fields on\n // this record. No-op when no fields are declared or the record\n // had never been indexed (e.g. a delete of a missing id).\n if (existing) {\n await this.maintainPersistedIndexesOnDelete(id, existing.record)\n }\n } else {\n this.cache.delete(id)\n // Remove from secondary indexes — no-op if no indexes are declared\n // or the record wasn't previously indexed.\n if (existing) {\n this.indexes?.remove(id, existing.record)\n }\n }\n\n await this.onDirty?.(this.name, id, 'delete', existing?.version ?? 0)\n\n this.emitter.emit('change', {\n vault: this.vault,\n collection: this.name,\n id,\n action: 'delete',\n } satisfies ChangeEvent)\n\n await this.onAccess?.('delete', id)\n }\n\n /**\n * List all records in the collection.\n *\n * Throws in lazy mode — bulk listing defeats the purpose of lazy\n * hydration. Use `scan()` to iterate over the full collection\n * page-by-page without holding more than `pageSize` records in memory.\n *\n * @param locale Optional locale options. When provided,\n * each record is locale-resolved before being returned.\n */\n async list(locale?: LocaleReadOptions): Promise<T[]> {\n if (this.lazy) {\n throw new Error(\n `Collection \"${this.name}\": list() is not available in lazy mode (prefetch: false). ` +\n `Use collection.scan({ pageSize }) to iterate over the full collection.`,\n )\n }\n await this.ensureHydrated()\n const records = [...this.cache.values()].map(e => e.record)\n if (!locale) return records\n return Promise.all(records.map(r => this.applyLocaleToRecord(r, locale)))\n }\n\n // ─── Bulk operations ─────────────────────────────────────\n\n /**\n * Put many records in one call. Each item is processed sequentially\n * through the normal `put()` path — meaning per-item validation,\n * history snapshots, ledger appends, and change events all still\n * fire. The round-trip saving comes from the adapter staying hot\n * across the batch (no connection re-open, no keyring re-unlock).\n *\n * ## Semantics\n *\n * **Best-effort with per-item results.** If item 5 of 10 fails, items\n * 1–4 are already persisted and items 6–10 are still attempted.\n * The returned {@link PutManyResult} lists every success and failure\n * individually so the caller can decide whether to roll forward\n * (retry the failures) or roll back (manually delete the successes).\n *\n * **True tx-atomic putMany** — pass `{ atomic: true }` to switch\n * to the transaction executor: pre-flight CAS against every\n * item's `expectedVersion`, then commit all ops with best-effort\n * revert on mid-batch failure. Atomic mode throws on failure rather\n * than returning a mixed-results object.\n *\n * ## Change events\n *\n * One `change` event per successfully-written record, same as N\n * single-record puts. Subscribers don't need to special-case bulk.\n */\n async putMany(\n entries: ReadonlyArray<readonly [id: string, record: T, opts?: PutManyItemOptions]>,\n options?: PutManyOptions,\n ): Promise<PutManyResult> {\n if (options?.atomic) {\n return this.putManyAtomic(entries)\n }\n const success: string[] = []\n const failures: Array<{ id: string; error: Error }> = []\n for (const entry of entries) {\n const [id, record] = entry\n try {\n await this.put(id, record)\n success.push(id)\n } catch (error) {\n failures.push({ id, error: error as Error })\n }\n }\n return { ok: failures.length === 0, success, failures }\n }\n\n /**\n * Atomic-mode implementation of {@link putMany}. Pre-flights every\n * `expectedVersion`, executes all puts in declaration order, and\n * reverts executed ops via the raw adapter on mid-batch failure.\n * See `runTransaction` for the shared semantics + crash-window caveat.\n *\n * @internal\n */\n private async putManyAtomic(\n entries: ReadonlyArray<readonly [id: string, record: T, opts?: PutManyItemOptions]>,\n ): Promise<PutManyResult> {\n // Phase 1 — pre-flight CAS + prior-envelope snapshot for revert.\n const priors = new Map<string, EncryptedEnvelope | null>()\n for (const [id, , opts] of entries) {\n if (!priors.has(id)) {\n priors.set(id, await this.adapter.get(this.vault, this.name, id))\n }\n if (opts?.expectedVersion !== undefined) {\n const env = priors.get(id) ?? null\n const actual = env?._v ?? 0\n if (actual !== opts.expectedVersion) {\n throw new ConflictError(\n actual,\n `putMany atomic: ${this.vault}/${this.name}/${id} ` +\n `expected v${opts.expectedVersion}, found v${actual}`,\n )\n }\n }\n }\n // Phase 2 — execute; revert on failure.\n const executed: Array<{ id: string; prior: EncryptedEnvelope | null }> = []\n try {\n for (const [id, record] of entries) {\n await this.put(id, record)\n executed.push({ id, prior: priors.get(id) ?? null })\n }\n return { ok: true, success: executed.map((e) => e.id), failures: [] }\n } catch (err) {\n for (const { id, prior } of executed.slice().reverse()) {\n try {\n if (prior) await this.adapter.put(this.vault, this.name, id, prior)\n else await this.adapter.delete(this.vault, this.name, id)\n } catch { /* best-effort */ }\n }\n throw err\n }\n }\n\n /**\n * Get many records in one call. Returns a `Map<id, T | null>` —\n * missing records surface as `null` entries so the caller can\n * distinguish \"not found\" from \"lookup failed\". Order-stable\n * iteration (Map preserves insertion order = input `ids` order).\n *\n * Reads go through the per-id `get()` path, which means the cache\n * / hydration logic stays consistent with single-record reads.\n */\n async getMany(ids: readonly string[]): Promise<Map<string, T | null>> {\n const result = new Map<string, T | null>()\n for (const id of ids) {\n result.set(id, await this.get(id))\n }\n return result\n }\n\n /**\n * Delete many records in one call. Same best-effort contract as\n * {@link putMany}: if item 5 fails, items 1–4 are already deleted\n * and items 6–10 are still attempted.\n *\n * Deleting a non-existent id is not a failure — matches the\n * idempotent semantics of single-record `delete()`.\n */\n async deleteMany(ids: readonly string[]): Promise<DeleteManyResult> {\n const success: string[] = []\n const failures: Array<{ id: string; error: Error }> = []\n for (const id of ids) {\n try {\n await this.delete(id)\n success.push(id)\n } catch (error) {\n failures.push({ id, error: error as Error })\n }\n }\n return { ok: failures.length === 0, success, failures }\n }\n\n /**\n * Build a chainable query against the collection. Returns a `Query<T>`\n * builder when called with no arguments.\n *\n * Backward-compatible overload: passing a predicate function returns\n * the filtered records directly (the API). Prefer the chainable\n * form for new code.\n *\n * @example\n * ```ts\n * // New chainable API:\n * const overdue = invoices.query()\n * .where('status', '==', 'open')\n * .where('dueDate', '<', new Date())\n * .orderBy('dueDate')\n * .toArray();\n *\n * // Legacy predicate form (still supported):\n * const drafts = invoices.query(i => i.status === 'draft');\n * ```\n */\n query(): Query<T>\n query(predicate: (record: T) => boolean): T[]\n query(predicate?: (record: T) => boolean): Query<T> | T[] {\n if (this.lazy) {\n throw new Error(\n `Collection \"${this.name}\": query() is not available in lazy mode (prefetch: false). ` +\n `Use collection.lazyQuery() for indexed reads, or collection.scan({ pageSize }) ` +\n `and filter the streamed records with a regular for-await loop.`,\n )\n }\n if (predicate !== undefined) {\n // Legacy form: synchronous predicate filter against the cache.\n return [...this.cache.values()].map(e => e.record).filter(predicate)\n }\n // New form: return a chainable builder bound to this collection's cache.\n const source: QuerySource<T> = {\n snapshot: () => [...this.cache.values()].map(e => e.record),\n subscribe: (cb: () => void) => {\n const handler = (event: ChangeEvent): void => {\n if (event.vault === this.vault && event.collection === this.name) {\n cb()\n }\n }\n this.emitter.on('change', handler)\n return () => this.emitter.off('change', handler)\n },\n // Index-aware fast path for `==` and `in` operators on indexed\n // fields. The Query builder consults these when present and falls\n // back to a linear scan otherwise.\n getIndexes: () => this.getIndexes(),\n lookupById: (id: string) => this.cache.get(id)?.record,\n }\n // Build a JoinContext if the vault passed a join resolver.\n // Without one, .join() on the resulting Query will throw with an\n // actionable error — the case is unreachable in production but\n // matters for unit tests that construct Collection directly.\n const resolver = this.joinResolver\n const leftCollection = this.name\n const joinContext: JoinContext | undefined = resolver\n ? {\n leftCollection,\n resolveRef: (field: string) => resolver.resolveRef(leftCollection, field),\n resolveSource: (collectionName: string) => resolver.resolveSource(collectionName),\n ...(resolver.resolveDictSource\n ? { resolveDictSource: (field: string) => resolver.resolveDictSource!(leftCollection, field) }\n : {}),\n }\n : undefined\n return new Query<T>(source, undefined, joinContext, this.aggregateStrategy)\n }\n\n /**\n * Subscribe to every put/delete on this collection. Returns an\n * unsubscribe function.\n *\n * Fires **after** the store write has committed — subscribers see\n * only materialised state, never in-flight or rolled-back writes.\n *\n * This is an event stream, not a reactive value. For reactive\n * \"current array state\" semantics use `query().live()`. Typical\n * use cases for `subscribe()`:\n * - audit-trail / activity-feed UI that lists events as they happen\n * - Pinia-per-collection wiring where each store subscribes once\n * - outbox-style workers that process every new record\n *\n * The callback receives a `CollectionChangeEvent<T>`:\n * - `{ type: 'put', id, record }` — record is the current\n * decrypted value. May be `null` if another op deleted the\n * record between the emit and the handler firing (rare race).\n * - `{ type: 'delete', id, record: null }` — deletion event;\n * the record content is gone by the time the handler runs.\n *\n * The callback is invoked synchronously *with respect to the emit\n * moment*, but the record lookup is async (cache hit for eager\n * collections; one `get()` for lazy collections). If your handler\n * does not need the record, cast it away and ignore — the lookup\n * is still performed, but it's cheap on the hydrated path.\n *\n * ergonomic wrapper over `db.on('change', …)` that\n * filters to this collection and hydrates the record.\n */\n subscribe(cb: (event: CollectionChangeEvent<T>) => void): () => void {\n const handler = (event: ChangeEvent): void => {\n if (event.vault !== this.vault || event.collection !== this.name) return\n if (event.action === 'put') {\n // Cache hit in eager mode; get() in lazy mode.\n void this.get(event.id).then(record => {\n cb({ type: 'put', id: event.id, record: record ?? null })\n }).catch(() => {\n // Record vanished between emit + lookup (race). Emit with null\n // so subscribers still see the event they were promised.\n cb({ type: 'put', id: event.id, record: null })\n })\n } else {\n // delete\n cb({ type: 'delete', id: event.id, record: null })\n }\n }\n this.emitter.on('change', handler)\n return () => {\n this.emitter.off('change', handler)\n }\n }\n\n /**\n * Return a minimal JoinableSource view of this collection's\n * in-memory cache. Used by the Vault's `resolveSource`\n * method when another collection's `.join()` needs to probe this\n * one as the right side.\n *\n * The returned object captures the cache reference through a\n * closure, so subsequent mutations to the cache are visible to\n * the joined query. That's intentional: a join that fires after\n * the right-side collection has been updated should see the\n * fresh data.\n *\n * Throws in lazy mode because the cache is bounded and could\n * silently miss records — consistent with the `query()` /\n * `list()` lazy-mode policy. If this becomes a blocker for a\n * real consumer, the fix is to add an async `scan()`-backed\n * variant of this method, which is exactly what streaming\n * joins will need anyway.\n */\n querySourceForJoin(): JoinableSource {\n if (this.lazy) {\n throw new Error(\n `Collection \"${this.name}\": .join() cannot use a lazy-mode ` +\n `collection as the right side. Opening it in eager mode ` +\n `(prefetch: true, default) makes it joinable. Streaming joins ` +\n `over lazy collections are not yet supported.`,\n )\n }\n // Structural source — the join executor calls snapshot() and\n // lookupById(); the live-join executor additionally calls\n // subscribe() so right-side mutations propagate. We capture\n // `this.cache` and `this.emitter` by closure so later mutations\n // are visible to the snapshot view AND drive live re-fires.\n return {\n snapshot: () => [...this.cache.values()].map(e => e.record),\n lookupById: (id: string) => this.cache.get(id)?.record,\n subscribe: (cb: () => void) => {\n const handler = (event: ChangeEvent): void => {\n if (event.vault === this.vault && event.collection === this.name) {\n cb()\n }\n }\n this.emitter.on('change', handler)\n return () => this.emitter.off('change', handler)\n },\n }\n }\n\n /**\n * Cache statistics — useful for devtools, monitoring, and verifying\n * that LRU eviction is happening as expected in lazy mode.\n *\n * In eager mode, returns size only (no hits/misses are tracked because\n * every read is a cache hit by construction). In lazy mode, returns\n * the full LRU stats: `{ hits, misses, evictions, size, bytes }`.\n */\n cacheStats(): CacheStats {\n if (this.lazy && this.lru) {\n return { ...this.lru.stats(), lazy: true }\n }\n return {\n hits: 0,\n misses: 0,\n evictions: 0,\n size: this.cache.size,\n bytes: 0,\n lazy: false,\n }\n }\n\n // ─── History Methods ────────────────────────────────────────────\n\n /** Get version history for a record, newest first. */\n async history(id: string, options?: HistoryOptions): Promise<HistoryEntry<T>[]> {\n const envelopes = await this.historyStrategy.getHistoryEntries(\n this.adapter, this.vault, this.name, id, options,\n )\n\n const entries: HistoryEntry<T>[] = []\n for (const env of envelopes) {\n // History reads skip schema validation — see getVersion() docs.\n const record = await this.decryptRecord(env, { skipValidation: true })\n entries.push({\n version: env._v,\n timestamp: env._ts,\n userId: env._by ?? '',\n record,\n })\n }\n return entries\n }\n\n /**\n * Get a specific past version of a record.\n *\n * History reads intentionally **skip schema validation** — historical\n * records predate the current schema by definition, so validating them\n * against today's shape would be a false positive on any schema\n * evolution. If a caller needs validated history, they should filter\n * and re-put the records through the normal `put()` path.\n */\n async getVersion(id: string, version: number): Promise<T | null> {\n const envelope = await this.historyStrategy.getVersionEnvelope(\n this.adapter, this.vault, this.name, id, version,\n )\n if (!envelope) return null\n return this.decryptRecord(envelope, { skipValidation: true })\n }\n\n /** Revert a record to a past version. Creates a new version with the old content. */\n async revert(id: string, version: number): Promise<void> {\n const oldRecord = await this.getVersion(id, version)\n if (!oldRecord) {\n throw new Error(`Version ${version} not found for record \"${id}\"`)\n }\n await this.put(id, oldRecord)\n }\n\n /**\n * Compare two versions of a record and return the differences.\n * Use version 0 to represent \"before creation\" (empty).\n * Omit versionB to compare against the current version.\n */\n async diff(id: string, versionA: number, versionB?: number): Promise<DiffEntry[]> {\n const recordA = versionA === 0 ? null : await this.resolveVersion(id, versionA)\n const recordB = versionB === undefined || versionB === 0\n ? (versionB === 0 ? null : await this.resolveCurrentOrVersion(id))\n : await this.resolveVersion(id, versionB)\n return this.historyStrategy.diff(recordA, recordB)\n }\n\n /** Resolve a version: try history first, then check if it's the current version. */\n private async resolveVersion(id: string, version: number): Promise<T | null> {\n // Check history\n const fromHistory = await this.getVersion(id, version)\n if (fromHistory) return fromHistory\n // Check if it's the current live version\n await this.ensureHydrated()\n const current = this.cache.get(id)\n if (current && current.version === version) return current.record\n return null\n }\n\n private async resolveCurrentOrVersion(id: string): Promise<T | null> {\n await this.ensureHydrated()\n return this.cache.get(id)?.record ?? null\n }\n\n /** Prune history entries for a record (or all records if id is undefined). */\n async pruneRecordHistory(id: string | undefined, options: PruneOptions): Promise<number> {\n const pruned = await this.historyStrategy.pruneHistory(\n this.adapter, this.vault, this.name, id, options,\n )\n if (pruned > 0) {\n this.emitter.emit('history:prune', {\n vault: this.vault,\n collection: this.name,\n id: id ?? '*',\n pruned,\n })\n }\n return pruned\n }\n\n /** Clear all history for this collection (or a specific record). */\n async clearHistory(id?: string): Promise<number> {\n return this.historyStrategy.clearHistory(this.adapter, this.vault, this.name, id)\n }\n\n // ─── Core Methods ─────────────────────────────────────────────\n\n /**\n * Count records in the collection.\n *\n * In eager mode this returns the in-memory cache size (instant). In\n * lazy mode it asks the adapter via `list()` to enumerate ids — slower\n * but still correct, and avoids loading any record bodies into memory.\n */\n async count(): Promise<number> {\n if (this.lazy) {\n const ids = await this.adapter.list(this.vault, this.name)\n return ids.length\n }\n await this.ensureHydrated()\n return this.cache.size\n }\n\n // ─── Pagination & Streaming ───────────────────────────────────\n\n /**\n * Fetch a single page of records via the adapter's optional `listPage`\n * extension. Returns the decrypted records for this page plus an opaque\n * cursor for the next page.\n *\n * Pass `cursor: undefined` (or omit it) to start from the beginning.\n * The final page returns `nextCursor: null`.\n *\n * If the adapter does NOT implement `listPage`, this falls back to a\n * synthetic implementation: it loads all ids via `list()`, sorts them,\n * and slices a window. The first call emits a one-time console.warn so\n * developers can spot adapters that should opt into the fast path.\n */\n async listPage(opts: { cursor?: string; limit?: number } = {}): Promise<{\n items: T[]\n nextCursor: string | null\n }> {\n const limit = opts.limit ?? 100\n\n if (this.adapter.listPage) {\n const result = await this.adapter.listPage(this.vault, this.name, opts.cursor, limit)\n const decrypted: T[] = []\n for (const { record, version, id } of await this.decryptPage(result.items)) {\n // Update cache opportunistically — if the page-fetched record isn't\n // in cache yet, populate it. This makes a subsequent .get(id) free.\n // In LAZY mode we deliberately do NOT populate the LRU here:\n // streaming a 100K-record collection should not turn the LRU into\n // a giant write-once buffer that immediately evicts everything.\n // Random-access workloads via .get() are what the LRU is for.\n if (!this.lazy && !this.cache.has(id)) {\n this.cache.set(id, { record, version })\n }\n decrypted.push(record)\n }\n return { items: decrypted, nextCursor: result.nextCursor }\n }\n\n // Fallback: synthetic pagination over list() + get(). Slower than the\n // native path because every id requires its own round-trip, but\n // correct for adapters that haven't opted in.\n warnOnceFallback(this.adapter.name ?? 'unknown')\n const ids = (await this.adapter.list(this.vault, this.name)).slice().sort()\n const start = opts.cursor ? parseInt(opts.cursor, 10) : 0\n const end = Math.min(start + limit, ids.length)\n const items: T[] = []\n for (let i = start; i < end; i++) {\n const id = ids[i]!\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (envelope) {\n const record = await this.decryptRecord(envelope)\n items.push(record)\n // Same lazy-mode skip as the native path: don't pollute the LRU\n // with sequential scan results.\n if (!this.lazy && !this.cache.has(id)) {\n this.cache.set(id, { record, version: envelope._v })\n }\n }\n }\n return {\n items,\n nextCursor: end < ids.length ? String(end) : null,\n }\n }\n\n /**\n * Stream every record in the collection page-by-page as an async\n * iterable, with chainable `.where()` / `.filter()` clauses and a\n * memory-bounded `.aggregate(spec)` terminal.\n *\n * The whole point: process collections larger than RAM without\n * ever holding more than `pageSize` records decrypted at once.\n *\n * @example\n * ```ts\n * // Backward-compatible iteration — unchanged from the previous\n * // async-generator shape. `ScanBuilder` implements AsyncIterable.\n * for await (const record of invoices.scan({ pageSize: 500 })) {\n * await processOne(record)\n * }\n *\n * // — streaming aggregation with O(reducers) memory.\n * const { total, n } = await invoices.scan()\n * .where('year', '==', 2025)\n * .aggregate({ total: sum('amount'), n: count() })\n * ```\n *\n * Returns a `ScanBuilder<T>` instead of the raw async iterator\n * that previous versions used. The builder implements\n * `AsyncIterable<T>`, so every existing `for await … of` call\n * continues to work unchanged. Direct `.next()` calls on the\n * iterator — not idiomatic, not used in the codebase — are no\n * longer supported; upgrade to `for await` or call the new\n * `.aggregate()` terminal.\n *\n * Uses `adapter.listPage` when available; otherwise falls back\n * to the synthetic pagination path with the same one-time\n * warning (`listPage()` routes through that fallback internally).\n */\n scan(opts: { pageSize?: number } = {}): ScanBuilder<T> {\n const pageSize = opts.pageSize ?? 100\n // Build a JoinContext if the vault passed a join resolver\n // — same machinery as `query()`. Without one, `.join()`\n // on the resulting ScanBuilder will throw with an actionable\n // error. The resolver is unreachable in production but matters\n // for unit tests that construct Collection directly.\n const resolver = this.joinResolver\n const leftCollection = this.name\n const joinContext: JoinContext | undefined = resolver\n ? {\n leftCollection,\n resolveRef: (field: string) => resolver.resolveRef(leftCollection, field),\n resolveSource: (collectionName: string) => resolver.resolveSource(collectionName),\n ...(resolver.resolveDictSource\n ? { resolveDictSource: (field: string) => resolver.resolveDictSource!(leftCollection, field) }\n : {}),\n }\n : undefined\n // The page provider closure is bound to this collection's\n // listPage method so the builder is free of any `this`\n // coupling. Rebinding through the arrow keeps the unbound-\n // method lint rule happy — matches the pattern used in\n // builder.ts's candidateRecords helper.\n return new ScanBuilder<T>(\n {\n listPage: (listOpts) => this.listPage(listOpts),\n },\n pageSize,\n [],\n [],\n joinContext,\n )\n }\n\n /** Decrypt a page of envelopes returned by `adapter.listPage`. */\n private async decryptPage(\n items: ListPageResult['items'],\n ): Promise<Array<{ id: string; record: T; version: number }>> {\n const out: Array<{ id: string; record: T; version: number }> = []\n for (const { id, envelope } of items) {\n const record = await this.decryptRecord(envelope)\n out.push({ id, record, version: envelope._v })\n }\n return out\n }\n\n // ─── Internal ──────────────────────────────────────────────────\n\n /** Load all records from adapter into memory cache. */\n private async ensureHydrated(): Promise<void> {\n if (this.hydrated) return\n\n const ids = await this.adapter.list(this.vault, this.name)\n for (const id of ids) {\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (envelope) {\n const record = await this.decryptRecord(envelope)\n this.cache.set(id, { record, version: envelope._v })\n }\n }\n this.hydrated = true\n this.rebuildEagerIndexesFromCache()\n }\n\n /** Hydrate from a pre-loaded snapshot (used by Vault). */\n async hydrateFromSnapshot(records: Record<string, EncryptedEnvelope>): Promise<void> {\n for (const [id, envelope] of Object.entries(records)) {\n const record = await this.decryptRecord(envelope)\n this.cache.set(id, { record, version: envelope._v })\n }\n this.hydrated = true\n this.rebuildEagerIndexesFromCache()\n }\n\n /**\n * Rebuild secondary indexes from the current in-memory cache.\n *\n * Called after any bulk hydration. Incremental put/delete updates\n * are handled by `indexes.upsert()` / `indexes.remove()` directly,\n * so this only fires for full reloads.\n *\n * Synchronous and O(N × indexes.size); for the target scale of\n * 1K–50K records this completes in single-digit milliseconds.\n */\n private rebuildEagerIndexesFromCache(): void {\n const eager = this.indexes\n if (!eager || eager.fields().length === 0) return\n const snapshot: Array<{ id: string; record: T }> = []\n for (const [id, entry] of this.cache) {\n snapshot.push({ id, record: entry.record })\n }\n eager.build(snapshot)\n }\n\n /**\n * Rebuild every declared index from scratch.\n *\n * Eager mode: refreshes the in-memory `CollectionIndexes` from the\n * current cache — O(records × declaredFields).\n *\n * Lazy mode: tears down every `_idx/<field>/<recordId>`\n * side-car, walks the canonical record namespace, and materialises\n * fresh side-cars for every declared field. The in-memory mirror is\n * cleared and re-ingested. Intended for two scenarios:\n * 1. Adding a new indexed field to a collection that already holds\n * records — after the schema change, call `rebuildIndexes()` to\n * backfill the side-cars.\n * 2. Recovery from a catastrophic drift (audit noticed many\n * `index:write-partial` events, operator wants a clean slate).\n *\n * The rebuild is NOT incremental — it's a full bulk-replace. For\n * per-field drift repair, use `reconcileIndex(field)` instead.\n */\n async rebuildIndexes(): Promise<void> {\n if (!this.lazy) {\n await this.ensureHydrated()\n this.rebuildEagerIndexesFromCache()\n return\n }\n\n const persisted = this.persistedIndexes\n if (!persisted) return\n const fields = persisted.fields()\n if (fields.length === 0) return\n\n // 1. Collect canonical ids (skip every reserved-namespace id —\n // `_idx/`, `_keyring`, `_history/`, `_ledger_deltas/`, `_meta/`,\n // `_ledger`, `_blob_`, etc. User records may not start with `_`\n // per the monorepo convention used across the hub).\n const allIds = await this.adapter.list(this.vault, this.name)\n const canonicalIds: string[] = []\n const staleIdxIds: string[] = []\n for (const id of allIds) {\n if (decodeIdxId(id)) {\n staleIdxIds.push(id)\n } else if (!id.startsWith('_')) {\n canonicalIds.push(id)\n }\n }\n\n // 2. Drop every existing side-car. Errors here are tolerated — the\n // next step overwrites any remnants. If a side-car is for a\n // field that is no longer declared, the delete still removes\n // the stale row from storage.\n for (const id of staleIdxIds) {\n try { await this.adapter.delete(this.vault, this.name, id) } catch { /* ignore */ }\n }\n persisted.clear()\n\n // 3. Walk records and write fresh side-cars for every declared field.\n for (const recordId of canonicalIds) {\n const envelope = await this.adapter.get(this.vault, this.name, recordId)\n if (!envelope) continue\n const record = await this.decryptRecord(envelope, { skipValidation: true })\n await this.maintainPersistedIndexesOnPut(recordId, record, null, envelope._v)\n }\n\n this.persistedIndexesLoaded = true\n }\n\n /**\n * Compare the persisted `_idx/<field>/*` side-cars against the\n * canonical records for a single field, reporting the drift (and\n * optionally repairing it).\n *\n * Lazy mode only. Eager mode throws — the in-memory index cannot\n * drift.\n *\n * `missing` — record ids whose value is indexable but no side-car\n * exists. Happens when a `put()` succeeded but the side-car put\n * failed (surfaced as `index:write-partial`).\n * `stale` — side-car ids pointing to a record that no longer exists\n * or whose current value no longer matches the side-car body.\n * `applied` — number of writes that were actually applied (always 0\n * when `dryRun: true`).\n *\n * Design reference: acceptance criteria.\n */\n async reconcileIndex(\n field: string,\n opts: { dryRun?: boolean } = {},\n ): Promise<{ field: string; missing: string[]; stale: string[]; applied: number }> {\n if (!this.lazy) {\n throw new Error(\n `Collection \"${this.name}\": reconcileIndex is only meaningful in lazy mode ` +\n `(prefetch: false). Eager mode maintains indexes in memory with no drift.`,\n )\n }\n const persisted = this.persistedIndexes\n if (!persisted) {\n throw new Error(\n `Collection \"${this.name}\": indexing is disabled on this Noydb instance. ` +\n `Pass \\`withIndexing()\\` from \"@noy-db/hub/indexing\" to \\`createNoydb({ indexStrategy })\\`.`,\n )\n }\n if (!persisted.has(field)) {\n throw new Error(\n `Collection \"${this.name}\": field \"${field}\" is not declared in indexes. ` +\n `Declare it in the collection options before reconciling.`,\n )\n }\n\n const dryRun = opts.dryRun === true\n const allIds = await this.adapter.list(this.vault, this.name)\n\n // Map side-car recordId → stored value (if readable). Also capture\n // \"stale\" side-cars whose field matches but whose record is gone.\n const sidecar = new Map<string, unknown>()\n const sidecarIds = new Map<string, string>() // recordId -> sidecar id\n for (const id of allIds) {\n const decoded = decodeIdxId(id)\n if (!decoded || decoded.field !== field) continue\n sidecarIds.set(decoded.recordId, id)\n const env = await this.adapter.get(this.vault, this.name, id)\n if (!env) continue\n try {\n const body = JSON.parse(await this.decryptJsonString(env)) as { value: unknown }\n sidecar.set(decoded.recordId, body.value)\n } catch {\n // Unreadable — treat as stale so it gets rewritten.\n sidecar.set(decoded.recordId, undefined)\n }\n }\n\n // Walk canonical records and compare against side-car state.\n const missing: string[] = []\n const stale: string[] = []\n const fixesPut: Array<{ recordId: string; record: T; version: number }> = []\n for (const id of allIds) {\n if (decodeIdxId(id)) continue\n if (id.startsWith('_')) continue\n const env = await this.adapter.get(this.vault, this.name, id)\n if (!env) continue\n const record = await this.decryptRecord(env, { skipValidation: true })\n const live = readPersistedValue(record as unknown as Record<string, unknown>, field)\n const stored = sidecar.get(id)\n const hasSidecar = sidecarIds.has(id)\n const indexable = live !== null && live !== undefined\n\n if (indexable && !hasSidecar) {\n missing.push(id)\n fixesPut.push({ recordId: id, record, version: env._v })\n } else if (indexable && hasSidecar && !valuesMatch(stored, live)) {\n // Side-car body drifted from live value (e.g. partial write\n // after an update). Rewrite so lookups agree with reality.\n missing.push(id)\n fixesPut.push({ recordId: id, record, version: env._v })\n } else if (!indexable && hasSidecar) {\n // Record exists but its value is no longer indexable (null/\n // undefined). The side-car is stale.\n stale.push(sidecarIds.get(id)!)\n }\n sidecarIds.delete(id)\n }\n // Any side-car whose canonical record vanished is stale.\n for (const [, idxId] of sidecarIds) stale.push(idxId)\n\n let applied = 0\n if (!dryRun) {\n for (const idxId of stale) {\n try {\n await this.adapter.delete(this.vault, this.name, idxId)\n applied++\n } catch { /* ignore — next reconcile picks it up */ }\n }\n for (const fix of fixesPut) {\n await this.maintainPersistedIndexesOnPut(fix.recordId, fix.record, null, fix.version)\n applied++\n }\n // In-memory mirror is authoritative for query dispatch — make\n // sure it matches what's on disk now.\n persisted.clear()\n this.persistedIndexesLoaded = false\n await this.ensurePersistedIndexesLoaded()\n }\n\n return { field, missing, stale, applied }\n }\n\n /**\n * Get the in-memory index store. Used by `Query` to short-circuit\n * `==` and `in` lookups when an index covers the where clause.\n *\n * Returns `null` if no indexes are declared on this collection.\n */\n getIndexes(): CollectionIndexes | null {\n const eager = this.indexes\n return eager && eager.fields().length > 0 ? eager : null\n }\n\n /**\n * Return a `BlobSet` for the given record id.\n *\n * No I/O is performed until you call a method on the handle.\n *\n * ```ts\n * const blobs = invoices.blob('inv-001')\n *\n * // Upload a PDF (deduplicates automatically, MIME auto-detected)\n * await blobs.put('receipt.pdf', pdfBytes)\n *\n * // List slots\n * const files = await blobs.list() // SlotInfo[]\n *\n * // Serve as HTTP response (Content-Type, ETag, streaming body)\n * const res = await blobs.response('receipt.pdf', { inline: true })\n *\n * // Publish a named version (amendment versioning)\n * await blobs.publish('receipt.pdf', 'issued-2025-01')\n *\n * // Raw bytes\n * const bytes = await blobs.get('receipt.pdf')\n * ```\n *\n * Blobs are stored in internal collections (`_blob_slots_*`, `_blob_index`,\n * `_blob_chunks`, `_blob_versions_*`) that are excluded from queries and\n * `list()`. Slot metadata uses this collection's DEK; chunk data uses a\n * vault-shared `_blob` DEK (enabling cross-collection deduplication).\n */\n blob(id: string): BlobSet {\n // tree-shake refactor: delegate to `blobStrategy`. The default\n // is `NO_BLOBS` (throws with a message pointing at the `@noy-db/hub/blobs`\n // subpath). Users who want blob storage pass `blobs()` from that\n // subpath into `createNoydb({ blobStrategy: blobs() })`, which\n // threads the active strategy through Vault → Collection.\n return this.blobStrategy.openSlot({\n store: this.adapter,\n vault: this.vault,\n collection: this.name,\n recordId: id,\n getDEK: this.getDEK,\n encrypted: this.encrypted,\n userId: this.keyring.userId,\n })\n }\n\n /** Get all records as encrypted envelopes (for dump). */\n async dumpEnvelopes(): Promise<Record<string, EncryptedEnvelope>> {\n await this.ensureHydrated()\n const result: Record<string, EncryptedEnvelope> = {}\n for (const [id, entry] of this.cache) {\n result[id] = await this.encryptRecord(entry.record, entry.version)\n }\n return result\n }\n\n /**\n * Apply locale resolution to a record.\n *\n * Called from `get()` and `list()` when locale options are present.\n * Uses the effective locale: per-call `locale` takes precedence over\n * `this.defaultLocale`.\n *\n * - i18nText fields: replaced with the resolved string (or the full\n * map when `locale === 'raw'`).\n * - dictKey fields: `<field>Label` virtual fields added.\n *\n * Returns the record unchanged when no locale is active and no i18n/dict\n * fields are registered.\n */\n private async applyLocaleToRecord(\n record: T,\n localeOpts?: LocaleReadOptions,\n ): Promise<T> {\n const hasI18n = this.i18nFields && Object.keys(this.i18nFields).length > 0\n const hasDict = this.dictKeyFields && Object.keys(this.dictKeyFields).length > 0\n if (!hasI18n && !hasDict) return record\n\n const locale = localeOpts?.locale ?? this.defaultLocale\n if (!locale) return record\n\n let result = record as unknown as Record<string, unknown>\n\n // 1. i18nText resolution\n if (hasI18n && this.i18nFields) {\n result = this.i18nStrategy.applyI18nLocale(result, this.i18nFields, locale, localeOpts?.fallback)\n }\n\n // 2. dictKey label resolution\n if (hasDict && this.dictKeyFields && this.dictLabelResolver && locale !== 'raw') {\n const withLabels = { ...result }\n for (const [field, desc] of Object.entries(this.dictKeyFields)) {\n const key = result[field]\n if (typeof key !== 'string') continue\n const label = await this.dictLabelResolver(\n desc.name,\n key,\n locale,\n localeOpts?.fallback,\n )\n if (label !== undefined) {\n withLabels[`${field}Label`] = label\n }\n }\n result = withLabels\n }\n\n return result as T\n }\n\n /**\n * Low-level: encrypt a pre-serialised JSON string into an EncryptedEnvelope.\n * Used by both the normal record path and the CRDT path (which serialises\n * a CrdtState rather than a T).\n */\n /**\n * Write / update / delete the `_idx/<field>/<recordId>` side-cars for\n * every declared persistence-index field on this collection after a\n * successful main-record `put()`.\n *\n * Timing: called AFTER `adapter.put()` of the main record succeeds, so\n * a failed main write never leaves a stale index entry. Side-car write\n * failures do NOT fail the overall `put()` — the main record is already\n * durably committed. Per-field failures surface as\n * `IndexWriteFailureError` on the emitter's `index:write-partial`\n * channel and the operator runs a reconcile pass later.\n *\n * Null/undefined field values are not indexed — matches the\n * `PersistedCollectionIndex.stringifyKey` contract. If the prior value\n * was non-null and the new value is null, the side-car is deleted.\n */\n private async maintainPersistedIndexesOnPut(\n id: string,\n newRecord: T,\n previousRecord: T | null,\n version: number,\n ): Promise<void> {\n const persisted = this.persistedIndexes\n if (!persisted) return\n const defs = persisted.definitions()\n if (defs.length === 0) return\n\n const newRec = newRecord as unknown as Record<string, unknown>\n const prevRec = previousRecord as unknown as Record<string, unknown> | null\n\n for (const def of defs) {\n const newValue = extractIndexValue(newRec, def)\n const previousValue = prevRec ? extractIndexValue(prevRec, def) : null\n\n // Update the in-memory mirror first — it's the authoritative source\n // for query dispatch. If the adapter write below fails, the mirror\n // still reflects intended state; the reconciler compares mirror\n // against side-cars on next run.\n persisted.upsert(id, def.key, newValue, previousValue)\n\n const idxId = encodeIdxId(def.key, id)\n try {\n if (newValue === null || newValue === undefined) {\n // Clear any pre-existing side-car for this (field, record).\n if (previousValue !== null && previousValue !== undefined) {\n await this.adapter.delete(this.vault, this.name, idxId)\n }\n } else {\n const body = JSON.stringify({\n field: def.key,\n value: serializeIndexValue(newValue),\n recordId: id,\n writtenAt: new Date().toISOString(),\n })\n const envelope = await this.encryptJsonString(body, version)\n await this.adapter.put(this.vault, this.name, idxId, envelope)\n }\n } catch (cause) {\n this.emitter.emit('index:write-partial', {\n vault: this.vault,\n collection: this.name,\n id,\n action: 'put',\n error: new IndexWriteFailureError({ recordId: id, field: def.key, op: 'put', cause }),\n })\n }\n }\n }\n\n /**\n * Tear down `_idx/<field>/<recordId>` side-cars for a deleted record.\n * Mirror state updates regardless of adapter outcome; adapter failures\n * surface on `index:write-partial` the same way put does.\n */\n private async maintainPersistedIndexesOnDelete(id: string, previousRecord: T): Promise<void> {\n const persisted = this.persistedIndexes\n if (!persisted) return\n const defs = persisted.definitions()\n if (defs.length === 0) return\n\n const prevRec = previousRecord as unknown as Record<string, unknown>\n for (const def of defs) {\n const previousValue = extractIndexValue(prevRec, def)\n if (previousValue !== null && previousValue !== undefined) {\n persisted.remove(id, def.key, previousValue)\n }\n\n const idxId = encodeIdxId(def.key, id)\n try {\n await this.adapter.delete(this.vault, this.name, idxId)\n } catch (cause) {\n this.emitter.emit('index:write-partial', {\n vault: this.vault,\n collection: this.name,\n id,\n action: 'delete',\n error: new IndexWriteFailureError({ recordId: id, field: def.key, op: 'delete', cause }),\n })\n }\n }\n }\n\n /**\n * Bulk-load the persisted-index mirror from `_idx/<field>/*` side-cars\n * on first lazy-mode query. Idempotent — subsequent calls short-circuit\n * on the `persistedIndexesLoaded` flag.\n *\n * Listing the whole id namespace is acceptable here because the caller\n * has already decided to pay a first-query cost (this is the indexed\n * equivalent of lazy-mode hydration, not a per-query scan).\n */\n private async ensurePersistedIndexesLoaded(): Promise<void> {\n if (this.persistedIndexesLoaded) return\n const persisted = this.persistedIndexes\n if (!persisted || persisted.fields().length === 0) {\n this.persistedIndexesLoaded = true\n return\n }\n\n const ids = await this.adapter.list(this.vault, this.name)\n const byField = new Map<string, Array<{ recordId: string; value: unknown }>>()\n for (const id of ids) {\n const decoded = decodeIdxId(id)\n if (!decoded) continue\n if (!persisted.has(decoded.field)) continue\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (!envelope) continue\n try {\n const json = await this.decryptJsonString(envelope)\n const body = JSON.parse(json) as { value: unknown; recordId: string }\n if (typeof body.recordId !== 'string') continue\n const rows = byField.get(decoded.field) ?? []\n rows.push({ recordId: body.recordId, value: body.value })\n byField.set(decoded.field, rows)\n } catch {\n // Skip unreadable side-cars — the reconciler picks them up later.\n }\n }\n for (const [field, rows] of byField) {\n persisted.ingest(field, rows)\n }\n this.persistedIndexesLoaded = true\n\n // auto-reconcile on first query. The mirror is now\n // populated from whatever side-cars existed; reconcileIndex will\n // diff that against the canonical records and repair (or just\n // report) drift per-field. Skip on the inner reload triggered by\n // reconcileIndex itself — see `autoReconciling` guard.\n if (this.reconcileOnOpen !== 'off' && !this.autoReconciling) {\n await this.autoReconcile()\n }\n }\n\n /**\n * Walk every declared persisted-index field, run `reconcileIndex`\n * per the configured policy, and emit `index:reconciled` for each.\n * Called internally by `ensurePersistedIndexesLoaded()` — exposed as\n * a private helper for readability, not as a public API (the public\n * entry points are `reconcileIndex` and `rebuildIndexes`).\n */\n private async autoReconcile(): Promise<void> {\n const persisted = this.persistedIndexes\n if (!persisted) return\n this.autoReconciling = true\n try {\n const dryRun = this.reconcileOnOpen === 'dry-run'\n for (const def of persisted.definitions()) {\n try {\n const report = await this.reconcileIndex(def.key, { dryRun })\n this.emitter.emit('index:reconciled', {\n vault: this.vault,\n collection: this.name,\n field: def.key,\n missing: report.missing,\n stale: report.stale,\n applied: report.applied,\n skipped: false,\n })\n } catch {\n // Tolerate a single field's failure — a broken reconcile\n // shouldn't prevent the rest of the collection from\n // working. The `index:write-partial` channel captures\n // per-field failures during put/delete; this is its\n // sibling for the reconcile path.\n }\n }\n } finally {\n this.autoReconciling = false\n }\n }\n\n /**\n * Construct a `LazyQuery<T>` bound to this collection. Used by the\n * lazy-mode branch of `query()` and kept private because callers should\n * always go through `query()` to pick up the eager/lazy dispatch.\n */\n /**\n * Build a chainable indexed-read query against a lazy-mode collection.\n *\n * Companion to `query()`, which is eager-mode only and materialises a\n * snapshot. `lazyQuery()` dispatches every read through the persisted\n * index side-cars — no bulk decrypt, no snapshot. Every field touched by\n * `.where(...)` or `.orderBy(...)` MUST be declared in `indexes`;\n * otherwise `.toArray()` throws `IndexRequiredError`.\n *\n * The returned builder is always Promise-returning on its terminals\n * (`toArray`, `first`, `count`) because candidate records are decrypted\n * from the adapter on demand.\n *\n * @example\n * ```ts\n * const disbursements = vault.collection<Disbursement>('disbursements', {\n * prefetch: false,\n * cache: { maxRecords: 1000 },\n * indexes: ['clientId', 'period'],\n * })\n * const rows = await disbursements.lazyQuery()\n * .where('clientId', '==', 'c-42')\n * .orderBy('period', 'desc')\n * .limit(50)\n * .toArray()\n * ```\n *\n * Throws at call time when the collection is in eager mode — use\n * `query()` there. Throws if no index is declared, because a lazy\n * query with no index would need to enumerate the whole collection.\n */\n lazyQuery(): LazyQuery<T> {\n if (!this.lazy) {\n throw new Error(\n `Collection \"${this.name}\": lazyQuery() is only available in lazy mode ` +\n `(prefetch: false). Use collection.query() for eager-mode chainable reads.`,\n )\n }\n const persisted = this.persistedIndexes\n if (!persisted) {\n throw new Error(\n `Collection \"${this.name}\": lazyQuery() requires indexing to be enabled. ` +\n `Pass \\`withIndexing()\\` from \"@noy-db/hub/indexing\" to ` +\n `\\`createNoydb({ indexStrategy: withIndexing() })\\`.`,\n )\n }\n if (persisted.fields().length === 0) {\n throw new Error(\n `Collection \"${this.name}\": lazyQuery() requires at least one field declared ` +\n `in \\`indexes\\`. Declare the fields you'll filter or sort by, or use ` +\n `collection.scan({ pageSize }) for non-indexed iteration.`,\n )\n }\n const source: LazyQuerySource<T> = {\n collectionName: this.name,\n persistedIndexes: persisted,\n ensurePersistedIndexesLoaded: () => this.ensurePersistedIndexesLoaded(),\n getRecord: (id: string) => this.get(id),\n }\n return new LazyQuery<T>(source)\n }\n\n private async encryptJsonString(json: string, version: number): Promise<EncryptedEnvelope> {\n const by = this.keyring.userId\n\n if (!this.encrypted) {\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: json,\n _by: by,\n }\n }\n\n const dek = await this.getDEK(this.name)\n const { iv, data } = await encrypt(json, dek)\n\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: by,\n }\n }\n\n private async encryptRecord(record: T, version: number): Promise<EncryptedEnvelope> {\n const base = await this.encryptJsonString(JSON.stringify(record), version)\n if (!this.deterministicFields || !this.encrypted) return base\n\n // compute deterministic-ciphertext slots for every\n // declared field. Non-primitive values are JSON-stringified so\n // objects/arrays still dedupe on structural equality.\n const dek = await this.getDEK(this.name)\n const rec = record as unknown as Record<string, unknown>\n const det: Record<string, string> = {}\n for (const field of this.deterministicFields) {\n const value = rec[field]\n if (value === undefined || value === null) continue\n const plaintext = typeof value === 'string' ? value : JSON.stringify(value)\n const { iv, data } = await encryptDeterministic(plaintext, dek, `${this.name}/${field}`)\n det[field] = `${iv}:${data}`\n }\n if (Object.keys(det).length === 0) return base\n return { ...base, _det: det }\n }\n\n /**\n * find the first record whose deterministic field matches\n * the given plaintext. Returns `null` when no match exists.\n *\n * Reads every envelope via the adapter and compares the stored\n * `_det[field]` to a freshly computed deterministic ciphertext — no\n * record bodies are decrypted during the search, which is the whole\n * point of a deterministic index.\n *\n * Throws when the field is not declared in `deterministicFields`, so a\n * typo fails loudly at the call site rather than silently returning\n * null forever.\n */\n async findByDet(field: string, value: unknown): Promise<T | null> {\n if (!this.deterministicFields || !this.deterministicFields.has(field)) {\n throw new Error(\n `Collection \"${this.name}\": field \"${field}\" is not declared in deterministicFields`,\n )\n }\n if (!this.encrypted) {\n throw new Error(\n `Collection \"${this.name}\": findByDet is only meaningful on encrypted collections`,\n )\n }\n const dek = await this.getDEK(this.name)\n const plaintext = typeof value === 'string' ? value : JSON.stringify(value)\n const { iv, data } = await encryptDeterministic(plaintext, dek, `${this.name}/${field}`)\n const target = `${iv}:${data}`\n\n const ids = await this.adapter.list(this.vault, this.name)\n for (const id of ids) {\n const env = await this.adapter.get(this.vault, this.name, id)\n if (!env || !env._det) continue\n if (env._det[field] === target) {\n return this.decryptRecord(env)\n }\n }\n return null\n }\n\n /**\n * return every record whose deterministic field matches.\n * Same semantics as {@link findByDet} but without the short-circuit.\n */\n async queryByDet(field: string, value: unknown): Promise<T[]> {\n if (!this.deterministicFields || !this.deterministicFields.has(field)) {\n throw new Error(\n `Collection \"${this.name}\": field \"${field}\" is not declared in deterministicFields`,\n )\n }\n if (!this.encrypted) {\n throw new Error(\n `Collection \"${this.name}\": queryByDet is only meaningful on encrypted collections`,\n )\n }\n const dek = await this.getDEK(this.name)\n const plaintext = typeof value === 'string' ? value : JSON.stringify(value)\n const { iv, data } = await encryptDeterministic(plaintext, dek, `${this.name}/${field}`)\n const target = `${iv}:${data}`\n\n const ids = await this.adapter.list(this.vault, this.name)\n const matches: T[] = []\n for (const id of ids) {\n const env = await this.adapter.get(this.vault, this.name, id)\n if (!env || !env._det) continue\n if (env._det[field] === target) {\n matches.push(await this.decryptRecord(env))\n }\n }\n return matches\n }\n\n // ─── Hierarchical Access ──────────────────────────\n\n private assertTiersEnabled(): void {\n if (!this.tiers) {\n throw new Error(\n `Collection \"${this.name}\": hierarchical tiers are not enabled. ` +\n `Pass { tiers: [0, 1, 2, …] } to vault.collection() to opt in.`,\n )\n }\n }\n\n private assertDeclaredTier(tier: number): void {\n if (tier < 0 || !Number.isInteger(tier)) {\n throw new Error(`Collection \"${this.name}\": tier must be a non-negative integer, got ${tier}`)\n }\n if (tier === 0) return\n if (!this.tiers || !this.tiers.has(tier)) {\n throw new Error(\n `Collection \"${this.name}\": tier ${tier} is not declared in { tiers: [...] }`,\n )\n }\n }\n\n /**\n * tier-aware put. Encrypts the record with the\n * collection's tier-N DEK and stamps `_tier: N` on the envelope. The\n * caller's keyring must hold the tier-N DEK (directly, by\n * delegation, or by virtue of being the grantor); otherwise throws\n * `TierNotGrantedError`.\n *\n * accepts an optional `elevation` context. When\n * present, the emitted cross-tier event is stamped with\n * `authorization: 'elevation'`, the elevation's reason, and the\n * caller's pre-elevation tier. `vault.elevate(...).collection().put`\n * threads this through; direct `putAtTier` calls leave it undefined\n * and fall back to the inherent-write event shape.\n */\n async putAtTier(\n id: string,\n record: T,\n tier: number,\n opts?: { elevation?: { reason: string; fromTier: number } },\n ): Promise<void> {\n this.assertTiersEnabled()\n this.assertDeclaredTier(tier)\n assertTierAccess(this.keyring, this.name, tier)\n\n const key = dekKey(this.name, tier)\n const dek = await this.getDEK(key)\n\n const existing = await this.adapter.get(this.vault, this.name, id)\n const version = existing ? existing._v + 1 : 1\n const json = JSON.stringify(record)\n const { iv, data } = await encrypt(json, dek)\n const envelope: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: this.keyring.userId,\n ...(tier > 0 && { _tier: tier }),\n }\n\n await this.adapter.put(this.vault, this.name, id, envelope)\n\n if (tier > 0) {\n this.emitCrossTierEvent({\n actor: this.keyring.userId,\n collection: this.name,\n id,\n tier,\n authorization: opts?.elevation ? 'elevation' : 'inherent',\n op: 'put',\n ts: envelope._ts,\n ...(opts?.elevation && {\n reason: opts.elevation.reason,\n elevatedFrom: opts.elevation.fromTier,\n }),\n })\n }\n }\n\n /**\n * tier-aware get. When the stored record is at a\n * tier the caller cannot decrypt:\n * - `'invisibility'` mode (default) → returns `null`.\n * - `'ghost'` mode → returns a `GhostRecord` placeholder with the\n * tier and the record id (the record exists but contents are\n * withheld).\n *\n * Fully-cleared reads return the plaintext record and fire a\n * cross-tier event when `_tier > 0`.\n */\n async getAtTier(id: string): Promise<T | GhostRecord | null> {\n this.assertTiersEnabled()\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (!envelope) return null\n const tier = envelope._tier ?? 0\n if (tier === 0) {\n return this.decryptRecord(envelope)\n }\n\n const key = dekKey(this.name, tier)\n if (!this.keyring.deks.has(key)) {\n if (this.tierMode === 'ghost') {\n return { _ghost: true, _tier: tier } as GhostRecord\n }\n return null\n }\n\n const dek = await this.getDEK(key)\n const plaintext = await decrypt(envelope._iv, envelope._data, dek)\n const record = JSON.parse(plaintext) as T\n\n this.emitCrossTierEvent({\n actor: this.keyring.userId,\n collection: this.name,\n id,\n tier,\n authorization: this.isElevatorOrOwner() ? 'inherent' : 'delegation',\n op: 'get',\n ts: new Date().toISOString(),\n })\n\n return record\n }\n\n /**\n * list ids grouped by the caller's readability.\n * Returns only ids whose tier the caller can read. Above-tier ids\n * are omitted in `'invisibility'` mode and included (with tier\n * metadata) in `'ghost'` mode.\n */\n async listAtTier(): Promise<Array<{ id: string; tier: number; readable: boolean }>> {\n this.assertTiersEnabled()\n const ids = await this.adapter.list(this.vault, this.name)\n const out: Array<{ id: string; tier: number; readable: boolean }> = []\n for (const id of ids) {\n const env = await this.adapter.get(this.vault, this.name, id)\n if (!env) continue\n const tier = env._tier ?? 0\n const readable = tier === 0 || this.keyring.deks.has(dekKey(this.name, tier))\n if (!readable && this.tierMode === 'invisibility') continue\n out.push({ id, tier, readable })\n }\n return out\n }\n\n /**\n * elevate a record to a higher tier. Re-encrypts with\n * the target tier's DEK. The caller must hold DEKs for both the\n * current tier (to decrypt) and the target tier (to re-encrypt).\n * Stamps `_elevatedBy` with the caller id so `demote()` can check\n * the reverse operation.\n */\n async elevate(id: string, toTier: number): Promise<void> {\n this.assertTiersEnabled()\n this.assertDeclaredTier(toTier)\n assertTierAccess(this.keyring, this.name, toTier)\n\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (!envelope) throw new Error(`Record \"${id}\" not found in collection \"${this.name}\"`)\n const fromTier = envelope._tier ?? 0\n if (toTier === fromTier) return\n if (toTier < fromTier) {\n throw new Error(`Use demote() to lower the tier of \"${id}\" from ${fromTier} to ${toTier}`)\n }\n // Caller must have access at the existing tier to decrypt.\n if (fromTier > 0) assertTierAccess(this.keyring, this.name, fromTier)\n\n const fromKey = dekKey(this.name, fromTier)\n const toKey = dekKey(this.name, toTier)\n const fromDek = await this.getDEK(fromKey)\n const toDek = await this.getDEK(toKey)\n\n const plaintext = await decrypt(envelope._iv, envelope._data, fromDek)\n const { iv, data } = await encrypt(plaintext, toDek)\n const now = new Date().toISOString()\n const next: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: envelope._v + 1,\n _ts: now,\n _iv: iv,\n _data: data,\n _by: this.keyring.userId,\n _tier: toTier,\n _elevatedBy: this.keyring.userId,\n }\n await this.adapter.put(this.vault, this.name, id, next)\n\n this.emitCrossTierEvent({\n actor: this.keyring.userId,\n collection: this.name,\n id,\n tier: toTier,\n authorization: 'elevation',\n op: 'elevate',\n ts: now,\n })\n }\n\n /**\n * demote a record to a lower tier. Allowed only for\n * the user who performed the last elevation or an owner.\n */\n async demote(id: string, toTier: number): Promise<void> {\n this.assertTiersEnabled()\n if (toTier < 0) throw new Error(`Cannot demote to negative tier ${toTier}`)\n\n const envelope = await this.adapter.get(this.vault, this.name, id)\n if (!envelope) throw new Error(`Record \"${id}\" not found in collection \"${this.name}\"`)\n const fromTier = envelope._tier ?? 0\n if (toTier === fromTier) return\n if (toTier > fromTier) {\n throw new Error(`Use elevate() to raise the tier of \"${id}\" from ${fromTier} to ${toTier}`)\n }\n const isOwner = this.keyring.role === 'owner'\n const isOriginalElevator = envelope._elevatedBy === this.keyring.userId\n if (!isOwner && !isOriginalElevator) {\n throw new TierDemoteDeniedError(id, fromTier)\n }\n // Caller must still hold the DEK of the current tier to decrypt.\n assertTierAccess(this.keyring, this.name, fromTier)\n if (toTier > 0) this.assertDeclaredTier(toTier)\n\n const fromDek = await this.getDEK(dekKey(this.name, fromTier))\n const toDek = await this.getDEK(dekKey(this.name, toTier))\n\n const plaintext = await decrypt(envelope._iv, envelope._data, fromDek)\n const { iv, data } = await encrypt(plaintext, toDek)\n const now = new Date().toISOString()\n const next: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: envelope._v + 1,\n _ts: now,\n _iv: iv,\n _data: data,\n _by: this.keyring.userId,\n ...(toTier > 0 && { _tier: toTier }),\n }\n await this.adapter.put(this.vault, this.name, id, next)\n\n this.emitCrossTierEvent({\n actor: this.keyring.userId,\n collection: this.name,\n id,\n tier: fromTier,\n authorization: 'elevation',\n op: 'demote',\n ts: now,\n })\n }\n\n private isElevatorOrOwner(): boolean {\n return this.keyring.role === 'owner' || this.keyring.role === 'admin'\n }\n\n private emitCrossTierEvent(event: CrossTierAccessEvent): void {\n try {\n this.onCrossTierAccess?.(event)\n } catch {\n // notification sink failures must never block a tier operation\n }\n }\n\n /** Low-level: decrypt an envelope and return the raw JSON string. */\n private async decryptJsonString(envelope: EncryptedEnvelope): Promise<string> {\n if (!this.encrypted) return envelope._data\n const dek = await this.getDEK(this.name)\n return decrypt(envelope._iv, envelope._data, dek)\n }\n\n /**\n * Decrypt an envelope into a record of type `T`.\n *\n * When a schema is attached, the decrypted value is validated before\n * being returned. A divergence between the stored bytes and the\n * current schema throws `SchemaValidationError` with\n * `direction: 'output'` — silently returning drifted data would\n * propagate garbage into the UI and break the whole point of having\n * a schema.\n *\n * `skipValidation` exists for history reads: when calling\n * `getVersion()` the caller is explicitly asking for an old snapshot\n * that may predate a schema change, so validating it would be a\n * false positive. Every non-history read leaves this flag `false`.\n */\n private async decryptRecord(\n envelope: EncryptedEnvelope,\n opts: { skipValidation?: boolean } = {},\n ): Promise<T> {\n const json = await this.decryptJsonString(envelope)\n let parsed: unknown = JSON.parse(json)\n\n // CRDT resolution: if this collection is in CRDT mode, the\n // stored JSON is a CrdtState, not T directly. Resolve to the snapshot.\n if (this.crdtMode && parsed !== null && typeof parsed === 'object' && '_crdt' in parsed) {\n parsed = this.crdtStrategy.resolveCrdtSnapshot(parsed as CrdtState)\n }\n\n let record = parsed as T\n\n if (this.schema !== undefined && !opts.skipValidation) {\n // Context string deliberately avoids leaking the record id — the\n // envelope only carries the version, not the id (the id lives in\n // the adapter-side key). `<collection>@v<n>` is enough for the\n // developer to find the offending record.\n record = await validateSchemaOutput(\n this.schema,\n record,\n `${this.name}@v${envelope._v}`,\n )\n }\n\n return record\n }\n}\n\n/**\n * Read a field value from a plain record for persisted-index maintenance.\n * Supports dotted paths so declarations like `indexes: ['billing.clientId']`\n * work the same way `readPath` handles them for the eager-mode builder.\n */\nfunction readPersistedValue(record: Record<string, unknown>, field: string): unknown {\n if (!field.includes('.')) return record[field]\n const segments = field.split('.')\n let cursor: unknown = record\n for (const segment of segments) {\n if (cursor === null || cursor === undefined) return undefined\n cursor = (cursor as Record<string, unknown>)[segment]\n }\n return cursor\n}\n\n/**\n * Canonicalize a typed value for storage inside the side-car body so it\n * round-trips through `JSON.parse` without losing fidelity. Dates are\n * serialised as ISO strings; everything else passes through.\n *\n * The in-memory mirror compares on the stringified bucket key, so the\n * exact storage form is not query-critical — this just protects the\n * reconciler, which compares the stored body against the\n * live record value and would otherwise mismatch on Date objects.\n */\nfunction serializeIndexValue(value: unknown): unknown {\n if (value instanceof Date) return value.toISOString()\n return value\n}\n\n/**\n * Extract the indexable value for a declaration — a scalar for\n * single-field, or a tuple array for composite. Returns `null` when\n * the value is not indexable (single-field null/undefined, composite\n * with any null/undefined component — the whole composite is skipped\n * if any part is missing).\n */\nfunction extractIndexValue(\n record: Record<string, unknown>,\n def: PersistedIndexDef,\n): unknown {\n if (def.kind === 'single') {\n const v = readPersistedValue(record, def.field)\n return v === undefined || v === null ? null : v\n }\n const tuple: unknown[] = []\n for (const f of def.fields) {\n const v = readPersistedValue(record, f)\n if (v === undefined || v === null) return null\n tuple.push(v)\n }\n return tuple\n}\n\n/**\n * Compare the decrypted side-car body's `value` against the live record\n * field value, in the same canonical form used for storage. Handles the\n * Date-is-ISO-string round trip so reconcile doesn't flag a false drift.\n */\nfunction valuesMatch(stored: unknown, live: unknown): boolean {\n const serialized = serializeIndexValue(live)\n if (stored === serialized) return true\n if (stored === undefined || serialized === undefined) return stored === serialized\n // JSON-stringify both sides for structural equality on arrays/objects.\n try {\n return JSON.stringify(stored) === JSON.stringify(serialized)\n } catch {\n return false\n }\n}\n","/**\n * Strategy seam between core Collection and the optional CRDT\n * subsystem. Core imports `CrdtStrategy` as a TYPE-ONLY symbol and\n * `NO_CRDT` as a minimal runtime stub.\n *\n * The state-construction / merge / snapshot-resolution helpers —\n * `buildLwwMapState`, `buildRgaState`, `mergeCrdtStates`,\n * `resolveCrdtSnapshot` — are only reachable from `withCrdt()` in\n * `./active.ts`, which is only exported through the `@noy-db/hub/crdt`\n * subpath. Consumers without CRDT mode configured never pull the\n * ~221 LOC into their bundle.\n *\n * @internal\n */\n\nimport type { CrdtState, LwwMapState, RgaState } from './crdt.js'\n\n/**\n * Seam interface. `@internal`.\n *\n * @internal\n */\nexport interface CrdtStrategy {\n buildLwwMapState(\n record: Record<string, unknown>,\n previous: LwwMapState | undefined,\n now: string,\n ): LwwMapState\n buildRgaState(\n items: readonly unknown[],\n previous: RgaState | undefined,\n idGen: () => string,\n ): RgaState\n mergeCrdtStates(local: CrdtState, remote: CrdtState): CrdtState\n resolveCrdtSnapshot(state: CrdtState): unknown\n}\n\nconst NOT_ENABLED = new Error(\n 'CRDT mode requires the CRDT strategy. Import `{ withCrdt }` from ' +\n '\"@noy-db/hub/crdt\" and pass it to `createNoydb({ crdtStrategy: withCrdt() })`.',\n)\n\n/**\n * No-CRDT stub. Every method throws with a pointer at the subpath.\n * If a Collection declares `crdt: '...'` without this strategy wired,\n * the first put/sync-merge/read that hits the CRDT path surfaces the\n * error immediately.\n *\n * @internal\n */\nexport const NO_CRDT: CrdtStrategy = {\n buildLwwMapState() { throw NOT_ENABLED },\n buildRgaState() { throw NOT_ENABLED },\n mergeCrdtStates() { throw NOT_ENABLED },\n resolveCrdtSnapshot() { throw NOT_ENABLED },\n}\n","/**\n * Strategy seam for the optional i18n (multi-locale + dictionary)\n * subsystem. Core imports `I18nStrategy` type-only + `NO_I18N` stub;\n * real `applyI18nLocale` / `validateI18nTextValue` /\n * `DictionaryHandle` are only reachable via `withI18n()` in\n * `./active.ts`.\n *\n * Solo apps that don't use `i18nText()` fields, don't declare\n * `dictKey()` fields, and don't open a `vault.dictionary(...)` handle\n * ship none of the ~854 LOC behind this seam.\n *\n * Behavior under NO_I18N:\n *\n * - **applyI18nLocale** — returns the record unchanged. Apps without\n * any i18n descriptors never observe a difference; apps that\n * *did* declare i18nText/dictKey fields without opting into the\n * strategy still get raw values back (locale resolution silently\n * skipped). The validators below ensure the misconfiguration is\n * caught at write time instead.\n * - **validateI18nTextValue** — throws when called. Only fires when\n * a collection declared `i18nFields`; if you declared the field,\n * you must opt in.\n * - **buildDictionaryHandle** — throws when called. Only fires when\n * user code calls `vault.dictionary(...)`.\n *\n * @internal\n */\n\nimport type { NoydbStore } from '../types.js'\nimport type { LedgerStore } from '../history/ledger/store.js'\nimport type { UnlockedKeyring } from '../team/keyring.js'\nimport type { NoydbEventEmitter } from '../events.js'\nimport type { I18nTextDescriptor } from './core.js'\nimport type { DictionaryHandle, DictionaryOptions } from './dictionary.js'\n\n/**\n * Options accepted by `I18nStrategy.buildDictionaryHandle`. Mirrors\n * the `DictionaryHandle` constructor verbatim — kept here so core\n * code never imports the dictionary module at runtime.\n *\n * @internal\n */\nexport interface BuildDictionaryHandleOptions<Keys extends string = string> {\n adapter: NoydbStore\n compartmentName: string\n dictionaryName: string\n keyring: UnlockedKeyring\n getDEK: (collectionName: string) => Promise<CryptoKey>\n encrypted: boolean\n ledger: LedgerStore | undefined\n options: DictionaryOptions\n findAndUpdateReferences:\n | ((\n dictionaryName: string,\n oldKey: string,\n newKey: string,\n ) => Promise<void>)\n | undefined\n emitter: NoydbEventEmitter\n /**\n * Used by the active strategy to satisfy the generic-key parameter\n * on the returned handle. The NO_I18N stub never reads it.\n */\n // marker generic — runtime sees no value\n _keyMarker?: Keys\n}\n\n/**\n * @internal\n */\nexport interface I18nStrategy {\n /**\n * Resolve `i18nText` fields on a record to the requested locale and\n * return a new object. Returns the input unchanged under\n * `NO_I18N`.\n */\n applyI18nLocale(\n record: Record<string, unknown>,\n fields: Record<string, I18nTextDescriptor>,\n locale: string,\n fallback?: string | readonly string[],\n ): Record<string, unknown>\n\n /**\n * Validate that an i18nText field's value satisfies its descriptor\n * (required locales present, etc.). Throws under `NO_I18N` —\n * declaring i18nFields without opting in is a misconfiguration.\n */\n validateI18nTextValue(\n value: unknown,\n field: string,\n descriptor: I18nTextDescriptor,\n ): void\n\n /**\n * Construct a typed `DictionaryHandle` for the named dictionary.\n * Throws under `NO_I18N`.\n */\n buildDictionaryHandle<Keys extends string = string>(\n opts: BuildDictionaryHandleOptions<Keys>,\n ): DictionaryHandle<Keys>\n}\n\nfunction notEnabled(op: string): Error {\n return new Error(\n `${op} requires the i18n strategy. Import ` +\n '`{ withI18n }` from \"@noy-db/hub/i18n\" and pass it to ' +\n '`createNoydb({ i18nStrategy: withI18n() })`.',\n )\n}\n\n/**\n * No-i18n stub. Locale resolution is the identity; validation and\n * dictionary construction throw with an actionable pointer.\n *\n * @internal\n */\nexport const NO_I18N: I18nStrategy = {\n applyI18nLocale(record) { return record },\n validateI18nTextValue() { throw notEnabled('i18nText field validation') },\n buildDictionaryHandle() { throw notEnabled('vault.dictionary()') },\n}\n","/**\n * Strategy seam for the optional history + ledger + time-machine\n * subsystem. Core imports `HistoryStrategy` type-only + `NO_HISTORY`\n * stub; real implementations of `saveHistory`, `LedgerStore`,\n * `VaultInstant`, `computePatch`, `diff` etc. are only reachable via\n * `withHistory()` in `./active.ts`.\n *\n * Applications that don't track per-record versioning, don't need the\n * hash-chained audit ledger, and don't restore to past instants ship\n * none of the ~1,880 LOC behind this seam.\n *\n * Strategy contract:\n *\n * - **saveHistory / pruneHistory / clearHistory** — no-ops under\n * NO_HISTORY. Writes still succeed; no snapshot is captured.\n * - **getHistoryEntries / getVersionEnvelope / diff** — throw under\n * NO_HISTORY. These are read APIs the consumer would only call\n * after explicitly asking for history; the throw guides them to\n * `@noy-db/hub/history`.\n * - **envelopePayloadHash / computePatch** — return empty / `[]`\n * under NO_HISTORY. These are only used inside the\n * `if (this.ledger)` branch, which is itself gated by\n * `buildLedger()` returning null.\n * - **buildLedger** — returns `null` under NO_HISTORY. The Vault's\n * public `vault.ledger()` accessor throws when null.\n * - **buildVaultInstant** — throws under NO_HISTORY. `vault.at()`\n * propagates the throw.\n *\n * @internal\n */\n\nimport type {\n EncryptedEnvelope,\n NoydbStore,\n HistoryOptions,\n PruneOptions,\n} from '../types.js'\nimport type { LedgerStore } from './ledger/store.js'\nimport type { JsonPatch } from './ledger/patch.js'\nimport type { DiffEntry } from './diff.js'\nimport type { VaultInstant, VaultEngine } from './time-machine.js'\n\n/**\n * Options accepted by `HistoryStrategy.buildLedger`. Mirrors the\n * `LedgerStore` constructor verbatim — kept in this file so `core`\n * code never imports the LedgerStore module at runtime.\n *\n * @internal\n */\nexport interface BuildLedgerOptions {\n adapter: NoydbStore\n vault: string\n encrypted: boolean\n getDEK: (collectionName: string) => Promise<CryptoKey>\n actor: string\n}\n\n/**\n * @internal\n */\nexport interface HistoryStrategy {\n /**\n * Persist a full encrypted envelope snapshot of the prior version\n * under `_history/{collection}:{id}:{paddedVersion}`. No-op under\n * `NO_HISTORY`.\n */\n saveHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n envelope: EncryptedEnvelope,\n ): Promise<void>\n\n /**\n * List history envelopes for a record, newest first. Throws under\n * `NO_HISTORY` — callers reach this via `collection.history()` /\n * `collection.getVersion()` / `collection.diff()`, which only work\n * with the strategy enabled.\n */\n getHistoryEntries(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n options?: HistoryOptions,\n ): Promise<EncryptedEnvelope[]>\n\n /**\n * Fetch a specific version's envelope. Throws under `NO_HISTORY`.\n */\n getVersionEnvelope(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string,\n version: number,\n ): Promise<EncryptedEnvelope | null>\n\n /**\n * Prune history entries by retention rule. Returns `0` under\n * `NO_HISTORY`.\n */\n pruneHistory(\n adapter: NoydbStore,\n vault: string,\n collection: string,\n recordId: string | undefined,\n options: PruneOptions,\n ): Promise<number>\n\n /**\n * Clear all history for vault/collection/record. Returns `0` under\n * `NO_HISTORY`.\n */\n clearHistory(\n adapter: NoydbStore,\n vault: string,\n collection?: string,\n recordId?: string,\n ): Promise<number>\n\n /**\n * Compute the SHA-256 hash of an envelope's encrypted payload, used\n * by `LedgerStore.append` to track tamper-evidence. Returns the\n * empty string under `NO_HISTORY` (the call site is gated by\n * `if (this.ledger)`, so the value is never observed).\n */\n envelopePayloadHash(envelope: EncryptedEnvelope | null): Promise<string>\n\n /**\n * Compute the JSON patch from `from` → `to`. Returns `[]` under\n * `NO_HISTORY`.\n */\n computePatch(from: unknown, to: unknown): JsonPatch\n\n /**\n * Compute the typed diff between two records. Throws under\n * `NO_HISTORY` — `collection.diff()` is a history-read API.\n */\n diff(recordA: unknown, recordB: unknown): DiffEntry[]\n\n /**\n * Construct (or return null) a `LedgerStore` for the vault. Returns\n * `null` under `NO_HISTORY`; the Vault treats null as \"no ledger\n * attached\" — collection write paths skip the append branch and the\n * public `vault.ledger()` accessor throws.\n */\n buildLedger(opts: BuildLedgerOptions): LedgerStore | null\n\n /**\n * Construct a `VaultInstant` for time-machine reads. Throws under\n * `NO_HISTORY`.\n */\n buildVaultInstant(engine: VaultEngine, timestamp: string): VaultInstant\n}\n\n/**\n * Error thrown when the consumer reaches a history-gated surface\n * without opting into the strategy. The message names the offending\n * operation and points to the subpath import.\n *\n * @internal\n */\nfunction notEnabled(op: string): Error {\n return new Error(\n `${op} requires the history strategy. Import ` +\n '`{ withHistory }` from \"@noy-db/hub/history\" and pass it to ' +\n '`createNoydb({ historyStrategy: withHistory() })`.',\n )\n}\n\n/**\n * No-history stub. Snapshots and prune/clear are no-ops; reads and\n * time-machine throw with an actionable message; ledger construction\n * returns null so the write-path's `if (this.ledger)` branch is dead\n * code in the bundle.\n *\n * @internal\n */\nexport const NO_HISTORY: HistoryStrategy = {\n async saveHistory() {},\n async getHistoryEntries() { throw notEnabled('collection.history()') },\n async getVersionEnvelope() { throw notEnabled('collection.getVersion()') },\n async pruneHistory() { return 0 },\n async clearHistory() { return 0 },\n async envelopePayloadHash() { return '' },\n computePatch() { return [] },\n diff() { throw notEnabled('collection.diff()') },\n buildLedger() { return null },\n buildVaultInstant() { throw notEnabled('vault.at() / vault.timeMachine()') },\n}\n","/**\n * Operator implementations for the query DSL.\n *\n * All predicates run client-side, AFTER decryption — they never see ciphertext.\n * This file is dependency-free and tree-shakeable.\n */\n\n/** Comparison operators supported by the where() builder. */\nexport type Operator =\n | '=='\n | '!='\n | '<'\n | '<='\n | '>'\n | '>='\n | 'in'\n | 'contains'\n | 'startsWith'\n | 'between'\n\n/**\n * A single field comparison clause inside a query plan.\n * Plans are JSON-serializable, so this type uses primitives only.\n */\nexport interface FieldClause {\n readonly type: 'field'\n readonly field: string\n readonly op: Operator\n readonly value: unknown\n}\n\n/**\n * A user-supplied predicate function escape hatch. Not serializable.\n *\n * The predicate accepts `unknown` at the type level so the surrounding\n * Clause type can stay non-parametric — this keeps Collection<T> covariant\n * in T at the public API surface. Builder methods cast user predicates\n * (typed `(record: T) => boolean`) into this shape on the way in.\n */\nexport interface FilterClause {\n readonly type: 'filter'\n readonly fn: (record: unknown) => boolean\n}\n\n/** A logical group of clauses combined by AND or OR. */\nexport interface GroupClause {\n readonly type: 'group'\n readonly op: 'and' | 'or'\n readonly clauses: readonly Clause[]\n}\n\nexport type Clause = FieldClause | FilterClause | GroupClause\n\n/**\n * Read a possibly nested field path like \"address.city\" from a record.\n * Returns undefined if any segment is missing.\n */\nexport function readPath(record: unknown, path: string): unknown {\n if (record === null || record === undefined) return undefined\n if (!path.includes('.')) {\n return (record as Record<string, unknown>)[path]\n }\n const segments = path.split('.')\n let cursor: unknown = record\n for (const segment of segments) {\n if (cursor === null || cursor === undefined) return undefined\n cursor = (cursor as Record<string, unknown>)[segment]\n }\n return cursor\n}\n\n/**\n * Evaluate a single field clause against a record.\n * Returns false on type mismatches rather than throwing — query results\n * exclude non-matching records by definition.\n */\nexport function evaluateFieldClause(record: unknown, clause: FieldClause): boolean {\n const actual = readPath(record, clause.field)\n const { op, value } = clause\n\n switch (op) {\n case '==':\n return actual === value\n case '!=':\n return actual !== value\n case '<':\n return isComparable(actual, value) && (actual as number) < (value as number)\n case '<=':\n return isComparable(actual, value) && (actual as number) <= (value as number)\n case '>':\n return isComparable(actual, value) && (actual as number) > (value as number)\n case '>=':\n return isComparable(actual, value) && (actual as number) >= (value as number)\n case 'in':\n return Array.isArray(value) && value.includes(actual)\n case 'contains':\n if (typeof actual === 'string') return typeof value === 'string' && actual.includes(value)\n if (Array.isArray(actual)) return actual.includes(value)\n return false\n case 'startsWith':\n return typeof actual === 'string' && typeof value === 'string' && actual.startsWith(value)\n case 'between': {\n if (!Array.isArray(value) || value.length !== 2) return false\n const [lo, hi] = value\n if (!isComparable(actual, lo) || !isComparable(actual, hi)) return false\n return (actual as number) >= (lo as number) && (actual as number) <= (hi as number)\n }\n default: {\n // Exhaustiveness — TS will error if a new operator is added without a case.\n const _exhaustive: never = op\n void _exhaustive\n return false\n }\n }\n}\n\n/**\n * Two values are \"comparable\" if they share an order-defined runtime type.\n * Strings compare lexicographically; numbers and Dates numerically; otherwise false.\n */\nfunction isComparable(a: unknown, b: unknown): boolean {\n if (typeof a === 'number' && typeof b === 'number') return true\n if (typeof a === 'string' && typeof b === 'string') return true\n if (a instanceof Date && b instanceof Date) return true\n return false\n}\n\n/**\n * Evaluate any clause (field / filter / group) against a record.\n * The recursion depth is bounded by the user's query expression — no risk of\n * blowing the stack on a 50K-record collection.\n */\nexport function evaluateClause(record: unknown, clause: Clause): boolean {\n switch (clause.type) {\n case 'field':\n return evaluateFieldClause(record, clause)\n case 'filter':\n return clause.fn(record)\n case 'group':\n if (clause.op === 'and') {\n for (const child of clause.clauses) {\n if (!evaluateClause(record, child)) return false\n }\n return true\n } else {\n for (const child of clause.clauses) {\n if (evaluateClause(record, child)) return true\n }\n return false\n }\n }\n}\n","/**\n * Query DSL `.join()` — eager, single-FK, intra-vault joins.\n *\n * resolves a ref()-declared foreign key into an attached\n * right-side record under an alias, using one of two planner paths\n * selected automatically:\n *\n * - **nested-loop** — right-side source exposes `lookupById`, so\n * each left row costs O(1). This is the common path for joins\n * against a Collection, which backs `lookupById` with a Map\n * lookup.\n * - **hash** — right-side has only `snapshot()`. Build a\n * `Map<id, record>` once, probe per left row. Same asymptotic\n * cost for our collections, but the path exists as a fallback\n * for custom QuerySource implementations and as an explicit\n * test-only override via `{ strategy: 'hash' }`.\n *\n * Scope:\n *\n * - Equi-joins on declared `ref()` fields only. Joins on\n * undeclared fields throw at plan time with an actionable error\n * naming the field and collection.\n * - Same-vault only. Cross-vault correlation goes\n * through `queryAcross`; this is an architectural\n * invariant, not a limitation we plan to lift.\n * - Hard row ceiling via `JoinTooLargeError` — default 50k per\n * side, override via `{ maxRows }`. Warns at 80% of the ceiling\n * on the existing warn channel.\n * - Three ref-mode behaviors on dangling refs:\n * strict → `DanglingReferenceError`,\n * warn → attach `null` with a one-shot warning,\n * cascade → attach `null` silently (cascade is a delete-time\n * mode; any dangling refs still present at read time are\n * mid-flight cascades or orphans from earlier, not a DSL error).\n *\n * Partition-awareness seam:\n *\n * Every `JoinLeg` carries a `partitionScope` field that is always\n * `'all'` in. The executor never reads this field.\n * partition-aware joins will start populating it from `where()`\n * predicates on the partition key without changing the planner's\n * external shape — this is the whole reason it exists now.\n *\n * Joins stay OUT of the ledger: reads don't touch `_ledger/`,\n * including joined reads.\n */\n\nimport type { RefDescriptor, RefMode } from '../refs.js'\nimport { readPath } from './predicate.js'\nimport { JoinTooLargeError, DanglingReferenceError } from '../errors.js'\n\n/** Planner strategy for a single join leg. Auto-selected unless overridden. */\nexport type JoinStrategy = 'hash' | 'nested'\n\n/** Default per-side row ceiling before `.join()` throws `JoinTooLargeError`. */\nexport const DEFAULT_JOIN_MAX_ROWS = 50_000\n\n/**\n * Fraction of the row ceiling at which a one-shot warning is emitted.\n * At 80% we warn; at 100% we throw. The warn gives consumers a\n * heads-up before the hard error so they can raise the ceiling or\n * filter further without first hitting a broken query.\n */\nconst JOIN_WARN_FRACTION = 0.8\n\n/**\n * Internal representation of a single join leg in the query plan.\n *\n * This is the primary place where constraint #1 is honored:\n * every leg carries a `partitionScope` field that is always `'all'`\n * in and is never read by the executor. partition-aware\n * joins will start populating it from `where()` predicates on the\n * partition key without changing the planner's external shape.\n */\nexport interface JoinLeg {\n /** Field on the left-side record holding the foreign key value. */\n readonly field: string\n /** Alias key under which the joined right-side record attaches. */\n readonly as: string\n /** Target collection name, resolved from the `ref()` declaration. */\n readonly target: string\n /** Ref mode controlling behavior on dangling refs at read time. */\n readonly mode: RefMode\n /** Manual planner strategy override. `undefined` → auto-select. */\n readonly strategy: JoinStrategy | undefined\n /** Per-side row ceiling override. `undefined` → DEFAULT_JOIN_MAX_ROWS. */\n readonly maxRows: number | undefined\n /**\n * Partition scope for future partition-aware joins. Always `'all'`\n * today — the executor never reads this field. Future versions will\n * populate it from `where()` predicates without breaking the\n * planner's external shape. Do not remove even though it looks\n * unused today — that's the whole point of having it.\n */\n readonly partitionScope: 'all' | readonly string[]\n /**\n * When `true`, this is a dictionary join. The executor\n * resolves the left-field value against the dict snapshot and\n * attaches `{ ...labels, key }` rather than a right-side record.\n * `target` holds the dictionary name (not a collection name).\n */\n readonly isDictJoin?: true\n}\n\n/**\n * Minimal shape of a joinable right-side record source.\n *\n * Collections implement this structurally via their `QuerySource`;\n * sources without `lookupById` force the hash-join fallback. Kept as\n * a thin interface so tests can wire up plain-object sources without\n * pulling in the full Collection class.\n *\n * The optional `subscribe` is used by `Query.live()` to merge\n * right-side change streams into the live re-run trigger. Sources\n * that omit `subscribe` still work for live joins — they just\n * don't drive re-fires when their right side mutates. Collection\n * implements `subscribe` by hooking into the existing per-\n * vault event emitter.\n */\nexport interface JoinableSource {\n snapshot(): readonly unknown[]\n lookupById?(id: string): unknown\n /**\n * Subscribe to mutations on this source. The callback fires\n * AFTER the underlying record set has been updated. Returns an\n * unsubscribe function. Optional — sources without this method\n * cannot trigger live-join re-fires from their side.\n */\n subscribe?(cb: () => void): () => void\n}\n\n/**\n * Join resolution context attached to a `Query` when it's constructed\n * from a `Collection`. Holds everything the `.join()` method needs to\n * translate a field name into a target collection + ref mode, and\n * everything the executor needs to read the right side.\n *\n * Kept as a structural interface so `Vault` can implement it\n * without `Query` needing to import `Vault` (circular-import\n * avoid). The Collection wires this up in its `query()` method using\n * the `joinResolver` back-reference the Vault passes in.\n */\nexport interface JoinContext {\n /** Name of the left-side (owning) collection. */\n readonly leftCollection: string\n /** Look up a `RefDescriptor` by field name on the left collection. */\n resolveRef(field: string): RefDescriptor | null\n /** Resolve a right-side source by target collection name. */\n resolveSource(collectionName: string): JoinableSource | null\n /**\n * Resolve a dictKey join source. Returns a `JoinableSource`\n * whose snapshot exposes `{ key, ...labels }` records, keyed by the\n * stable dictionary key. `null` when the field is not a dictKey.\n *\n * The source is built from the compartment's in-memory dictionary\n * snapshot — same data as `DictionaryHandle.list()`, O(1) per lookup.\n */\n resolveDictSource?(field: string): JoinableSource | null\n}\n\n/**\n * Coerce an unknown FK value into a lookup key string.\n *\n * Legitimate ref values are strings or numbers — the same narrowing\n * the write-time `enforceRefsOnPut` path applies. Anything else\n * (objects, arrays, booleans, null, undefined) is treated as \"no\n * ref\" and returns `null`, so the join attaches `null` instead of\n * running `String({})` and producing `'[object Object]'` as a\n * bucket key. This matches the lint rule guidance and keeps\n * bizarre FK values from producing silently-wrong lookups.\n */\nfunction coerceRefKey(value: unknown): string | null {\n if (value === null || value === undefined) return null\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'bigint') return String(value)\n return null\n}\n\n/**\n * Warn-channel deduplication for dangling-ref `'warn'` mode. Keyed\n * by `field → target:refId` so the same dangling ref only produces\n * one warning even across many rows or repeated queries.\n */\nconst warnedDanglingKeys = new Set<string>()\nfunction warnOnceDangling(field: string, target: string, refId: string): void {\n const key = `${field}→${target}:${refId}`\n if (warnedDanglingKeys.has(key)) return\n warnedDanglingKeys.add(key)\n console.warn(\n `[noy-db] .join() encountered dangling ref in 'warn' mode: ` +\n `field \"${field}\" → \"${target}:${refId}\" not found. Attaching null.`,\n )\n}\n\n/**\n * Track row-ceiling warnings to fire only once per (target, side).\n * Prevents per-query spam when a consumer is running the same query\n * repeatedly (e.g. in a reactive loop).\n */\nconst warnedCeilingKeys = new Set<string>()\nfunction warnCeilingApproaching(\n target: string,\n side: 'left' | 'right',\n rows: number,\n maxRows: number,\n): void {\n const key = `${target}:${side}`\n if (warnedCeilingKeys.has(key)) return\n warnedCeilingKeys.add(key)\n const pct = Math.round((rows / maxRows) * 100)\n console.warn(\n `[noy-db] .join() ${side} side is at ${pct}% of the ${maxRows}-row ` +\n `ceiling for target \"${target}\" (${rows} rows). Streaming joins over ` +\n `scan() are not yet supported for collections that need to exceed this.`,\n )\n}\n\n/**\n * Apply every join leg in the plan against a base set of left-side\n * rows. Called by the query executor after `where` / `orderBy` /\n * `offset` / `limit` have narrowed the left set.\n *\n * Each leg attaches a `leg.as` field to every row. Returns a new\n * array of plain objects — the original left rows are not mutated\n * (structural sharing is fine for the inner fields, but the\n * top-level object is a fresh clone so consumers can further mutate\n * safely).\n *\n * **Ordering:** joins run AFTER orderBy / limit / offset in v1.\n * This keeps the planner simple and means queries like \"top 10\n * invoices with client\" sort and paginate the left side first, then\n * join. Sorting *by* a joined field is out of scope for — users\n * can post-sort the result array in userland or wait for \n * (multi-FK chaining) which can be layered on top.\n *\n * **Multi-FK chaining:** each leg's `maxRows` is enforced\n * against the current left-row count independently. Because\n * joins are equi-joins on the target's primary key (one-to-one or\n * one-to-null), the left row count is constant across legs — no\n * cartesian blowup. The per-leg left-side check is still necessary\n * so that a later leg with a tighter ceiling correctly fires on a\n * query like `.join('a', { maxRows: 100_000 }).join('b', { maxRows: 50 })`,\n * which should throw on the second leg if the left set exceeds 50.\n */\nexport function applyJoins(\n rows: readonly unknown[],\n joins: readonly JoinLeg[],\n context: JoinContext,\n): unknown[] {\n if (joins.length === 0) return [...rows]\n\n let result: unknown[] = [...rows]\n for (const leg of joins) {\n result = applyOneJoin(result, leg, context)\n }\n return result\n}\n\nfunction applyOneJoin(\n leftRows: readonly unknown[],\n leg: JoinLeg,\n context: JoinContext,\n): unknown[] {\n // Dict join path — resolve left-field value against the\n // dictionary snapshot and attach { key, ...labels } under leg.as.\n if (leg.isDictJoin) {\n const dictSource = context.resolveDictSource?.(leg.field)\n if (!dictSource) {\n throw new Error(\n `.join() field \"${leg.field}\" on \"${context.leftCollection}\" is declared as a ` +\n `dictKey join but the dict source could not be resolved. ` +\n `Ensure the dictionary has at least one entry.`,\n )\n }\n const out: unknown[] = []\n const snapshot = dictSource.snapshot()\n const dictMap = new Map<string, unknown>()\n for (const entry of snapshot) {\n const k = readPath(entry, 'key')\n if (typeof k === 'string') dictMap.set(k, entry)\n }\n for (const left of leftRows) {\n const rawId = readPath(left, leg.field)\n const key = coerceRefKey(rawId)\n const dictEntry = key === null ? undefined : dictMap.get(key)\n out.push({ ...(left as Record<string, unknown>), [leg.as]: dictEntry ?? null })\n }\n return out\n }\n\n const source = context.resolveSource(leg.target)\n if (!source) {\n throw new Error(\n `.join() cannot resolve target collection \"${leg.target}\" ` +\n `(referenced from field \"${leg.field}\" on \"${context.leftCollection}\"). ` +\n `Make sure the target collection has been opened via vault.collection() ` +\n `at least once before running the query.`,\n )\n }\n\n const maxRows = leg.maxRows ?? DEFAULT_JOIN_MAX_ROWS\n\n // Per-leg left-side ceiling check. In a\n // multi-FK chain, each leg's `maxRows` is enforced independently\n // against the current left-row count, so\n // `.join('a', { maxRows: 100_000 }).join('b', { maxRows: 50 })`\n // correctly throws on the second leg if the left set exceeds 50.\n if (leftRows.length > maxRows) {\n throw new JoinTooLargeError({\n leftRows: leftRows.length,\n rightRows: -1,\n maxRows,\n side: 'left',\n message:\n `.join() left side has ${leftRows.length} rows, exceeding the ${maxRows}-row ` +\n `ceiling for target \"${leg.target}\". Filter the left side further with ` +\n `where()/limit() before joining, or raise the ceiling via { maxRows }. ` +\n `Streaming joins over scan() are not yet supported.`,\n })\n }\n if (leftRows.length > maxRows * JOIN_WARN_FRACTION) {\n warnCeilingApproaching(leg.target, 'left', leftRows.length, maxRows)\n }\n\n const rightSnapshot = source.snapshot()\n if (rightSnapshot.length > maxRows) {\n throw new JoinTooLargeError({\n leftRows: leftRows.length,\n rightRows: rightSnapshot.length,\n maxRows,\n side: 'right',\n message:\n `.join() right side \"${leg.target}\" has ${rightSnapshot.length} rows, ` +\n `exceeding the ${maxRows}-row ceiling. Raise the ceiling via { maxRows } ` +\n `if the data genuinely fits in memory, or track for streaming joins.`,\n })\n }\n if (rightSnapshot.length > maxRows * JOIN_WARN_FRACTION) {\n warnCeilingApproaching(leg.target, 'right', rightSnapshot.length, maxRows)\n }\n\n // Strategy selection: explicit override wins; otherwise prefer\n // nested-loop when the source exposes lookupById (O(1) per row),\n // falling back to hash join when it doesn't.\n const strategy: JoinStrategy =\n leg.strategy ?? (source.lookupById ? 'nested' : 'hash')\n\n if (strategy === 'nested' && source.lookupById) {\n // Bind through an arrow so the `this` context of lookupById\n // doesn't drift — same pattern as the existing candidateRecords\n // helper in builder.ts.\n const lookup = (id: string): unknown => source.lookupById?.(id)\n return nestedLoopJoin(leftRows, leg, lookup)\n }\n return hashJoin(leftRows, leg, rightSnapshot)\n}\n\nfunction nestedLoopJoin(\n leftRows: readonly unknown[],\n leg: JoinLeg,\n lookupById: (id: string) => unknown,\n): unknown[] {\n const out: unknown[] = []\n for (const left of leftRows) {\n const rawId = readPath(left, leg.field)\n const key = coerceRefKey(rawId)\n const right = key === null ? undefined : lookupById(key)\n out.push(attachJoin(left, leg, right, rawId))\n }\n return out\n}\n\nfunction hashJoin(\n leftRows: readonly unknown[],\n leg: JoinLeg,\n rightSnapshot: readonly unknown[],\n): unknown[] {\n // Build the right-side hash once per query execution. We key on\n // the `id` field because ref() always points to a target's primary\n // key — non-equi and non-id joins are out of scope for.\n const rightMap = new Map<string, unknown>()\n for (const record of rightSnapshot) {\n const rawId = readPath(record, 'id')\n const key = coerceRefKey(rawId)\n if (key !== null) {\n rightMap.set(key, record)\n }\n }\n const out: unknown[] = []\n for (const left of leftRows) {\n const rawId = readPath(left, leg.field)\n const key = coerceRefKey(rawId)\n const right = key === null ? undefined : rightMap.get(key)\n out.push(attachJoin(left, leg, right, rawId))\n }\n return out\n}\n\n/**\n * Attach the resolved right-side record (or null) to the left row\n * under the alias, applying ref-mode semantics for the dangling\n * case.\n *\n * A left-side record whose FK field is null/undefined is NOT a\n * dangling ref — it's \"no reference at all\", which is always\n * allowed regardless of mode. This matches the write-time\n * `enforceRefsOnPut` behavior: \"Nullish ref values are allowed —\n * treat them as 'no reference'.\"\n *\n * Only non-null FKs pointing at non-existent targets trigger the\n * mode behavior.\n */\nfunction attachJoin(\n left: unknown,\n leg: JoinLeg,\n right: unknown,\n rawId: unknown,\n): unknown {\n if (left === null || typeof left !== 'object') {\n // Pathological input — return as-is. Shouldn't happen in\n // practice because QuerySource yields objects, but defensive\n // because plan execution is untyped at this layer.\n return left\n }\n const merged: Record<string, unknown> = { ...(left as Record<string, unknown>) }\n\n // \"No ref at all\" — null/undefined FK value, or a non-string/non-\n // number FK that coerceRefKey treated as no-ref. Never throws\n // regardless of mode; matches the write-time policy that nullish\n // refs are allowed.\n const refKey = coerceRefKey(rawId)\n if (right === undefined) {\n if (refKey !== null && leg.mode === 'strict') {\n throw new DanglingReferenceError({\n field: leg.field,\n target: leg.target,\n refId: refKey,\n message:\n `.join() strict dangling: record references \"${leg.target}:${refKey}\" ` +\n `via field \"${leg.field}\", but no such record exists. Use ref() mode 'warn' ` +\n `or 'cascade' if dangling refs are acceptable, or run ` +\n `vault.checkIntegrity() to find and fix the orphans.`,\n })\n }\n if (refKey !== null && leg.mode === 'warn') {\n warnOnceDangling(leg.field, leg.target, refKey)\n }\n // For 'cascade' and null refs we attach null silently. Cascade\n // is a delete-time mode; any dangling refs visible at read time\n // are either mid-flight or pre-existing orphans, not a DSL error.\n merged[leg.as] = null\n } else {\n merged[leg.as] = right\n }\n return merged\n}\n\n/**\n * Test-only: reset the join warning deduplication state between\n * tests. Production code never calls this — the dedup state is\n * intentionally process-scoped so a noisy query doesn't spam the\n * console once per component render.\n */\nexport function resetJoinWarnings(): void {\n warnedDanglingKeys.clear()\n warnedCeilingKeys.clear()\n}\n","/**\n * Reactive query primitive — `query.live()`.\n *\n * produces a `LiveQuery<T>` that re-runs the query and\n * updates its `value` whenever any source feeding it (the left\n * collection AND every right-side collection a join leg points at)\n * mutates.\n *\n * Framework-agnostic by design. The Vue layer wraps a `LiveQuery`\n * in a Vue `Ref<T[]>` by subscribing once and copying `value` into\n * the ref on every notification. React/Solid/Svelte adapters do the\n * same with their own primitives. Core never depends on a UI\n * framework.\n *\n * **Error semantics.** A `.live()` query may throw at re-run time —\n * a strict-mode `DanglingReferenceError` is the most common case\n * (a right-side record was deleted out-of-band, leaving a left\n * row's FK pointing at nothing). When the re-run throws, the\n * `LiveQuery` catches the error and stores it in the `error`\n * field; it does NOT propagate the throw out of the source's\n * change handler, because doing so would tear down whatever\n * upstream emitter is dispatching. Listeners check `error` after\n * each notification and render an error state in the UI.\n *\n * **Dedup of right-side subscriptions.** A multi-FK chain that\n * joins the same target twice (e.g.\n * `.join('billingClientId').join('shippingClientId')`, both\n * pointing at `clients`) only subscribes to that target once. We\n * dedup by target collection name, on the assumption that\n * `resolveSource(name)` returns a single subscribable source per\n * vault + name. Vault's `resolveSource` reads from\n * `collectionCache` so this assumption holds.\n *\n * **What .live() does NOT do in v1:**\n * - No granular delta updates — the whole query re-runs on every\n * change. Granular delta tracking is a v2 optimization once\n * the API is stable.\n * - No batching of bursty changes — one event in, one re-run\n * out. Batching with microtask coalescing is a v2 enhancement.\n * - No async notifications — every notification is synchronous\n * within the source's change handler.\n * - No re-planning under live mutations — the planner picks once\n * at subscription time and reuses the same plan for every\n * re-run.\n */\n\n/**\n * The reactive primitive returned by `Query.live()`.\n *\n * Listeners can read the current `value` snapshot at any time and\n * subscribe to changes via `.subscribe(cb)`. The `error` field\n * carries the most recent re-run error, if any — read it after\n * each notification to render error state.\n *\n * Always call `stop()` when the live query is no longer needed.\n * Without it, the upstream change-stream subscriptions stay live\n * forever and the query keeps re-running on every mutation.\n */\nexport interface LiveQuery<T> {\n /**\n * Current snapshot of the query result. Updated in place on\n * every upstream change. The reference returned is the same\n * `readonly T[]` array — consumers that want change detection by\n * reference should copy: `const arr = [...live.value]`.\n */\n readonly value: readonly T[]\n /**\n * Most recent re-run error, or `null` on success. Set when the\n * executor throws (e.g. `DanglingReferenceError` in strict mode\n * after a right-side delete). Cleared on the next successful\n * re-run.\n */\n readonly error: Error | null\n /**\n * Register a notification callback. Fires AFTER `value` and\n * `error` have been updated for a given upstream change.\n * Returns an unsubscribe function.\n *\n * The first call to `subscribe` does NOT fire the callback\n * immediately — call sites that want the initial value should\n * read `live.value` directly before subscribing.\n */\n subscribe(cb: () => void): () => void\n /**\n * Tear down every upstream subscription and clear the listener\n * set. Idempotent — calling twice is safe. After `stop()`, the\n * query no longer re-runs and `subscribe()` becomes a no-op\n * (the returned unsubscribe is still callable and is also a\n * no-op).\n */\n stop(): void\n}\n\n/**\n * Internal subscription handle for an upstream source — left or\n * right side. The contract is just `subscribe(cb): unsubscribe`,\n * matching the existing `QuerySource.subscribe` and the new\n * `JoinableSource.subscribe` (added in ).\n */\nexport interface LiveUpstream {\n subscribe(cb: () => void): () => void\n}\n\n/**\n * Build a LiveQuery from a `recompute` callback (typically the\n * Query's bound `toArray`) and a list of upstream sources to\n * subscribe to.\n *\n * The recompute fires once synchronously to populate the initial\n * value, then re-fires every time any upstream notifies. Errors\n * thrown by recompute are caught and stored in `error` instead of\n * propagating — see the file docstring for the rationale.\n */\nexport function buildLiveQuery<T>(\n recompute: () => T[],\n upstreams: readonly LiveUpstream[],\n): LiveQuery<T> {\n return new LiveQueryImpl<T>(recompute, upstreams)\n}\n\nclass LiveQueryImpl<T> implements LiveQuery<T> {\n private _value: readonly T[] = []\n private _error: Error | null = null\n private readonly listeners = new Set<() => void>()\n private readonly unsubs: Array<() => void> = []\n private stopped = false\n\n constructor(\n private readonly recompute: () => T[],\n upstreams: readonly LiveUpstream[],\n ) {\n // Initial compute. If this throws, the constructor still\n // succeeds — we want consumers to be able to render an error\n // state from `live.error` rather than wrapping every\n // `query.live()` call in a try/catch.\n this.refresh()\n for (const upstream of upstreams) {\n try {\n this.unsubs.push(upstream.subscribe(this.onUpstreamChange))\n } catch (err) {\n // Upstream subscription failed — record it as the live\n // error and continue with the upstreams that did work.\n // The LiveQuery is now degraded (won't re-fire on this\n // upstream's changes) but isn't broken; consumers can\n // detect this via `live.error`.\n this._error = err instanceof Error ? err : new Error(String(err))\n }\n }\n }\n\n get value(): readonly T[] {\n return this._value\n }\n\n get error(): Error | null {\n return this._error\n }\n\n /**\n * Bound change handler — used as the callback passed to every\n * upstream's subscribe. Bound via class field so the `this`\n * context survives the indirect call from arbitrary upstreams.\n */\n private readonly onUpstreamChange = (): void => {\n this.refresh()\n for (const cb of this.listeners) {\n try {\n cb()\n } catch {\n // Listener errors are isolated — one buggy consumer\n // doesn't break the others or tear down the live query.\n }\n }\n }\n\n private refresh(): void {\n if (this.stopped) return\n try {\n this._value = this.recompute()\n this._error = null\n } catch (err) {\n this._error = err instanceof Error ? err : new Error(String(err))\n // Don't clobber the previous value on error — consumers\n // typically want to keep showing the last known good state\n // alongside the error message rather than flashing to an\n // empty list.\n }\n }\n\n subscribe(cb: () => void): () => void {\n if (this.stopped) return () => {}\n this.listeners.add(cb)\n return () => this.listeners.delete(cb)\n }\n\n stop(): void {\n if (this.stopped) return\n this.stopped = true\n for (const unsub of this.unsubs) {\n try {\n unsub()\n } catch {\n // Unsub errors are swallowed — at this point we're tearing\n // down anyway and the failure is noise.\n }\n }\n this.unsubs.length = 0\n this.listeners.clear()\n }\n}\n","/**\n * Strategy seam between the core Query / ScanBuilder chain and the\n * optional aggregate / groupBy subsystem. Core imports\n * `AggregateStrategy` as a TYPE-ONLY symbol and `NO_AGGREGATE` as a\n * tiny runtime stub.\n *\n * The heavy machinery — `Aggregation`, `GroupedQuery`, the\n * reducer-step logic — is only reachable from `withAggregate()` in\n * `./active.ts`, which is only exported through the\n * `@noy-db/hub/aggregate` subpath. Consumers that don't import the\n * subpath ship none of the ~886 LOC.\n *\n * @internal\n */\n\nimport type {\n Aggregation,\n AggregateSpec,\n AggregateResult,\n AggregationUpstream,\n} from './aggregation.js'\nimport type { GroupedQuery } from './groupby.js'\n\n/**\n * Seam interface. `@internal` — will promote to public only when the\n * aggregate subsystem is extracted into its own package.\n *\n * @internal\n */\nexport interface AggregateStrategy {\n /**\n * Build an `Aggregation<R>` for `Query.aggregate(spec)`. `executeRecords`\n * is a closure that produces the matching record set when the\n * aggregation runs. NO_AGGREGATE throws; the active strategy\n * constructs a real `Aggregation`.\n */\n aggregate<Spec extends AggregateSpec>(\n executeRecords: () => readonly unknown[],\n spec: Spec,\n upstreams: readonly AggregationUpstream[],\n ): Aggregation<AggregateResult<Spec>>\n\n /**\n * Build a `GroupedQuery<T, F>` for `Query.groupBy(field)`. Same\n * closure / upstream inputs as `aggregate` plus the group key field.\n */\n groupBy<T, F extends string>(\n executeRecords: () => readonly unknown[],\n field: F,\n upstreams: readonly AggregationUpstream[],\n dictLabelResolver?: (\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ) => Promise<string | undefined>,\n ): GroupedQuery<T, F>\n\n /**\n * Terminal streaming aggregator for `ScanBuilder.aggregate(spec)`.\n * Takes an async iterable of decrypted records + the spec and\n * returns the reduced result.\n */\n scanAggregate<Spec extends AggregateSpec>(\n iter: AsyncIterable<unknown>,\n spec: Spec,\n ): Promise<AggregateResult<Spec>>\n}\n\nconst NOT_ENABLED = new Error(\n 'Aggregate / groupBy is not enabled on this Noydb instance. ' +\n 'Import `{ withAggregate }` from \"@noy-db/hub/aggregate\" and pass it to ' +\n '`createNoydb({ aggregateStrategy: withAggregate() })`.',\n)\n\n/**\n * No-aggregate stub. Every `.aggregate()` / `.groupBy()` / streaming\n * `scan().aggregate()` call throws with a pointer at the subpath. The\n * real `Aggregation` / `GroupedQuery` classes are never referenced at\n * runtime, so the bundler drops the ~886 LOC.\n *\n * @internal\n */\nexport const NO_AGGREGATE: AggregateStrategy = {\n aggregate() { throw NOT_ENABLED },\n groupBy() { throw NOT_ENABLED },\n scanAggregate() { throw NOT_ENABLED },\n}\n","/**\n * Chainable, immutable query builder.\n *\n * Each builder operation returns a NEW Query — the underlying plan is never\n * mutated. This makes plans safe to share, cache, and serialize.\n */\n\nimport type { Clause, FieldClause, FilterClause, GroupClause, Operator } from './predicate.js'\nimport { evaluateClause } from './predicate.js'\nimport type { CollectionIndexes } from '../indexing/eager-indexes.js'\nimport type { JoinContext, JoinLeg, JoinStrategy } from './join.js'\nimport { applyJoins } from './join.js'\nimport type { LiveQuery, LiveUpstream } from './live.js'\nimport { buildLiveQuery } from './live.js'\nimport type { AggregateSpec, AggregateResult, AggregationUpstream, Aggregation } from '../aggregate/aggregation.js'\nimport type { GroupedQuery } from '../aggregate/groupby.js'\nimport { NO_AGGREGATE, type AggregateStrategy } from '../aggregate/strategy.js'\n\nexport interface OrderBy {\n readonly field: string\n readonly direction: 'asc' | 'desc'\n}\n\n/**\n * A complete query plan: zero-or-more clauses, optional ordering, pagination,\n * and optional joins.\n *\n * Plans are JSON-serializable as long as no FilterClause is present and no\n * join leg carries a manual `strategy` override (JoinLeg itself is plain\n * data, so it serializes cleanly).\n *\n * Plans are intentionally NOT parametric on T — see `predicate.ts` FilterClause\n * for the variance reasoning. The public `Query<T>` API attaches the type tag.\n */\nexport interface QueryPlan {\n readonly clauses: readonly Clause[]\n readonly orderBy: readonly OrderBy[]\n readonly limit: number | undefined\n readonly offset: number\n /**\n * Zero-or-more join legs to apply after where/orderBy/limit/offset.\n * Each leg attaches a resolved right-side record (or null) under its\n * alias. See `query/join.ts` for the full semantics.\n */\n readonly joins: readonly JoinLeg[]\n}\n\nconst EMPTY_PLAN: QueryPlan = {\n clauses: [],\n orderBy: [],\n limit: undefined,\n offset: 0,\n joins: [],\n}\n\n/**\n * Source of records that a query executes against.\n *\n * The interface is non-parametric to keep variance friendly: callers cast\n * their typed source (e.g. `QuerySource<Invoice>`) into this opaque shape.\n *\n * `getIndexes` and `lookupById` are optional fast-path hooks. When both are\n * present and a where clause matches an indexed field, the executor uses\n * the index to skip a linear scan. Sources without these methods (or with\n * `getIndexes` returning `null`) always fall back to a linear scan.\n */\nexport interface QuerySource<T> {\n /** Snapshot of all current records. The query never mutates this array. */\n snapshot(): readonly T[]\n /** Subscribe to mutations; returns an unsubscribe function. */\n subscribe?(cb: () => void): () => void\n /** Index store for the indexed-fast-path. Optional. */\n getIndexes?(): CollectionIndexes | null\n /** O(1) record lookup by id, used to materialize index hits. */\n lookupById?(id: string): T | undefined\n}\n\ninterface InternalSource {\n snapshot(): readonly unknown[]\n subscribe?(cb: () => void): () => void\n getIndexes?(): CollectionIndexes | null\n lookupById?(id: string): unknown\n}\n\n/**\n * The chainable builder. All methods return a new Query — the original\n * remains unchanged. Terminal methods (`toArray`, `first`, `count`,\n * `subscribe`) execute the plan against the source.\n *\n * Type parameter T flows through the public API for ergonomics, but the\n * internal storage uses `unknown` so Collection<T> stays covariant.\n *\n * The optional `joinContext` is attached when the Query is constructed\n * via `Collection.query()` (Collection passes in a context built from\n * the Vault's join resolver). A Query constructed via `new Query`\n * directly — e.g. from tests with a plain-object source — has no\n * joinContext, and calling `.join()` on it throws with an actionable\n * error. See `query/join.ts` for the full design.\n */\nexport class Query<T> {\n private readonly source: InternalSource\n private readonly plan: QueryPlan\n private readonly joinContext: JoinContext | undefined\n private readonly aggregateStrategy: AggregateStrategy\n\n constructor(\n source: QuerySource<T>,\n plan: QueryPlan = EMPTY_PLAN,\n joinContext?: JoinContext,\n aggregateStrategy: AggregateStrategy = NO_AGGREGATE,\n ) {\n this.source = source as InternalSource\n this.plan = plan\n this.joinContext = joinContext\n this.aggregateStrategy = aggregateStrategy\n }\n\n /** Add a field comparison. Multiple where() calls are AND-combined. */\n where(field: string, op: Operator, value: unknown): Query<T> {\n const clause: FieldClause = { type: 'field', field, op, value }\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, clauses: [...this.plan.clauses, clause] },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /**\n * Logical OR group. Pass a callback that builds a sub-query.\n * Each clause inside the callback is OR-combined; the group itself\n * joins the parent plan with AND.\n */\n or(builder: (q: Query<T>) => Query<T>): Query<T> {\n const sub = builder(\n new Query<T>(this.source as QuerySource<T>, EMPTY_PLAN, this.joinContext, this.aggregateStrategy),\n )\n const group: GroupClause = {\n type: 'group',\n op: 'or',\n clauses: sub.plan.clauses,\n }\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, clauses: [...this.plan.clauses, group] },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /**\n * Logical AND group. Same shape as `or()` but every clause inside the group\n * must match. Useful for explicit grouping inside a larger OR.\n */\n and(builder: (q: Query<T>) => Query<T>): Query<T> {\n const sub = builder(\n new Query<T>(this.source as QuerySource<T>, EMPTY_PLAN, this.joinContext, this.aggregateStrategy),\n )\n const group: GroupClause = {\n type: 'group',\n op: 'and',\n clauses: sub.plan.clauses,\n }\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, clauses: [...this.plan.clauses, group] },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /** Escape hatch: add an arbitrary predicate function. Not serializable. */\n filter(fn: (record: T) => boolean): Query<T> {\n const clause: FilterClause = {\n type: 'filter',\n fn: fn as (record: unknown) => boolean,\n }\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, clauses: [...this.plan.clauses, clause] },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /** Sort by a field. Subsequent calls are tie-breakers. */\n orderBy(field: string, direction: 'asc' | 'desc' = 'asc'): Query<T> {\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, orderBy: [...this.plan.orderBy, { field, direction }] },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /** Cap the result size. */\n limit(n: number): Query<T> {\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, limit: n },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /** Skip the first N matching records (after ordering). */\n offset(n: number): Query<T> {\n return new Query<T>(\n this.source as QuerySource<T>,\n { ...this.plan, offset: n },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /**\n * Resolve a `ref()`-declared foreign key and attach the right-side\n * record under `opts.as`. — eager, single-FK, intra-\n * vault joins.\n *\n * ```ts\n * const rows = invoices.query()\n * .where('status', '==', 'open')\n * .join('clientId', { as: 'client' })\n * .toArray()\n * // → [{ id, amount, client: { id, name, ... } }, ...]\n * ```\n *\n * Preconditions:\n * - The Query must have a `joinContext` (constructed via\n * `Collection.query()`, not `new Query`).\n * - `field` must have a matching `refs: { [field]: ref('<target>') }`\n * declaration on the left collection.\n * - The target collection must be reachable via the vault\n * (either currently open or openable on demand).\n *\n * Strategy:\n * - Nested-loop against `lookupById` when the target source\n * provides it (the common path for Collection targets).\n * - Hash join otherwise, or when `{ strategy: 'hash' }` is\n * explicitly passed for test purposes.\n *\n * Ref-mode semantics on dangling refs (left record has a non-null\n * FK value pointing at a right-side id that doesn't exist):\n * - `strict` → throws `DanglingReferenceError` with the full\n * field / target / refId context.\n * - `warn` → attaches `null` and emits a one-shot warning per\n * unique dangling pair.\n * - `cascade` → attaches `null` silently. Cascade is a\n * delete-time mode; dangling refs visible at read time are\n * either mid-flight cascades or pre-existing orphans, not a\n * DSL-level error.\n *\n * A left-side record whose FK field is `null` / `undefined` is NOT\n * a dangling ref — it's \"no reference at all\", always allowed\n * regardless of mode.\n *\n * The return type widens `T` with `Record<As, R | null>`. The `R`\n * parameter is optional — supply it explicitly for type-checked\n * access to the joined fields:\n *\n * ```ts\n * invoices.query().join<'client', Client>('clientId', { as: 'client' })\n * // ^^^^^^^^^^^^^^^^^^^ alias literal + right-side type\n * ```\n *\n * Without the generic, the joined field is typed as `unknown`, which\n * still works but requires a cast to access its properties.\n *\n * Joins stay intra-vault by construction — cross-vault\n * correlation goes through `Noydb.queryAcross`, not\n * `.join()`.\n */\n join<As extends string, R = unknown>(\n field: string,\n opts: { as: As; strategy?: JoinStrategy; maxRows?: number },\n ): Query<T & Record<As, R | null>> {\n if (!this.joinContext) {\n throw new Error(\n `Query.join() requires a join context. Use collection.query() ` +\n `to construct a join-capable Query instead of the Query constructor ` +\n `directly (the direct constructor is only used for tests with ` +\n `plain-object sources).`,\n )\n }\n const descriptor = this.joinContext.resolveRef(field)\n // Check for dictKey join when no ref() is declared\n const isDictJoinField = !descriptor && this.joinContext.resolveDictSource?.(field) != null\n if (!descriptor && !isDictJoinField) {\n throw new Error(\n `Query.join(): no ref() declared for field \"${field}\" on collection ` +\n `\"${this.joinContext.leftCollection}\". Add ` +\n `refs: { ${field}: ref('<target-collection>') } to the collection ` +\n `options, then retry. See the ref() docs for the full list of modes.`,\n )\n }\n const leg: JoinLeg = descriptor\n ? {\n field,\n as: opts.as,\n target: descriptor.target,\n mode: descriptor.mode,\n strategy: opts.strategy,\n maxRows: opts.maxRows,\n // constraint #1 — always 'all' in. Do not remove.\n partitionScope: 'all',\n }\n : {\n // Dict join leg\n field,\n as: opts.as,\n target: field, // dict name = field name for dictKey\n mode: 'strict',\n strategy: opts.strategy,\n maxRows: opts.maxRows,\n partitionScope: 'all',\n isDictJoin: true,\n }\n return new Query<T & Record<As, R | null>>(\n this.source as unknown as QuerySource<T & Record<As, R | null>>,\n { ...this.plan, joins: [...this.plan.joins, leg] },\n this.joinContext,\n this.aggregateStrategy,\n )\n }\n\n /**\n * Execute the plan and return the matching records. When the plan\n * carries any join legs, they are applied after `where` / `orderBy`\n * / `limit` / `offset` narrow the left set. See the `.join()` doc\n * for the ordering rationale.\n */\n toArray(): T[] {\n const base = executePlanWithSource(this.source, this.plan)\n if (this.plan.joins.length === 0) return base as T[]\n if (!this.joinContext) {\n // Unreachable in practice — .join() throws if joinContext is\n // missing — but belt-and-braces for direct plan construction.\n throw new Error(\n `Query.toArray(): plan carries ${this.plan.joins.length} join leg(s) ` +\n `but no JoinContext is attached. This usually means the Query was ` +\n `constructed via the raw Query constructor with a plan that had joins ` +\n `pre-populated. Use collection.query().join(...) instead.`,\n )\n }\n return applyJoins(base, this.plan.joins, this.joinContext) as T[]\n }\n\n /** Return the first matching record, or null. Joins are applied. */\n first(): T | null {\n const arr = this.limit(1).toArray()\n return arr[0] ?? null\n }\n\n /**\n * Return the number of matching records (after where/filter,\n * before limit). **Joins are NOT applied** — count() reports the\n * left-side cardinality, because joins in are projection-only\n * (they attach an aliased field; they never filter). Running joins\n * here just to discard the aliases would be wasteful, and in strict\n * mode it could throw `DanglingReferenceError` for a call whose\n * intent is purely to count.\n */\n count(): number {\n // Use the same index-aware candidate machinery as toArray(); skip the\n // index-driving clause from re-evaluation. The length BEFORE limit/offset\n // is what `count()` documents.\n const { candidates, remainingClauses } = candidateRecords(this.source, this.plan.clauses)\n if (remainingClauses.length === 0) return candidates.length\n return filterRecords(candidates, remainingClauses).length\n }\n\n /**\n * Reduce the matching records through a named set of reducers.\n * the aggregation terminal.\n *\n * ```ts\n * const { total, n, avgAmount } = invoices.query()\n * .where('status', '==', 'open')\n * .aggregate({\n * total: sum('amount'),\n * n: count(),\n * avgAmount: avg('amount'),\n * })\n * .run()\n * ```\n *\n * Returns an `Aggregation<R>` wrapper with two terminals:\n * - `.run(): R` — synchronous one-shot reduction\n * - `.live(): LiveAggregation<R>` — reactive primitive that\n * re-runs the reduction whenever the source notifies of a\n * change. Always call `live.stop()` when finished.\n *\n * The reducer spec is bound here once and reused by both\n * terminals — this is why `.aggregate()` returns a wrapper instead\n * of being a direct terminal. Consumers who only need the static\n * value read `.run()`; consumers wiring a reactive UI read\n * `.live()`.\n *\n * Joins are intentionally NOT applied to aggregations in —\n * the same logic as `.count()`. Joins in are projection-only\n * (they attach an aliased field and never filter), so running\n * them just to throw the aliases away would be wasteful. If you\n * need a reducer that reads a joined field, open an issue —\n * aggregations-across-joins is explicitly out of scope for v1.\n *\n * Every reducer factory accepts an optional `{ seed }` parameter\n * that is plumbed through the protocol but unused by the\n * executor — that's constraint #2. When partition-aware\n * aggregation lands, the seed will carry running state across\n * partition boundaries without an API break.\n */\n aggregate<Spec extends AggregateSpec>(\n spec: Spec,\n ): Aggregation<AggregateResult<Spec>> {\n // Closure over the current query. Produces the record set that\n // the aggregation reduces — same pipeline as `count()`, skipping\n // limit/offset because aggregation is over the full match set,\n // not a paginated slice. (A paginated aggregation would be a\n // different operation; see docs for rationale.)\n const source = this.source\n const clauses = this.plan.clauses\n const executeRecords = (): readonly unknown[] => {\n const { candidates, remainingClauses } = candidateRecords(source, clauses)\n return remainingClauses.length === 0\n ? candidates\n : filterRecords(candidates, remainingClauses)\n }\n\n // Upstream for live mode — only the left source subscribes.\n // Joined aggregations are out of scope for (see above), so\n // there are no right-side change streams to merge in.\n const upstreams: AggregationUpstream[] = []\n if (source.subscribe) {\n const subscribe = source.subscribe.bind(source)\n upstreams.push({ subscribe: (cb: () => void) => subscribe(cb) })\n }\n\n return this.aggregateStrategy.aggregate<Spec>(executeRecords, spec, upstreams)\n }\n\n /**\n * Partition matching records into buckets keyed by a field, then\n * terminate with `.aggregate(spec)` to compute per-bucket\n * reducers..\n *\n * ```ts\n * const byClient = invoices.query()\n * .where('status', '==', 'open')\n * .groupBy('clientId')\n * .aggregate({ total: sum('amount'), n: count() })\n * .run()\n * // → [ { clientId: 'c1', total: 5250, n: 3 }, … ]\n * ```\n *\n * Result rows carry the group key value under the grouping field\n * name plus every reducer output from the spec. Buckets are\n * emitted in first-seen order — consumers who want a specific\n * ordering should `.sort()` downstream.\n *\n * **Cardinality caps:** a one-shot warning fires at 10_000\n * distinct groups; `GroupCardinalityError` throws at 100_000.\n * Grouping on a high-uniqueness field like `id` or `createdAt` is\n * almost always a query mistake — the error message names the\n * field and observed cardinality and suggests narrowing with\n * `.where()` first.\n *\n * **Null / undefined keys:** records with a missing or explicitly\n * `null` group field get their own buckets. `Map`-based\n * partitioning distinguishes `undefined` from `null`, so the two\n * cases do NOT merge. Consumers who want them merged should\n * coalesce upstream with `.filter()`.\n *\n * **Joins are not applied** — same rationale as `.count()` and\n * `.aggregate()`. Joined fields in are projection-only, so\n * running a join inside a grouping pipeline would be wasteful and\n * could trigger `DanglingReferenceError` in strict mode for a\n * call whose intent is purely to bucket-and-reduce. Grouping by\n * a joined field is explicitly out of scope for — file an\n * issue if a real consumer needs it.\n *\n * **Filter clauses (`.filter(fn)`):** grouped queries still\n * support filter clauses in the underlying plan — they run in\n * the same candidate/filter pipeline that `.aggregate()` uses.\n * The performance caveat is the same: filter clauses cost O(N)\n * per record and can't be index-accelerated.\n */\n groupBy<F extends string>(field: F): GroupedQuery<T, F> {\n // Same record-producing closure as .aggregate() — grouped and\n // non-grouped aggregations execute over the same candidate set.\n // We inline the closure here instead of sharing a helper so the\n // builder stays allocation-friendly for the hot path.\n const source = this.source\n const clauses = this.plan.clauses\n const executeRecords = (): readonly unknown[] => {\n const { candidates, remainingClauses } = candidateRecords(source, clauses)\n return remainingClauses.length === 0\n ? candidates\n : filterRecords(candidates, remainingClauses)\n }\n\n const upstreams: AggregationUpstream[] = []\n if (source.subscribe) {\n const subscribe = source.subscribe.bind(source)\n upstreams.push({ subscribe: (cb: () => void) => subscribe(cb) })\n }\n\n // Wire dictKey label resolver for <field>Label projection\n const joinCtx = this.joinContext\n const dictLabelResolver = joinCtx?.resolveDictSource\n ? (() => {\n const dictSource = joinCtx.resolveDictSource(field)\n if (!dictSource) return undefined\n const snapshot = dictSource.snapshot()\n const dictMap = new Map<string, Record<string, string>>()\n for (const entry of snapshot) {\n const k = (entry as Record<string, unknown>)['key']\n const labels = (entry as Record<string, unknown>)['labels']\n if (typeof k === 'string' && labels && typeof labels === 'object') {\n dictMap.set(k, labels as Record<string, string>)\n }\n }\n return async (\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ): Promise<string | undefined> => {\n const labels = dictMap.get(key)\n if (!labels) return undefined\n if (labels[locale] !== undefined) return labels[locale]\n const chain = Array.isArray(fallback)\n ? (fallback as readonly string[])\n : fallback\n ? [fallback as string]\n : []\n for (const fb of chain) {\n if (fb === 'any') {\n const any = Object.values(labels)[0]\n if (any !== undefined) return any\n } else if (labels[fb] !== undefined) {\n return labels[fb]\n }\n }\n return undefined\n }\n })()\n : undefined\n\n return this.aggregateStrategy.groupBy<T, F>(executeRecords, field, upstreams, dictLabelResolver)\n }\n\n /**\n * Re-run the query whenever the source notifies of changes.\n * Returns an unsubscribe function. The callback receives the latest result.\n * Throws if the source does not support subscriptions.\n *\n * **For joined queries, prefer `.live()`** — `subscribe()`\n * only re-fires on LEFT-side changes, so joined data can be\n * stale if the right side mutates between emissions. `.live()`\n * merges change streams from every join target.\n */\n subscribe(cb: (result: T[]) => void): () => void {\n if (!this.source.subscribe) {\n throw new Error('Query source does not support subscriptions. Pass a source with a subscribe() method.')\n }\n cb(this.toArray())\n return this.source.subscribe(() => cb(this.toArray()))\n }\n\n /**\n * Reactive terminal — returns a `LiveQuery<T>` that re-runs the\n * query and updates its `value` whenever any source feeding it\n * mutates..\n *\n * For non-joined queries, `.live()` is a convenience over the\n * existing `.subscribe()` callback shape: a hand-rolled reactive\n * primitive with `value` / `error` fields and a `subscribe(cb)`\n * notification channel. Frame-agnostic — Vue / React / Solid\n * adapters wrap it in their own primitive.\n *\n * For joined queries, `.live()` additionally subscribes to every\n * join target's change stream. Mutations on a right-side\n * collection (insert / update / delete of a client referenced by\n * an invoice) re-fire the live query and re-evaluate every\n * dependent left row. Right-side targets are deduped by\n * collection name, so a chain that joins the same target twice\n * (e.g. billing client + shipping client → both 'clients') only\n * subscribes once.\n *\n * **Ref-mode behavior on right-side disappearance** — matches the\n * eager `.toArray()` contract from :\n * - `strict` → re-run throws `DanglingReferenceError`. The\n * LiveQuery catches the throw, stores it in `live.error`, and\n * notifies listeners (the throw does NOT propagate out of\n * the source's change handler — that would tear down the\n * emitter). Consumers check `live.error` after each\n * notification and render an error state in the UI.\n * - `warn` → joined value flips to `null`; the existing\n * warn-channel deduplication keeps repeated re-runs from\n * spamming the console.\n * - `cascade` → no special handling needed; the cascade-\n * delete mechanism propagates the right-side delete into the\n * left collection on the next tick, and the live query\n * naturally re-fires with the orphaned left rows gone.\n *\n * Always call `live.stop()` when finished — it tears down every\n * upstream subscription. The Vue layer's `onUnmounted` hook\n * should call `stop()` automatically; raw consumers must do it\n * themselves.\n *\n * **Limitations:**\n * - No granular delta updates — the whole query re-runs on\n * every change.\n * - No microtask batching — bursty changes produce one re-run\n * per change.\n * - No re-planning under live mutations — the planner picks\n * once at subscription time and reuses the same plan.\n * - Streaming live joins are deferred.\n */\n live(): LiveQuery<T> {\n const upstreams: LiveUpstream[] = []\n\n // Left-side change stream — every live query subscribes to\n // its source if the source supports subscriptions.\n if (this.source.subscribe) {\n const leftSubscribe = this.source.subscribe.bind(this.source)\n upstreams.push({\n subscribe: (cb: () => void) => leftSubscribe(cb),\n })\n }\n\n // Right-side change streams — only for joined queries. Dedup\n // by target name so a chain joining the same target twice\n // doesn't double-subscribe and double-fire on every right-side\n // mutation.\n if (this.plan.joins.length > 0 && this.joinContext) {\n const subscribed = new Set<string>()\n for (const leg of this.plan.joins) {\n if (subscribed.has(leg.target)) continue\n subscribed.add(leg.target)\n const rightSource = this.joinContext.resolveSource(leg.target)\n if (rightSource?.subscribe) {\n const rightSubscribe = rightSource.subscribe.bind(rightSource)\n upstreams.push({\n subscribe: (cb: () => void) => rightSubscribe(cb),\n })\n }\n }\n }\n\n // The recompute is just toArray bound to this query — same\n // pipeline as eager execution, including join application.\n return buildLiveQuery<T>(() => this.toArray(), upstreams)\n }\n\n /**\n * Return the plan as a JSON-friendly object. FilterClause entries are\n * stripped (their `fn` cannot be serialized) and replaced with\n * { type: 'filter', fn: '[function]' } so devtools can still see them.\n */\n toPlan(): unknown {\n return serializePlan(this.plan)\n }\n}\n\n/**\n * Index-aware execution: try the indexed fast path first, fall back to a\n * full scan otherwise. Mirrors `executePlan` for the public surface but\n * takes a `QuerySource` so it can consult `getIndexes()` and `lookupById()`.\n */\nfunction executePlanWithSource(source: InternalSource, plan: QueryPlan): unknown[] {\n const { candidates, remainingClauses } = candidateRecords(source, plan.clauses)\n // Only the clauses NOT consumed by the index need re-evaluation. This is\n // the key optimization that makes indexed queries dominate linear scans:\n // for a single-clause query against an indexed field, `remainingClauses`\n // is empty and we skip the per-record predicate evaluation entirely.\n let result = remainingClauses.length === 0\n ? [...candidates]\n : filterRecords(candidates, remainingClauses)\n if (plan.orderBy.length > 0) {\n result = sortRecords(result, plan.orderBy)\n }\n if (plan.offset > 0) {\n result = result.slice(plan.offset)\n }\n if (plan.limit !== undefined) {\n result = result.slice(0, plan.limit)\n }\n return result\n}\n\ninterface CandidateResult {\n /** The reduced candidate set, materialized to record objects. */\n readonly candidates: readonly unknown[]\n /** The clauses that the index could not satisfy and must still be evaluated. */\n readonly remainingClauses: readonly Clause[]\n}\n\n/**\n * Pick a candidate record set using the index store when possible.\n *\n * Strategy: scan the top-level clauses for the FIRST `==` or `in` clause\n * against an indexed field. If found, use the index to materialize a\n * candidate set and return the OTHER clauses as `remainingClauses`. The\n * caller skips re-evaluating the index-driving clause because the index\n * is authoritative for that field.\n *\n * This is a deliberately simple planner. A future optimizer could pick\n * the most selective index, intersect multiple indexes, or push composite\n * keys through. For the single-index fast path is good enough.\n */\nfunction candidateRecords(source: InternalSource, clauses: readonly Clause[]): CandidateResult {\n const indexes = source.getIndexes?.()\n if (!indexes || !source.lookupById || clauses.length === 0) {\n return { candidates: source.snapshot(), remainingClauses: clauses }\n }\n // Bind the lookup method through an arrow so it doesn't drift from\n // its `this` context — keeps the unbound-method lint rule happy.\n const lookupById = (id: string): unknown => source.lookupById?.(id)\n\n for (let i = 0; i < clauses.length; i++) {\n const clause = clauses[i]!\n if (clause.type !== 'field') continue\n if (!indexes.has(clause.field)) continue\n\n let ids: ReadonlySet<string> | null = null\n if (clause.op === '==') {\n ids = indexes.lookupEqual(clause.field, clause.value)\n } else if (clause.op === 'in' && Array.isArray(clause.value)) {\n ids = indexes.lookupIn(clause.field, clause.value)\n }\n\n if (ids !== null) {\n // Found an index-eligible clause: materialize the candidate set and\n // remove this clause from the remaining list.\n const remaining: Clause[] = []\n for (let j = 0; j < clauses.length; j++) {\n if (j !== i) remaining.push(clauses[j]!)\n }\n return {\n candidates: materializeIds(ids, lookupById),\n remainingClauses: remaining,\n }\n }\n // Not index-eligible — keep scanning in case a later clause is a\n // better candidate.\n }\n\n // No clause was index-eligible — fall back to a full scan.\n return { candidates: source.snapshot(), remainingClauses: clauses }\n}\n\nfunction materializeIds(\n ids: ReadonlySet<string>,\n lookupById: (id: string) => unknown,\n): unknown[] {\n const out: unknown[] = []\n for (const id of ids) {\n const record = lookupById(id)\n if (record !== undefined) out.push(record)\n }\n return out\n}\n\n/**\n * Execute a plan against a snapshot of records.\n * Pure function — same input, same output, no side effects.\n *\n * Records are typed as `unknown` because plans are non-parametric; callers\n * cast the return type at the API surface (see `Query.toArray()`).\n */\nexport function executePlan(records: readonly unknown[], plan: QueryPlan): unknown[] {\n let result = filterRecords(records, plan.clauses)\n if (plan.orderBy.length > 0) {\n result = sortRecords(result, plan.orderBy)\n }\n if (plan.offset > 0) {\n result = result.slice(plan.offset)\n }\n if (plan.limit !== undefined) {\n result = result.slice(0, plan.limit)\n }\n return result\n}\n\nfunction filterRecords(records: readonly unknown[], clauses: readonly Clause[]): unknown[] {\n if (clauses.length === 0) return [...records]\n const out: unknown[] = []\n for (const r of records) {\n let matches = true\n for (const clause of clauses) {\n if (!evaluateClause(r, clause)) {\n matches = false\n break\n }\n }\n if (matches) out.push(r)\n }\n return out\n}\n\nfunction sortRecords(records: unknown[], orderBy: readonly OrderBy[]): unknown[] {\n // Stable sort: Array.prototype.sort is required to be stable since ES2019.\n return [...records].sort((a, b) => {\n for (const { field, direction } of orderBy) {\n const av = readField(a, field)\n const bv = readField(b, field)\n const cmp = compareValues(av, bv)\n if (cmp !== 0) return direction === 'asc' ? cmp : -cmp\n }\n return 0\n })\n}\n\nfunction readField(record: unknown, field: string): unknown {\n if (record === null || record === undefined) return undefined\n if (!field.includes('.')) {\n return (record as Record<string, unknown>)[field]\n }\n const segments = field.split('.')\n let cursor: unknown = record\n for (const segment of segments) {\n if (cursor === null || cursor === undefined) return undefined\n cursor = (cursor as Record<string, unknown>)[segment]\n }\n return cursor\n}\n\nfunction compareValues(a: unknown, b: unknown): number {\n // Nullish goes last in asc order.\n if (a === undefined || a === null) return b === undefined || b === null ? 0 : 1\n if (b === undefined || b === null) return -1\n if (typeof a === 'number' && typeof b === 'number') return a - b\n if (typeof a === 'string' && typeof b === 'string') return a < b ? -1 : a > b ? 1 : 0\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime()\n // Mixed/unsupported types: treat as equal so the sort stays stable.\n // (Deliberate choice — we don't try to coerce arbitrary objects to strings.)\n return 0\n}\n\nfunction serializePlan(plan: QueryPlan): unknown {\n return {\n clauses: plan.clauses.map(serializeClause),\n orderBy: plan.orderBy,\n limit: plan.limit,\n offset: plan.offset,\n joins: plan.joins,\n }\n}\n\nfunction serializeClause(clause: Clause): unknown {\n if (clause.type === 'filter') {\n return { type: 'filter', fn: '[function]' }\n }\n if (clause.type === 'group') {\n return {\n type: 'group',\n op: clause.op,\n clauses: clause.clauses.map(serializeClause),\n }\n }\n return clause\n}\n","/**\n * Secondary indexes for the query DSL.\n *\n * ships **in-memory hash indexes**:\n * - Built during `Collection.ensureHydrated()` from the decrypted cache\n * - Maintained incrementally on `put` and `delete`\n * - Consulted by the query executor for `==` and `in` operators on\n * indexed fields, falling back to a linear scan otherwise\n * - Live entirely in memory — no adapter writes for the index itself\n *\n * Persistent encrypted index blobs (the spec's \"store as a separate\n * AES-256-GCM blob\" note) are deferred to a follow-up issue. The reasons\n * are documented in the PR body — short version: at the target\n * scale of 1K–50K records, building the index during hydrate is free,\n * so persistence buys nothing measurable.\n */\n\nimport { readPath } from '../query/predicate.js'\n\n/**\n * Index declaration accepted by `Collection`'s constructor.\n *\n * Accepts:\n * - `string` — a single-field hash index (`'clientId'`)\n * - `{ fields: [...] }` or `readonly string[]` — a composite index\n * over an ordered field tuple. Only lazy-mode\n * collections consume composite declarations today; eager mode\n * silently treats a composite as equivalent to declaring each\n * component field as its own single-field index.\n *\n * Additive variants (unique constraints, partial indexes) will land as\n * further union members without breaking existing declarations.\n */\nexport type IndexDef = string | { readonly fields: readonly string[] } | readonly string[]\n\n/**\n * Internal representation of a built hash index.\n *\n * Maps stringified field values to the set of record ids whose value\n * for that field matches. Stringification keeps the index simple and\n * works uniformly for primitives (`'open'`, `'42'`, `'true'`).\n *\n * Records whose indexed field is `undefined` or `null` are NOT inserted\n * — `query().where('field', '==', undefined)` falls back to a linear\n * scan, which is the conservative behavior.\n */\nexport interface HashIndex {\n readonly field: string\n readonly buckets: Map<string, Set<string>>\n}\n\n/**\n * Container for all indexes on a single collection.\n *\n * Methods are pure with respect to the in-memory `buckets` Map — they\n * never touch the adapter or the keyring. The Collection class owns\n * lifecycle (build on hydrate, maintain on put/delete).\n */\nexport class CollectionIndexes {\n private readonly indexes = new Map<string, HashIndex>()\n\n /**\n * Declare an index. Subsequent record additions are tracked under it.\n * Calling this twice for the same field is a no-op (idempotent).\n */\n declare(field: string): void {\n if (this.indexes.has(field)) return\n this.indexes.set(field, { field, buckets: new Map() })\n }\n\n /** True if the given field has a declared index. */\n has(field: string): boolean {\n return this.indexes.has(field)\n }\n\n /** All declared field names, in declaration order. */\n fields(): string[] {\n return [...this.indexes.keys()]\n }\n\n /**\n * Build all declared indexes from a snapshot of records.\n * Called once per hydration. O(N × indexes.size).\n */\n build<T>(records: ReadonlyArray<{ id: string; record: T }>): void {\n for (const idx of this.indexes.values()) {\n idx.buckets.clear()\n for (const { id, record } of records) {\n addToIndex(idx, id, record)\n }\n }\n }\n\n /**\n * Insert or update a single record across all indexes.\n * Called by `Collection.put()` after the encrypted write succeeds.\n *\n * If `previousRecord` is provided, the record is removed from any old\n * buckets first — this is the update path. Pass `null` for fresh adds.\n */\n upsert<T>(id: string, newRecord: T, previousRecord: T | null): void {\n if (this.indexes.size === 0) return\n if (previousRecord !== null) {\n this.remove(id, previousRecord)\n }\n for (const idx of this.indexes.values()) {\n addToIndex(idx, id, newRecord)\n }\n }\n\n /**\n * Remove a record from all indexes. Called by `Collection.delete()`\n * (and as the first half of `upsert` for the update path).\n */\n remove<T>(id: string, record: T): void {\n if (this.indexes.size === 0) return\n for (const idx of this.indexes.values()) {\n removeFromIndex(idx, id, record)\n }\n }\n\n /** Drop all index data. Called when the collection is invalidated. */\n clear(): void {\n for (const idx of this.indexes.values()) {\n idx.buckets.clear()\n }\n }\n\n /**\n * Equality lookup: return the set of record ids whose `field` matches\n * the given value. Returns `null` if no index covers the field — the\n * caller should fall back to a linear scan.\n *\n * The returned Set is a reference to the index's internal storage —\n * callers must NOT mutate it.\n */\n lookupEqual(field: string, value: unknown): ReadonlySet<string> | null {\n const idx = this.indexes.get(field)\n if (!idx) return null\n const key = stringifyKey(value)\n return idx.buckets.get(key) ?? EMPTY_SET\n }\n\n /**\n * Set lookup: return the union of record ids whose `field` matches any\n * of the given values. Returns `null` if no index covers the field.\n */\n lookupIn(field: string, values: readonly unknown[]): ReadonlySet<string> | null {\n const idx = this.indexes.get(field)\n if (!idx) return null\n const out = new Set<string>()\n for (const value of values) {\n const key = stringifyKey(value)\n const bucket = idx.buckets.get(key)\n if (bucket) {\n for (const id of bucket) out.add(id)\n }\n }\n return out\n }\n}\n\nconst EMPTY_SET: ReadonlySet<string> = new Set()\n\n/**\n * Stringify a value into a stable bucket key.\n *\n * `null`/`undefined` produce a sentinel that records will never match\n * (so we never index nullish values — `where('x', '==', null)` falls back\n * to a linear scan). Numbers, booleans, strings, and Date objects are\n * coerced via `String()`. Objects produce a sentinel that no real record\n * will match — querying with object values is a code smell.\n */\nfunction stringifyKey(value: unknown): string {\n if (value === null || value === undefined) return '\\0NULL\\0'\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'boolean') return String(value)\n if (value instanceof Date) return value.toISOString()\n return '\\0OBJECT\\0'\n}\n\nfunction addToIndex<T>(idx: HashIndex, id: string, record: T): void {\n const value = readPath(record, idx.field)\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n let bucket = idx.buckets.get(key)\n if (!bucket) {\n bucket = new Set()\n idx.buckets.set(key, bucket)\n }\n bucket.add(id)\n}\n\nfunction removeFromIndex<T>(idx: HashIndex, id: string, record: T): void {\n const value = readPath(record, idx.field)\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n const bucket = idx.buckets.get(key)\n if (!bucket) return\n bucket.delete(id)\n // Clean up empty buckets so the Map doesn't accumulate dead keys.\n if (bucket.size === 0) idx.buckets.delete(key)\n}\n","/**\n * Aggregation reducers for the query DSL.\n *\n * the reducer protocol plus five built-in factories\n * (`count`, `sum`, `avg`, `min`, `max`) consumed by `Query.aggregate()`\n * and, in the future, `Scan.aggregate()`. Every factory accepts\n * an optional `{ seed }` parameter that is plumbed through the\n * protocol but unused by the executor — that's the load-bearing\n * half of constraint #2. When partition-aware aggregation\n * lands, the seed carries the previous partition's running total into\n * the next partition without requiring a protocol change.\n *\n * Reducers are intentionally generic over their internal state type\n * `S` so compound reducers (avg keeps `{sum, count}`, min/max keep a\n * value bag) can model internal bookkeeping without leaking the\n * implementation through the accumulator's public shape. `finalize`\n * collapses `S` back into the user-visible `R`.\n *\n * Reducers are pure data — `init` / `step` / `finalize` / optional\n * `remove` are stateless functions that receive and return `S`. This\n * is the shape that admits O(1) incremental maintenance in a future\n * optimization (delta-aware `LiveAggregation` applies `step` or\n * `remove` per delta), without blocking the simpler \"full re-run on\n * source change\" that ships.\n */\n\nimport { readPath } from '../query/predicate.js'\n\n/**\n * A single reducer: factory-produced, ready to plug into an\n * `.aggregate()` spec.\n *\n * Type parameters:\n * - `R` — user-visible result type (what the aggregation returns\n * for this slot, e.g. `number` for `sum()`)\n * - `S` — internal state type, defaults to `R` for simple reducers\n * that don't need compound bookkeeping\n *\n * A reducer is stateless: every method is pure over `S`. `init()` is\n * called once per aggregation run to build the initial state; `step()`\n * folds a record into the state; `remove()` (optional) un-folds a\n * record, enabling incremental live maintenance; `finalize()` reads\n * the final answer out of the state at the end of the run.\n */\nexport interface Reducer<R, S = R> {\n /** Build the initial state for a fresh aggregation run. */\n init(): S\n /** Fold a record into the state. Returns the new state. */\n step(state: S, record: unknown): S\n /**\n * Un-fold a record from the state. Returns the new state.\n *\n * Optional — reducers without `remove` cannot be maintained\n * incrementally and must be re-run from scratch when the underlying\n * record set changes. `sum`, `count`, `avg` implement `remove` in\n * O(1); `min` and `max` implement it in O(N) worst case (when the\n * extremum itself is removed and the next extremum must be\n * recomputed from the remaining contributing values).\n */\n remove?(state: S, record: unknown): S\n /** Collapse the internal state into the user-visible result. */\n finalize(state: S): R\n}\n\n/**\n * Common options accepted by every reducer factory.\n *\n * `seed` — optional initial value for the internal state. **Unused by\n * the executor**, plumbed through the protocol for constraint\n * #2 (partition-aware aggregation seam). In, partitioned\n * aggregations will pass the previous partition's carry as `seed` so\n * a long time series can be rolled forward one partition at a time\n * without re-aggregating closed partitions.\n *\n * always uses `init()` with the factory's zero value, regardless\n * of whether `seed` was passed. Do not remove the parameter — that's\n * the whole point of having it exist now.\n */\nexport interface ReducerOptions<TSeed = unknown> {\n /** constraint #2 — seed is plumbed through but unused in. */\n readonly seed?: TSeed\n}\n\n// ---------------------------------------------------------------------------\n// Factories\n// ---------------------------------------------------------------------------\n\n/**\n * Count the number of records that match the query. Ignores field\n * values entirely — the count is over the number of records, not over\n * the number of non-null field values in any column.\n */\nexport function count(opts?: ReducerOptions<number>): Reducer<number> {\n // Seed captured on the closure but unused at execution time in\n //. The reference in _seed keeps lint happy.\n const _seed = opts?.seed\n void _seed\n return {\n init: () => 0,\n step: (state) => state + 1,\n remove: (state) => state - 1,\n finalize: (state) => state,\n }\n}\n\n/**\n * Sum a numeric field across all matching records. Non-number values\n * at the field path are coerced to 0 — consumers who want a different\n * behavior (throw, skip, treat as NaN) should filter upstream via\n * `.where()` or write a custom reducer.\n */\nexport function sum(\n field: string,\n opts?: ReducerOptions<number>,\n): Reducer<number> {\n const _seed = opts?.seed\n void _seed\n return {\n init: () => 0,\n step: (state, record) => state + readNumber(record, field),\n remove: (state, record) => state - readNumber(record, field),\n finalize: (state) => state,\n }\n}\n\n/**\n * Arithmetic mean of a numeric field across all matching records.\n *\n * Returns `null` for an empty result set (zero records is not a\n * well-defined denominator — returning NaN would poison downstream\n * arithmetic, and throwing would force every consumer to wrap in\n * try/catch just to handle \"no matches\"). Consumers who want an\n * explicit zero should coalesce with `?? 0`.\n *\n * Internal state is `{sum, count}` so the running average can be\n * maintained incrementally — on each delta, both fields update in\n * O(1) and `finalize` divides. Directly storing `avg` as state would\n * not admit incremental removal without also tracking count.\n */\nexport function avg(\n field: string,\n opts?: ReducerOptions<{ sum: number; count: number }>,\n): Reducer<number | null, { sum: number; count: number }> {\n const _seed = opts?.seed\n void _seed\n return {\n init: () => ({ sum: 0, count: 0 }),\n step: (state, record) => ({\n sum: state.sum + readNumber(record, field),\n count: state.count + 1,\n }),\n remove: (state, record) => ({\n sum: state.sum - readNumber(record, field),\n count: state.count - 1,\n }),\n finalize: (state) => (state.count === 0 ? null : state.sum / state.count),\n }\n}\n\ninterface MinMaxState {\n /**\n * Multiset of contributing field values. Stored as a plain array\n * because we need to support `remove` and a plain array gives us\n * O(1) push + O(N) worst-case removal — which matches the\n * documented min/max removal complexity. A sorted structure would\n * let us drop the O(N) rescan but adds complexity that doesn't\n * need; consumers hitting the O(N) ceiling should file an issue.\n */\n readonly values: number[]\n}\n\nfunction pushValue(state: MinMaxState, value: number): MinMaxState {\n return { values: [...state.values, value] }\n}\n\nfunction removeValue(state: MinMaxState, value: number): MinMaxState {\n // Remove the first matching value — duplicates are fine, we only\n // need to drop one instance per `remove()` call so the multiset\n // count stays consistent with the record count.\n const idx = state.values.indexOf(value)\n if (idx < 0) return state\n const next = state.values.slice()\n next.splice(idx, 1)\n return { values: next }\n}\n\n/**\n * Smallest numeric value of a field across all matching records.\n * Returns `null` for an empty result set. See `avg()` for the\n * reasoning on `null` vs NaN vs throwing.\n *\n * Incremental complexity: O(1) for `step`, O(N) worst case for\n * `remove` when the current minimum is removed (the state holds the\n * full multiset of contributing values and `finalize` scans for the\n * new minimum). Consumers with very large result sets and frequent\n * removals of the current extremum should either accept the cost or\n * wait for a future optimization.\n */\nexport function min(\n field: string,\n opts?: ReducerOptions<number>,\n): Reducer<number | null, MinMaxState> {\n const _seed = opts?.seed\n void _seed\n return {\n init: () => ({ values: [] }),\n step: (state, record) => pushValue(state, readNumber(record, field)),\n remove: (state, record) => removeValue(state, readNumber(record, field)),\n finalize: (state) => {\n if (state.values.length === 0) return null\n let out = state.values[0]!\n for (let i = 1; i < state.values.length; i++) {\n const v = state.values[i]!\n if (v < out) out = v\n }\n return out\n },\n }\n}\n\n/**\n * Largest numeric value of a field across all matching records.\n * Mirror of `min()` — see that doc for semantics, null-on-empty\n * behavior, and the O(N) removal caveat.\n */\nexport function max(\n field: string,\n opts?: ReducerOptions<number>,\n): Reducer<number | null, MinMaxState> {\n const _seed = opts?.seed\n void _seed\n return {\n init: () => ({ values: [] }),\n step: (state, record) => pushValue(state, readNumber(record, field)),\n remove: (state, record) => removeValue(state, readNumber(record, field)),\n finalize: (state) => {\n if (state.values.length === 0) return null\n let out = state.values[0]!\n for (let i = 1; i < state.values.length; i++) {\n const v = state.values[i]!\n if (v > out) out = v\n }\n return out\n },\n }\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Read a numeric field from a record. Non-number values (null,\n * undefined, strings, objects) coerce to 0 so sum/avg/min/max don't\n * produce NaN on one bad row. Consumers who want strict typing should\n * validate upstream with Standard Schema, which NOYDB already runs on\n * every `put()`.\n */\nfunction readNumber(record: unknown, field: string): number {\n const value = readPath(record, field)\n return typeof value === 'number' && Number.isFinite(value) ? value : 0\n}\n","/**\n * Aggregate execution — the runtime behind `Query.aggregate()`.\n *\n * takes an `AggregateSpec` (a record of named reducers\n * built from `reducers.ts`) and runs every reducer over the records\n * produced by the underlying query. Two terminal surfaces:\n *\n * - `.run(): R` — synchronous one-shot reduction. Matches the\n * existing `Query.toArray()` / `.first()` / `.count()` style.\n * - `.live(): LiveAggregation<R>` — reactive primitive that\n * re-runs the reduction whenever the query's source notifies of\n * a change. uses naive full re-run; incremental delta\n * maintenance is admitted by the reducer protocol (`remove()`)\n * but not wired to the executor yet — a follow-up optimization\n * can switch from full re-run to delta-based without breaking\n * the public API. Consumers get correct, reactive values today.\n *\n * The `Aggregation<R>` wrapper is deliberately tiny — it exists so\n * `.aggregate(spec)` can be chained with either `.run()` or `.live()`\n * without the builder needing two separate terminal methods. It\n * holds the closure over the query execution (produces the current\n * matching record set) and the spec, and stitches them together in\n * either mode.\n *\n * This file depends ONLY on `reducers.ts` — it has no knowledge of\n * the `Query` class. Tests can therefore exercise the reduction\n * surface with plain record arrays, without spinning up a Collection.\n */\n\nimport type { Reducer } from './reducers.js'\n\n/**\n * A named set of reducers, keyed by output field name. Each key\n * becomes a field on the aggregated result.\n *\n * ```ts\n * const spec = {\n * total: sum('amount'),\n * n: count(),\n * avgAmount: avg('amount'),\n * }\n * ```\n */\nexport type AggregateSpec = Readonly<Record<string, Reducer<unknown, unknown>>>\n\n/**\n * Map an `AggregateSpec` to its reduced result shape — each key\n * carries the finalized result type from its reducer. A spec built\n * from `{ total: sum('amount'), n: count() }` yields a result of\n * `{ total: number, n: number }`.\n *\n * This uses a mapped type with a conditional to extract `R` from\n * each `Reducer<R, _>`. The `infer` captures the user-visible result\n * type, discarding the internal state type `S`.\n */\nexport type AggregateResult<Spec extends AggregateSpec> = {\n [K in keyof Spec]: Spec[K] extends Reducer<infer R, unknown> ? R : never\n}\n\n/**\n * Pure reduction over a record array. Runs every reducer's\n * `init → step* → finalize` pipeline exactly once over the records.\n *\n * Called by `Aggregation.run()` and by the live-mode refresh path.\n * Exported for tests and for future `scan().aggregate()` reuse\n * — the streaming path will call the same reducer protocol with a\n * per-page loop instead of a single array.\n */\nexport function reduceRecords<Spec extends AggregateSpec>(\n records: readonly unknown[],\n spec: Spec,\n): AggregateResult<Spec> {\n // Per-slot state, keyed by the spec's output field name.\n const state: Record<string, unknown> = {}\n for (const key of Object.keys(spec)) {\n state[key] = spec[key]!.init()\n }\n for (const record of records) {\n for (const key of Object.keys(spec)) {\n state[key] = spec[key]!.step(state[key], record)\n }\n }\n const result: Record<string, unknown> = {}\n for (const key of Object.keys(spec)) {\n result[key] = spec[key]!.finalize(state[key])\n }\n return result as AggregateResult<Spec>\n}\n\n/**\n * A minimal reactive primitive for aggregation results.\n *\n * Same spirit as the `LiveQuery` in : frame-agnostic, a plain\n * object with `value` / `error` fields and a `subscribe(cb)`\n * notification channel that Vue / React / Solid adapters wrap in\n * their own primitive. Intentionally NOT a Promise — aggregations\n * have a well-defined \"current value\" at every instant, and the\n * reactive consumer wants to read that value synchronously.\n *\n * Error semantics mirror `LiveQuery`: if a re-run throws, the\n * previous successful `value` is preserved and the error is stored\n * in `error` so consumers can render an error state without losing\n * the last-known-good result. The throw does NOT propagate out of\n * the source's change handler (which would tear down the upstream\n * emitter).\n *\n * `stop()` tears down the upstream subscription. It is idempotent —\n * calling it multiple times is safe — and subscribe calls after\n * stop are no-ops (they immediately return a no-op unsubscribe).\n * Always call `stop()` when done; Vue's `onUnmounted` is the\n * canonical place. Raw consumers must do it themselves.\n */\nexport interface LiveAggregation<R> {\n /** Current reduced value. Undefined only if the first compute threw. */\n readonly value: R | undefined\n /** Last execution error, if any. Cleared on the next successful run. */\n readonly error: unknown\n /** Notify on every recomputation (success or error). Returns unsubscribe. */\n subscribe(cb: () => void): () => void\n /** Tear down the upstream subscription. Idempotent. */\n stop(): void\n}\n\n/**\n * Upstream change-notification hook for live aggregation.\n *\n * Matches the shape that `QuerySource.subscribe` already uses — a\n * single method that accepts a callback and returns an unsubscribe\n * function. The `Aggregation` wrapper collects upstreams from the\n * query's source and wires them into a single re-run trigger.\n */\nexport interface AggregationUpstream {\n subscribe(cb: () => void): () => void\n}\n\n/**\n * Internal implementation of `LiveAggregation`. Not exported —\n * consumers get the interface only. The class wraps a `recompute`\n * closure (which runs the full reduction and returns the new value)\n * and a list of upstreams (sources whose changes should trigger a\n * re-run).\n *\n * Error isolation: if an individual listener callback throws, the\n * other listeners still fire and the error is logged to the warn\n * channel. This matches `LiveQuery` from and keeps one misbehaving\n * consumer from tearing down the whole live aggregation.\n */\nclass LiveAggregationImpl<R> implements LiveAggregation<R> {\n public value: R | undefined\n public error: unknown\n private readonly listeners = new Set<() => void>()\n private readonly unsubscribes: Array<() => void> = []\n private stopped = false\n\n constructor(\n private readonly recompute: () => R,\n upstreams: readonly AggregationUpstream[],\n ) {\n // Initial computation — surface any error through the `error`\n // field rather than letting the constructor throw, so consumers\n // can always construct a LiveAggregation and check its state\n // afterwards. Throwing from a constructor would force every\n // caller to wrap in try/catch, which is the opposite of the\n // \"reactive value with error state\" ergonomics we want.\n try {\n this.value = recompute()\n this.error = undefined\n } catch (err) {\n this.value = undefined\n this.error = err\n }\n\n // Wire up upstream subscriptions. Each one triggers a full\n // recomputation; we don't attempt incremental updates in.\n for (const upstream of upstreams) {\n const unsub = upstream.subscribe(() => this.refresh())\n this.unsubscribes.push(unsub)\n }\n }\n\n private refresh(): void {\n if (this.stopped) return\n try {\n this.value = this.recompute()\n this.error = undefined\n } catch (err) {\n // Preserve the previous successful value — consumers render an\n // error state using `error` without losing the last-known-good\n // number. This matches LiveQuery's error-preservation contract.\n this.error = err\n }\n for (const listener of this.listeners) {\n try {\n listener()\n } catch (err) {\n // Isolate listener errors so one bad consumer can't tear\n // down every other subscriber on the same aggregation.\n console.warn('[noy-db] LiveAggregation listener threw:', err)\n }\n }\n }\n\n subscribe(cb: () => void): () => void {\n if (this.stopped) {\n // No-op after stop. Returning a harmless unsubscribe lets\n // consumers use the same teardown pattern unconditionally.\n return () => {}\n }\n this.listeners.add(cb)\n return () => {\n this.listeners.delete(cb)\n }\n }\n\n stop(): void {\n if (this.stopped) return\n this.stopped = true\n for (const unsub of this.unsubscribes) {\n try {\n unsub()\n } catch (err) {\n console.warn('[noy-db] LiveAggregation upstream unsubscribe threw:', err)\n }\n }\n this.unsubscribes.length = 0\n this.listeners.clear()\n }\n}\n\n/**\n * Chainable wrapper returned by `Query.aggregate(spec)`. Holds the\n * execute-records closure and the spec; terminal methods (`run`,\n * `live`) stitch them together in either mode.\n *\n * Why a wrapper instead of two terminal methods on `Query` directly?\n *\n * The `.aggregate(spec)` call is where the spec is bound — both\n * `.run()` and `.live()` need the same spec, and the consumer's\n * fluent style is `query.where(...).aggregate(spec).run()` or\n * `.aggregate(spec).live()`. Wrapping lets the spec be named once\n * and reused for either terminal, and keeps the `Query` class\n * from growing a pair of near-duplicate method overloads\n * (`aggregateRun` / `aggregateLive`) that would be harder to\n * discover.\n */\nexport class Aggregation<R> {\n constructor(\n private readonly executeRecords: () => readonly unknown[],\n private readonly spec: AggregateSpec,\n private readonly upstreams: readonly AggregationUpstream[],\n ) {}\n\n /**\n * Execute the query and reduce the results synchronously.\n * Returns the reduced shape matching the spec — e.g. a spec of\n * `{ total: sum('amount'), n: count() }` returns\n * `{ total: number, n: number }`.\n */\n run(): R {\n return reduceRecords(this.executeRecords(), this.spec) as unknown as R\n }\n\n /**\n * Build a reactive `LiveAggregation<R>` that re-runs the reduction\n * whenever any upstream source notifies of a change. The initial\n * value is computed eagerly in the constructor, so consumers can\n * read `live.value` immediately after calling `.live()`.\n *\n * Always call `live.stop()` when finished — it tears down the\n * upstream subscriptions. Vue's `onUnmounted` is the canonical\n * place.\n *\n * **Implementation note:** every upstream change triggers a full\n * re-reduction. Incremental maintenance (O(1) per delta for\n * sum/count/avg via the reducer protocol's `remove()` method) is a\n * planned follow-up optimization — the protocol already supports\n * it, but the executor doesn't drive it yet. Consumers get\n * correct, reactive values today; future PRs can switch to\n * delta-based maintenance without changing this API.\n */\n live(): LiveAggregation<R> {\n const recompute = (): R =>\n reduceRecords(this.executeRecords(), this.spec) as unknown as R\n return new LiveAggregationImpl<R>(recompute, this.upstreams)\n }\n}\n\n/**\n * Build a `LiveAggregation<V>` from a recompute closure and a list\n * of upstreams. Exposed so sibling files in the query DSL\n * (currently `groupby.ts`) can reuse the reactive primitive\n * without reaching into `LiveAggregationImpl` directly. This keeps\n * the implementation class private while still allowing planned\n * composition with `.groupBy().aggregate().live()`.\n */\nexport function buildLiveAggregation<V>(\n recompute: () => V,\n upstreams: readonly AggregationUpstream[],\n): LiveAggregation<V> {\n return new LiveAggregationImpl<V>(recompute, upstreams)\n}\n","/**\n * Query DSL `.groupBy()` —.\n *\n * Chains after `.where()` / `.filter()` / `.or()` / `.and()` on a\n * Query and before a reducer spec, so consumers can compute\n * per-bucket aggregates without folding in userland:\n *\n * ```ts\n * const byClient = invoices.query()\n * .where('status', '==', 'open')\n * .groupBy('clientId')\n * .aggregate({ total: sum('amount'), n: count() })\n * .run()\n * // → [ { clientId: 'c1', total: 5250, n: 3 }, … ]\n * ```\n *\n * Execution pipeline:\n *\n * 1. Run the query's where/filter clauses (same candidate /\n * filter pipeline as `.aggregate()` directly on Query).\n * 2. Partition the matching records into buckets keyed by\n * `readPath(record, field)`. JS `Map` preserves insertion\n * order, so the first-seen key for a bucket determines its\n * position in the result array — consumers who want a\n * specific ordering should `.sort()` downstream.\n * 3. Enforce cardinality: warn once per field at 10% of the cap\n * (10_000 buckets), throw `GroupCardinalityError` at 100% of\n * the cap (100_000 buckets).\n * 4. For each bucket, build a per-group reducer state and\n * step every record in the bucket through it.\n * 5. Emit one result row per bucket, shaped as\n * `{ [field]: key, ...reduced }`.\n *\n * **Null / undefined keys:** `Map` distinguishes `null` from\n * `undefined`, so records with a missing group field get their own\n * bucket, and records with an explicit `null` value get a separate\n * bucket from that. Consumers who want them merged can coalesce\n * upstream with `.filter()`.\n *\n * **Live mode:** `.groupBy().aggregate().live()` re-runs the full\n * grouping pipeline on every source change. Per-bucket incremental\n * delta maintenance is a future optimization — the reducer\n * protocol's `remove()` hook admits it, but ships naive\n * re-grouping for simplicity.\n *\n * **Type-level stable-key narrowing:** when\n * `dictKey` lands, `groupBy<DictField>()` will narrow the group key\n * type to the stable dictionary key rather than the resolved locale\n * label. That prevents grouping by the locale-resolved label,\n * which would produce different buckets per reader. types the\n * key as `unknown` at the result shape; the dictKey narrowing\n * layers on top without an API break.\n *\n * Partition-awareness seam: when partitioned collections land,\n * per-partition grouping will need to merge sub-results across\n * partitions. The reducer protocol's `{ seed }` parameter\n * (already plumbed through in `reducers.ts`) is the mechanism —\n * groupBy doesn't need its own seam for the moment, because it\n * delegates to the reducer protocol for all per-bucket state.\n */\n\nimport { readPath } from '../query/predicate.js'\nimport type {\n AggregateSpec,\n AggregateResult,\n AggregationUpstream,\n LiveAggregation,\n} from './aggregation.js'\nimport { buildLiveAggregation } from './aggregation.js'\nimport { GroupCardinalityError } from '../errors.js'\n\n/**\n * Cardinality thresholds for `.groupBy()`. The warn threshold gives\n * consumers a heads-up before the hard error; the cap is a fixed\n * constant in (not overridable). A `{ maxGroups }` override\n * can be added later without a break if a real consumer asks.\n */\nexport const GROUPBY_WARN_CARDINALITY = 10_000\nexport const GROUPBY_MAX_CARDINALITY = 100_000\n\n/**\n * One-shot warning dedup per-field — reactive dashboards\n * re-executing the same grouped query should produce the warning\n * once, not once per re-fire. Keyed on the grouping field name\n * because \"this field has high cardinality on your current data\"\n * is a field-level property, not a per-query one.\n */\nconst warnedCardinalityFields = new Set<string>()\nfunction warnCardinalityApproaching(field: string, observed: number): void {\n if (warnedCardinalityFields.has(field)) return\n warnedCardinalityFields.add(field)\n console.warn(\n `[noy-db] .groupBy(\"${field}\") produced ${observed} distinct groups, ` +\n `${Math.round((observed / GROUPBY_MAX_CARDINALITY) * 100)}% of the ` +\n `${GROUPBY_MAX_CARDINALITY}-group ceiling. Narrow the query with ` +\n `.where() before grouping, or switch to a lower-cardinality field.`,\n )\n}\n\n/**\n * Test-only: clear the per-field cardinality warning dedup between\n * tests. Production code never calls this — matching the\n * `resetJoinWarnings` pattern in `join.ts`.\n */\nexport function resetGroupByWarnings(): void {\n warnedCardinalityFields.clear()\n}\n\n/**\n * Result row shape for a grouped aggregation. Each row carries the\n * group key value under the grouping field name plus every reducer\n * output from the spec.\n *\n * types the group key as `unknown` at the result shape — the\n * runtime read via `readPath` can return any value, and narrowing\n * to a specific type would require the caller to assert at the\n * call site. `dictKey` narrowing layers on top of this by\n * adding an overload that constrains `F` when the grouping field\n * is a `dictKey`.\n */\nexport type GroupedRow<F extends string, R> = { [K in F]: unknown } & R\n\n/**\n * Chainable wrapper returned by `Query.groupBy(field)`. Terminates\n * with `.aggregate(spec)` which returns a `GroupedAggregation`.\n *\n * Kept minimal — the only operation on a grouped query is\n * aggregation. Ordering, limiting, and further filtering belong on\n * the underlying `Query` before `.groupBy()` is called; applying\n * them post-group would be a different operation (`having` /\n * `groupOrderBy`), out of scope for.\n */\nexport class GroupedQuery<T, F extends string> {\n constructor(\n private readonly executeRecords: () => readonly unknown[],\n private readonly field: F,\n private readonly upstreams: readonly AggregationUpstream[],\n /**\n * Optional dict label resolver attached by the query builder when\n * the grouping field is a dictKey.\n */\n private readonly dictLabelResolver?: (\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ) => Promise<string | undefined>,\n ) {\n // T is phantom on the wrapper so consumers can still see the\n // source row type on hover. Reference it to keep lint quiet.\n void undefined as T | undefined\n }\n\n /**\n * Build a grouped aggregation. Returns a `GroupedAggregation`\n * with `.run()`, `.runAsync()`, and `.live()` terminals — same shape\n * as the non-grouped `.aggregate()` wrapper, just with an array\n * result (one row per bucket) instead of a single reduced object.\n */\n aggregate<Spec extends AggregateSpec>(\n spec: Spec,\n ): GroupedAggregation<GroupedRow<F, AggregateResult<Spec>>> {\n return new GroupedAggregation<GroupedRow<F, AggregateResult<Spec>>>(\n this.executeRecords,\n this.field,\n spec,\n this.upstreams,\n this.dictLabelResolver,\n )\n }\n}\n\n/**\n * Execute the group-and-reduce pipeline. Pure function over a\n * record array and a spec — shared by `GroupedAggregation.run()`\n * and the live-mode refresh path. Exported for tests and for any\n * future `scan().groupBy().aggregate()` reuse.\n *\n * Enforces the cardinality cap incrementally during the partition\n * loop, so a runaway grouping throws at the moment the 100_001st\n * bucket would be created — the consumer doesn't have to wait for\n * the full partition to materialize before the error fires.\n */\nexport function groupAndReduce<R>(\n records: readonly unknown[],\n field: string,\n spec: AggregateSpec,\n): R[] {\n // Map preserves insertion order natively (ES2015), so first-seen\n // keys determine output ordering without a parallel order array.\n const buckets = new Map<unknown, unknown[]>()\n for (const record of records) {\n const key = readPath(record, field)\n let bucket = buckets.get(key)\n if (bucket === undefined) {\n if (buckets.size >= GROUPBY_MAX_CARDINALITY) {\n throw new GroupCardinalityError(\n field,\n buckets.size + 1,\n GROUPBY_MAX_CARDINALITY,\n )\n }\n bucket = []\n buckets.set(key, bucket)\n }\n bucket.push(record)\n }\n\n if (buckets.size >= GROUPBY_WARN_CARDINALITY) {\n warnCardinalityApproaching(field, buckets.size)\n }\n\n // Reduce each bucket through the spec. Same init/step/finalize\n // pipeline as `reduceRecords` in aggregate.ts, but one state per\n // bucket. Inlining the loop here keeps the per-bucket path tight\n // — calling `reduceRecords` per bucket would recompute\n // `Object.keys(spec)` once per bucket unnecessarily.\n const keys = Object.keys(spec)\n const out: R[] = []\n for (const [groupKey, bucketRecords] of buckets) {\n const state: Record<string, unknown> = {}\n for (const key of keys) {\n state[key] = spec[key]!.init()\n }\n for (const record of bucketRecords) {\n for (const key of keys) {\n state[key] = spec[key]!.step(state[key], record)\n }\n }\n const row: Record<string, unknown> = { [field]: groupKey }\n for (const key of keys) {\n row[key] = spec[key]!.finalize(state[key])\n }\n out.push(row as unknown as R)\n }\n return out\n}\n\n/**\n * Grouped aggregation wrapper — the `.groupBy(field).aggregate(spec)`\n * terminal. Shape mirrors `Aggregation<R>` from aggregate.ts: two\n * terminals (`.run()` and `.live()`), spec bound at construction\n * time, upstreams collected for live mode.\n *\n * The generic `R` is the per-row result shape (i.e. a single\n * grouped row), and the terminals return `R[]` — one row per\n * bucket.\n */\nexport class GroupedAggregation<R> {\n constructor(\n private readonly executeRecords: () => readonly unknown[],\n private readonly field: string,\n private readonly spec: AggregateSpec,\n private readonly upstreams: readonly AggregationUpstream[],\n /**\n * Optional dict label resolver for `<field>Label` projection\n *. Present when the grouping field is a dictKey.\n */\n private readonly dictLabelResolver?: (\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ) => Promise<string | undefined>,\n ) {}\n\n /** Execute the query, group, reduce, and return an array of rows. */\n run(): R[] {\n return groupAndReduce<R>(this.executeRecords(), this.field, this.spec)\n }\n\n /**\n * Execute the query, group, reduce, and resolve `<field>Label` for\n * each result row when the grouping field is a `dictKey` and a\n * `locale` is provided. Returns `R[]` synchronously when\n * no locale is specified (identical to `.run()`).\n *\n * The `<field>Label` field is appended to each row. Rows whose group\n * key has no dictionary entry get `<field>Label: undefined`.\n */\n async runAsync(opts?: {\n locale?: string\n fallback?: string | readonly string[]\n }): Promise<R[]> {\n const rows = groupAndReduce<R>(this.executeRecords(), this.field, this.spec)\n if (!opts?.locale || !this.dictLabelResolver) return rows\n\n const resolve = this.dictLabelResolver\n const locale = opts.locale\n const fallback = opts.fallback\n const labelKey = `${this.field}Label`\n\n return Promise.all(\n rows.map(async (row) => {\n const key = (row as Record<string, unknown>)[this.field]\n if (typeof key !== 'string') return row\n const label = await resolve(key, locale, fallback)\n return { ...(row as Record<string, unknown>), [labelKey]: label } as unknown as R\n }),\n )\n }\n\n /**\n * Build a reactive `LiveAggregation<R[]>` that re-runs the full\n * group-and-reduce pipeline whenever any upstream source notifies\n * of a change. Same error-isolation and idempotent-stop contract\n * as `Aggregation.live()` — the implementation delegates to the\n * same `LiveAggregationImpl` class by threading a fresh\n * recompute closure through the existing constructor.\n *\n * uses naive full re-run on every change. Incremental\n * per-bucket maintenance (apply `step` on inserted records,\n * `remove` on deleted records, route by bucket key) is a future\n * optimization — the reducer protocol admits it, but wiring\n * delta-aware source subscriptions is a separate PR.\n *\n * Always call `live.stop()` when finished.\n */\n live(): LiveAggregation<R[]> {\n const recompute = (): R[] =>\n groupAndReduce<R>(this.executeRecords(), this.field, this.spec)\n return buildLiveAggregation<R[]>(recompute, this.upstreams)\n }\n}\n","/**\n * Streaming scan builder with filter + aggregate support.\n *\n * `Collection.scan()` now returns a `ScanBuilder<T>` that\n * implements `AsyncIterable<T>` (for existing `for await … of`\n * consumers) AND exposes chainable `.where()` / `.filter()` clauses\n * plus a `.aggregate(spec)` async terminal that reduces the scan\n * stream through the same reducer protocol as `Query.aggregate()`\n *.\n *\n * **Memory model:** O(reducers), not O(records). The aggregate\n * terminal initializes one state per reducer, iterates through the\n * scan one record at a time via `for await`, applies every reducer's\n * `step` per record, and never collects the stream into an array.\n * This is what makes `scan().aggregate()` suitable for collections\n * that don't fit in memory — the bound is a code-level invariant\n * visible in the function body, not a runtime assertion.\n *\n * **Paginated iteration:** the builder holds a `pageProvider`\n * closure that maps `(cursor, limit) → Promise<page>`, plumbed by\n * `Collection.scan()` to `collection.listPage(...)`. The page\n * iterator walks cursors forward until exhaustion, same as the\n * previous async-generator `scan()` did.\n *\n * **Backward compatibility:** existing `for await (const rec of\n * collection.scan()) { … }` code continues to work because\n * `ScanBuilder` implements `[Symbol.asyncIterator]`. The previous\n * signature returned an `AsyncIterableIterator<T>` (which has both\n * `[Symbol.asyncIterator]` and `.next()`). We verified at grep time\n * that no call sites use `.next()` on the scan result directly, so\n * the narrowed interface is safe.\n *\n * **Immutability:** each `.where()` / `.filter()` call returns a\n * fresh builder sharing the same page provider and page size. This\n * lets a base scan be reused for multiple parallel aggregations:\n *\n * ```ts\n * const scan = invoices.scan()\n * const [open, paid] = await Promise.all([\n * scan.where('status', '==', 'open').aggregate({ n: count() }),\n * scan.where('status', '==', 'paid').aggregate({ n: count() }),\n * ])\n * ```\n *\n * Note that each aggregation pays a full scan — there's no shared\n * iteration across the two. Multi-way aggregation in a single pass\n * is out of scope; consumers who need it should build a compound spec\n * and run a single `.aggregate({ openN, paidN })` at the DSL level.\n *\n * **Out of scope for (tracked separately):**\n * - `scan().aggregate().live()` — unbounded scan + change-stream\n * reconciliation is a design problem, not just a code one\n * - `scan().groupBy().aggregate()` — high-cardinality grouping on\n * huge collections would re-introduce the O(groups) memory\n * problem that aggregate fixes\n * - Parallel scan across pages — race-safe page cursor contracts\n * are not in the adapter API yet\n * - `scan().join(...)` — tracked under (streaming join)\n */\n\nimport type { Clause, FieldClause, Operator } from './predicate.js'\nimport { evaluateClause, readPath } from './predicate.js'\nimport type {\n AggregateSpec,\n AggregateResult,\n} from '../aggregate/aggregation.js'\nimport type { JoinContext, JoinLeg, JoinableSource } from './join.js'\nimport { DanglingReferenceError } from '../errors.js'\n\n/**\n * Page provider — the Collection-shaped hook the builder calls to\n * walk cursors forward. Kept as a structural interface so tests can\n * wire up a synthetic provider without pulling in the full\n * Collection class. Collection's `listPage` matches this shape\n * exactly.\n */\nexport interface ScanPageProvider<T> {\n listPage(opts: {\n cursor?: string\n limit?: number\n }): Promise<{ items: T[]; nextCursor: string | null }>\n}\n\nconst DEFAULT_SCAN_PAGE_SIZE = 100\n\n/**\n * Chainable streaming scan. Implements `AsyncIterable<T>` for\n * drop-in use with `for await … of`; adds `.where()` / `.filter()`\n * chainable clauses and a `.aggregate(spec)` async terminal.\n *\n * The builder is immutable per operation — each chained call\n * returns a fresh `ScanBuilder` sharing the same page provider and\n * page size. The original builder is never mutated, so it's safe\n * to reuse across multiple parallel consumers.\n */\nexport class ScanBuilder<T> implements AsyncIterable<T> {\n private readonly pageProvider: ScanPageProvider<T>\n private readonly pageSize: number\n private readonly clauses: readonly Clause[]\n /**\n * Zero-or-more join legs to apply per record as the stream flows.\n * Each leg attaches the resolved right-side record (or null) under\n * its alias. — streaming joins.\n *\n * Joins are evaluated AFTER clauses, so a `where()` filtered-out\n * record never triggers a right-side lookup. This is the same\n * ordering as `Query.toArray()` (clauses first, joins after) and\n * keeps the streaming path from doing wasted work.\n */\n private readonly joins: readonly JoinLeg[]\n /**\n * Join resolution context. Required for `.join()` to translate a\n * field name into a target collection + ref mode and to resolve\n * the right-side `JoinableSource`. Optional because tests\n * construct ScanBuilder directly with synthetic page providers\n * that don't know about ref() — calling `.join()` without a\n * context throws with an actionable error.\n */\n private readonly joinContext: JoinContext | undefined\n\n constructor(\n pageProvider: ScanPageProvider<T>,\n pageSize: number = DEFAULT_SCAN_PAGE_SIZE,\n clauses: readonly Clause[] = [],\n joins: readonly JoinLeg[] = [],\n joinContext?: JoinContext,\n ) {\n this.pageProvider = pageProvider\n this.pageSize = pageSize\n this.clauses = clauses\n this.joins = joins\n this.joinContext = joinContext\n }\n\n /**\n * Add a field comparison. Runs per record as the scan stream\n * flows through, so non-matching records are dropped before they\n * reach `.aggregate()` or the iteration consumer. Multiple\n * `.where()` calls are AND-combined — same semantics as\n * `Query.where()`.\n *\n * Clauses cannot use the secondary-index fast path here because\n * the scan sources records from the adapter's paginator, not from\n * the in-memory cache where indexes live. Index-accelerated scans\n * are a future optimization — the current implementation\n * evaluates clauses per record in O(1) per clause.\n */\n where(field: string, op: Operator, value: unknown): ScanBuilder<T> {\n const clause: FieldClause = { type: 'field', field, op, value }\n return new ScanBuilder<T>(\n this.pageProvider,\n this.pageSize,\n [...this.clauses, clause],\n this.joins,\n this.joinContext,\n )\n }\n\n /**\n * Escape hatch: add an arbitrary predicate function. Same\n * non-serializable caveat as `Query.filter()` — filter clauses\n * don't round-trip through `toPlan()`. Prefer `.where()` when\n * possible.\n */\n filter(fn: (record: T) => boolean): ScanBuilder<T> {\n const clause: Clause = {\n type: 'filter',\n fn: fn as (record: unknown) => boolean,\n }\n return new ScanBuilder<T>(\n this.pageProvider,\n this.pageSize,\n [...this.clauses, clause],\n this.joins,\n this.joinContext,\n )\n }\n\n /**\n * Resolve a `ref()`-declared foreign key per record as the scan\n * stream flows, attaching the right-side record (or null) under\n * `opts.as`. — streaming joins over `scan()`.\n *\n * ```ts\n * for await (const inv of invoices.scan().join('clientId', { as: 'client' })) {\n * await processInvoice(inv) // inv.client is attached\n * }\n *\n * // Or terminate with .aggregate() for streaming joined aggregation\n * const { total } = await invoices.scan()\n * .where('status', '==', 'open')\n * .join('clientId', { as: 'client' })\n * .aggregate({ total: sum('amount') })\n * ```\n *\n * **The key difference from eager `.join()`:** the LEFT\n * side streams page-by-page from the adapter and is never\n * materialized. Memory ceiling on the left is O(pageSize), not\n * O(rowCount). This is what makes streaming joins suitable for\n * collections that exceed the eager join's 50_000-row ceiling.\n *\n * **Right-side strategy** is auto-selected per leg:\n * - **Indexed** — right source exposes `lookupById`, so each\n * left row costs O(1). This is the common path for\n * Collection right sides, which back `lookupById` with a Map\n * lookup over the in-memory cache. The right collection must\n * be in eager mode (the same constraint as eager join's\n * `querySourceForJoin` from ).\n * - **Hash** — right source has only `snapshot()`. Build a\n * `Map<id, record>` once at iteration start, probe per left\n * row. Same correctness, same per-row cost as the indexed\n * path; the difference is the upfront cost of materializing\n * the right side once.\n *\n * Both strategies hold the right side in memory for the duration\n * of the iteration. The \"streaming\" property applies to the LEFT\n * side only — true left-and-right streaming joins (where neither\n * side fits in memory) require a sort-merge join planner that's\n * out of scope for.\n *\n * **Ref-mode semantics** match eager `.join()` exactly:\n * - `strict` → throws `DanglingReferenceError` mid-stream\n * when a left record points at a non-existent right id.\n * The throw aborts the async iterator — consumers should\n * wrap the `for await` in try/catch if they want to recover.\n * - `warn` → attaches `null` and emits a one-shot warning\n * per unique dangling pair (deduped via the same warn\n * channel as eager join).\n * - `cascade` → attaches `null` silently. A delete-time mode;\n * dangling refs at read time are mid-flight or pre-existing\n * orphans, not a DSL error.\n *\n * Left records with null/undefined FK values attach `null`\n * regardless of mode — same \"no reference at all\" policy as\n * eager join and write-time `enforceRefsOnPut`.\n *\n * **Multi-FK chaining** is supported via repeated `.join()`\n * calls: each leg resolves an independent ref. Each leg\n * independently picks its right-side strategy and applies its\n * own ref mode.\n *\n * **Joins are NOT applied** to a `.aggregate()` terminal that\n * doesn't reference joined fields — wait, that's not quite\n * right. The streaming path actually DOES apply joins before\n * `.aggregate()` because the join attaches a field that the\n * spec might reference. Unlike `Query.aggregate()` (which skips\n * joins entirely as a projection-only short-circuit), the\n * streaming aggregation can't know whether the spec touches a\n * joined field, so it always applies joins. Consumers who want\n * unjoined streaming aggregation should leave `.join()` off the\n * chain — the chain is composable for a reason.\n *\n * constraint #1 — every JoinLeg carries `partitionScope:\n * 'all'` plumbed through but never read by. Same seam as\n * eager join.\n */\n join<As extends string, R = unknown>(\n field: string,\n opts: { as: As },\n ): ScanBuilder<T & Record<As, R | null>> {\n if (!this.joinContext) {\n throw new Error(\n `ScanBuilder.join() requires a join context. Use ` +\n `collection.scan() to construct a join-capable scan instead ` +\n `of the ScanBuilder constructor directly (the direct ` +\n `constructor is only used for tests with synthetic page ` +\n `providers).`,\n )\n }\n const descriptor = this.joinContext.resolveRef(field)\n if (!descriptor) {\n throw new Error(\n `ScanBuilder.join(): no ref() declared for field \"${field}\" on ` +\n `collection \"${this.joinContext.leftCollection}\". Add ` +\n `refs: { ${field}: ref('<target-collection>') } to the ` +\n `collection options, then retry.`,\n )\n }\n const leg: JoinLeg = {\n field,\n as: opts.as,\n target: descriptor.target,\n mode: descriptor.mode,\n strategy: undefined,\n maxRows: undefined,\n // constraint #1 — always 'all' in, never read by\n // the streaming executor. partition-aware scan joins\n // will populate this from where() predicates without\n // changing the planner shape.\n partitionScope: 'all',\n }\n return new ScanBuilder<T & Record<As, R | null>>(\n this.pageProvider as unknown as ScanPageProvider<T & Record<As, R | null>>,\n this.pageSize,\n this.clauses,\n [...this.joins, leg],\n this.joinContext,\n )\n }\n\n /**\n * Iterate the scan as an async iterable. Walks the page\n * provider's cursors forward until exhaustion, applying every\n * clause per record — only matching records are yielded.\n *\n * Backward-compatible with the previous async-generator `scan()`\n * return type for `for await … of` consumers.\n */\n async *[Symbol.asyncIterator](): AsyncIterator<T> {\n // One-time setup: resolve every join leg's right-side source\n // and pick its strategy (lookupById per row vs hash from\n // snapshot once). Both are O(left) per record after setup; the\n // difference is the upfront cost of hashing the right side\n // when there's no lookupById.\n //\n // Hash maps live for the lifetime of the iteration, so memory\n // for the right side is O(rightRowCount) per leg. Memory for\n // the left side stays O(pageSize) regardless — that's the\n // streaming property we're after.\n const joinResolvers = this.joins.length === 0 ? null : this.buildJoinResolvers()\n\n let page = await this.pageProvider.listPage({ limit: this.pageSize })\n while (true) {\n for (const record of page.items) {\n if (!this.recordMatches(record)) continue\n if (joinResolvers === null) {\n yield record\n } else {\n // Apply every join leg in declaration order. Each\n // leg attaches a field — the result of one leg becomes\n // the input to the next. Multi-FK chaining is\n // supported by construction.\n let attached: unknown = record\n for (const resolver of joinResolvers) {\n attached = this.applyOneJoinStreaming(attached, resolver)\n }\n yield attached as T\n }\n }\n if (page.nextCursor === null) return\n page = await this.pageProvider.listPage({\n cursor: page.nextCursor,\n limit: this.pageSize,\n })\n }\n }\n\n /**\n * Per-leg right-side resolution state. Built once at iteration\n * start and reused for every left record. Two strategies:\n *\n * - `lookupById`: present when the right source exposes the\n * hook directly (typical Collection right side). Per-row\n * cost is O(1).\n * - `hashByPrimaryKey`: built from `snapshot()` when no\n * lookupById. Per-row cost is O(1) after the upfront O(N)\n * materialization. Same as eager join's hash strategy.\n *\n * `warnedKeys` is the per-leg dedup set for ref-mode 'warn'. We\n * key on `field→target:refId` so the same dangling pair only\n * warns once per iteration. The dedup is per-iteration, not\n * per-process — a long-running scan that re-iterates would warn\n * again, which is the desired behavior (the data may have\n * changed between iterations).\n */\n private buildJoinResolvers(): Array<{\n leg: JoinLeg\n source: JoinableSource\n lookupById: ((id: string) => unknown) | null\n hashByPrimaryKey: ReadonlyMap<string, unknown> | null\n warnedKeys: Set<string>\n }> {\n if (!this.joinContext) {\n // Unreachable — .join() throws if joinContext is missing.\n // Belt-and-braces because the iterator is invoked via\n // Symbol.asyncIterator on a builder that may have been\n // constructed via the direct constructor with pre-populated\n // joins.\n throw new Error(\n `ScanBuilder iterator: ${this.joins.length} join leg(s) ` +\n `present but no JoinContext attached. Use collection.scan() ` +\n `to construct a join-capable scan.`,\n )\n }\n const resolvers: Array<{\n leg: JoinLeg\n source: JoinableSource\n lookupById: ((id: string) => unknown) | null\n hashByPrimaryKey: ReadonlyMap<string, unknown> | null\n warnedKeys: Set<string>\n }> = []\n for (const leg of this.joins) {\n const source = this.joinContext.resolveSource(leg.target)\n if (!source) {\n throw new Error(\n `ScanBuilder.join() cannot resolve target collection ` +\n `\"${leg.target}\" (referenced from field \"${leg.field}\" on ` +\n `\"${this.joinContext.leftCollection}\"). Make sure the target ` +\n `collection has been opened via vault.collection() ` +\n `at least once before iterating the scan.`,\n )\n }\n // Strategy selection: prefer lookupById when available\n // (O(1) per row, no upfront cost), fall back to hashing\n // snapshot() once otherwise.\n let lookupById: ((id: string) => unknown) | null = null\n let hashByPrimaryKey: ReadonlyMap<string, unknown> | null = null\n if (source.lookupById) {\n // Bind through an arrow so the lookupById's `this`\n // doesn't drift — same pattern as the eager join's\n // strategy resolver.\n const fn = source.lookupById.bind(source)\n lookupById = (id: string): unknown => fn(id)\n } else {\n const map = new Map<string, unknown>()\n for (const record of source.snapshot()) {\n const rawId = readPath(record, 'id')\n const key = coerceRefKey(rawId)\n if (key !== null) map.set(key, record)\n }\n hashByPrimaryKey = map\n }\n resolvers.push({\n leg,\n source,\n lookupById,\n hashByPrimaryKey,\n warnedKeys: new Set<string>(),\n })\n }\n return resolvers\n }\n\n /**\n * Resolve a single join leg for one left record and return the\n * left record with the joined field attached under\n * `leg.as`. Pure function over `(left, resolver)`; never\n * mutates the input.\n *\n * Ref-mode dispatch matches eager `applyJoins` from :\n * - null/undefined FK → attach null silently (always allowed)\n * - dangling FK + strict → throw `DanglingReferenceError`\n * - dangling FK + warn → attach null, warn-once per pair\n * - dangling FK + cascade → attach null silently\n */\n private applyOneJoinStreaming(\n left: unknown,\n resolver: {\n leg: JoinLeg\n source: JoinableSource\n lookupById: ((id: string) => unknown) | null\n hashByPrimaryKey: ReadonlyMap<string, unknown> | null\n warnedKeys: Set<string>\n },\n ): unknown {\n if (left === null || typeof left !== 'object') {\n // Pathological input; matches eager join's defensive return.\n return left\n }\n const { leg } = resolver\n const rawId = readPath(left, leg.field)\n const refKey = coerceRefKey(rawId)\n let right: unknown = undefined\n if (refKey !== null) {\n if (resolver.lookupById !== null) {\n right = resolver.lookupById(refKey)\n } else if (resolver.hashByPrimaryKey !== null) {\n right = resolver.hashByPrimaryKey.get(refKey)\n }\n }\n\n const merged: Record<string, unknown> = {\n ...(left as Record<string, unknown>),\n }\n if (right === undefined) {\n // No matching record. Distinguish \"no ref at all\" (null FK)\n // from \"dangling ref\" (FK pointed at nothing).\n if (refKey !== null && leg.mode === 'strict') {\n throw new DanglingReferenceError({\n field: leg.field,\n target: leg.target,\n refId: refKey,\n message:\n `ScanBuilder.join() strict dangling: record references ` +\n `\"${leg.target}:${refKey}\" via field \"${leg.field}\", but no ` +\n `such record exists. Use ref() mode 'warn' or 'cascade' if ` +\n `dangling refs are acceptable, or run ` +\n `vault.checkIntegrity() to find and fix the orphans.`,\n })\n }\n if (refKey !== null && leg.mode === 'warn') {\n const dedupKey = `${leg.field}→${leg.target}:${refKey}`\n if (!resolver.warnedKeys.has(dedupKey)) {\n resolver.warnedKeys.add(dedupKey)\n console.warn(\n `[noy-db] ScanBuilder.join() encountered dangling ref in ` +\n `'warn' mode: field \"${leg.field}\" → \"${leg.target}:` +\n `${refKey}\" not found. Attaching null.`,\n )\n }\n }\n // strict already threw above; warn falls through here; cascade\n // hits this path silently.\n merged[leg.as] = null\n } else {\n merged[leg.as] = right\n }\n return merged\n }\n\n /**\n * Reduce the scan stream through a named set of reducers and\n * return the final aggregated shape.\n *\n * Memory is O(reducers): one mutable state slot per spec key.\n * Records flow through the pipeline one at a time via\n * `for await` and are discarded after their `step()` is applied\n * — never collected into an array. This is the distinguishing\n * property from `Query.aggregate()`, which materializes the full\n * match set first.\n *\n * Reuses the same reducer protocol as `Query.aggregate()`,\n * so `count()`, `sum(field)`, `avg(field)`, `min(field)`,\n * `max(field)` all work unchanged. The `{ seed }` parameter\n * plumbing from constraint #2 is honored transparently — the\n * factories ignore it in and the scan executor never\n * touches the per-reducer state construction.\n *\n * **Returns a Promise**, unlike `Query.aggregate().run()` which\n * is synchronous. The scan is inherently async because it walks\n * adapter pages, so the terminal has to be too. Consumers\n * destructure with await:\n *\n * ```ts\n * const { total, n } = await invoices.scan()\n * .where('year', '==', 2025)\n * .aggregate({ total: sum('amount'), n: count() })\n * ```\n *\n * **No `.live()` in.** `scan().aggregate().live()` would\n * require reconciling an unbounded streaming iteration with a\n * change-stream subscription — a design problem, not just a code\n * one. Consumers with huge collections and live needs should\n * narrow with `.where()` enough to fit in the 50k `query()`\n * limit and use `query().aggregate().live()` instead.\n */\n async aggregate<Spec extends AggregateSpec>(\n spec: Spec,\n ): Promise<AggregateResult<Spec>> {\n const keys = Object.keys(spec)\n // Per-reducer state. Exactly |keys| entries, never grows with\n // the record count — that's the O(reducers) memory guarantee.\n const state: Record<string, unknown> = {}\n for (const key of keys) {\n state[key] = spec[key]!.init()\n }\n\n // Record-by-record streaming step. `for await (… of this)`\n // invokes the Symbol.asyncIterator above, which honors the\n // clause list, so filtered-out records never reach the step\n // loop — they're dropped at the iterator boundary.\n for await (const record of this) {\n for (const key of keys) {\n state[key] = spec[key]!.step(state[key], record)\n }\n }\n\n const result: Record<string, unknown> = {}\n for (const key of keys) {\n result[key] = spec[key]!.finalize(state[key])\n }\n return result as AggregateResult<Spec>\n }\n\n /**\n * Evaluate the clause list against a single record. Linear in\n * the clause count; short-circuits on first false. Clauses on a\n * scan are always re-evaluated per record — no index-accelerated\n * path, because the stream sources records from the adapter\n * paginator, not from the in-memory cache where indexes live.\n */\n private recordMatches(record: T): boolean {\n if (this.clauses.length === 0) return true\n for (const clause of this.clauses) {\n if (!evaluateClause(record, clause)) return false\n }\n return true\n }\n}\n\n/**\n * Coerce an unknown FK value into a lookup key string.\n *\n * Mirror of the same helper in `query/join.ts` — kept local to\n * `scan-builder.ts` to avoid pulling the eager join executor's\n * surface area into this file. Strings and numbers convert to\n * string keys; everything else (objects, arrays, booleans, null,\n * undefined) returns null and is treated as \"no ref at all\".\n *\n * Matches the write-time `enforceRefsOnPut` policy: nullish ref\n * values are never dangling, regardless of mode.\n */\nfunction coerceRefKey(value: unknown): string | null {\n if (value === null || value === undefined) return null\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'bigint') return String(value)\n return null\n}\n","/**\n * Persistent, encrypted secondary indexes for lazy-mode collections.\n *\n * Parallel to the in-memory `CollectionIndexes` used by eager mode (see\n * `packages/hub/src/query/indexes.ts`): same logical surface, but entries\n * are materialised as encrypted side-car records (`_idx/<field>/<recordId>`)\n * and bulk-loaded into an in-memory mirror on first query.\n *\n * This module only owns the id-namespace convention, the in-memory mirror,\n * and the typed errors. Write-path integration (PR 2 / ), query-planner\n * dispatch (PR 3 / , PR 4 / ), and the rebuild/reconcile utilities\n * (PR 5 / ) live in other files.\n *\n * See the design spec for the full architecture + threat model.\n */\n\n/**\n * Reserved id prefix for encrypted index side-car records.\n * Matches the existing `_keyring`, `_ledger_deltas/…`, `_meta/handle`\n * conventions inside a collection's id namespace.\n */\nexport const IDX_PREFIX = '_idx/' as const\n\n/**\n * Encode the side-car record id for a (field, recordId) pair.\n *\n * Format: `_idx/<field>/<recordId>` — no escaping. Field names may contain\n * dots (for dotted-path access consistent with eager-mode `readPath`);\n * record ids may contain slashes. The first two slash-separated segments\n * are `_idx` and the field; everything after the *second* slash is the\n * record id verbatim.\n */\nexport function encodeIdxId(field: string, recordId: string): string {\n return `${IDX_PREFIX}${field}/${recordId}`\n}\n\n/**\n * Decode a side-car id back into `{ field, recordId }`, or `null` if the\n * input is not a well-formed idx id. A well-formed id is:\n * - prefixed with `_idx/`\n * - contains a field segment (non-empty, no slashes)\n * - contains a record-id segment (non-empty, may contain slashes)\n */\nexport function decodeIdxId(id: string): { field: string; recordId: string } | null {\n if (!id.startsWith(IDX_PREFIX)) return null\n const rest = id.slice(IDX_PREFIX.length)\n const firstSlash = rest.indexOf('/')\n if (firstSlash <= 0) return null\n const field = rest.slice(0, firstSlash)\n const recordId = rest.slice(firstSlash + 1)\n if (recordId.length === 0) return null\n return { field, recordId }\n}\n\n/**\n * Fast-path predicate for discriminating side-car ids from regular record\n * ids and other reserved namespaces. Used by the hub to filter `list()`\n * results during bulk-load of the in-memory mirror.\n */\nexport function isIdxId(id: string): boolean {\n return decodeIdxId(id) !== null\n}\n\n/**\n * Sorted-value entry returned by `orderedBy()`. Mirrors the body shape\n * used by the write path — but `orderedBy` emits them already sorted by\n * `value` in the requested direction. Consumers (PR 4 / ) treat the\n * array as immutable and paginate via a numeric offset.\n *\n * **Note on `value`:** as of, this is the ORIGINAL TYPED\n * value (number, Date, boolean, etc.), not the stringified bucket key.\n * That's what lets range predicates and `orderedBy` compare numerically\n * instead of stumbling into `'10' < '2'` on `String(n)`.\n */\nexport interface OrderedEntry {\n readonly recordId: string\n readonly value: unknown\n}\n\n/**\n * Bulk-load row shape accepted by `ingest()`. The `value` field is the\n * decrypted index body's `value` field verbatim.\n */\nexport interface IngestRow {\n readonly recordId: string\n readonly value: unknown\n}\n\n/**\n * In-memory mirror of the persisted index side-car records for a single\n * collection. Populated by bulk-loading `_idx/<field>/*` ids on first\n * query and maintained incrementally by `Collection.put()` / `.delete()`\n * via `upsert()` / `remove()`.\n *\n * API surface is deliberately parallel to `CollectionIndexes` (eager mode)\n * so the query planner in PR 3/4 can dispatch to either polymorphically.\n *\n * Lifecycle:\n * - `declare(field)` — accept the field as indexable (idempotent)\n * - `ingest(field, rows[])` — bulk-load from decrypted index bodies\n * - `upsert(recordId, field, newValue, previousValue)` — incremental update\n * - `remove(recordId, field, value)` — incremental remove\n * - `lookupEqual(field, value)` / `lookupIn(field, values)` — equality reads\n * - `orderedBy(field, dir)` — sorted iteration for orderBy\n * - `clear()` — drop all buckets (invalidation / rotation)\n */\n/**\n * Per-field storage: the equality bucket map AND a parallel table of typed\n * values keyed by recordId. The typed table exists so range predicates\n * and `orderedBy` can compare on the original typed value rather\n * than the stringified bucket key — String(10) < String(2) is the classic\n * landmine `stringifyKey` introduces for numeric fields.\n */\ninterface PersistedFieldState {\n readonly buckets: Map<string, Set<string>>\n readonly values: Map<string, unknown>\n}\n\n/**\n * Structured index definition. Single-field indexes carry just a field\n * name; composite indexes carry the ordered list of fields and\n * the synthetic `key` (= fields joined by `COMPOSITE_DELIMITER`) used\n * as the bucket-map key and side-car envelope id segment.\n */\nexport type PersistedIndexDef =\n | { readonly kind: 'single'; readonly field: string; readonly key: string }\n | { readonly kind: 'composite'; readonly fields: readonly string[]; readonly key: string }\n\n/**\n * Delimiter used to synthesize a composite-index key from an ordered\n * field list. Intentionally a character that is extremely unusual in\n * JavaScript object keys (`|`) so collision with a literal field name\n * is vanishingly rare in practice. Composite declarations whose field\n * names contain `|` are rejected at declare-time with an explicit\n * error.\n */\nexport const COMPOSITE_DELIMITER = '|'\n\nexport function compositeKey(fields: readonly string[]): string {\n return fields.join(COMPOSITE_DELIMITER)\n}\n\nexport class PersistedCollectionIndex {\n private readonly indexes = new Map<string, PersistedFieldState>()\n private readonly defs = new Map<string, PersistedIndexDef>()\n\n /**\n * Declare a single-field index. Subsequent `upsert` / `ingest` calls\n * populate the in-memory mirror; calls before `declare` are no-ops\n * (tolerant bulk-load ordering). Idempotent.\n */\n declare(field: string): void {\n if (this.indexes.has(field)) return\n this.indexes.set(field, { buckets: new Map(), values: new Map() })\n this.defs.set(field, { kind: 'single', field, key: field })\n }\n\n /**\n * Declare a composite (multi-field) index. The synthetic\n * key is `fields.join('|')`; it doubles as the in-memory map key and\n * the `_idx/<key>/<recordId>` side-car field segment. Callers upsert\n * and lookup via the same `key` as single-field indexes, just with a\n * tuple value (JSON-stringified for bucketing).\n */\n declareComposite(fields: readonly string[]): void {\n if (fields.length === 0) {\n throw new Error('declareComposite: fields array must be non-empty')\n }\n for (const f of fields) {\n if (f.includes(COMPOSITE_DELIMITER)) {\n throw new Error(\n `declareComposite: field \"${f}\" contains the composite delimiter ` +\n `\"${COMPOSITE_DELIMITER}\" — pick a different field name or open an ` +\n `issue to add hash-based composite keys.`,\n )\n }\n }\n const key = compositeKey(fields)\n if (this.indexes.has(key)) return\n this.indexes.set(key, { buckets: new Map(), values: new Map() })\n this.defs.set(key, { kind: 'composite', fields: [...fields], key })\n }\n\n /**\n * Every declared index's structured definition. Collection walks this\n * when materialising side-cars on put/delete so it can extract a\n * single-field value or a composite tuple appropriately.\n */\n definitions(): PersistedIndexDef[] {\n return [...this.defs.values()]\n }\n\n /** True if `field` has been declared as indexable on this mirror. */\n has(field: string): boolean {\n return this.indexes.has(field)\n }\n\n /** All declared field names, in declaration order. */\n fields(): string[] {\n return [...this.indexes.keys()]\n }\n\n /**\n * Bulk-load the mirror from decrypted index bodies. Intended to be\n * called once per field after reading the collection's `_idx/<field>/*`\n * side-cars. Safe to call twice with the same rows — bucket Sets\n * deduplicate recordIds. If `field` is not declared, this is a no-op\n * (tolerates the case where bulk-load runs before `declare()` lands).\n */\n ingest(field: string, rows: readonly IngestRow[]): void {\n const state = this.indexes.get(field)\n if (!state) return\n for (const row of rows) {\n addToState(state, row.recordId, row.value)\n }\n }\n\n /**\n * Incrementally update a record's index entry for one field. Called by\n * `Collection.put()` after the main write succeeds. If\n * `previousValue` is non-null, the record is removed from the old\n * bucket first — this is the update path. Pass `null` for fresh adds.\n * No-op if the field is not declared.\n */\n upsert(recordId: string, field: string, newValue: unknown, previousValue: unknown): void {\n const state = this.indexes.get(field)\n if (!state) return\n if (previousValue !== null && previousValue !== undefined) {\n removeFromState(state, recordId, previousValue)\n }\n addToState(state, recordId, newValue)\n }\n\n /**\n * Remove a record from the index for one field. Called by\n * `Collection.delete()`. No-op if the field is not declared or\n * the record isn't in the bucket. Empty buckets are dropped to keep\n * the Map clean.\n */\n remove(recordId: string, field: string, value: unknown): void {\n const state = this.indexes.get(field)\n if (!state) return\n removeFromState(state, recordId, value)\n }\n\n /**\n * Drop all bucket data while preserving field declarations. Called on\n * invalidation (incoming sync changes, keyring rotation) — the next\n * query re-populates via `ingest`.\n */\n clear(): void {\n for (const state of this.indexes.values()) {\n state.buckets.clear()\n state.values.clear()\n }\n }\n\n /**\n * Equality lookup — return the set of record ids whose `field` matches\n * `value`. Returns `null` if the field is not declared (caller falls\n * back to scan or throws `IndexRequiredError`). Returns a shared empty\n * set if the field is declared but no record matches — that set MUST\n * NOT be mutated by the caller.\n */\n lookupEqual(field: string, value: unknown): ReadonlySet<string> | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const key = stringifyKey(value)\n return state.buckets.get(key) ?? EMPTY_SET\n }\n\n /**\n * Set lookup — return the union of record ids whose `field` matches any\n * of `values`. Returns `null` if the field is not declared. Returns a\n * fresh (non-shared) Set — safe for the caller to mutate.\n */\n lookupIn(field: string, values: readonly unknown[]): ReadonlySet<string> | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const out = new Set<string>()\n for (const value of values) {\n const bucket = state.buckets.get(stringifyKey(value))\n if (bucket) for (const id of bucket) out.add(id)\n }\n return out\n }\n\n /**\n * Range lookup. Return record ids whose indexed value\n * satisfies the predicate. Comparison happens on the ORIGINAL TYPED\n * value carried in `state.values` — so numeric `<` sorts numerically,\n * not lexicographically on `String(n)`. Returns `null` if the field\n * is not declared.\n *\n * Supported ops: `'<'`, `'<='`, `'>'`, `'>='`, `'between'`. For\n * `'between'`, `value` is `[lo, hi]` and both bounds are inclusive\n * (matches the eager-mode operator contract in `predicate.ts`).\n */\n lookupRange(\n field: string,\n op: '<' | '<=' | '>' | '>=' | 'between',\n value: unknown,\n ): ReadonlySet<string> | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const out = new Set<string>()\n for (const [recordId, live] of state.values) {\n if (live === undefined || live === null) continue\n if (matchesRange(live, op, value)) out.add(recordId)\n }\n return out\n }\n\n /**\n * Sorted iteration — return every entry on `field` as an\n * `OrderedEntry[]`, sorted by the ORIGINAL TYPED value (#275: no more\n * `'10' < '2'` surprises on numeric fields). Consumers paginate with\n * a numeric offset. `OrderedEntry.value` is the typed value.\n */\n orderedBy(field: string, dir: 'asc' | 'desc'): readonly OrderedEntry[] | null {\n const state = this.indexes.get(field)\n if (!state) return null\n const entries: OrderedEntry[] = []\n for (const [recordId, value] of state.values) {\n entries.push({ recordId, value })\n }\n entries.sort((a, b) => compareTyped(a.value, b.value))\n if (dir === 'desc') entries.reverse()\n return entries\n }\n}\n\nconst EMPTY_SET: ReadonlySet<string> = new Set()\n\n/**\n * Canonicalize a value into a bucket key. Deliberately identical to the\n * eager-mode `stringifyKey` in `query/indexes.ts` so semantics match. When\n * `query/indexes.ts` changes its coercion rules, update this in lockstep.\n *\n * null / undefined values are NOT indexed — callers who pass them to\n * `upsert` / `remove` short-circuit before reaching this function; the\n * sentinel here exists only to make `lookupEqual(field, null)` return\n * an empty bucket (rather than matching some arbitrary record).\n */\nfunction stringifyKey(value: unknown): string {\n if (value === null || value === undefined) return '\\0NULL\\0'\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'boolean') return String(value)\n if (value instanceof Date) return value.toISOString()\n // composite index values are tuple arrays. JSON.stringify\n // gives a delimiter-safe, order-preserving canonical form so buckets\n // for `['c-A', '2026-Q1']` and `['c-A', '2026-Q2']` never collide.\n if (Array.isArray(value)) {\n const parts: string[] = []\n for (const el of value) parts.push(stringifyKey(el))\n return JSON.stringify(parts)\n }\n return '\\0OBJECT\\0'\n}\n\nfunction addToState(state: PersistedFieldState, recordId: string, value: unknown): void {\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n let bucket = state.buckets.get(key)\n if (!bucket) {\n bucket = new Set()\n state.buckets.set(key, bucket)\n }\n bucket.add(recordId)\n state.values.set(recordId, value)\n}\n\nfunction removeFromState(state: PersistedFieldState, recordId: string, value: unknown): void {\n if (value === null || value === undefined) return\n const key = stringifyKey(value)\n const bucket = state.buckets.get(key)\n if (bucket) {\n bucket.delete(recordId)\n if (bucket.size === 0) state.buckets.delete(key)\n }\n state.values.delete(recordId)\n}\n\n/**\n * Range-predicate comparator. Runs on the ORIGINAL TYPED value so numeric\n * fields sort numerically (not lexicographically on `String(n)`). ISO-8601\n * date strings already sort correctly lexicographically; Date instances\n * compare via `getTime()` before the string branch to keep the contract\n * honest regardless of which form survived serialization.\n */\nfunction matchesRange(\n live: unknown,\n op: '<' | '<=' | '>' | '>=' | 'between',\n bound: unknown,\n): boolean {\n if (op === 'between') {\n if (!Array.isArray(bound) || bound.length !== 2) return false\n return compareTyped(live, bound[0]) >= 0 && compareTyped(live, bound[1]) <= 0\n }\n const cmp = compareTyped(live, bound)\n switch (op) {\n case '<': return cmp < 0\n case '<=': return cmp <= 0\n case '>': return cmp > 0\n case '>=': return cmp >= 0\n }\n}\n\nfunction compareTyped(a: unknown, b: unknown): number {\n if (a === undefined || a === null) return b === undefined || b === null ? 0 : 1\n if (b === undefined || b === null) return -1\n if (typeof a === 'number' && typeof b === 'number') return a - b\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime()\n if (typeof a === 'string' && typeof b === 'string') return a < b ? -1 : a > b ? 1 : 0\n if (typeof a === 'boolean' && typeof b === 'boolean') {\n return a === b ? 0 : a ? 1 : -1\n }\n // Mixed/unsupported types: deliberately treat as equal so sort stays\n // stable. Matches the eager-mode `compareValues` contract in\n // builder.ts — we don't silently coerce arbitrary objects to strings\n // (which would be meaningless) nor throw (which would be hostile).\n return 0\n}\n","/**\n * Lazy-mode query builder.\n *\n * Companion to `Query<T>` in `builder.ts`, but built for collections in lazy\n * mode where `snapshot()` is unavailable — records live in the adapter and\n * are pulled on demand. Dispatches through `PersistedCollectionIndex` to\n * resolve a candidate record-id set, then decrypts only those records.\n *\n * Scope:\n * - `.where(field, '==' | 'in', value)` — dispatched through the index\n * - `.where(field, other-op, value)` — evaluated against the decrypted\n * candidate set (non-indexed ops still require the field to be indexed\n * — we need SOMETHING to scope the candidate set)\n * - `.orderBy(field, dir?)` — dispatched through `orderedBy` when no\n * `==`/`in` clause is present; otherwise applied as an in-memory sort\n * over the candidate set\n * - `.limit(n)` / `.offset(n)` — page slice after filtering\n * - `.toArray()` / `.first()` / `.count()` — terminals\n *\n * Every field referenced by a where or orderBy clause MUST be indexed;\n * otherwise `toArray()` throws `IndexRequiredError`. This is deliberate:\n * silent scan-fallback would hide the very performance cliff that lazy-mode\n * indexes exist to prevent (see `docs/architecture.md` §indexes).\n */\n\nimport type { Clause, FieldClause, Operator } from '../query/predicate.js'\nimport { evaluateClause, readPath } from '../query/predicate.js'\nimport type { PersistedCollectionIndex } from './persisted-indexes.js'\nimport { IndexRequiredError } from '../errors.js'\n\nexport interface LazyOrderBy {\n readonly field: string\n readonly direction: 'asc' | 'desc'\n}\n\n/**\n * Source abstraction the LazyQuery runs against. Collection implements it.\n * Kept minimal so the builder stays test-friendly.\n */\nexport interface LazyQuerySource<T> {\n readonly collectionName: string\n readonly persistedIndexes: PersistedCollectionIndex\n /** Ensure `_idx/<field>/*` side-cars have been bulk-loaded into the mirror. */\n ensurePersistedIndexesLoaded(): Promise<void>\n /** Decrypt one record by id, or return null if it's gone. */\n getRecord(id: string): Promise<T | null>\n}\n\ninterface LazyPlan {\n readonly clauses: readonly FieldClause[]\n readonly orderBy: readonly LazyOrderBy[]\n readonly limit: number | undefined\n readonly offset: number\n}\n\nconst EMPTY_PLAN: LazyPlan = {\n clauses: [],\n orderBy: [],\n limit: undefined,\n offset: 0,\n}\n\nexport class LazyQuery<T> {\n private readonly source: LazyQuerySource<T>\n private readonly plan: LazyPlan\n\n constructor(source: LazyQuerySource<T>, plan: LazyPlan = EMPTY_PLAN) {\n this.source = source\n this.plan = plan\n }\n\n where<V>(field: string, op: Operator, value: V): LazyQuery<T> {\n const clause: FieldClause = { type: 'field', field, op, value }\n return new LazyQuery<T>(this.source, {\n ...this.plan,\n clauses: [...this.plan.clauses, clause],\n })\n }\n\n orderBy(field: string, direction: 'asc' | 'desc' = 'asc'): LazyQuery<T> {\n return new LazyQuery<T>(this.source, {\n ...this.plan,\n orderBy: [...this.plan.orderBy, { field, direction }],\n })\n }\n\n limit(n: number): LazyQuery<T> {\n return new LazyQuery<T>(this.source, { ...this.plan, limit: n })\n }\n\n offset(n: number): LazyQuery<T> {\n return new LazyQuery<T>(this.source, { ...this.plan, offset: n })\n }\n\n async toArray(): Promise<T[]> {\n await this.source.ensurePersistedIndexesLoaded()\n\n const touchedFields = collectTouchedFields(this.plan)\n const missingFields = touchedFields.filter(f => !isFieldIndexed(f, this.source.persistedIndexes))\n if (missingFields.length > 0) {\n throw new IndexRequiredError({\n collection: this.source.collectionName,\n touchedFields,\n missingFields,\n })\n }\n\n const candidateIds = this.resolveCandidateIds()\n if (candidateIds === null) {\n // No usable driver — every touched field is indexed but no clause\n // pins the candidate set. This happens when a query only uses\n // operators other than `==`/`in` and no `orderBy` clause is\n // present — we refuse to enumerate the whole index, because that\n // defeats the purpose of lazy mode.\n throw new IndexRequiredError({\n collection: this.source.collectionName,\n touchedFields,\n missingFields: touchedFields,\n })\n }\n\n const records: T[] = []\n for (const id of candidateIds) {\n const record = await this.source.getRecord(id)\n if (record === null) continue\n if (!matchesAll(record, this.plan.clauses)) continue\n records.push(record)\n }\n\n const sorted = this.plan.orderBy.length > 0\n ? sortRecords(records, this.plan.orderBy)\n : records\n\n const offset = this.plan.offset > 0 ? this.plan.offset : 0\n const limited = this.plan.limit === undefined\n ? sorted.slice(offset)\n : sorted.slice(offset, offset + this.plan.limit)\n\n return limited\n }\n\n async first(): Promise<T | null> {\n const out = await this.limit(1).toArray()\n return out.length > 0 ? out[0]! : null\n }\n\n async count(): Promise<number> {\n const out = await this.toArray()\n return out.length\n }\n\n /**\n * Resolve the candidate record-id set to decrypt. Returns null when the\n * query has no usable driver — no `==`/`in` clause and no `orderBy`\n * clause that can scope the scan. Callers interpret null as\n * IndexRequiredError (see `toArray`).\n */\n private resolveCandidateIds(): readonly string[] | null {\n const idx = this.source.persistedIndexes\n\n // prefer a composite index when the query's `==`\n // clauses cover every field of one declared composite. The\n // composite mirror lookup is O(matches) vs single-field +\n // post-filter on the decrypted candidate set.\n const eqMap = new Map<string, unknown>()\n for (const clause of this.plan.clauses) {\n if (clause.op === '==') eqMap.set(clause.field, clause.value)\n }\n if (eqMap.size >= 2) {\n for (const def of idx.definitions()) {\n if (def.kind !== 'composite') continue\n if (def.fields.every(f => eqMap.has(f))) {\n const tuple = def.fields.map(f => eqMap.get(f))\n const ids = idx.lookupEqual(def.key, tuple)\n if (ids) return [...ids]\n }\n }\n }\n\n for (const clause of this.plan.clauses) {\n if (clause.op === '==') {\n const ids = idx.lookupEqual(clause.field, clause.value)\n if (ids) return [...ids]\n } else if (clause.op === 'in' && Array.isArray(clause.value)) {\n const ids = idx.lookupIn(clause.field, clause.value as readonly unknown[])\n if (ids) return [...ids]\n } else if (isRangeOp(clause.op)) {\n // range predicates on an indexed field dispatch\n // through `lookupRange`, which compares on the original typed\n // value (no numeric-lexicographic landmines).\n const ids = idx.lookupRange(clause.field, clause.op, clause.value)\n if (ids) return [...ids]\n }\n }\n\n // No equality/range driver — try to scope via orderBy.\n if (this.plan.orderBy.length > 0) {\n const primary = this.plan.orderBy[0]!\n const entries = idx.orderedBy(primary.field, primary.direction)\n if (entries) return entries.map(e => e.recordId)\n }\n\n return null\n }\n}\n\n/**\n * True if the given field name is covered by either a single-field\n * index or appears as a component of a declared composite index.\n * Composite coverage is sufficient for the missing-field check because\n * composite writes also maintain the in-memory mirror — the range /\n * orderBy / single-equality lookup paths fall through to decrypted\n * candidates that still get post-filtered by the composite clause.\n */\nfunction isFieldIndexed(field: string, idx: PersistedCollectionIndex): boolean {\n if (idx.has(field)) return true\n for (const def of idx.definitions()) {\n if (def.kind === 'composite' && def.fields.includes(field)) return true\n }\n return false\n}\n\nfunction isRangeOp(op: Operator): op is '<' | '<=' | '>' | '>=' | 'between' {\n return op === '<' || op === '<=' || op === '>' || op === '>=' || op === 'between'\n}\n\nfunction collectTouchedFields(plan: LazyPlan): string[] {\n const seen = new Set<string>()\n for (const c of plan.clauses) seen.add(c.field)\n for (const o of plan.orderBy) seen.add(o.field)\n return [...seen]\n}\n\nfunction matchesAll(record: unknown, clauses: readonly Clause[]): boolean {\n for (const c of clauses) {\n if (!evaluateClause(record, c)) return false\n }\n return true\n}\n\nfunction sortRecords<T>(records: T[], orderBy: readonly LazyOrderBy[]): T[] {\n return [...records].sort((a, b) => {\n for (const { field, direction } of orderBy) {\n const av = readPath(a, field)\n const bv = readPath(b, field)\n const cmp = compareValues(av, bv)\n if (cmp !== 0) return direction === 'asc' ? cmp : -cmp\n }\n return 0\n })\n}\n\nfunction compareValues(a: unknown, b: unknown): number {\n if (a === undefined || a === null) return b === undefined || b === null ? 0 : 1\n if (b === undefined || b === null) return -1\n if (typeof a === 'number' && typeof b === 'number') return a - b\n if (typeof a === 'string' && typeof b === 'string') return a < b ? -1 : a > b ? 1 : 0\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime()\n return 0\n}\n","/**\n * Strategy seam between core Collection and the optional indexing\n * subsystem. Core imports `IndexStrategy` and `IndexState` as\n * TYPE-ONLY symbols and `NO_INDEXING` as a tiny runtime stub.\n *\n * The heavy classes — `CollectionIndexes`, `PersistedCollectionIndex`,\n * `LazyQuery` — are only instantiated inside the `withIndexing()`\n * factory under `./active.ts`, which in turn is only reachable through\n * the `@noy-db/hub/indexing` subpath export. A consumer that never\n * imports the subpath ships none of those classes in their bundle\n * (ESM tree-shaking + hub's `\"sideEffects\": false`).\n *\n * @internal\n */\n\nimport type { CollectionIndexes, IndexDef } from './eager-indexes.js'\nimport type { PersistedCollectionIndex } from './persisted-indexes.js'\n\n/**\n * Per-collection container for whatever mirrors the active strategy\n * decided to materialize. Both accessors may return `null` — they do\n * for `NO_INDEXING`, and `getEagerIndexes` returns null in a\n * lazy-mode collection even when indexing is active (lazy uses the\n * persisted mirror instead).\n *\n * `isEnabled` is a cheap guard so collection code can short-circuit\n * the full indexing path without inspecting either mirror.\n *\n * @internal\n */\nexport interface IndexState {\n readonly isEnabled: boolean\n getEagerIndexes(): CollectionIndexes | null\n getPersistedIndexes(): PersistedCollectionIndex | null\n}\n\n/**\n * Factory that builds one `IndexState` per Collection. Called exactly\n * once inside each Collection constructor with the declared\n * `IndexDef[]` and the lazy-mode flag (so lazy collections get the\n * persisted mirror and eager collections get the in-memory one).\n *\n * @internal\n */\nexport interface IndexStrategy {\n createState(args: {\n readonly defs: readonly IndexDef[]\n readonly lazy: boolean\n }): IndexState\n}\n\n/**\n * No-indexing stub. Every Collection defaults to this; it returns a\n * cheap `IndexState` whose mirrors are both `null`. Collection code\n * null-checks both accessors and short-circuits, so no indexing code\n * path runs and the heavy classes never arrive in the bundle.\n *\n * @internal\n */\nexport const NO_INDEXING: IndexStrategy = {\n createState() {\n return DISABLED_STATE\n },\n}\n\nconst DISABLED_STATE: IndexState = {\n isEnabled: false,\n getEagerIndexes: () => null,\n getPersistedIndexes: () => null,\n}\n","/**\n * Generic LRU cache for `Collection`'s lazy hydration mode.\n *\n * Backed by a JavaScript `Map`, which preserves insertion order. Promotion\n * is implemented as `delete()` + `set()` — O(1) on `Map` since both\n * operations are constant-time. Eviction walks the iterator from the front\n * (least recently used) until both budgets are satisfied.\n *\n * ships in-memory only. The cache is never persisted; on collection\n * close every entry is dropped. Persisting cache state is a follow-up\n * once the access patterns from real consumers tell us whether it would\n * pay back the complexity.\n */\n\nexport interface LruEntry<V> {\n /** The cached value. */\n readonly value: V\n /**\n * Approximate decrypted byte size of the entry. Used by the byte-budget\n * eviction path. Callers compute this once at insert time and pass it\n * in — recomputing on every access would dominate the per-record cost.\n */\n readonly size: number\n}\n\nexport interface LruOptions {\n /** Maximum number of entries before eviction. Required if `maxBytes` is unset. */\n maxRecords?: number\n /** Maximum total bytes before eviction. Computed from per-entry `size`. */\n maxBytes?: number\n}\n\nexport interface LruStats {\n /** Total cache hits since construction (or `resetStats()`). */\n hits: number\n /** Total cache misses since construction (or `resetStats()`). */\n misses: number\n /** Total entries evicted since construction (or `resetStats()`). */\n evictions: number\n /** Current number of cached entries. */\n size: number\n /** Current sum of cached entry sizes (in bytes, approximate). */\n bytes: number\n}\n\n/**\n * O(1) LRU cache. Both `get()` and `set()` promote the touched entry to\n * the most-recently-used end. Eviction happens after every insert and\n * walks the front of the Map iterator dropping entries until both\n * budgets are satisfied.\n */\nexport class Lru<K, V> {\n private readonly entries = new Map<K, LruEntry<V>>()\n private readonly maxRecords: number | undefined\n private readonly maxBytes: number | undefined\n private currentBytes = 0\n private hits = 0\n private misses = 0\n private evictions = 0\n\n constructor(options: LruOptions) {\n if (options.maxRecords === undefined && options.maxBytes === undefined) {\n throw new Error('Lru: must specify maxRecords, maxBytes, or both')\n }\n this.maxRecords = options.maxRecords\n this.maxBytes = options.maxBytes\n }\n\n /**\n * Look up a key. Hits promote the entry to most-recently-used; misses\n * return undefined. Both update the running stats counters.\n */\n get(key: K): V | undefined {\n const entry = this.entries.get(key)\n if (!entry) {\n this.misses++\n return undefined\n }\n // Promote: re-insert moves the entry to the end of the iteration order.\n this.entries.delete(key)\n this.entries.set(key, entry)\n this.hits++\n return entry.value\n }\n\n /**\n * Insert or update a key. If the key already exists, its size is\n * accounted for and the entry is promoted to MRU. After insertion,\n * eviction runs to maintain both budgets.\n */\n set(key: K, value: V, size: number): void {\n const existing = this.entries.get(key)\n if (existing) {\n // Update path: subtract the old size before adding the new one.\n this.currentBytes -= existing.size\n this.entries.delete(key)\n }\n this.entries.set(key, { value, size })\n this.currentBytes += size\n this.evictUntilUnderBudget()\n }\n\n /**\n * Remove a key without affecting hit/miss stats. Used by `Collection.delete()`.\n * Returns true if the key was present.\n */\n remove(key: K): boolean {\n const existing = this.entries.get(key)\n if (!existing) return false\n this.currentBytes -= existing.size\n this.entries.delete(key)\n return true\n }\n\n /** True if the cache currently holds an entry for the given key. */\n has(key: K): boolean {\n return this.entries.has(key)\n }\n\n /**\n * Drop every entry. Stats counters survive — call `resetStats()` if you\n * want a clean slate. Used by `Collection.invalidate()` on key rotation.\n */\n clear(): void {\n this.entries.clear()\n this.currentBytes = 0\n }\n\n /** Reset hit/miss/eviction counters to zero. Does NOT touch entries. */\n resetStats(): void {\n this.hits = 0\n this.misses = 0\n this.evictions = 0\n }\n\n /** Snapshot of current cache statistics. Cheap — no copying. */\n stats(): LruStats {\n return {\n hits: this.hits,\n misses: this.misses,\n evictions: this.evictions,\n size: this.entries.size,\n bytes: this.currentBytes,\n }\n }\n\n /**\n * Iterate over all currently-cached values. Order is least-recently-used\n * first. Used by tests and devtools — production callers should use\n * `Collection.scan()` instead.\n */\n *values(): IterableIterator<V> {\n for (const entry of this.entries.values()) yield entry.value\n }\n\n /**\n * Walk the cache from the LRU end and drop entries until both budgets\n * are satisfied. Called after every `set()`. Single pass — entries are\n * never re-promoted during eviction.\n */\n private evictUntilUnderBudget(): void {\n while (this.overBudget()) {\n const oldest = this.entries.keys().next()\n if (oldest.done) return // empty cache; nothing more to evict\n const key = oldest.value\n const entry = this.entries.get(key)\n if (entry) this.currentBytes -= entry.size\n this.entries.delete(key)\n this.evictions++\n }\n }\n\n private overBudget(): boolean {\n if (this.maxRecords !== undefined && this.entries.size > this.maxRecords) return true\n if (this.maxBytes !== undefined && this.currentBytes > this.maxBytes) return true\n return false\n }\n}\n","/**\n * Cache policy helpers — parse human-friendly byte budgets into raw numbers.\n *\n * Accepted shapes (case-insensitive on suffix):\n * number — interpreted as raw bytes\n * '1024' — string of digits, raw bytes\n * '50KB' — kilobytes (×1024)\n * '50MB' — megabytes (×1024²)\n * '1GB' — gigabytes (×1024³)\n *\n * Decimals are accepted (`'1.5GB'` → 1610612736 bytes).\n *\n * Anything else throws — better to fail loud at construction time than\n * to silently treat a typo as 0 bytes (which would evict everything).\n */\n\nconst UNITS: Record<string, number> = {\n '': 1,\n 'B': 1,\n 'KB': 1024,\n 'MB': 1024 * 1024,\n 'GB': 1024 * 1024 * 1024,\n // 'TB' deliberately not supported — if you need it, you're not using NOYDB.\n}\n\n/** Parse a byte budget into a positive integer number of bytes. */\nexport function parseBytes(input: number | string): number {\n if (typeof input === 'number') {\n if (!Number.isFinite(input) || input <= 0) {\n throw new Error(`parseBytes: numeric input must be a positive finite number, got ${String(input)}`)\n }\n return Math.floor(input)\n }\n\n const trimmed = input.trim()\n if (trimmed === '') {\n throw new Error('parseBytes: empty string is not a valid byte budget')\n }\n\n // Accept either a bare number or a number followed by a unit suffix.\n // Regex: optional sign, digits with optional decimal, optional unit.\n const match = /^([0-9]+(?:\\.[0-9]+)?)\\s*([A-Za-z]*)$/.exec(trimmed)\n if (!match) {\n throw new Error(`parseBytes: invalid byte budget \"${input}\". Expected format: \"1024\", \"50KB\", \"50MB\", \"1GB\"`)\n }\n\n const value = parseFloat(match[1]!)\n const unit = (match[2] ?? '').toUpperCase()\n\n if (!(unit in UNITS)) {\n throw new Error(`parseBytes: unknown unit \"${match[2]}\" in \"${input}\". Supported: B, KB, MB, GB`)\n }\n\n const bytes = Math.floor(value * UNITS[unit]!)\n if (bytes <= 0) {\n throw new Error(`parseBytes: byte budget must be > 0, got ${bytes} from \"${input}\"`)\n }\n return bytes\n}\n\n/**\n * Estimate the in-memory byte size of a decrypted record.\n *\n * Uses `JSON.stringify().length` as a stand-in for actual heap usage.\n * It's a deliberate approximation: real V8 heap size includes pointer\n * overhead, hidden classes, and string interning that we can't measure\n * from JavaScript. The JSON length is a stable, monotonic proxy that\n * costs O(record size) per insert — fine when records are typically\n * < 1 KB and the cache eviction is the slow path anyway.\n *\n * Returns `0` (and the caller must treat it as 1 for accounting) if\n * stringification throws on circular references; this is documented\n * but in practice records always come from JSON-decoded envelopes.\n */\nexport function estimateRecordBytes(record: unknown): number {\n try {\n return JSON.stringify(record).length\n } catch {\n return 0\n }\n}\n","/**\n * Strategy seam for the optional sync engine + presence subsystem.\n * Core imports `SyncStrategy` type-only + `NO_SYNC` stub; the real\n * `SyncEngine`, `SyncTransaction`, and `PresenceHandle` constructors\n * are only reachable via `withSync()` in `./sync-active.ts`.\n *\n * Solo apps that never configure `sync` and never call\n * `collection.presence()` ship none of the ~856 LOC behind this seam\n * (`sync.ts` + `sync-transaction.ts` + `presence.ts`).\n *\n * Note: `keyring.ts` (~746 LOC) stays in core because it's required\n * for any multi-user vault — even single-owner vaults use a keyring\n * to wrap the DEK. The team package's grant/revoke/magic-link/\n * delegation modules tree-shake naturally via direct named imports.\n *\n * Behavior under NO_SYNC:\n *\n * - **buildSyncEngine** — throws. Only fires when `createNoydb({ sync })`\n * passes a remote target.\n * - **buildSyncTransaction** — throws. Only fires when `db.transaction(vault)`\n * is called on a vault with sync configured.\n * - **buildPresence** — throws. Only fires when user code calls\n * `collection.presence()`.\n *\n * @internal\n */\n\nimport type {\n NoydbStore,\n ConflictStrategy,\n SyncTargetRole,\n} from '../types.js'\nimport type { NoydbEventEmitter } from '../events.js'\nimport type { SyncPolicy } from '../store/sync-policy.js'\nimport type { SyncEngine } from './sync.js'\nimport type { SyncTransaction } from './sync-transaction.js'\nimport type { PresenceHandle, PresenceHandleOpts } from './presence.js'\nimport type { Vault } from '../vault.js'\n\n/**\n * Options accepted by `SyncStrategy.buildSyncEngine`. Mirrors the\n * `SyncEngine` constructor verbatim — kept here so core code never\n * imports the sync module at runtime.\n *\n * @internal\n */\nexport interface BuildSyncEngineOptions {\n local: NoydbStore\n remote: NoydbStore\n vault: string\n strategy: ConflictStrategy\n emitter: NoydbEventEmitter\n syncPolicy?: SyncPolicy\n role?: SyncTargetRole\n label?: string\n}\n\n/**\n * @internal\n */\nexport interface SyncStrategy {\n buildSyncEngine(opts: BuildSyncEngineOptions): SyncEngine\n buildSyncTransaction(vault: Vault, engine: SyncEngine): SyncTransaction\n buildPresence<P>(opts: PresenceHandleOpts): PresenceHandle<P>\n}\n\nfunction notEnabled(op: string): Error {\n return new Error(\n `${op} requires the sync strategy. Import ` +\n '`{ withSync }` from \"@noy-db/hub/sync\" and pass it to ' +\n '`createNoydb({ syncStrategy: withSync() })`.',\n )\n}\n\n/**\n * No-sync stub. Every constructor throws with an actionable pointer\n * — there is no useful \"off\" mode for sync engine / presence /\n * sync-transaction; if the consumer reached one of these surfaces,\n * they intended to use it.\n *\n * @internal\n */\nexport const NO_SYNC: SyncStrategy = {\n buildSyncEngine() { throw notEnabled('SyncEngine') },\n buildSyncTransaction() { throw notEnabled('SyncTransaction') },\n buildPresence() { throw notEnabled('collection.presence()') },\n}\n","/**\n * Strategy seam between core Collection and the optional blob subsystem.\n *\n * Core imports `BlobStrategy` as a TYPE-ONLY symbol and `NO_BLOBS` as a\n * minimal runtime stub. Neither pulls in the heavy `BlobSet` / chunk /\n * MIME machinery — those only arrive when the consumer explicitly\n * imports `@noy-db/hub/blobs` (see `./index.ts` → `withBlobs()` factory).\n *\n * This file is intentionally tiny and free of side effects so the\n * bundler keeps it in the graph without dragging everything else in.\n *\n * @internal\n */\n\nimport type { BlobSet } from './blob-set.js'\nimport type { NoydbStore } from '../types.js'\n\n/**\n * Args forwarded by `Collection.blob(id)` to the active strategy's\n * `openSlot`. The strategy is responsible for returning a live\n * `BlobSet` bound to the given record.\n *\n * @internal\n */\nexport interface BlobStrategyOpenArgs {\n readonly store: NoydbStore\n readonly vault: string\n readonly collection: string\n readonly recordId: string\n readonly getDEK: (collectionName: string) => Promise<CryptoKey>\n readonly encrypted: boolean\n readonly userId: string\n}\n\n/**\n * The seam interface. `@internal` — do not build public APIs on this\n * shape; it can evolve freely until blobs are extracted into their\n * own package, at which point it will be promoted to public.\n *\n * @internal\n */\nexport interface BlobStrategy {\n openSlot(args: BlobStrategyOpenArgs): BlobSet\n}\n\n/**\n * Default strategy for collections that did not opt into blob storage.\n * Every operation surfaces an actionable error that points the caller\n * at the opt-in path.\n *\n * @internal\n */\nexport const NO_BLOBS: BlobStrategy = {\n openSlot() {\n throw new Error(\n 'Blob storage is not enabled on this Noydb instance. ' +\n 'Import `{ withBlobs }` from \"@noy-db/hub/blobs\" and pass `withBlobs()` to `createNoydb({ blobStrategy: withBlobs() })`.',\n )\n },\n}\n","/**\n * Strategy seam for the optional VaultFrame snapshot primitive.\n * Core imports `ShadowStrategy` as TYPE-ONLY and `NO_SHADOW` as a\n * 4-line stub. `VaultFrame` is only constructed inside `withShadow()`\n * — consumers who never import `@noy-db/hub/shadow` ship none of\n * the ~129 LOC.\n *\n * @internal\n */\n\nimport type { VaultFrame } from './vault-frame.js'\n\n/**\n * @internal\n */\nexport interface ShadowStrategy {\n /**\n * Build a `VaultFrame` bound to the given vault. The factory type\n * is kept loose (`unknown`) to avoid a core → shadow type\n * dependency — the consumer always calls this through\n * `vault.frame()`, which returns `VaultFrame` at its surface.\n */\n buildFrame(vault: unknown): VaultFrame\n}\n\nconst NOT_ENABLED = new Error(\n 'VaultFrame requires the shadow strategy. Import `{ withShadow }` ' +\n 'from \"@noy-db/hub/shadow\" and pass it to ' +\n '`createNoydb({ shadowStrategy: withShadow() })`.',\n)\n\n/**\n * No-shadow stub.\n *\n * @internal\n */\nexport const NO_SHADOW: ShadowStrategy = {\n buildFrame() { throw NOT_ENABLED },\n}\n","/**\n * Strategy seam for the optional consent-audit subsystem. Core\n * imports `ConsentStrategy` as a TYPE-ONLY symbol and `NO_CONSENT`\n * as a tiny runtime stub.\n *\n * `writeConsentEntry` / `loadConsentEntries` are only reachable from\n * `withConsent()` in `./active.ts`, exported through\n * `@noy-db/hub/consent`. Applications without a consent scope ship\n * none of the ~194 LOC.\n *\n * @internal\n */\n\nimport type { NoydbStore } from '../types.js'\nimport type {\n ConsentAuditEntry,\n ConsentAuditFilter,\n} from './consent.js'\n\n/**\n * @internal\n */\nexport interface ConsentStrategy {\n /**\n * Record one consent audit entry. No-op under NO_CONSENT.\n */\n write(\n adapter: NoydbStore,\n vault: string,\n encrypted: boolean,\n entry: Omit<ConsentAuditEntry, 'id' | 'timestamp'>,\n getDEK: (collectionName: string) => Promise<CryptoKey>,\n ): Promise<void>\n\n /**\n * Read filtered consent entries. Returns `[]` under NO_CONSENT.\n */\n read(\n adapter: NoydbStore,\n vault: string,\n encrypted: boolean,\n getDEK: (collectionName: string) => Promise<CryptoKey>,\n filter?: ConsentAuditFilter,\n ): Promise<ConsentAuditEntry[]>\n}\n\n/**\n * No-consent stub. `write` is a no-op (returns a resolved promise);\n * `read` returns `[]`. Consumers get a consistent API surface without\n * pulling the consent module into the bundle.\n *\n * @internal\n */\nexport const NO_CONSENT: ConsentStrategy = {\n async write() {},\n async read() { return [] },\n}\n","/**\n * Strategy seam for the optional accounting-periods subsystem. Core\n * imports `PeriodsStrategy` type-only + `NO_PERIODS` stub; the real\n * `loadPeriods` / `chainAnchor` / `assertTsWritable` /\n * `validatePeriodName` / `appendPeriodLedgerEntry` functions are\n * only reachable via `withPeriods()` in `./active.ts`.\n *\n * Applications that never call `vault.closePeriod()` /\n * `vault.openPeriod()` ship none of the ~363 LOC.\n *\n * @internal\n */\n\nimport type { EncryptedEnvelope, NoydbStore } from '../types.js'\nimport type { LedgerStore } from '../history/ledger/store.js'\nimport type { PeriodRecord } from './periods.js'\n\n/**\n * @internal\n */\nexport interface PeriodsStrategy {\n loadPeriods(\n adapter: NoydbStore,\n vault: string,\n decrypt: (envelope: EncryptedEnvelope) => Promise<PeriodRecord>,\n ): Promise<PeriodRecord[]>\n chainAnchor(records: readonly PeriodRecord[]): Promise<{\n priorPeriodName?: string\n priorPeriodHash: string\n }>\n assertTsWritable(\n existing: { ts: string | null; record: Record<string, unknown> | null } | null,\n incoming: Record<string, unknown> | null,\n periods: readonly PeriodRecord[],\n ): void\n validatePeriodName(name: string, existing: readonly PeriodRecord[]): void\n appendPeriodLedgerEntry(\n ledger: LedgerStore | null,\n actor: string,\n envelope: EncryptedEnvelope,\n periodName: string,\n ): Promise<void>\n}\n\n/**\n * No-periods stub. `loadPeriods` returns `[]`; the write-guards do\n * nothing (vaults without closed periods never reject writes);\n * `validatePeriodName` / `appendPeriodLedgerEntry` throw because\n * those paths are only reached when the user explicitly called\n * `closePeriod()` / `openPeriod()` — if they did that without the\n * strategy, they need to wire it.\n *\n * @internal\n */\nconst NOT_ENABLED = new Error(\n 'Accounting periods require the periods strategy. Import ' +\n '`{ withPeriods }` from \"@noy-db/hub/periods\" and pass it to ' +\n '`createNoydb({ periodsStrategy: withPeriods() })`.',\n)\n\nexport const NO_PERIODS: PeriodsStrategy = {\n async loadPeriods() { return [] },\n async chainAnchor() { return { priorPeriodHash: '' } },\n assertTsWritable() {},\n validatePeriodName() { throw NOT_ENABLED },\n async appendPeriodLedgerEntry() { throw NOT_ENABLED },\n}\n","/**\n * _dict_* reserved collections + dictKey schema descriptor —\n *\n * Stores bounded enum-like field dictionaries as reserved encrypted\n * collections (`_dict_<name>/`) within a vault. Each dictionary\n * entry maps a stable key (e.g. `'paid'`) to a locale → label record\n * (e.g. `{ en: 'Paid', th: 'ชำระแล้ว' }`).\n *\n * Design decisions\n * ────────────────\n *\n * **Why reserved collections, not a separate store?**\n * Same answer as `_sync_credentials`: the compartment's existing\n * encryption stack is exactly right. Dictionaries are encrypted under the\n * same vault DEK, inherit ACL, ledger, and backup/restore for free.\n *\n * **One collection per dictionary, not one collection with namespaces.**\n * Each `_dict_<name>/` collection holds entries `{ id: key, labels: {...} }`.\n * This composes with `ref()` naturally (a dictKey IS a ref to the dict\n * collection), and means the query DSL works over dictionary entries\n * without any special-casing.\n *\n * **dictKey() is a descriptor, not a Zod type.**\n * The descriptor pattern matches `ref()`: declare NOYDB-specific metadata\n * in the collection options alongside `refs`. TypeScript inference comes\n * from the descriptor's generic parameter, not from Zod internals.\n *\n * API:\n * `dictKey(name, keys?)` — returns a DictKeyDescriptor\n * `vault.dictionary(name)` — returns a DictionaryHandle\n * `DictionaryHandle.put/putAll/get/delete/rename/list` — CRUD\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport type { NoydbEventEmitter } from '../events.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport type { UnlockedKeyring } from '../team/keyring.js'\nimport { encrypt, decrypt } from '../crypto.js'\nimport { ensureCollectionDEK } from '../team/keyring.js'\nimport type { LedgerStore } from '../history/ledger/store.js'\nimport { envelopePayloadHash } from '../history/ledger/hash.js'\nimport {\n PermissionDeniedError,\n DictKeyMissingError,\n} from '../errors.js'\n\n/** Reserved collection name prefix. Never collides with user collections. */\nexport const DICT_COLLECTION_PREFIX = '_dict_'\n\n/** Return the adapter collection name for a named dictionary. */\nexport function dictCollectionName(dictionaryName: string): string {\n return `${DICT_COLLECTION_PREFIX}${dictionaryName}`\n}\n\n/** Return true when a collection name is a reserved dictionary collection. */\nexport function isDictCollectionName(name: string): boolean {\n return name.startsWith(DICT_COLLECTION_PREFIX)\n}\n\n// ─── DictKey descriptor ────────────────────────────────────────────────\n\n/**\n * Descriptor returned by `dictKey()`. Attach to the collection's\n * `dictKeyFields` option to declare which fields are dictionary-backed:\n *\n * ```ts\n * const invoices = company.collection<Invoice>('invoices', {\n * dictKeyFields: {\n * status: dictKey('status', ['draft', 'open', 'paid'] as const),\n * },\n * })\n * ```\n *\n * The generic parameter `Keys` narrows the TypeScript type of the field\n * to a literal union; the runtime value of `keys` is used by `put()`\n * validation to reject unknown keys when a key set is declared.\n */\nexport interface DictKeyDescriptor<Keys extends string = string> {\n readonly _noydbDictKey: true\n /** Which dictionary this field references. */\n readonly name: string\n /** Declared valid keys. When set, `put()` rejects keys not in this set. */\n readonly keys: readonly Keys[] | undefined\n}\n\n/**\n * Create a `DictKeyDescriptor` for a dictionary-backed enum field.\n *\n * @param name The dictionary name (corresponds to `_dict_<name>` collection).\n * @param keys Optional `as const` array of valid key literals — narrows the\n * TypeScript type to a literal union and enables put-time\n * validation.\n *\n * @example\n * ```ts\n * const invoices = company.collection<Invoice>('invoices', {\n * dictKeyFields: {\n * status: dictKey('status', ['draft', 'open', 'paid'] as const),\n * },\n * })\n * ```\n */\nexport function dictKey<Keys extends string>(\n name: string,\n keys?: readonly Keys[],\n): DictKeyDescriptor<Keys> {\n return { _noydbDictKey: true, name, keys }\n}\n\n/** Runtime predicate for detecting a DictKeyDescriptor. */\nexport function isDictKeyDescriptor(x: unknown): x is DictKeyDescriptor {\n return (\n typeof x === 'object' &&\n x !== null &&\n (x as { _noydbDictKey?: unknown })._noydbDictKey === true\n )\n}\n\n// ─── Dictionary entry shape ────────────────────────────────────────────\n\n/**\n * One entry in a `_dict_*` collection. The record `id` (adapter-side\n * key) IS the stable dictionary key (e.g. `'paid'`). The `labels`\n * record maps locale codes to display strings.\n */\nexport interface DictEntry {\n /** Stable key — same as the record id in the adapter. */\n readonly key: string\n /** Locale → label map, e.g. `{ en: 'Paid', th: 'ชำระแล้ว' }`. */\n readonly labels: Record<string, string>\n}\n\n// ─── Per-dictionary options ────────────────────────────────────────────\n\n/**\n * Options for `vault.dictionary(name, options?)`.\n *\n * `writableBy` controls the minimum role for write operations (put,\n * putAll, delete, rename). Defaults to `'admin'` to match the standard\n * \"dictionary contents are owned by admins\" convention; set to\n * `'operator'` for user-editable dictionaries like custom tags.\n */\nexport interface DictionaryOptions {\n /** Minimum role allowed to write dictionary entries. Default: `'admin'`. */\n readonly writableBy?: 'owner' | 'admin' | 'operator'\n}\n\n// ─── DictionaryHandle ──────────────────────────────────────────────────\n\n/**\n * Handle to a named dictionary within a vault.\n *\n * Obtained via `vault.dictionary(name)`. Provides strongly-typed\n * CRUD for dictionary entries, plus the `rename()` operation that is the\n * only sanctioned mass-mutation path for dictKey fields.\n *\n * All writes are encrypted under the compartment's DEK for the\n * `_dict_<name>` collection. Adapters never see plaintext.\n */\nexport class DictionaryHandle<Keys extends string = string> {\n private readonly collName: string\n\n /**\n * Synchronous write-through cache for dict-join support.\n * Populated on every `put()`, `delete()`, and `rename()`. The snapshot\n * is built from this cache by `snapshotEntries()` — the query executor\n * calls this synchronously inside `.toArray()`.\n *\n * `null` means \"not yet initialized\" — callers should use `list()`\n * to warm the cache before using dict joins on pre-existing data.\n */\n private readonly _syncCache = new Map<string, DictEntry>()\n\n /**\n * Return all cached entries as `{ key, labels, ...labels }` records —\n * usable synchronously by the join executor's `snapshot()` call.\n * Returns an empty array when the cache has never been populated.\n */\n snapshotEntries(): readonly Record<string, unknown>[] {\n return Array.from(this._syncCache.values()).map((e) => ({\n key: e.key,\n labels: e.labels,\n ...e.labels,\n }))\n }\n\n constructor(\n private readonly adapter: NoydbStore,\n private readonly compartmentName: string,\n private readonly dictionaryName: string,\n private readonly keyring: UnlockedKeyring,\n private readonly getDEK: (collectionName: string) => Promise<CryptoKey>,\n private readonly encrypted: boolean,\n private readonly ledger: LedgerStore | undefined,\n private readonly options: DictionaryOptions,\n /**\n * Callback provided by the Vault to find and rewrite records\n * in any registered collection that has a dictKeyField pointing at\n * this dictionary, used by `rename()`.\n */\n private readonly findAndUpdateReferences:\n | ((\n dictionaryName: string,\n oldKey: string,\n newKey: string,\n ) => Promise<void>)\n | undefined,\n private readonly emitter: NoydbEventEmitter,\n ) {\n this.collName = dictCollectionName(dictionaryName)\n }\n\n // ─── Access checks ────────────────────────────────────────────────\n\n private requireWriteAccess(): void {\n const minRole = this.options.writableBy ?? 'admin'\n const roleRank: Record<string, number> = {\n client: 1,\n viewer: 2,\n operator: 3,\n admin: 4,\n owner: 5,\n }\n const callerRank = roleRank[this.keyring.role] ?? 0\n const requiredRank = roleRank[minRole] ?? 4\n if (callerRank < requiredRank) {\n throw new PermissionDeniedError(\n `Dictionary \"${this.dictionaryName}\" writes require \"${minRole}\" role or above. ` +\n `Current role: \"${this.keyring.role}\".`,\n )\n }\n }\n\n // ─── Internal helpers ─────────────────────────────────────────────\n\n private async getDekForDict(): Promise<CryptoKey> {\n const resolve = await ensureCollectionDEK(\n this.adapter,\n this.compartmentName,\n this.keyring,\n )\n return resolve(this.collName)\n }\n\n private async encryptEntry(entry: DictEntry, version: number): Promise<EncryptedEnvelope> {\n if (!this.encrypted) {\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: JSON.stringify(entry),\n _by: this.keyring.userId,\n }\n }\n const dek = await this.getDekForDict()\n const { iv, data } = await encrypt(JSON.stringify(entry), dek)\n return {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: this.keyring.userId,\n }\n }\n\n private async decryptEntry(envelope: EncryptedEnvelope): Promise<DictEntry> {\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as DictEntry\n }\n const dek = await this.getDekForDict()\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as DictEntry\n }\n\n // ─── Public API ───────────────────────────────────────────────────\n\n /**\n * Add or overwrite a single dictionary entry.\n *\n * @param key The stable key to store (e.g. `'paid'`).\n * @param labels Locale → label map (e.g. `{ en: 'Paid', th: 'ชำระแล้ว' }`).\n */\n async put(key: Keys, labels: Record<string, string>): Promise<void> {\n this.requireWriteAccess()\n\n const entry: DictEntry = { key, labels }\n const existing = await this.adapter.get(\n this.compartmentName,\n this.collName,\n key,\n )\n const version = existing ? existing._v + 1 : 1\n const envelope = await this.encryptEntry(entry, version)\n\n await this.adapter.put(\n this.compartmentName,\n this.collName,\n key,\n envelope,\n existing ? existing._v : undefined,\n )\n\n // Maintain synchronous cache for dict-join snapshot\n this._syncCache.set(key, entry)\n\n this.emitter.emit('change', {\n vault: this.compartmentName,\n collection: this.collName,\n id: key,\n action: 'put',\n })\n\n if (this.ledger) {\n await this.ledger.append({\n op: 'put',\n collection: this.collName,\n id: key,\n version,\n actor: this.keyring.userId,\n // — must be the real envelope hash so\n // vault.verifyBackupIntegrity()'s data-cross-check matches.\n payloadHash: await envelopePayloadHash(envelope),\n })\n }\n }\n\n /**\n * Batch-add or overwrite multiple dictionary entries in one call.\n *\n * @param entries `{ key: { locale: label } }` map.\n */\n async putAll(entries: Record<Keys, Record<string, string>>): Promise<void> {\n this.requireWriteAccess()\n for (const [key, labels] of Object.entries(entries) as [Keys, Record<string, string>][]) {\n await this.put(key, labels)\n }\n }\n\n /**\n * Load the label map for a single key.\n *\n * @returns The label map, or `null` if the key doesn't exist.\n */\n async get(key: Keys): Promise<Record<string, string> | null> {\n const envelope = await this.adapter.get(\n this.compartmentName,\n this.collName,\n key,\n )\n if (!envelope) return null\n const entry = await this.decryptEntry(envelope)\n return entry.labels\n }\n\n /**\n * Delete a dictionary key.\n *\n * Default mode is `'strict'` — throws `DictKeyInUseError` if any\n * registered collection has a record referencing this key. Pass\n * `{ mode: 'warn' }` to skip the check (dev-mode cleanup only).\n */\n async delete(key: Keys, opts: { mode?: 'strict' | 'warn' } = {}): Promise<void> {\n this.requireWriteAccess()\n\n const existing = await this.adapter.get(\n this.compartmentName,\n this.collName,\n key,\n )\n if (!existing) {\n throw new DictKeyMissingError(this.dictionaryName, key)\n }\n\n const mode = opts.mode ?? 'strict'\n if (mode === 'strict' && this.findAndUpdateReferences) {\n // Check for references by attempting a rename to a sentinel that\n // doesn't exist — we reuse the reference-finding machinery but\n // abort before applying changes. Simpler: the vault\n // exposes a separate checkReferences() callback. For now we rely\n // on the caller to confirm no references exist, or use warn mode.\n // A dedicated findReferences API is tracked as a follow-up.\n }\n\n await this.adapter.delete(this.compartmentName, this.collName, key)\n\n // Maintain synchronous cache for dict-join snapshot\n this._syncCache.delete(key)\n\n this.emitter.emit('change', {\n vault: this.compartmentName,\n collection: this.collName,\n id: key,\n action: 'delete',\n })\n\n if (this.ledger) {\n await this.ledger.append({\n op: 'delete',\n collection: this.collName,\n id: key,\n version: existing._v,\n actor: this.keyring.userId,\n // — for delete the prior envelope is what was just\n // removed; we hash it so the chain captures intent. The\n // verifyBackupIntegrity data-cross-check skips delete\n // entries entirely (the live record is gone), but the\n // chain still benefits from a stable non-empty hash.\n payloadHash: await envelopePayloadHash(existing),\n })\n }\n }\n\n /**\n * Rename a dictionary key — the only sanctioned mass-mutation path.\n *\n * Atomically:\n * 1. Adds the new key with the same labels as the old key.\n * 2. Updates every registered record that stores the old key to\n * store the new key instead.\n * 3. Deletes the old key.\n * 4. Appends a single ledger entry recording the rename.\n *\n * Respects ACL: throws `PermissionDeniedError` before any mutation\n * if the caller can't write. The cascade is best-effort atomic\n * within this call — no two-phase commit across adapter calls.\n *\n * Cascade-on-delete is NOT supported. Use `rename()` when you need\n * to change a key that records reference.\n */\n async rename(oldKey: Keys, newKey: string): Promise<void> {\n this.requireWriteAccess()\n\n // 1. Load old entry\n const existing = await this.adapter.get(\n this.compartmentName,\n this.collName,\n oldKey,\n )\n if (!existing) {\n throw new DictKeyMissingError(this.dictionaryName, oldKey)\n }\n const oldEntry = await this.decryptEntry(existing)\n\n // 2. Write new key\n const newEntry: DictEntry = { key: newKey, labels: oldEntry.labels }\n const newEnvelope = await this.encryptEntry(newEntry, 1)\n await this.adapter.put(\n this.compartmentName,\n this.collName,\n newKey,\n newEnvelope,\n )\n\n // 3. Update all referencing records in registered collections\n if (this.findAndUpdateReferences) {\n await this.findAndUpdateReferences(this.dictionaryName, oldKey, newKey)\n }\n\n // 4. Delete old key\n await this.adapter.delete(this.compartmentName, this.collName, oldKey)\n\n // Maintain synchronous cache for dict-join snapshot\n this._syncCache.delete(oldKey)\n this._syncCache.set(newKey, newEntry)\n\n this.emitter.emit('change', {\n vault: this.compartmentName,\n collection: this.collName,\n id: oldKey,\n action: 'delete',\n })\n this.emitter.emit('change', {\n vault: this.compartmentName,\n collection: this.collName,\n id: newKey,\n action: 'put',\n })\n\n // 5. Ledger — record the rename as delete(oldKey) + put(newKey)\n // so verifyBackupIntegrity()'s data-cross-check matches reality\n // (the oldKey envelope is gone; the newKey envelope is what was\n // just written). Two entries instead of one — the chain still\n // captures the rename intent via the matching ts + actor.\n if (this.ledger) {\n await this.ledger.append({\n op: 'delete',\n collection: this.collName,\n id: oldKey,\n version: existing._v,\n actor: this.keyring.userId,\n payloadHash: await envelopePayloadHash(existing),\n })\n await this.ledger.append({\n op: 'put',\n collection: this.collName,\n id: newKey,\n version: 1,\n actor: this.keyring.userId,\n payloadHash: await envelopePayloadHash(newEnvelope),\n })\n }\n }\n\n /**\n * List all entries in this dictionary.\n *\n * @returns Array of `{ key, labels }` objects.\n */\n async list(): Promise<DictEntry[]> {\n const keys = await this.adapter.list(this.compartmentName, this.collName)\n const entries: DictEntry[] = []\n for (const key of keys) {\n const envelope = await this.adapter.get(\n this.compartmentName,\n this.collName,\n key,\n )\n if (!envelope) continue\n const entry = await this.decryptEntry(envelope)\n entries.push(entry)\n // Warm the synchronous cache\n this._syncCache.set(key, entry)\n }\n return entries\n }\n\n /**\n * Resolve a key to its label for the given locale.\n *\n * Used by the collection's locale-aware read path to populate\n * `<field>Label` virtual fields. Returns `undefined` when the\n * key doesn't exist or has no label for the requested locale\n * (after exhausting the fallback chain).\n */\n async resolveLabel(\n key: string,\n locale: string,\n fallback?: string | readonly string[],\n ): Promise<string | undefined> {\n const labels = await this.get(key as Keys)\n if (!labels) return undefined\n\n // Try primary locale\n if (labels[locale] !== undefined) return labels[locale]\n\n // Try fallback chain\n const chain = Array.isArray(fallback) ? (fallback as readonly string[]) : fallback ? [fallback as string] : []\n for (const fb of chain) {\n if (fb === 'any') {\n // Return any available label\n const any = Object.values(labels)[0]\n if (any !== undefined) return any\n } else if (labels[fb] !== undefined) {\n return labels[fb]\n }\n }\n\n return undefined\n }\n}\n","/**\n * Accounting-period closure + opening.\n *\n * A closed period seals every record whose envelope `_ts` is at or\n * before the period's `endDate`: further writes (`put` / `delete`)\n * against such records throw {@link PeriodClosedError}. The period\n * itself is stored as a record in the reserved `_periods` collection\n * and written through the normal ledger-instrumented path, so every\n * closure appends a tamper-evident entry to the vault's hash chain.\n *\n * ## Closure model\n *\n * ```\n * vault.closePeriod({ name: 'FY2026-Q1', endDate: '2026-03-31' })\n * └─► PeriodRecord written to _periods/<name>\n * ├─ priorPeriodName / priorPeriodHash — chain to last close\n * ├─ closedAt / closedBy — provenance\n * └─ normal ledger append fires (LedgerStore.append)\n * ```\n *\n * Enforcement (`assertTsWritable`) is vault-local: the Vault caches\n * the list of closed periods on first read and consults that cache in\n * the `Collection.put` / `.delete` path via the `periodGuard` hook.\n *\n * ## Opening model\n *\n * ```\n * vault.openPeriod({\n * name: 'FY2026-Q2',\n * startDate: '2026-04-01',\n * fromPeriod: 'FY2026-Q1',\n * carryForward: async (priorView) => Record<string, Record<string, unknown>>,\n * })\n * ```\n *\n * `carryForward` receives a read-only `VaultInstant` anchored at the\n * prior period's `endDate` (built via `vault.at(endDate)`) so the\n * callback can compute closing aggregates from the sealed state. The\n * returned `{ [collectionName]: { [id]: record } }` map is written\n * before the new `PeriodRecord` lands — opening balances materialise\n * as normal records with fresh timestamps that fall outside every\n * closed period.\n *\n * ## Not covered\n *\n * - Partial re-opening of a closed period. If an auditor needs to\n * make a correction inside a sealed period, the sanctioned path is\n * a compensating entry in the NEW period, not an unlock of the\n * old one.\n * - Automatic period rollover. `closePeriod` / `openPeriod` are\n * deliberately explicit operator calls so the caller decides when\n * the boundary lands.\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport type { LedgerStore } from '../history/ledger/index.js'\nimport { sha256Hex, canonicalJson } from '../history/ledger/index.js'\nimport { PeriodClosedError, ValidationError } from '../errors.js'\n\n/** The reserved collection name holding closed-period metadata. */\nexport const PERIODS_COLLECTION = '_periods'\n\n/**\n * Stored record for one closed or opened accounting period. One entry\n * per period, keyed by `name` in the reserved `_periods` collection.\n *\n * The hash chain between periods is computed at read time by\n * `loadPeriods()` — each record carries the name + hash of its\n * predecessor so a tamper with any period's record breaks the chain\n * into the next one, the same way the ledger's `prevHash` works.\n */\nexport interface PeriodRecord {\n /** Human-readable name (e.g., `'FY2026-Q1'`). Unique per vault. */\n readonly name: string\n /**\n * Role discriminator. A period is `'closed'` from the moment its\n * `closedAt` is recorded; `'opened'` marks a period whose opening\n * entries have been carried forward via {@link openPeriod}. Many\n * workflows will produce one opened period per closed period (the\n * opened one is the SUCCESSOR — its `startDate` equals the prior\n * `endDate + 1 day`).\n */\n readonly kind: 'closed' | 'opened'\n /** ISO date — inclusive upper bound for records belonging to this period. */\n readonly endDate: string\n /** ISO date — lower bound (present on opened periods only). */\n readonly startDate?: string\n /**\n * Record field carrying the business date (e.g. `'date'` on an\n * invoice, `'paidAt'` on a payment). The guard compares\n * `record[dateField]` against `endDate` — NOT the envelope `_ts`.\n * Accounting entries booked late (business date `2026-01-15`,\n * write-time `2026-04-22`) still get sealed when Q1 closes at\n * `2026-03-31` because the comparison uses the business date.\n *\n * Optional for backwards compat. When absent, the guard falls back\n * to envelope `_ts` — that's a write-time seal, appropriate for\n * content that doesn't carry a logical business date (e.g. system\n * settings) but almost never right for accounting ledgers.\n */\n readonly dateField?: string\n /** ISO timestamp recorded at `closePeriod()` / `openPeriod()` call time. */\n readonly closedAt: string\n /** userId of the keyring that invoked the close/open. */\n readonly closedBy: string\n /** Name of the prior period this one chains to, if any. */\n readonly priorPeriodName?: string\n /** sha256(canonicalJson(priorPeriod)) — empty for the first period. */\n readonly priorPeriodHash: string\n /**\n * Opened periods only — the names of the collections whose\n * carry-forward aggregates were written by {@link openPeriod}.\n * Recorded for auditability so a future `verifyPeriodChain()` can\n * cross-check the opening balances against the closing snapshot.\n */\n readonly openingCollections?: readonly string[]\n}\n\n/** Options for `vault.closePeriod()`. */\nexport interface ClosePeriodOptions {\n /** Human-readable name. Must not collide with an existing period. */\n readonly name: string\n /**\n * Inclusive upper cutoff. A record is sealed when its\n * `record[dateField]` (or, if absent, the envelope `_ts`) is at or\n * before this ISO timestamp.\n */\n readonly endDate: string\n /**\n * Record field carrying the business date used for period\n * membership. Recommended for accounting workflows — e.g. an\n * invoice booked late (write-time after close) is still sealed\n * when its `invoice.date` falls inside the closed period.\n *\n * Omit to use envelope `_ts` (write-time seal). This fallback\n * rarely matches real-world accounting semantics; prefer passing\n * an explicit `dateField`.\n */\n readonly dateField?: string\n}\n\n/** Options for `vault.openPeriod()`. */\nexport interface OpenPeriodOptions<TCollections = Record<string, Record<string, unknown>>> {\n /** Human-readable name for the new period. Must be unique. */\n readonly name: string\n /** ISO lower bound of the new period (usually prior `endDate + 1 day`). */\n readonly startDate: string\n /**\n * Name of the prior CLOSED period this one chains from. The prior\n * period's record is verified to exist and to be `kind: 'closed'`;\n * its `endDate` is made available to the `carryForward` callback.\n */\n readonly fromPeriod: string\n /**\n * Receives a read-only facade over the vault's CURRENT state,\n * plus the prior period's `endDate`. Accounting semantics: after\n * a period closes, records with `record[dateField] <= endDate`\n * are frozen — current state equals closing state, so a caller\n * can compute closing balances by querying the live collection\n * with a `where('date', '<=', priorEndDate)` filter.\n *\n * Returns opening-balance records keyed by collection name.\n * Example:\n *\n * ```ts\n * carryForward: async (ctx) => {\n * const closing = await ctx.collection<Journal>('journal')\n * .query().where('date', '<=', ctx.priorEndDate).toArray()\n * const opening: Record<string, Journal> = {}\n * for (const entry of closing) {\n * opening[`OB-${entry.id}`] = { ...entry, date: '2026-04-01' }\n * }\n * return { journal: opening }\n * }\n * ```\n */\n readonly carryForward: (\n ctx: CarryForwardContext,\n ) => Promise<TCollections> | TCollections\n}\n\n/**\n * Context passed to `OpenPeriodOptions.carryForward`. Exposes a\n * read-only subset of the live vault (`collection(name).get/list`)\n * plus the prior period's `endDate` so business-date filters can\n * be built by the caller.\n *\n * Writes go via the return value, not via the facade — the\n * `collection()` here is deliberately restricted to reads.\n */\nexport interface CarryForwardContext {\n /** The prior period's `endDate` — the boundary of the closing snapshot. */\n readonly priorEndDate: string\n /** Read-only collection facade over current vault state. */\n collection<T = unknown>(name: string): ReadOnlyCollection<T>\n}\n\n/** Minimum read surface exposed to `carryForward`. */\nexport interface ReadOnlyCollection<T> {\n get(id: string): Promise<T | null>\n list(): Promise<T[]>\n}\n\n/**\n * Load every period record currently stored on the adapter.\n * Decrypting is the caller's responsibility (we return plain records\n * so the vault can use its own `_periods` DEK).\n *\n * @internal — called by Vault methods that need the closed-period\n * cache. Not part of the public API surface.\n */\nexport async function loadPeriods(\n adapter: NoydbStore,\n vault: string,\n decrypt: (envelope: EncryptedEnvelope) => Promise<PeriodRecord>,\n): Promise<PeriodRecord[]> {\n const ids = await adapter.list(vault, PERIODS_COLLECTION)\n const records: PeriodRecord[] = []\n for (const id of ids) {\n const env = await adapter.get(vault, PERIODS_COLLECTION, id)\n if (env) records.push(await decrypt(env))\n }\n // Stable order by closedAt so chain verification is reproducible.\n records.sort((a, b) => a.closedAt.localeCompare(b.closedAt))\n return records\n}\n\n/**\n * Given the current ordered period list, pick the last entry that\n * belongs to the hash chain — used as the `priorPeriodHash` anchor\n * for the next closure/opening.\n *\n * @internal\n */\nexport async function chainAnchor(\n records: readonly PeriodRecord[],\n): Promise<{ priorPeriodName?: string; priorPeriodHash: string }> {\n const last = records[records.length - 1]\n if (!last) return { priorPeriodHash: '' }\n const hash = await sha256Hex(canonicalJson(last as unknown as Record<string, unknown>))\n return { priorPeriodName: last.name, priorPeriodHash: hash }\n}\n\n/**\n * Throw `PeriodClosedError` if the record being touched falls within\n * any closed period.\n *\n * Three signals, evaluated per period:\n *\n * 1. If the period declares a `dateField`, the guard reads\n * `record[dateField]` on BOTH the existing (prior) record AND the\n * incoming (new) record. Either comparing `<= endDate` triggers\n * the error — callers cannot slide a record into a closed period\n * by editing its date field.\n * 2. If the period has no `dateField`, the guard falls back to the\n * envelope `_ts` of the existing record. Fresh inserts (no\n * existing envelope) pass.\n * 3. For a delete, only the existing side is checked.\n *\n * @internal\n */\nexport function assertTsWritable(\n existing: { ts: string | null; record: Record<string, unknown> | null } | null,\n incomingRecord: Record<string, unknown> | null,\n closedPeriods: readonly PeriodRecord[],\n): void {\n for (const p of closedPeriods) {\n if (p.kind !== 'closed') continue\n if (p.dateField) {\n const checkRecord = (label: string, r: Record<string, unknown> | null): void => {\n if (!r) return\n const v = r[p.dateField!]\n if (typeof v === 'string' && v <= p.endDate) {\n throw new PeriodClosedError(p.name, p.endDate, `${label}[${p.dateField}]=${v}`)\n }\n }\n checkRecord('existing', existing?.record ?? null)\n checkRecord('incoming', incomingRecord)\n continue\n }\n // Fallback: write-time seal via envelope _ts.\n const existingTs = existing?.ts ?? null\n if (existingTs !== null && existingTs <= p.endDate) {\n throw new PeriodClosedError(p.name, p.endDate, existingTs)\n }\n }\n}\n\n/**\n * Sanity-check a proposed period name + endDate against existing\n * records. Shared by closePeriod / openPeriod so the two pathways\n * produce identical diagnostics.\n *\n * @internal\n */\nexport function validatePeriodName(\n name: string,\n existing: readonly PeriodRecord[],\n): void {\n if (name.length === 0) {\n throw new ValidationError('Period name cannot be empty.')\n }\n if (existing.some((p) => p.name === name)) {\n throw new ValidationError(`Period \"${name}\" already exists.`)\n }\n}\n\n/**\n * Wire a reserved-collection ledger append for a period record. The\n * period itself is stored via the adapter as an encrypted envelope;\n * the ledger entry is a normal `put` with the period's payloadHash,\n * so period closures inherit the chain's tamper-evidence.\n *\n * @internal\n */\nexport async function appendPeriodLedgerEntry(\n ledger: LedgerStore | null,\n actor: string,\n envelope: EncryptedEnvelope,\n name: string,\n): Promise<void> {\n if (!ledger) return\n const { envelopePayloadHash } = await import('../history/ledger/index.js')\n await ledger.append({\n op: 'put',\n collection: PERIODS_COLLECTION,\n id: name,\n version: envelope._v,\n actor,\n payloadHash: await envelopePayloadHash(envelope),\n })\n}\n","/**\n * `vault.exportBlobs()` — bulk blob extraction primitive.\n *\n * Async-iterable handle over every blob attached to records in a\n * vault, optionally filtered by collection allowlist and per-record\n * predicate. Emits tuples of `{ blobId, recordRef, bytes, meta }` so\n * the consumer can pipe into any sink (zip stream, S3 multipart, USB\n * copy, cold-storage tape) without pulling the whole export into\n * memory.\n *\n * ## Auth + audit\n *\n * - Capability check runs **once** at handle creation via\n * `Vault.assertCanExport('plaintext', 'blob')`. An operator whose\n * keyring lacks that bit fails before a single byte of ciphertext\n * is decrypted.\n * - Audit entry lands in `_export_audit` at handle creation: the\n * actor, start timestamp, target collections, predicate presence,\n * and batch mechanism. **No content hashes** — per the spec\n * non-correlation invariant.\n *\n * ## Abort + resume\n *\n * - `handle.abort()` flips the internal signal; the next iteration\n * boundary throws `AbortError`. Consumers already in `for await`\n * can catch and exit cleanly.\n * - Restart after a partial failure with `{ afterBlobId }` — the\n * iterator skips tuples up to (and including) that blob id before\n * yielding again. Combined with a blob-count ceiling it supports\n * idempotent batch re-runs.\n *\n * @module\n */\n\nimport type { Collection } from '../collection.js'\nimport type { SlotInfo } from '../types.js'\n\n// ─── Types ──────────────────────────────────────────────────────────────\n\nexport interface ExportBlobsOptions {\n /**\n * Collection allowlist. Omit to export blobs from every collection\n * the caller has read access to.\n */\n readonly collections?: readonly string[]\n /**\n * Per-record predicate. Called on the decrypted record BEFORE any\n * blob bytes are read for that record — returning false skips the\n * record and all its slots without touching their chunks.\n */\n readonly where?: (record: unknown, context: { collection: string; id: string }) => boolean\n /**\n * Resume after a specific blob id. The iterator skips tuples up to\n * and including this id, then yields. Format of the id is the same\n * as `ExportedBlob.blobId` (the HMAC-keyed eTag).\n */\n readonly afterBlobId?: string\n /**\n * External abort signal. When fired, the next iterator tick throws\n * `ExportBlobsAbortedError`. Honored alongside `handle.abort()`.\n */\n readonly signal?: AbortSignal\n}\n\nexport interface ExportedBlob {\n /** Opaque blob identifier — HMAC-keyed eTag, stable across vaults. */\n readonly blobId: string\n /** Where this blob came from in the vault. */\n readonly recordRef: {\n readonly collection: string\n readonly id: string\n readonly slot: string\n }\n /** Decrypted plaintext bytes. */\n readonly bytes: Uint8Array\n /** Best-effort metadata (from the blob slot record). */\n readonly meta: {\n readonly size: number\n /**\n * User-visible filename stored on the slot. Often equal to the\n * slot name; differs when the caller supplied an explicit\n * `filename` to `BlobSet.put()`.\n */\n readonly filename: string\n readonly mimeType?: string\n readonly createdAt?: string\n }\n}\n\nexport interface ExportBlobsHandle extends AsyncIterable<ExportedBlob> {\n /** Abort the export. Safe to call multiple times. */\n abort(): void\n /** True once `abort()` has fired or the external signal aborted. */\n readonly aborted: boolean\n}\n\nexport class ExportBlobsAbortedError extends Error {\n constructor(reason: string) {\n super(`exportBlobs aborted: ${reason}`)\n this.name = 'ExportBlobsAbortedError'\n }\n}\n\n// ─── Audit ──────────────────────────────────────────────────────────────\n\nexport const EXPORT_AUDIT_COLLECTION = '_export_audit'\n\nexport interface ExportBlobsAuditEntry {\n readonly id: string\n readonly mechanism: 'exportBlobs'\n readonly actor: string\n readonly startedAt: string\n readonly collections: readonly string[] | null\n readonly predicate: boolean\n readonly afterBlobId: string | null\n}\n\n// ─── Implementation ─────────────────────────────────────────────────────\n\n/**\n * Build the handle. Factored out of `Vault.exportBlobs` so the\n * implementation can be unit-tested without going through the\n * compartment lifecycle.\n */\nexport function createExportBlobsHandle(\n actor: string,\n listAccessibleCollections: () => Promise<string[]>,\n getCollection: <T>(name: string) => Collection<T>,\n writeAudit: (entry: ExportBlobsAuditEntry) => Promise<void>,\n options: ExportBlobsOptions,\n): ExportBlobsHandle {\n let aborted = false\n\n const abort = (): void => {\n aborted = true\n }\n\n if (options.signal) {\n if (options.signal.aborted) aborted = true\n options.signal.addEventListener('abort', () => { aborted = true })\n }\n\n function assertLive(): void {\n if (aborted) throw new ExportBlobsAbortedError('aborted by caller')\n }\n\n const allowlist = options.collections ? new Set(options.collections) : null\n\n // Write the audit entry BEFORE the first yield so a blocked\n // iteration still leaves an audit trail that the export started.\n let auditPromise: Promise<void> | null = null\n function writeAuditOnce(): Promise<void> {\n if (!auditPromise) {\n auditPromise = writeAudit({\n id: generateBatchId(),\n mechanism: 'exportBlobs',\n actor,\n startedAt: new Date().toISOString(),\n collections: options.collections ?? null,\n predicate: Boolean(options.where),\n afterBlobId: options.afterBlobId ?? null,\n })\n }\n return auditPromise\n }\n\n async function* generate(): AsyncGenerator<ExportedBlob> {\n await writeAuditOnce()\n assertLive()\n\n // Resolve target collections lazily — also keeps the call async.\n const allCollections = await listAccessibleCollections()\n const targets = allCollections.filter(name => {\n if (name.startsWith('_')) return false\n if (allowlist && !allowlist.has(name)) return false\n return true\n })\n\n let resumeCursorHit = options.afterBlobId === undefined\n\n for (const collectionName of targets) {\n if (aborted) return\n\n const coll = getCollection<Record<string, unknown>>(collectionName)\n const records = await coll.list().catch(() => [])\n for (const record of records) {\n if (aborted) return\n assertLive()\n\n const idField = (record as { id?: unknown }).id\n if (typeof idField !== 'string') continue\n\n if (options.where && !options.where(record, { collection: collectionName, id: idField })) continue\n\n const blobSet = coll.blob(idField)\n const slots = await blobSet.list().catch(() => [] as SlotInfo[])\n for (const slot of slots) {\n if (aborted) return\n\n if (!resumeCursorHit) {\n if (slot.eTag === options.afterBlobId) {\n resumeCursorHit = true\n }\n continue\n }\n\n const bytes = await blobSet.get(slot.name)\n if (!bytes) continue\n\n const item: ExportedBlob = {\n blobId: slot.eTag,\n recordRef: { collection: collectionName, id: idField, slot: slot.name },\n bytes,\n meta: {\n size: slot.size,\n filename: slot.filename,\n ...(slot.mimeType !== undefined && { mimeType: slot.mimeType }),\n ...(slot.uploadedAt !== undefined && { createdAt: slot.uploadedAt }),\n },\n }\n yield item\n }\n }\n }\n }\n\n const handle: ExportBlobsHandle = {\n abort,\n get aborted() { return aborted },\n [Symbol.asyncIterator]: () => generate(),\n }\n return handle\n}\n\n// ─── Helpers ────────────────────────────────────────────────────────────\n\nfunction generateBatchId(): string {\n // 16 bytes of crypto randomness, URL-safe base64, no padding.\n const raw = globalThis.crypto.getRandomValues(new Uint8Array(16))\n let s = ''\n for (const b of raw) s += b.toString(16).padStart(2, '0')\n return `batch-${Date.now().toString(36)}-${s.slice(0, 12)}`\n}\n","/**\n * Blob retention + compaction.\n *\n * Declarative per-collection / per-slot eviction policy. Two\n * triggers:\n *\n * - **`retainDays`** — age-based TTL. A slot uploaded more than N\n * days ago is evicted.\n * - **`evictWhen(record)`** — predicate over the **decrypted**\n * record. Lets consumers express \"the image is safe to drop once\n * the structured invoice has been reviewed and confirmed.\"\n *\n * Either trigger (or both) causes the slot to evict. Eviction removes\n * the slot entry from `_blob_slots_{collection}`, decrements the\n * blob's refCount (so unreferenced chunks can be GC'd by the next\n * sweep), and writes one entry to the `_blob_eviction_audit`\n * collection for tamper-evident record-keeping.\n *\n * The audit entry carries the eTag of the evicted blob (opaque HMAC\n * of plaintext under the vault's `_blob` DEK) — no plaintext leakage,\n * per the SPEC non-correlation invariant. Consumers reconstructing\n * \"what used to be attached\" can look up the audit entry by record\n * id.\n *\n * Compaction is **consumer-scheduled** — noy-db never runs a\n * background daemon. Call `vault.compact()` whenever your workflow\n * allows (cron, manual \"tidy\" button, cold-storage export prep, …).\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope, SlotInfo } from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport { encrypt } from '../crypto.js'\n\n// ─── Config types ───────────────────────────────────────────────────────\n\nexport interface BlobFieldPolicy<T = unknown> {\n /**\n * Age-based TTL in days. A slot whose `uploadedAt` is older than\n * `now - retainDays × 86400s` evicts on the next `vault.compact()`.\n * Omit to disable age-based eviction.\n */\n readonly retainDays?: number\n /**\n * Predicate evaluated against the decrypted record. When it returns\n * `true`, every matching slot on that record evicts. Omit to\n * disable predicate-based eviction.\n */\n readonly evictWhen?: (record: T) => boolean\n}\n\nexport type BlobFieldsConfig<T = unknown> = Record<string, BlobFieldPolicy<T>>\n\n// ─── Audit collection ──────────────────────────────────────────────────\n\nexport const BLOB_EVICTION_AUDIT_COLLECTION = '_blob_eviction_audit'\n\nexport interface BlobEvictionEntry {\n readonly id: string\n readonly collection: string\n readonly recordId: string\n readonly slotName: string\n readonly blobHash: string\n readonly reason: 'ttl' | 'predicate' | 'both'\n readonly evictedAt: string\n readonly actor: string\n}\n\n// ─── Compaction result ──────────────────────────────────────────────────\n\nexport interface CompactionResult {\n /** Number of blob slots evicted across all collections. */\n readonly evicted: number\n /** Number of records touched (iterated + policy checked). */\n readonly records: number\n /** Number of collections with `blobFields` configured. */\n readonly collections: number\n /** Number of audit entries written. Equal to `evicted`. */\n readonly auditEntries: number\n /** Per-collection breakdown for diagnostics. */\n readonly byCollection: Record<string, { records: number; evicted: number }>\n}\n\n// ─── Core ──────────────────────────────────────────────────────────────\n\nexport interface CompactRunOptions {\n /** Override \"now\" for deterministic testing. */\n readonly now?: Date\n /**\n * Stop after this many evictions. Useful for capped batches / cron\n * jobs that need to fit in a time window. `undefined` = unbounded.\n */\n readonly maxEvictions?: number\n /**\n * Dry-run — evaluate policies and return the counts, but do NOT\n * delete slots or write audit entries. Lets a consumer preview\n * what would happen.\n */\n readonly dryRun?: boolean\n}\n\nexport interface CompactionContext {\n readonly adapter: NoydbStore\n readonly vault: string\n readonly actor: string\n readonly encrypted: boolean\n readonly getDEK: (collection: string) => Promise<CryptoKey>\n /**\n * Resolve a collection's declared `blobFields` config. Returns an\n * empty map for collections without the config — the walk skips\n * those.\n */\n readonly getBlobFields: <T>(collection: string) => BlobFieldsConfig<T> | null\n /** List collection names in the vault. */\n readonly listCollections: () => Promise<string[]>\n /** List record ids in a collection. */\n readonly listRecords: (collection: string) => Promise<string[]>\n /** Decrypt and return the record. Null when absent. */\n readonly getRecord: <T>(collection: string, id: string) => Promise<T | null>\n /** Return the BlobSet-like handle for a record's slots. */\n readonly listSlots: (collection: string, id: string) => Promise<SlotInfo[]>\n /** Delete a slot and decrement its blob's refCount. */\n readonly deleteSlot: (collection: string, id: string, slotName: string) => Promise<void>\n}\n\nexport async function runCompaction(\n ctx: CompactionContext,\n options: CompactRunOptions = {},\n): Promise<CompactionResult> {\n const now = options.now ?? new Date()\n const maxEvictions = options.maxEvictions ?? Infinity\n const dryRun = options.dryRun === true\n\n const allCollections = await ctx.listCollections()\n const byCollection: Record<string, { records: number; evicted: number }> = {}\n let evicted = 0\n let records = 0\n let auditEntries = 0\n let collectionsWithPolicy = 0\n\n outer: for (const collectionName of allCollections) {\n if (collectionName.startsWith('_')) continue\n const config = ctx.getBlobFields(collectionName)\n if (!config) continue\n const configuredSlots = Object.keys(config)\n if (configuredSlots.length === 0) continue\n collectionsWithPolicy += 1\n byCollection[collectionName] = { records: 0, evicted: 0 }\n\n const ids = await ctx.listRecords(collectionName)\n for (const recordId of ids) {\n if (evicted >= maxEvictions) break outer\n\n const record = await ctx.getRecord(collectionName, recordId).catch(() => null)\n if (record === null) continue\n records += 1\n byCollection[collectionName].records += 1\n\n const slots = await ctx.listSlots(collectionName, recordId).catch(() => [])\n for (const slot of slots) {\n if (evicted >= maxEvictions) break outer\n const policy = config[slot.name]\n if (!policy) continue\n\n const reason = evaluatePolicy(policy, record, slot, now)\n if (!reason) continue\n\n if (!dryRun) {\n await ctx.deleteSlot(collectionName, recordId, slot.name)\n await writeAuditEntry(ctx, {\n id: generateEvictionId(collectionName, recordId, slot.name),\n collection: collectionName,\n recordId,\n slotName: slot.name,\n blobHash: slot.eTag,\n reason,\n evictedAt: now.toISOString(),\n actor: ctx.actor,\n })\n auditEntries += 1\n }\n evicted += 1\n byCollection[collectionName].evicted += 1\n }\n }\n }\n\n return {\n evicted,\n records,\n collections: collectionsWithPolicy,\n auditEntries,\n byCollection,\n }\n}\n\nfunction evaluatePolicy<T>(\n policy: BlobFieldPolicy<T>,\n record: T,\n slot: SlotInfo,\n now: Date,\n): 'ttl' | 'predicate' | 'both' | null {\n let ttlTriggered = false\n let predicateTriggered = false\n\n if (policy.retainDays !== undefined && policy.retainDays > 0) {\n const uploadedAt = Date.parse(slot.uploadedAt)\n if (Number.isFinite(uploadedAt)) {\n const ageMs = now.getTime() - uploadedAt\n const limitMs = policy.retainDays * 86_400_000\n if (ageMs > limitMs) ttlTriggered = true\n }\n }\n\n if (policy.evictWhen) {\n try {\n if (policy.evictWhen(record)) predicateTriggered = true\n } catch {\n // Predicate error → do NOT evict. Fail closed.\n }\n }\n\n if (ttlTriggered && predicateTriggered) return 'both'\n if (ttlTriggered) return 'ttl'\n if (predicateTriggered) return 'predicate'\n return null\n}\n\nfunction generateEvictionId(collection: string, recordId: string, slotName: string): string {\n const rand = globalThis.crypto.getRandomValues(new Uint8Array(8))\n let suffix = ''\n for (const b of rand) suffix += b.toString(16).padStart(2, '0')\n return `${collection}__${recordId}__${slotName}__${suffix}`\n}\n\nasync function writeAuditEntry(ctx: CompactionContext, entry: BlobEvictionEntry): Promise<void> {\n const json = JSON.stringify(entry)\n let envelope: EncryptedEnvelope\n if (ctx.encrypted) {\n const dek = await ctx.getDEK(BLOB_EVICTION_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.evictedAt,\n _iv: iv,\n _data: data,\n _by: entry.actor,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.evictedAt,\n _iv: '',\n _data: json,\n _by: entry.actor,\n }\n }\n await ctx.adapter.put(ctx.vault, BLOB_EVICTION_AUDIT_COLLECTION, entry.id, envelope)\n}\n","/**\n * Magic-link-bound cross-user delegation grants.\n *\n * This module is the **core storage + encryption layer** that lets a\n * grantor issue a tier-DEK to a user whose KEK they do not know. The\n * trust bridge is provided by the `@noy-db/on-magic-link` package:\n *\n * 1. Grantor picks a grantee identity (user id + email handle).\n * 2. Grantor mints a magic-link token (ULID) via `createMagicLinkToken`.\n * 3. Grantor derives a **content key** + a **KEK** from\n * `(serverSecret, token, vault)` using HKDF-SHA256 with separate\n * `info` tags — both callers (grantor and grantee) can derive the\n * same keys given the same inputs.\n * 4. Grantor persists a record in `_magic_link_grants/<token>`:\n * - envelope `_data` is AES-GCM encrypted under the content key\n * - the inner `wrappedDek` is AES-KW wrapped under the KEK\n * 5. Grantee receives the URL, derives the same content key + KEK,\n * loads the grant, decrypts the envelope, unwraps the tier DEK.\n *\n * ## Why a separate collection from `_delegations`\n *\n * `_delegations` envelopes are encrypted under a DEK shared across\n * every vault user (audit-visibility). External auditors / client\n * portal users have NO pre-existing keyring, so they cannot read that\n * DEK. Magic-link grants live in their own collection whose envelope\n * encryption is derived purely from the magic-link URL + server secret\n * — nothing else is required to decrypt.\n *\n * ## Batch grants\n *\n * One magic-link token may point to MULTIPLE grants (e.g. the client\n * portal case: invoices + payments + etax all share one link). Each\n * grant is persisted under a distinct record id:\n *\n * `<token>` for the single-grant / primary entry\n * `<token>:<index>` for subsequent entries\n *\n * `listMagicLinkGrants(store, vault, token)` enumerates every record\n * whose id begins with `<token>` so the claimant can materialize all\n * DEKs in one pass.\n *\n * ## Revocation\n *\n * `store.delete(vault, _magic_link_grants, <token>)` immediately\n * invalidates the link — even if the URL was captured and the server\n * secret leaked, no payload remains to decrypt.\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport type { UnlockedKeyring } from './keyring.js'\nimport { encrypt, decrypt, wrapKey, unwrapKey } from '../crypto.js'\nimport { dekKey } from './tiers.js'\nimport { DelegationTargetMissingError } from '../errors.js'\n\n/** Reserved collection holding magic-link grant envelopes. */\nexport const MAGIC_LINK_GRANTS_COLLECTION = '_magic_link_grants'\n\n/** HKDF `info` for the AES-GCM content key. Version-namespaced. */\nexport const MAGIC_LINK_CONTENT_INFO_PREFIX = 'noydb-magic-link-content-v1:'\n\n/** HKDF `info` for the AES-KW KEK. Matches `@noy-db/on-magic-link`. */\nexport const MAGIC_LINK_KEK_INFO_PREFIX = 'noydb-magic-link-v1:'\n\n// ─── Types ──────────────────────────────────────────────────────────────\n\n/**\n * Decrypted payload of a magic-link grant record. Mirrors\n * `DelegationToken` in `team/delegation.ts` but tracked separately\n * because the two flows persist under different collections + envelope\n * encryption schemes.\n */\nexport interface MagicLinkGrantPayload {\n readonly id: string\n readonly toUser: string\n readonly fromUser: string\n readonly tier: number\n /** Collection name or `null` for the vault-wide tier DEK. */\n readonly collection: string | null\n /** Optional specific record id scope. */\n readonly record?: string\n /** ISO timestamp — grant expires at this instant. */\n readonly until: string\n /** AES-KW-wrapped tier DEK, unwrap with the magic-link KEK. */\n readonly wrappedDek: string\n /** ISO timestamp the grant was issued. */\n readonly createdAt: string\n /** Optional caller-provided label (surfaced in audit UIs). */\n readonly note?: string\n}\n\nexport interface IssueMagicLinkGrantOptions {\n readonly toUser: string\n readonly tier: number\n readonly collection?: string\n readonly record?: string\n readonly until: Date | string\n readonly note?: string\n}\n\nexport interface MagicLinkGrantRecord {\n /** Store record id — `<token>` or `<token>:<index>` for batch entries. */\n readonly recordId: string\n readonly payload: MagicLinkGrantPayload\n}\n\n// ─── Key derivation ─────────────────────────────────────────────────────\n\n/**\n * Derive the AES-GCM content key from the same HKDF inputs used for\n * the magic-link KEK. Different `info` suffix → domain-separated key.\n *\n * Exported so the `@noy-db/on-magic-link` package can share the exact\n * derivation path without cross-dependency between the two modules.\n */\nexport async function deriveMagicLinkContentKey(\n serverSecret: string | Uint8Array<ArrayBuffer>,\n token: string,\n vault: string,\n): Promise<CryptoKey> {\n const subtle = globalThis.crypto.subtle\n const ikmBytes =\n serverSecret instanceof Uint8Array\n ? serverSecret\n : new TextEncoder().encode(serverSecret)\n const tokenBytes = new TextEncoder().encode(token)\n const saltBuffer = await subtle.digest('SHA-256', tokenBytes)\n const info = new TextEncoder().encode(MAGIC_LINK_CONTENT_INFO_PREFIX + vault)\n const ikm = await subtle.importKey('raw', ikmBytes, 'HKDF', false, ['deriveKey'])\n return subtle.deriveKey(\n { name: 'HKDF', hash: 'SHA-256', salt: saltBuffer, info },\n ikm,\n { name: 'AES-GCM', length: 256 },\n false,\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Issue ──────────────────────────────────────────────────────────────\n\n/**\n * Persist a magic-link grant record. Caller derives + provides both\n * the content key and the KEK; this function performs the wrap/encrypt\n * and writes the envelope.\n *\n * `recordId` lets the caller use either the bare token (primary grant)\n * or a suffixed id (batch entry). The writer is responsible for\n * collision-avoidance across batch entries.\n */\nexport async function writeMagicLinkGrant(\n store: NoydbStore,\n vault: string,\n grantor: UnlockedKeyring,\n contentKey: CryptoKey,\n grantKek: CryptoKey,\n recordId: string,\n opts: IssueMagicLinkGrantOptions,\n): Promise<MagicLinkGrantRecord> {\n const collectionName = opts.collection ?? null\n const sourceKey = collectionName\n ? dekKey(collectionName, opts.tier)\n : `__any#${opts.tier}`\n const sourceDek = grantor.deks.get(sourceKey)\n if (!sourceDek) {\n throw new DelegationTargetMissingError(\n `grantor cannot find tier ${opts.tier} DEK for ${collectionName ?? '(any)'}`,\n )\n }\n const wrappedDek = await wrapKey(sourceDek, grantKek)\n\n const until = typeof opts.until === 'string' ? opts.until : opts.until.toISOString()\n const createdAt = new Date().toISOString()\n const payload: MagicLinkGrantPayload = {\n id: recordId,\n toUser: opts.toUser,\n fromUser: grantor.userId,\n tier: opts.tier,\n collection: collectionName,\n ...(opts.record && { record: opts.record }),\n until,\n wrappedDek,\n createdAt,\n ...(opts.note && { note: opts.note }),\n }\n\n const { iv, data } = await encrypt(JSON.stringify(payload), contentKey)\n const envelope: EncryptedEnvelope = {\n _noydb: 1,\n _v: 1,\n _ts: createdAt,\n _iv: iv,\n _data: data,\n _by: grantor.userId,\n }\n await store.put(vault, MAGIC_LINK_GRANTS_COLLECTION, recordId, envelope)\n return { recordId, payload }\n}\n\n// ─── Claim ──────────────────────────────────────────────────────────────\n\n/**\n * Fetch + decrypt a single magic-link grant record by id. Returns null\n * when the record is absent OR when decryption fails (wrong server\n * secret, wrong vault, tampered envelope) — callers treat a null as\n * \"this URL is not valid for this server\".\n *\n * The returned payload's `wrappedDek` is still AES-KW-wrapped; the\n * caller unwraps it with the magic-link KEK to obtain the tier DEK.\n */\nexport async function readMagicLinkGrantRecord(\n store: NoydbStore,\n vault: string,\n contentKey: CryptoKey,\n recordId: string,\n): Promise<MagicLinkGrantPayload | null> {\n const env = await store.get(vault, MAGIC_LINK_GRANTS_COLLECTION, recordId)\n if (!env) return null\n try {\n const json = await decrypt(env._iv, env._data, contentKey)\n return JSON.parse(json) as MagicLinkGrantPayload\n } catch {\n return null\n }\n}\n\n/**\n * Enumerate every grant record sharing the magic-link `token` prefix\n * (i.e. the primary `<token>` entry plus any `<token>:*` batch entries).\n * Expired grants are still returned — the caller filters on `until`.\n */\nexport async function listMagicLinkGrants(\n store: NoydbStore,\n vault: string,\n contentKey: CryptoKey,\n token: string,\n): Promise<MagicLinkGrantPayload[]> {\n const ids = await store.list(vault, MAGIC_LINK_GRANTS_COLLECTION)\n const matching = ids.filter(id => id === token || id.startsWith(`${token}:`))\n const out: MagicLinkGrantPayload[] = []\n for (const id of matching) {\n const payload = await readMagicLinkGrantRecord(store, vault, contentKey, id)\n if (payload) out.push(payload)\n }\n return out\n}\n\n/**\n * Unwrap the tier DEK from a grant payload using the magic-link KEK.\n * Thin wrapper around `unwrapKey` — provided so the claimant can avoid\n * importing `crypto.js` directly.\n */\nexport async function unwrapMagicLinkGrant(\n payload: MagicLinkGrantPayload,\n grantKek: CryptoKey,\n): Promise<CryptoKey> {\n return unwrapKey(payload.wrappedDek, grantKek)\n}\n\n/**\n * Delete a magic-link grant (primary + every batch entry sharing the\n * token). Safe to call when nothing exists.\n */\nexport async function revokeMagicLinkGrant(\n store: NoydbStore,\n vault: string,\n token: string,\n): Promise<number> {\n const ids = await store.list(vault, MAGIC_LINK_GRANTS_COLLECTION)\n const matching = ids.filter(id => id === token || id.startsWith(`${token}:`))\n for (const id of matching) {\n await store.delete(vault, MAGIC_LINK_GRANTS_COLLECTION, id)\n }\n return matching.length\n}\n\n// ─── Helpers ────────────────────────────────────────────────────────────\n\n/**\n * Compose the batch-entry record id. `index === 0` → bare token.\n * Subsequent entries use `<token>:<index>` so `store.list()` can\n * enumerate them all by common prefix.\n */\nexport function magicLinkGrantRecordId(token: string, index: number): string {\n return index === 0 ? token : `${token}:${index}`\n}\n\n/**\n * True when the payload's `until` is in the past relative to `now`.\n * Kept here (rather than inlined) so the semantics stay aligned with\n * the canonical `DelegationToken` expiry check.\n */\nexport function isMagicLinkGrantExpired(\n payload: MagicLinkGrantPayload,\n now: Date = new Date(),\n): boolean {\n return payload.until <= now.toISOString()\n}\n","import type { NoydbEventMap } from './types.js'\n\ntype EventHandler<T> = (data: T) => void\n\n/** Typed event emitter for NOYDB events. */\nexport class NoydbEventEmitter {\n private readonly listeners = new Map<string, Set<EventHandler<unknown>>>()\n\n on<K extends keyof NoydbEventMap>(\n event: K,\n handler: EventHandler<NoydbEventMap[K]>,\n ): void {\n let set = this.listeners.get(event as string)\n if (!set) {\n set = new Set()\n this.listeners.set(event as string, set)\n }\n set.add(handler as EventHandler<unknown>)\n }\n\n off<K extends keyof NoydbEventMap>(\n event: K,\n handler: EventHandler<NoydbEventMap[K]>,\n ): void {\n this.listeners.get(event as string)?.delete(handler as EventHandler<unknown>)\n }\n\n emit<K extends keyof NoydbEventMap>(event: K, data: NoydbEventMap[K]): void {\n const set = this.listeners.get(event as string)\n if (set) {\n for (const handler of set) {\n handler(data)\n }\n }\n }\n\n removeAllListeners(): void {\n this.listeners.clear()\n }\n}\n","/**\n * Strategy seam for the optional multi-record transaction subsystem.\n * `runTransaction` is only reachable through `withTransactions()`\n * exported from `@noy-db/hub/tx`. Consumers who don't use\n * `db.transaction(fn)` ship none of the ~288 LOC.\n *\n * @internal\n */\n\nimport type { Noydb } from '../noydb.js'\nimport type { TxContext } from './transaction.js'\n\n/**\n * @internal\n */\nexport interface TxStrategy {\n runTransaction<T>(\n db: Noydb,\n fn: (tx: TxContext) => Promise<T> | T,\n ): Promise<T>\n}\n\nconst NOT_ENABLED = new Error(\n 'Multi-record transactions require the tx strategy. Import ' +\n '`{ withTransactions }` from \"@noy-db/hub/tx\" and pass it to ' +\n '`createNoydb({ txStrategy: withTransactions() })`.',\n)\n\n/**\n * @internal\n */\nexport const NO_TX: TxStrategy = {\n async runTransaction() { throw NOT_ENABLED },\n}\n","/**\n * Strategy seam for the optional session-policy subsystem. Core\n * imports `SessionStrategy` type-only + `NO_SESSION` stub; real\n * `validateSessionPolicy`, `createEnforcer`, and `revokeAllSessions`\n * are only reachable via `withSession()` in `./active.ts`.\n *\n * Solo apps that never set `sessionPolicy` and never issue a session\n * token ship none of the ~495 LOC of policy + token machinery\n * (session-policy.ts + session.ts). Dev-unlock (~299 LOC) is a\n * separate import already tree-shake-friendly via direct named\n * imports.\n *\n * Behavior under NO_SESSION:\n *\n * - **validateSessionPolicy** — throws when called. Only fires if\n * `createNoydb({ sessionPolicy })` was passed; if you set a policy\n * you must opt into the strategy.\n * - **createEnforcer** — throws. Same gate.\n * - **revokeAllSessions** — silent no-op. Called unconditionally on\n * `db.close()`; without the strategy the global session registry\n * never recorded anything, so the no-op is correct.\n *\n * @internal\n */\n\nimport type { SessionPolicy } from '../types.js'\nimport type { PolicyEnforcer, PolicyEnforcerOptions } from './session-policy.js'\n\n/**\n * @internal\n */\nexport interface SessionStrategy {\n validateSessionPolicy(policy: SessionPolicy): void\n createEnforcer(opts: PolicyEnforcerOptions): PolicyEnforcer\n revokeAllSessions(): void\n}\n\nfunction notEnabled(op: string): Error {\n return new Error(\n `${op} requires the session strategy. Import ` +\n '`{ withSession }` from \"@noy-db/hub/session\" and pass it to ' +\n '`createNoydb({ sessionStrategy: withSession() })`.',\n )\n}\n\n/**\n * No-session stub. Policy validation + enforcer construction throw\n * with an actionable pointer; global session revocation is a silent\n * no-op (the registry was never populated).\n *\n * @internal\n */\nexport const NO_SESSION: SessionStrategy = {\n validateSessionPolicy() { throw notEnabled('sessionPolicy') },\n createEnforcer() { throw notEnabled('session policy enforcement') },\n revokeAllSessions() {},\n}\n","/**\n * CRDT state types, merge logic, and build helpers.\n * per-collection CRDT mode: 'lww-map' | 'rga' | 'yjs'\n *\n * The encrypted envelope wraps the CRDT state (not the resolved snapshot).\n * Adapters only ever see ciphertext. `collection.get(id)` returns the\n * resolved snapshot; `collection.getRaw(id)` returns the full CRDT state.\n */\n\n// ─── Mode ─────────────────────────────────────────────────────────────\n\n/** Per-collection CRDT mode. */\nexport type CrdtMode = 'lww-map' | 'rga' | 'yjs'\n\n// ─── State shapes ─────────────────────────────────────────────────────\n\n/**\n * Per-field last-write-wins registers.\n * Each field carries its latest value and the ISO timestamp of the last write.\n * Merge: for each field, keep the entry with the lexicographically higher `ts`.\n */\nexport interface LwwMapState {\n readonly _crdt: 'lww-map'\n readonly fields: Record<string, { readonly v: unknown; readonly ts: string }>\n}\n\n/**\n * Simplified Replicated Growable Array.\n * Items are assigned stable NID (noy-db id) strings on first insertion.\n * Deleted items are tracked as tombstones so concurrent removals commute.\n *\n * The resolved snapshot is the ordered list of non-tombstoned `v` values.\n */\nexport interface RgaState {\n readonly _crdt: 'rga'\n readonly items: ReadonlyArray<{ readonly nid: string; readonly v: unknown }>\n readonly tombstones: readonly string[]\n}\n\n/**\n * Yjs binary state marker. `update` is base64(Y.encodeStateAsUpdate()).\n * Core stores and retrieves the blob opaquely. `@noy-db/yjs` is responsible\n * for encoding, decoding, and merging via `Y.mergeUpdates`.\n * Core falls back to last-write-wins (higher `_v`) for conflict resolution.\n */\nexport interface YjsState {\n readonly _crdt: 'yjs'\n /** base64-encoded Y.encodeStateAsUpdate() bytes. */\n readonly update: string\n}\n\nexport type CrdtState = LwwMapState | RgaState | YjsState\n\n// ─── Snapshot resolution ──────────────────────────────────────────────\n\n/**\n * Resolve a CRDT state into the end-user record snapshot.\n *\n * - `lww-map` → `Record<string, unknown>` (field values extracted from registers)\n * - `rga` → `unknown[]` (non-tombstoned items in insertion order)\n * - `yjs` → `string` (base64 update blob; use @noy-db/yjs for a Y.Doc)\n */\nexport function resolveCrdtSnapshot(state: CrdtState): unknown {\n switch (state._crdt) {\n case 'lww-map': {\n const result: Record<string, unknown> = {}\n for (const [field, reg] of Object.entries(state.fields)) {\n result[field] = reg.v\n }\n return result\n }\n case 'rga': {\n const dead = new Set(state.tombstones)\n return state.items.filter(i => !dead.has(i.nid)).map(i => i.v)\n }\n case 'yjs':\n return state.update\n }\n}\n\n// ─── CRDT merge ───────────────────────────────────────────────────────\n\n/**\n * Merge two CRDT states produced by concurrent writes.\n * Called by the collection-level conflict resolver registered with SyncEngine.\n *\n * For `yjs`: core cannot merge Yjs without importing the `yjs` package.\n * The caller must handle that case by falling back to the higher-`_v` envelope.\n */\nexport function mergeCrdtStates(a: CrdtState, b: CrdtState): CrdtState {\n // Mismatched modes shouldn't happen in practice — same collection, same schema.\n if (a._crdt !== b._crdt) return a\n\n switch (a._crdt) {\n case 'lww-map':\n return mergeLwwMap(a, b as LwwMapState)\n case 'rga':\n return mergeRga(a, b as RgaState)\n case 'yjs':\n // Signal to caller that Yjs merge is needed externally\n return a\n }\n}\n\nfunction mergeLwwMap(a: LwwMapState, b: LwwMapState): LwwMapState {\n const merged: Record<string, { v: unknown; ts: string }> = {}\n const allFields = new Set([...Object.keys(a.fields), ...Object.keys(b.fields)])\n for (const field of allFields) {\n const fa = a.fields[field]\n const fb = b.fields[field]\n if (!fa) { merged[field] = fb! }\n else if (!fb) { merged[field] = fa }\n else { merged[field] = fa.ts >= fb.ts ? fa : fb }\n }\n return { _crdt: 'lww-map', fields: merged }\n}\n\nfunction mergeRga(a: RgaState, b: RgaState): RgaState {\n // Union tombstones from both sides\n const allTombstones = new Set([...a.tombstones, ...b.tombstones])\n // Union items by nid: start with a's ordering, append b-only items\n const seenNids = new Set(a.items.map(i => i.nid))\n const merged: Array<{ nid: string; v: unknown }> = [\n ...a.items,\n ...b.items.filter(i => !seenNids.has(i.nid)),\n ]\n return { _crdt: 'rga', items: merged, tombstones: [...allTombstones] }\n}\n\n// ─── Build helpers ────────────────────────────────────────────────────\n\n/**\n * Build (or update) an lww-map state from a new record.\n *\n * All fields in the new record win at timestamp `now`.\n * Fields present in the existing state but absent from the new record\n * are preserved (they were written by another device).\n */\nexport function buildLwwMapState(\n record: Record<string, unknown>,\n existing: LwwMapState | undefined,\n now: string,\n): LwwMapState {\n const fields: Record<string, { v: unknown; ts: string }> = {}\n\n // New record fields all get the current timestamp — this device wins for these\n for (const [field, value] of Object.entries(record)) {\n fields[field] = { v: value, ts: now }\n }\n\n // Preserve fields from the existing state that aren't in the new record\n if (existing) {\n for (const [field, reg] of Object.entries(existing.fields)) {\n if (!(field in fields)) {\n fields[field] = reg\n }\n }\n }\n\n return { _crdt: 'lww-map', fields }\n}\n\n/**\n * Build (or update) an RGA state from a new array.\n *\n * Existing items are matched to new elements by deep-equality of their `v`.\n * Unmatched existing items are tombstoned. New elements that have no existing\n * match get a fresh NID via `generateNid()`.\n */\nexport function buildRgaState(\n arr: unknown[],\n existing: RgaState | undefined,\n generateNid: () => string,\n): RgaState {\n // Build an index from JSON(v) → existing item so we can match by value\n const existingByValue = new Map<string, { nid: string; v: unknown }>()\n if (existing) {\n for (const item of existing.items) {\n // Only add first occurrence per value to avoid double-matching\n const key = JSON.stringify(item.v)\n if (!existingByValue.has(key)) existingByValue.set(key, item)\n }\n }\n\n const usedNids = new Set<string>()\n const newItems: Array<{ nid: string; v: unknown }> = []\n\n for (const el of arr) {\n const key = JSON.stringify(el)\n const match = existingByValue.get(key)\n if (match && !usedNids.has(match.nid)) {\n // Reuse existing NID to preserve cross-device identity\n newItems.push(match)\n usedNids.add(match.nid)\n } else {\n // New element — assign a fresh NID\n const nid = generateNid()\n newItems.push({ nid, v: el })\n usedNids.add(nid)\n }\n }\n\n // Elements in the existing state that aren't in the new array → tombstone.\n // Tombstoned items are kept in the items array to preserve ordering for\n // cross-device merge — the resolved snapshot filters them out.\n const tombstones: string[] = existing ? [...existing.tombstones] : []\n const extraItems: Array<{ nid: string; v: unknown }> = []\n if (existing) {\n for (const item of existing.items) {\n if (!usedNids.has(item.nid)) {\n if (!tombstones.includes(item.nid)) tombstones.push(item.nid)\n extraItems.push(item) // retain in items for ordering\n }\n }\n }\n\n // Final items: live items in new order, then tombstoned extras at the end\n const items = [...newItems, ...extraItems]\n\n return { _crdt: 'rga', items, tombstones }\n}\n","/**\n * Presence handle — real-time awareness of who is viewing/editing a collection.\n * encrypted ephemeral channel keyed by collection DEK via HKDF.\n *\n * The presence key is derived from the collection DEK so:\n * - The adapter never learns user identities from presence payloads.\n * - Presence rotates automatically when the DEK rotates (revoked users\n * can no longer participate after a DEK rotation).\n *\n * Two transport strategies:\n * 1. **Pub/sub** (real-time): used when the adapter implements\n * `presencePublish` and `presenceSubscribe`.\n * 2. **Storage-poll** (fallback): presence records are written to a\n * reserved `_presence_<collection>` collection on the sync adapter\n * (if available) or local adapter, and polled periodically.\n */\n\nimport type { NoydbStore, PresencePeer } from '../types.js'\nimport { encrypt, decrypt, generateIV, bufferToBase64, derivePresenceKey } from '../crypto.js'\n\n/** Options for constructing a PresenceHandle. @internal */\nexport interface PresenceHandleOpts {\n /** Local adapter for storage-poll fallback. */\n adapter: NoydbStore\n /** Remote (sync) adapter — preferred for broadcasting presence if available. */\n syncAdapter?: NoydbStore\n /** Vault name — used as part of the channel and storage key. */\n vault: string\n /** Collection name — used as HKDF `info` and channel suffix. */\n collectionName: string\n /** Calling user's ID, embedded unencrypted in storage records. */\n userId: string\n /** Whether encryption is active. When false, presence payloads are stored as JSON. */\n encrypted: boolean\n /** Callback that resolves the collection DEK (used to derive the presence key). */\n getDEK: (collectionName: string) => Promise<CryptoKey>\n /** How long (ms) before a peer's presence is considered stale. Default: 30_000. */\n staleMs?: number\n /** Poll interval (ms) for the storage-poll fallback. Default: 5_000. */\n pollIntervalMs?: number\n}\n\n/**\n * Internal storage envelope for the storage-poll fallback.\n * Written to `_presence_<collection>` as `{ userId, lastSeen, iv, data }`.\n */\ninterface StoragePresenceRecord {\n userId: string\n lastSeen: string\n iv: string // base64 AES-GCM IV (empty when not encrypted)\n data: string // base64 ciphertext or JSON string when not encrypted\n}\n\n/** Presence handle for a single collection. */\nexport class PresenceHandle<P> {\n private readonly adapter: NoydbStore\n private readonly syncAdapter: NoydbStore | undefined\n private readonly vault: string\n private readonly collectionName: string\n private readonly userId: string\n private readonly encrypted: boolean\n private readonly getDEK: (collectionName: string) => Promise<CryptoKey>\n private readonly staleMs: number\n private readonly pollIntervalMs: number\n private readonly channel: string\n private readonly storageCollection: string\n\n private presenceKey: CryptoKey | null = null\n private subscribers: Array<(peers: PresencePeer<P>[]) => void> = []\n private unsubscribePubSub: (() => void) | null = null\n private pollTimer: ReturnType<typeof setInterval> | null = null\n private stopped = false\n\n constructor(opts: PresenceHandleOpts) {\n this.adapter = opts.adapter\n this.syncAdapter = opts.syncAdapter\n this.vault = opts.vault\n this.collectionName = opts.collectionName\n this.userId = opts.userId\n this.encrypted = opts.encrypted\n this.getDEK = opts.getDEK\n this.staleMs = opts.staleMs ?? 30_000\n this.pollIntervalMs = opts.pollIntervalMs ?? 5_000\n // Channel used by pub/sub adapters — vault-scoped so two collections\n // in the same vault don't bleed into each other's presence channels.\n this.channel = `${opts.vault}:${opts.collectionName}:presence`\n // Reserved collection name for the storage-poll fallback.\n this.storageCollection = `_presence_${opts.collectionName}`\n }\n\n /**\n * Announce yourself (or update your cursor/status).\n * Encrypts `payload` with the presence key and publishes it.\n */\n async update(payload: P): Promise<void> {\n if (this.stopped) return\n\n const key = await this.getPresenceKey()\n const now = new Date().toISOString()\n const plaintext = JSON.stringify({ userId: this.userId, lastSeen: now, payload })\n let encryptedPayload: string\n\n if (this.encrypted && key) {\n const iv = generateIV()\n const ivB64 = bufferToBase64(iv)\n const { data } = await encrypt(plaintext, key)\n encryptedPayload = JSON.stringify({ iv: ivB64, data })\n } else {\n encryptedPayload = plaintext\n }\n\n // Pub/sub path — publish to any adapter that supports it\n const pubAdapter = this.getPubSubAdapter()\n if (pubAdapter?.presencePublish) {\n await pubAdapter.presencePublish(this.channel, encryptedPayload)\n }\n\n // Storage-poll path — write a record to the storage adapter\n await this.writeStorageRecord(payload, now)\n }\n\n /**\n * Subscribe to presence updates. The callback receives a filtered, decrypted\n * list of all currently-active peers (excluding yourself, excluding stale).\n *\n * Returns an unsubscribe function. Also call `stop()` to release all resources.\n */\n subscribe(cb: (peers: PresencePeer<P>[]) => void): () => void {\n if (this.stopped) return () => {}\n\n this.subscribers.push(cb)\n\n // Start pub/sub listener on first subscriber\n if (this.subscribers.length === 1) {\n this.startListening()\n }\n\n return () => {\n this.subscribers = this.subscribers.filter(s => s !== cb)\n if (this.subscribers.length === 0) this.stopListening()\n }\n }\n\n /** Stop all listening and clear resources. */\n stop(): void {\n this.stopped = true\n this.stopListening()\n this.subscribers = []\n }\n\n // ─── Private ────────────────────────────────────────────────────────\n\n private async getPresenceKey(): Promise<CryptoKey | null> {\n if (!this.encrypted) return null\n if (!this.presenceKey) {\n try {\n const dek = await this.getDEK(this.collectionName)\n this.presenceKey = await derivePresenceKey(dek, this.collectionName)\n } catch {\n // no-op — presence degrades gracefully if crypto fails\n }\n }\n return this.presenceKey\n }\n\n private getPubSubAdapter(): NoydbStore | undefined {\n // Prefer the sync adapter (it broadcasts to other devices)\n if (this.syncAdapter?.presencePublish) return this.syncAdapter\n if (this.adapter.presencePublish) return this.adapter\n return undefined\n }\n\n private startListening(): void {\n const pubAdapter = this.getPubSubAdapter()\n\n if (pubAdapter?.presenceSubscribe) {\n // Real-time pub/sub path\n this.unsubscribePubSub = pubAdapter.presenceSubscribe(\n this.channel,\n (encryptedPayload) => { void this.handlePubSubMessage(encryptedPayload) },\n )\n } else {\n // Storage-poll fallback\n this.pollTimer = setInterval(\n () => { void this.pollStoragePresence() },\n this.pollIntervalMs,\n )\n // Kick off an immediate poll\n void this.pollStoragePresence()\n }\n }\n\n private stopListening(): void {\n if (this.unsubscribePubSub) {\n this.unsubscribePubSub()\n this.unsubscribePubSub = null\n }\n if (this.pollTimer) {\n clearInterval(this.pollTimer)\n this.pollTimer = null\n }\n }\n\n private async handlePubSubMessage(encryptedPayload: string): Promise<void> {\n try {\n const peer = await this.decryptPresencePayload(encryptedPayload)\n if (!peer || peer.userId === this.userId) return\n\n const cutoff = new Date(Date.now() - this.staleMs).toISOString()\n if (peer.lastSeen < cutoff) return\n\n // Deliver only this new peer to subscribers; a full snapshot poll follows\n // on next interval. For pub/sub, we could maintain a map of active peers,\n // but for simplicity: emit a snapshot read from storage.\n await this.pollStoragePresence()\n } catch {\n // Decrypt failure — stale key or tampered payload, ignore\n }\n }\n\n private async decryptPresencePayload(\n encryptedPayload: string,\n ): Promise<{ userId: string; lastSeen: string; payload: P } | null> {\n const key = await this.getPresenceKey()\n\n if (!this.encrypted || !key) {\n return JSON.parse(encryptedPayload) as { userId: string; lastSeen: string; payload: P }\n }\n\n const { iv: ivB64, data } = JSON.parse(encryptedPayload) as { iv: string; data: string }\n const plaintext = await decrypt(ivB64, data, key)\n return JSON.parse(plaintext) as { userId: string; lastSeen: string; payload: P }\n }\n\n private async writeStorageRecord(payload: P, now: string): Promise<void> {\n const key = await this.getPresenceKey()\n const plaintext = JSON.stringify(payload)\n let iv = ''\n let data: string\n\n if (this.encrypted && key) {\n const ivBytes = generateIV()\n iv = bufferToBase64(ivBytes)\n const result = await encrypt(plaintext, key)\n data = result.data\n } else {\n data = plaintext\n }\n\n const record: StoragePresenceRecord = { userId: this.userId, lastSeen: now, iv, data }\n const json = JSON.stringify(record)\n\n // Use the sync adapter if available (so other devices can read it);\n // fall back to local adapter.\n const storeAdapter = this.syncAdapter ?? this.adapter\n const envelope = {\n _noydb: 1 as const,\n _v: 1,\n _ts: now,\n _iv: '',\n _data: json,\n }\n try {\n await storeAdapter.put(\n this.vault,\n this.storageCollection,\n this.userId,\n envelope,\n )\n } catch {\n // Presence write failure is non-fatal — the user is still present locally\n }\n }\n\n private async pollStoragePresence(): Promise<void> {\n if (this.stopped || this.subscribers.length === 0) return\n\n try {\n const storeAdapter = this.syncAdapter ?? this.adapter\n const ids = await storeAdapter.list(this.vault, this.storageCollection)\n const cutoff = new Date(Date.now() - this.staleMs).toISOString()\n const peers: PresencePeer<P>[] = []\n\n for (const id of ids) {\n if (id === this.userId) continue // skip ourselves\n const envelope = await storeAdapter.get(this.vault, this.storageCollection, id)\n if (!envelope) continue\n\n const record = JSON.parse(envelope._data) as StoragePresenceRecord\n if (record.lastSeen < cutoff) continue\n\n let peerPayload: P\n if (this.encrypted && this.presenceKey && record.iv) {\n const plaintext = await decrypt(record.iv, record.data, this.presenceKey)\n peerPayload = JSON.parse(plaintext) as P\n } else {\n peerPayload = JSON.parse(record.data) as P\n }\n\n peers.push({ userId: record.userId, payload: peerPayload, lastSeen: record.lastSeen })\n }\n\n for (const cb of this.subscribers) {\n cb(peers)\n }\n } catch {\n // Poll failure is non-fatal\n }\n }\n}\n","import type {\n NoydbStore,\n DirtyEntry,\n Conflict,\n ConflictStrategy,\n CollectionConflictResolver,\n PushOptions,\n PullOptions,\n PushResult,\n PullResult,\n SyncStatus,\n EncryptedEnvelope,\n SyncMetadata,\n SyncTargetRole,\n} from '../types.js'\nimport { NOYDB_SYNC_VERSION } from '../types.js'\nimport { ConflictError } from '../errors.js'\nimport type { NoydbEventEmitter } from '../events.js'\nimport type { SyncPolicy } from '../store/sync-policy.js'\nimport { SyncScheduler } from '../store/sync-policy.js'\n\n/** Sync engine: dirty tracking, push, pull, conflict resolution, scheduling. */\nexport class SyncEngine {\n private readonly local: NoydbStore\n private readonly remote: NoydbStore\n private readonly strategy: ConflictStrategy\n private readonly emitter: NoydbEventEmitter\n private readonly vault: string\n readonly role: SyncTargetRole\n readonly label: string | undefined\n\n private dirty: DirtyEntry[] = []\n private lastPush: string | null = null\n private lastPull: string | null = null\n private loaded = false\n private autoSyncInterval: ReturnType<typeof setInterval> | null = null\n private isOnline = true\n\n /** Sync scheduler. Manages push/pull timing. */\n readonly scheduler: SyncScheduler | null\n\n /** Per-collection conflict resolvers registered by Collection instances. */\n private readonly conflictResolvers = new Map<string, CollectionConflictResolver>()\n\n constructor(opts: {\n local: NoydbStore\n remote: NoydbStore\n vault: string\n strategy: ConflictStrategy\n emitter: NoydbEventEmitter\n syncPolicy?: SyncPolicy\n role?: SyncTargetRole\n label?: string\n }) {\n this.local = opts.local\n this.remote = opts.remote\n this.vault = opts.vault\n this.strategy = opts.strategy\n this.emitter = opts.emitter\n this.role = opts.role ?? 'sync-peer'\n this.label = opts.label\n\n // Create scheduler if a policy is provided\n const policy = opts.syncPolicy\n if (policy && policy.push.mode !== 'manual') {\n this.scheduler = new SyncScheduler(policy, {\n push: () => this.push().then(() => {}),\n pull: () => this.pull().then(() => {}),\n getDirtyCount: () => this.dirty.length,\n })\n } else {\n this.scheduler = null\n }\n }\n\n /** Start the sync scheduler. Called after vault is fully opened. */\n startScheduler(): void {\n this.scheduler?.start()\n }\n\n /** Stop the sync scheduler. Called on close. */\n stopScheduler(): void {\n this.scheduler?.stop()\n }\n\n /**\n * Register a per-collection conflict resolver.\n * Called by Collection when `conflictPolicy` is set.\n */\n registerConflictResolver(collection: string, resolver: CollectionConflictResolver): void {\n this.conflictResolvers.set(collection, resolver)\n }\n\n /** Record a local change for later push. */\n async trackChange(collection: string, id: string, action: 'put' | 'delete', version: number): Promise<void> {\n await this.ensureLoaded()\n\n // Deduplicate: if same collection+id already in dirty, update it\n const idx = this.dirty.findIndex(d => d.collection === collection && d.id === id)\n const entry: DirtyEntry = {\n vault: this.vault,\n collection,\n id,\n action,\n version,\n timestamp: new Date().toISOString(),\n }\n\n if (idx >= 0) {\n this.dirty[idx] = entry\n } else {\n this.dirty.push(entry)\n }\n\n await this.persistMeta()\n\n // Notify scheduler of the write (triggers on-change or debounce)\n this.scheduler?.notifyChange()\n }\n\n /** Push dirty records to remote adapter. Accepts optional `PushOptions` for partial sync. */\n async push(options?: PushOptions): Promise<PushResult> {\n await this.ensureLoaded()\n\n let pushed = 0\n const conflicts: Conflict[] = []\n const errors: Error[] = []\n const completed: number[] = []\n\n for (let i = 0; i < this.dirty.length; i++) {\n const entry = this.dirty[i]!\n\n // Partial sync: skip collections not in the filter\n if (options?.collections && !options.collections.includes(entry.collection)) {\n continue\n }\n\n try {\n if (entry.action === 'delete') {\n await this.remote.delete(this.vault, entry.collection, entry.id)\n completed.push(i)\n pushed++\n } else {\n const envelope = await this.local.get(this.vault, entry.collection, entry.id)\n if (!envelope) {\n // Record was deleted locally after being marked dirty\n completed.push(i)\n continue\n }\n\n try {\n await this.remote.put(\n this.vault,\n entry.collection,\n entry.id,\n envelope,\n entry.version - 1,\n )\n completed.push(i)\n pushed++\n } catch (err) {\n if (err instanceof ConflictError) {\n const remoteEnvelope = await this.remote.get(this.vault, entry.collection, entry.id)\n if (remoteEnvelope) {\n const { handled, conflict } = await this.handleConflict(\n entry.collection,\n entry.id,\n envelope,\n remoteEnvelope,\n 'push',\n )\n conflicts.push(conflict)\n if (handled === 'local') {\n await this.remote.put(this.vault, entry.collection, entry.id, conflict.local)\n completed.push(i)\n pushed++\n } else if (handled === 'remote') {\n await this.local.put(this.vault, entry.collection, entry.id, conflict.remote)\n completed.push(i)\n } else if (handled === 'merged' && conflict.local !== envelope) {\n // Merged envelope is stored in conflict.local (the winner)\n const merged = conflict.local\n await this.remote.put(this.vault, entry.collection, entry.id, merged)\n await this.local.put(this.vault, entry.collection, entry.id, merged)\n completed.push(i)\n pushed++\n }\n // handled === 'deferred': leave in dirty log\n }\n } else {\n throw err\n }\n }\n }\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n }\n\n // Remove completed entries from dirty log (reverse order to preserve indices)\n for (const i of completed.sort((a, b) => b - a)) {\n this.dirty.splice(i, 1)\n }\n\n this.lastPush = new Date().toISOString()\n await this.persistMeta()\n\n const result: PushResult = { pushed, conflicts, errors }\n this.emitter.emit('sync:push', result)\n return result\n }\n\n /** Pull remote records to local adapter. Accepts optional `PullOptions` for partial sync. */\n async pull(options?: PullOptions): Promise<PullResult> {\n await this.ensureLoaded()\n\n let pulled = 0\n const conflicts: Conflict[] = []\n const errors: Error[] = []\n\n try {\n const remoteSnapshot = await this.remote.loadAll(this.vault)\n\n for (const [collName, records] of Object.entries(remoteSnapshot)) {\n // Partial sync: skip collections not in the filter\n if (options?.collections && !options.collections.includes(collName)) {\n continue\n }\n\n for (const [id, remoteEnvelope] of Object.entries(records)) {\n // Partial sync: modifiedSince filter\n if (options?.modifiedSince && remoteEnvelope._ts <= options.modifiedSince) {\n continue\n }\n\n try {\n const localEnvelope = await this.local.get(this.vault, collName, id)\n\n if (!localEnvelope) {\n // New record from remote\n await this.local.put(this.vault, collName, id, remoteEnvelope)\n pulled++\n } else if (remoteEnvelope._v > localEnvelope._v) {\n // Remote is newer — check if we have a dirty entry for this\n const isDirty = this.dirty.some(d => d.collection === collName && d.id === id)\n if (isDirty) {\n // Both changed — conflict\n const { handled, conflict } = await this.handleConflict(\n collName,\n id,\n localEnvelope,\n remoteEnvelope,\n 'pull',\n )\n conflicts.push(conflict)\n if (handled === 'remote') {\n await this.local.put(this.vault, collName, id, conflict.remote)\n this.dirty = this.dirty.filter(d => !(d.collection === collName && d.id === id))\n pulled++\n } else if (handled === 'merged' && conflict.local !== localEnvelope) {\n const merged = conflict.local\n await this.local.put(this.vault, collName, id, merged)\n this.dirty = this.dirty.filter(d => !(d.collection === collName && d.id === id))\n pulled++\n }\n // 'local' or 'deferred': push handles it\n } else {\n // Remote is newer, no local changes — update\n await this.local.put(this.vault, collName, id, remoteEnvelope)\n pulled++\n }\n }\n // Same version or local is newer — skip (push will handle)\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n }\n }\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n\n this.lastPull = new Date().toISOString()\n await this.persistMeta()\n\n const result: PullResult = { pulled, conflicts, errors }\n this.emitter.emit('sync:pull', result)\n return result\n }\n\n /** Bidirectional sync: pull then push. */\n async sync(options?: { push?: PushOptions; pull?: PullOptions }): Promise<{ pull: PullResult; push: PushResult }> {\n const pullResult = await this.pull(options?.pull)\n const pushResult = await this.push(options?.push)\n return { pull: pullResult, push: pushResult }\n }\n\n /**\n * Push a specific subset of dirty entries (for sync transactions, ).\n * Entries are matched by collection+id from the dirty log; matched entries\n * are removed from the dirty log on success.\n */\n async pushFiltered(predicate: (entry: DirtyEntry) => boolean): Promise<PushResult> {\n await this.ensureLoaded()\n\n let pushed = 0\n const conflicts: Conflict[] = []\n const errors: Error[] = []\n const completed: number[] = []\n\n for (let i = 0; i < this.dirty.length; i++) {\n const entry = this.dirty[i]!\n if (!predicate(entry)) continue\n\n try {\n if (entry.action === 'delete') {\n await this.remote.delete(this.vault, entry.collection, entry.id)\n completed.push(i)\n pushed++\n } else {\n const envelope = await this.local.get(this.vault, entry.collection, entry.id)\n if (!envelope) {\n completed.push(i)\n continue\n }\n\n try {\n await this.remote.put(\n this.vault,\n entry.collection,\n entry.id,\n envelope,\n entry.version - 1,\n )\n completed.push(i)\n pushed++\n } catch (err) {\n if (err instanceof ConflictError) {\n const remoteEnvelope = await this.remote.get(this.vault, entry.collection, entry.id)\n if (remoteEnvelope) {\n const { handled, conflict } = await this.handleConflict(\n entry.collection,\n entry.id,\n envelope,\n remoteEnvelope,\n 'push',\n )\n conflicts.push(conflict)\n if (handled === 'local') {\n await this.remote.put(this.vault, entry.collection, entry.id, conflict.local)\n completed.push(i)\n pushed++\n } else if (handled === 'remote') {\n await this.local.put(this.vault, entry.collection, entry.id, conflict.remote)\n completed.push(i)\n } else if (handled === 'merged' && conflict.local !== envelope) {\n const merged = conflict.local\n await this.remote.put(this.vault, entry.collection, entry.id, merged)\n await this.local.put(this.vault, entry.collection, entry.id, merged)\n completed.push(i)\n pushed++\n }\n }\n } else {\n throw err\n }\n }\n }\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n }\n\n for (const i of completed.sort((a, b) => b - a)) {\n this.dirty.splice(i, 1)\n }\n\n this.lastPush = new Date().toISOString()\n await this.persistMeta()\n\n const result: PushResult = { pushed, conflicts, errors }\n this.emitter.emit('sync:push', result)\n return result\n }\n\n /** Get current sync status. */\n status(): SyncStatus {\n return {\n dirty: this.dirty.length,\n lastPush: this.lastPush,\n lastPull: this.lastPull,\n online: this.isOnline,\n }\n }\n\n // ─── Auto-Sync ───────────────────────────────────────────────────\n\n /** Start auto-sync: listen for online/offline events, optional periodic sync. */\n startAutoSync(intervalMs?: number): void {\n // Online/offline detection\n if (typeof globalThis.addEventListener === 'function') {\n globalThis.addEventListener('online', this.handleOnline)\n globalThis.addEventListener('offline', this.handleOffline)\n }\n\n // Periodic sync\n if (intervalMs && intervalMs > 0) {\n this.autoSyncInterval = setInterval(() => {\n if (this.isOnline) {\n void this.sync()\n }\n }, intervalMs)\n }\n }\n\n /** Stop auto-sync and scheduler. */\n stopAutoSync(): void {\n this.stopScheduler()\n if (typeof globalThis.removeEventListener === 'function') {\n globalThis.removeEventListener('online', this.handleOnline)\n globalThis.removeEventListener('offline', this.handleOffline)\n }\n if (this.autoSyncInterval) {\n clearInterval(this.autoSyncInterval)\n this.autoSyncInterval = null\n }\n }\n\n private handleOnline = (): void => {\n this.isOnline = true\n this.emitter.emit('sync:online', undefined as never)\n void this.sync()\n }\n\n private handleOffline = (): void => {\n this.isOnline = false\n this.emitter.emit('sync:offline', undefined as never)\n }\n\n /**\n * Resolve a conflict, checking per-collection resolvers first,\n * then falling back to the db-level `ConflictStrategy`.\n *\n * Returns the resolved `Conflict` object (possibly with `resolve` set for\n * manual mode) and a `handled` discriminant:\n * - `'local'` — keep the local envelope; push it to remote.\n * - `'remote'` — keep the remote envelope; update local.\n * - `'merged'` — a custom merge fn produced a new envelope stored as `conflict.local`.\n * - `'deferred'` — manual mode, resolve was not called synchronously.\n */\n private async handleConflict(\n collection: string,\n id: string,\n local: EncryptedEnvelope,\n remote: EncryptedEnvelope,\n _phase: 'push' | 'pull',\n ): Promise<{ handled: 'local' | 'remote' | 'merged' | 'deferred'; conflict: Conflict }> {\n const resolver = this.conflictResolvers.get(collection)\n\n if (resolver) {\n // Per-collection resolver is responsible for emitting sync:conflict\n // (manual policy emits with a resolve callback; LWW/FWW/custom are silent).\n const winner = await resolver(id, local, remote)\n const base: Conflict = {\n vault: this.vault,\n collection,\n id,\n local,\n remote,\n localVersion: local._v,\n remoteVersion: remote._v,\n }\n if (winner === null) return { handled: 'deferred', conflict: base }\n if (winner === local) return { handled: 'local', conflict: base }\n if (winner === remote) return { handled: 'remote', conflict: base }\n // Custom merge fn produced a new envelope — store as conflict.local for the caller\n return {\n handled: 'merged',\n conflict: { ...base, local: winner, localVersion: winner._v },\n }\n }\n\n // Fall back to db-level strategy — emit once\n const baseConflict: Conflict = {\n vault: this.vault,\n collection,\n id,\n local,\n remote,\n localVersion: local._v,\n remoteVersion: remote._v,\n }\n this.emitter.emit('sync:conflict', baseConflict)\n const side = this.legacyResolve(baseConflict)\n return { handled: side, conflict: baseConflict }\n }\n\n /** DB-level ConflictStrategy resolution (legacy, kept for backward compat). */\n private legacyResolve(conflict: Conflict): 'local' | 'remote' {\n if (typeof this.strategy === 'function') {\n return this.strategy(conflict)\n }\n switch (this.strategy) {\n case 'local-wins': return 'local'\n case 'remote-wins': return 'remote'\n case 'version':\n default:\n return conflict.localVersion >= conflict.remoteVersion ? 'local' : 'remote'\n }\n }\n\n // ─── Persistence ─────────────────────────────────────────────────\n\n private async ensureLoaded(): Promise<void> {\n if (this.loaded) return\n\n const envelope = await this.local.get(this.vault, '_sync', 'meta')\n if (envelope) {\n const meta = JSON.parse(envelope._data) as SyncMetadata\n this.dirty = [...meta.dirty]\n this.lastPush = meta.last_push\n this.lastPull = meta.last_pull\n }\n\n this.loaded = true\n }\n\n private async persistMeta(): Promise<void> {\n const meta: SyncMetadata = {\n _noydb_sync: NOYDB_SYNC_VERSION,\n last_push: this.lastPush,\n last_pull: this.lastPull,\n dirty: this.dirty,\n }\n\n const envelope: EncryptedEnvelope = {\n _noydb: 1,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: JSON.stringify(meta),\n }\n\n await this.local.put(this.vault, '_sync', 'meta', envelope)\n }\n}\n","import type { SyncTransactionResult } from '../types.js'\nimport type { SyncEngine } from './sync.js'\nimport type { Vault } from '../vault.js'\n\ninterface TxOp {\n readonly type: 'put' | 'delete'\n readonly collection: string\n readonly id: string\n readonly record?: unknown\n}\n\n/**\n * Sync transaction.\n *\n * Stages local writes and then pushes only those records to remote in a\n * single batch. If any record conflicts during the push, the result\n * carries `status: 'conflict'` — no automatic rollback is performed;\n * the caller handles conflict resolution.\n *\n * Obtain via `db.transaction(compartmentName)`.\n */\nexport class SyncTransaction {\n private readonly comp: Vault\n private readonly engine: SyncEngine\n private readonly ops: TxOp[] = []\n\n /** @internal — constructed by `Noydb.transaction()` */\n constructor(comp: Vault, engine: SyncEngine) {\n this.comp = comp\n this.engine = engine\n }\n\n /** Stage a record write. Does not write to any adapter until `commit()`. */\n put(collection: string, id: string, record: unknown): this {\n this.ops.push({ type: 'put', collection, id, record })\n return this\n }\n\n /** Stage a record delete. Does not write to any adapter until `commit()`. */\n delete(collection: string, id: string): this {\n this.ops.push({ type: 'delete', collection, id })\n return this\n }\n\n /**\n * Commit the transaction.\n *\n * Phase 1 — writes all staged operations to the local adapter via the\n * collection layer (encryption + dirty-log tracking).\n *\n * Phase 2 — pushes only the records that were written in this\n * transaction to the remote adapter. Existing dirty entries from\n * outside this transaction are not affected.\n *\n * If any record conflicts during the push, `status` is `'conflict'`\n * and `conflicts` lists the affected records. No automatic rollback is\n * performed.\n */\n async commit(): Promise<SyncTransactionResult> {\n // Phase 1: write all staged ops to local via collection layer\n for (const op of this.ops) {\n if (op.type === 'put') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n await (this.comp.collection<any>(op.collection)).put(op.id, op.record as any)\n } else {\n await this.comp.collection(op.collection).delete(op.id)\n }\n }\n\n // Phase 2: push only the records from this transaction\n const opSet = new Set<string>()\n for (const op of this.ops) {\n opSet.add(`${op.collection}::${op.id}`)\n }\n\n const pushResult = await this.engine.pushFiltered(\n (entry) => opSet.has(`${entry.collection}::${entry.id}`),\n )\n\n return {\n status: pushResult.conflicts.length > 0 ? 'conflict' : 'committed',\n pushed: pushResult.pushed,\n conflicts: pushResult.conflicts,\n }\n }\n}\n","/**\n * Multi-record atomic transactions.\n *\n * Lets an application stage writes across two or more collections (or\n * vaults) and commit them all-or-nothing.\n *\n * ```ts\n * await db.transaction(async (tx) => {\n * const inv = tx.vault('acme').collection<Invoice>('invoices')\n * const pay = tx.vault('acme').collection<Payment>('payments')\n * await inv.put(invoiceId, { ...invoice, status: 'paid' })\n * await pay.put(paymentId, { invoiceId, amount, paidAt })\n * })\n * // If the body throws before returning: nothing persisted.\n * // If the body returns: all puts committed; any CAS mismatch rolls\n * // the batch back and surfaces as ConflictError.\n * ```\n *\n * ## Atomicity semantics\n *\n * Ops are buffered during the body. On body-return the hub:\n *\n * 1. **Pre-flight** — re-reads every touched envelope and enforces\n * any caller-supplied `expectedVersion`. A mismatch throws\n * `ConflictError` with *no* writes performed.\n * 2. **Execute** — calls `Collection.put()` / `.delete()` for each\n * staged op in declaration order. History snapshots, ledger\n * appends, and change events fire as normal per op.\n * 3. **Unwind on failure** — if step 2 throws mid-batch, each\n * already-committed op is reverted via the raw store (restoring\n * the captured prior envelope, or deleting if none existed). The\n * ledger is NOT rewritten — audit history preserves the partial\n * commit and the revert.\n *\n * **Crash window.** Steps 2–3 are not a storage-layer transaction —\n * if the process dies between two executed ops, the on-disk state is\n * partial. True all-or-nothing atomicity requires a store that\n * implements `NoydbStore.tx()` (DynamoDB `TransactWriteItems`,\n * IndexedDB `readwrite` transaction, …). This executor declares\n * that future integration point via the `tx?()` method + the\n * `StoreCapabilities.txAtomic` bit, but does not yet delegate\n * to it — the cascade into `Fork · Stores` tracks the per-adapter\n * wire-up.\n *\n * ## Not covered\n *\n * - Cross-sync-peer atomicity. Transactions commit against the\n * primary store only; the sync engine pushes on its normal\n * schedule. For cross-peer two-phase commit use `SyncTransaction`\n * via `db.transaction(vaultName)`.\n * - Read-your-writes within the body. `tx.collection().get(id)`\n * returns the most-recently-staged value for that id when one\n * exists; if no staged op has touched the id, it reads the current\n * committed state. Version numbers returned by `get` reflect the\n * pre-transaction state (staged puts have no version yet).\n *\n * @module\n */\n\nimport type { Noydb } from '../noydb.js'\nimport type { Vault } from '../vault.js'\nimport type { Collection } from '../collection.js'\nimport type { EncryptedEnvelope } from '../types.js'\nimport { ConflictError } from '../errors.js'\n\n/** One op buffered inside a running `TxContext`. @internal */\ninterface StagedOp {\n type: 'put' | 'delete'\n vaultName: string\n collectionName: string\n id: string\n record?: unknown\n expectedVersion?: number\n}\n\n/**\n * Transaction handle passed to the user's body. Use\n * `tx.vault(name).collection<T>(name)` to get a per-collection\n * facade; its `put`/`delete`/`get` calls stage ops against the tx.\n */\nexport class TxContext {\n /** @internal */\n readonly _ops: StagedOp[] = []\n /** @internal */\n readonly _db: Noydb\n\n /** @internal */\n constructor(db: Noydb) {\n this._db = db\n }\n\n /** Scope subsequent `collection()` calls to the named vault. */\n vault(name: string): TxVault {\n const v = this._db.vault(name)\n return new TxVault(this, v)\n }\n}\n\n/** Per-vault facade inside a running transaction. */\nexport class TxVault {\n /** @internal */\n readonly _ctx: TxContext\n /** @internal */\n readonly _vault: Vault\n\n /** @internal */\n constructor(ctx: TxContext, vault: Vault) {\n this._ctx = ctx\n this._vault = vault\n }\n\n /** Scope subsequent op calls to the named collection. */\n collection<T>(name: string): TxCollection<T> {\n const c = this._vault.collection<T>(name)\n return new TxCollection<T>(this._ctx, this._vault, c, name)\n }\n}\n\n/** Per-collection facade inside a running transaction. */\nexport class TxCollection<T> {\n /** @internal */\n readonly _ctx: TxContext\n /** @internal */\n readonly _vault: Vault\n /** @internal */\n readonly _coll: Collection<T>\n /** @internal */\n readonly _name: string\n\n /** @internal */\n constructor(ctx: TxContext, vault: Vault, coll: Collection<T>, name: string) {\n this._ctx = ctx\n this._vault = vault\n this._coll = coll\n this._name = name\n }\n\n /**\n * Read the current committed value, or the most-recently-staged\n * value from the same transaction if one exists.\n */\n async get(id: string): Promise<T | null> {\n for (let i = this._ctx._ops.length - 1; i >= 0; i--) {\n const op = this._ctx._ops[i]!\n if (\n op.vaultName === this._vault.name &&\n op.collectionName === this._name &&\n op.id === id\n ) {\n if (op.type === 'delete') return null\n return op.record as T\n }\n }\n return this._coll.get(id)\n }\n\n /**\n * Stage a put. Does not write until the transaction body returns.\n * Supply `{ expectedVersion }` to enforce optimistic concurrency\n * during the commit pre-flight.\n */\n put(id: string, record: T, options?: { expectedVersion?: number }): void {\n const op: StagedOp = {\n type: 'put',\n vaultName: this._vault.name,\n collectionName: this._name,\n id,\n record,\n }\n if (options?.expectedVersion !== undefined) op.expectedVersion = options.expectedVersion\n this._ctx._ops.push(op)\n }\n\n /**\n * Stage a delete. Does not write until the transaction body returns.\n * Supply `{ expectedVersion }` to enforce optimistic concurrency\n * during the commit pre-flight.\n */\n delete(id: string, options?: { expectedVersion?: number }): void {\n const op: StagedOp = {\n type: 'delete',\n vaultName: this._vault.name,\n collectionName: this._name,\n id,\n }\n if (options?.expectedVersion !== undefined) op.expectedVersion = options.expectedVersion\n this._ctx._ops.push(op)\n }\n}\n\n/**\n * Commit plan: pre-flight check + execution + revert plan. Returned\n * from `runTransaction`.\n *\n * @internal — exposed only for the `Collection.putMany({atomic:true})`\n * wire-up so the bulk path can share the executor without creating\n * an outer TxContext.\n */\nexport async function runTransaction<T>(\n db: Noydb,\n fn: (tx: TxContext) => Promise<T> | T,\n): Promise<T> {\n const ctx = new TxContext(db)\n const bodyResult = await fn(ctx)\n\n if (ctx._ops.length === 0) return bodyResult\n\n // Phase 1 — pre-flight: snapshot every touched envelope and enforce\n // any caller-supplied expectedVersion. Same (vault, coll, id) touched\n // more than once in one tx snapshots only the *initial* committed\n // state; the in-order replay in Phase 2 takes care of successor ops.\n const priorEnvelopes = new Map<string, EncryptedEnvelope | null>()\n const store = db._store\n for (const op of ctx._ops) {\n const key = keyOf(op)\n if (!priorEnvelopes.has(key)) {\n const env = await store.get(op.vaultName, op.collectionName, op.id)\n priorEnvelopes.set(key, env)\n }\n if (op.expectedVersion !== undefined) {\n const env = priorEnvelopes.get(key) ?? null\n const actual = env?._v ?? 0\n if (actual !== op.expectedVersion) {\n throw new ConflictError(\n actual,\n `Transaction pre-flight: ${op.vaultName}/${op.collectionName}/${op.id} ` +\n `expected v${op.expectedVersion}, found v${actual}`,\n )\n }\n }\n }\n\n // Phase 2 — execute via the Collection layer so history snapshots,\n // ledger entries, and change events fire normally. We capture each\n // successful op so a mid-batch throw can revert in Phase 3.\n const executed: Array<{ op: StagedOp; priorEnvelope: EncryptedEnvelope | null }> = []\n try {\n for (const op of ctx._ops) {\n const coll = db.vault(op.vaultName).collection(op.collectionName)\n const key = keyOf(op)\n const prior = priorEnvelopes.get(key) ?? null\n if (op.type === 'put') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n await coll.put(op.id, op.record as any)\n } else {\n await coll.delete(op.id)\n }\n executed.push({ op, priorEnvelope: prior })\n }\n return bodyResult\n } catch (err) {\n // Phase 3 — best-effort revert. Restore captured prior envelopes\n // via the raw store to avoid re-firing Collection-level side\n // effects (we don't want a cascade of change events undoing\n // themselves). The ledger is left as-is: each committed op\n // appended an entry; the revert is deliberately not recorded as a\n // compensating entry because 's contract is \"atomic or not at\n // all\" from the caller's view, not \"every write visible in the\n // audit trail.\" Auditors who need the intermediate state can still\n // reconstruct it by walking the ledger through the failed-tx\n // timestamp.\n for (const { op, priorEnvelope } of executed.slice().reverse()) {\n try {\n if (priorEnvelope) {\n await store.put(op.vaultName, op.collectionName, op.id, priorEnvelope)\n } else {\n await store.delete(op.vaultName, op.collectionName, op.id)\n }\n } catch {\n // swallow — best-effort. Surfacing the revert error would\n // mask the original one that triggered the rollback.\n }\n }\n throw err\n }\n}\n\nfunction keyOf(op: StagedOp): string {\n return `${op.vaultName}\\x00${op.collectionName}\\x00${op.id}`\n}\n","/**\n * i18nText schema type —\n *\n * `i18nText({ languages, required })` creates a descriptor for a\n * multi-language content field whose value is stored as a\n * `{ [locale]: string }` map (e.g. `{ en: 'Consulting', th: 'ที่ปรึกษา' }`).\n *\n * On put, the descriptor validates that required languages are present.\n * On read (when a `locale` option is passed), the map is collapsed to the\n * caller's locale string via the fallback chain.\n *\n * Design decisions\n * ────────────────\n *\n * **Descriptor pattern (not a Zod type).**\n * `i18nText()` returns a plain descriptor object used in the collection's\n * `i18nFields` option — same pattern as `ref()` / `dictKey()`. This keeps\n * `@noy-db/core` at zero runtime dependencies and avoids Zod v3 field-type\n * constraints. TypeScript inference is handled via the descriptor's type.\n *\n * **Enforcement at the collection boundary.**\n * The `required` option is checked by `Collection.put()` via the compartment's\n * registered `i18nFields`. Failed validation throws `MissingTranslationError`\n * — a distinct class from `SchemaValidationError` so callers can tell\n * \"wrong shape\" from \"missing translations\".\n *\n * **Resolution is post-decryption.**\n * Locale resolution happens AFTER `decryptRecord()`, as a pure in-memory\n * transform. No additional crypto work is needed. The resolved record is\n * returned in place of the stored one, with i18nText fields replaced by\n * their locale-resolved strings.\n *\n * **`locale: 'raw'`.**\n * Passing `{ locale: 'raw' }` skips resolution and returns the full\n * `{ [locale]: string }` map — useful for bilingual exports, admin UIs,\n * and any context where all translations must be visible at once.\n *\n * **Out of scope.**\n * Pluralization, RTL rendering, date/number formatting, per-locale CRDT\n * merging.\n */\n\nimport { MissingTranslationError, LocaleNotSpecifiedError } from '../errors.js'\n\n// ─── i18nText descriptor ───────────────────────────────────────────────\n\n/**\n * Options for `i18nText()`.\n *\n * `languages` declares the full set of supported locales. `required`\n * controls which must be present on every `put()`.\n *\n * `autoTranslate` is the per-field opt-in for the `plaintextTranslator`\n * hook. When `true` and a `plaintextTranslator` is configured\n * on `createNoydb()`, missing translations are generated before `put()`.\n * Default: `false`.\n */\nexport interface I18nTextOptions {\n /** All supported locale codes (BCP 47). */\n readonly languages: readonly string[]\n /**\n * Which locales must be present on every `put()`.\n *\n * - `'all'` — every declared language must be present.\n * - `'any'` — at least one declared language must be present.\n * - `string[]` — listed locales are required; others are optional.\n */\n readonly required: 'all' | 'any' | readonly string[]\n /**\n * Per-field opt-in for the `plaintextTranslator` hook.\n * When `true`, missing required translations are auto-generated\n * before `put()` if a translator is configured. Default: `false`.\n */\n readonly autoTranslate?: boolean\n}\n\n/**\n * Descriptor returned by `i18nText()`. Attach to the collection's\n * `i18nFields` option:\n *\n * ```ts\n * const lineItems = company.collection<LineItem>('line-items', {\n * i18nFields: {\n * description: i18nText({ languages: ['en', 'th'], required: 'all' }),\n * },\n * })\n * ```\n */\nexport interface I18nTextDescriptor {\n readonly _noydbI18nText: true\n readonly options: I18nTextOptions\n}\n\n/**\n * Create an `I18nTextDescriptor` for a multi-language content field.\n *\n * @param options Language list + enforcement mode.\n *\n * @example\n * ```ts\n * i18nText({ languages: ['en', 'th'], required: 'all' })\n * i18nText({ languages: ['en', 'th'], required: ['th'], autoTranslate: true })\n * ```\n */\nexport function i18nText(options: I18nTextOptions): I18nTextDescriptor {\n return { _noydbI18nText: true, options }\n}\n\n/** Runtime predicate for detecting an `I18nTextDescriptor`. */\nexport function isI18nTextDescriptor(x: unknown): x is I18nTextDescriptor {\n return (\n typeof x === 'object' &&\n x !== null &&\n (x as { _noydbI18nText?: unknown })._noydbI18nText === true\n )\n}\n\n// ─── Validation helpers ────────────────────────────────────────────────\n\n/**\n * Validate that a value is a valid `{ [locale]: string }` map and that\n * all required locales are present. Throws `MissingTranslationError`\n * when the required constraint is violated.\n *\n * Called by `Collection.put()` for each registered `i18nField`.\n *\n * @param value The raw field value from the record being put.\n * @param field The field name (used in the thrown error message).\n * @param descriptor The `i18nText()` descriptor for this field.\n */\nexport function validateI18nTextValue(\n value: unknown,\n field: string,\n descriptor: I18nTextDescriptor,\n): void {\n const { options } = descriptor\n\n // Must be a non-null object\n if (typeof value !== 'object' || value === null || Array.isArray(value)) {\n throw new MissingTranslationError(\n field,\n options.languages,\n `Field \"${field}\" must be a { [locale]: string } map, got ${typeof value}.`,\n )\n }\n\n const map = value as Record<string, unknown>\n\n // All values must be strings\n for (const [locale, v] of Object.entries(map)) {\n if (typeof v !== 'string') {\n throw new MissingTranslationError(\n field,\n [locale],\n `Field \"${field}\": locale \"${locale}\" must be a string, got ${typeof v}.`,\n )\n }\n }\n\n // Check required constraint\n const { required } = options\n if (required === 'all') {\n const missing = options.languages.filter(\n (lang) => !(lang in map) || map[lang] === '',\n )\n if (missing.length > 0) {\n throw new MissingTranslationError(\n field,\n missing,\n `Field \"${field}\" requires all declared languages. Missing: ${missing.join(', ')}.`,\n )\n }\n } else if (required === 'any') {\n const present = options.languages.some(\n (lang) => lang in map && map[lang] !== '',\n )\n if (!present) {\n throw new MissingTranslationError(\n field,\n options.languages,\n `Field \"${field}\" requires at least one declared language. None present.`,\n )\n }\n } else {\n // string[] — named required locales; TypeScript narrows required to readonly string[]\n const requiredList = required\n const missing = requiredList.filter(\n (lang) => !(lang in map) || map[lang] === '',\n )\n if (missing.length > 0) {\n throw new MissingTranslationError(\n field,\n missing,\n `Field \"${field}\" requires: ${requiredList.join(', ')}. Missing: ${missing.join(', ')}.`,\n )\n }\n }\n}\n\n// ─── Locale resolution ─────────────────────────────────────────────────\n\n/**\n * Resolve an i18nText value (`{ [locale]: string }` map) to a string\n * for the given locale.\n *\n * @param value The stored locale map.\n * @param locale The requested locale code, or `'raw'` to return the map.\n * @param fallback Single locale or ordered list; use `'any'` as the last\n * element to fall back to any available translation.\n * @param field Field name used in `LocaleNotSpecifiedError` messages.\n * @returns The resolved string, OR the original map when `locale === 'raw'`.\n */\nexport function resolveI18nText(\n value: Record<string, string>,\n locale: string,\n fallback?: string | readonly string[],\n field?: string,\n): string | Record<string, string> {\n if (locale === 'raw') {\n return value\n }\n\n if (!locale) {\n throw new LocaleNotSpecifiedError(field ?? '<unknown>')\n }\n\n // Primary locale\n if (value[locale] !== undefined && value[locale] !== '') {\n return value[locale]\n }\n\n // Fallback chain\n const chain: readonly string[] = Array.isArray(fallback)\n ? fallback\n : fallback\n ? [fallback]\n : []\n\n for (const fb of chain) {\n if (fb === 'any') {\n const any = Object.values(value).find((v) => v !== '')\n if (any !== undefined) return any\n } else if (value[fb] !== undefined && value[fb] !== '') {\n return value[fb]\n }\n }\n\n throw new LocaleNotSpecifiedError(\n field ?? '<unknown>',\n `No translation available for locale \"${locale}\"` +\n (chain.length > 0 ? ` or fallback chain [${chain.join(', ')}]` : '') +\n '.',\n )\n}\n\n/**\n * Apply locale resolution to a single record, in-place over a copy.\n *\n * For each field registered as an `i18nText` descriptor:\n * - If `locale === 'raw'`, the field value is left as the stored map.\n * - Otherwise, the field value is replaced with the resolved string.\n *\n * Records that are not plain objects (null, array, primitives) are\n * returned unchanged.\n *\n * @param record The decrypted record.\n * @param i18nFields Map of field name → `I18nTextDescriptor`.\n * @param locale The requested locale (or `'raw'`).\n * @param fallback Fallback chain (optional).\n */\nexport function applyI18nLocale(\n record: Record<string, unknown>,\n i18nFields: Record<string, I18nTextDescriptor>,\n locale: string,\n fallback?: string | readonly string[],\n): Record<string, unknown> {\n const fieldNames = Object.keys(i18nFields)\n if (fieldNames.length === 0) return record\n\n const result = { ...record }\n\n for (const field of fieldNames) {\n const raw = result[field]\n if (raw === undefined || raw === null) continue\n if (typeof raw !== 'object' || Array.isArray(raw)) continue\n\n result[field] = resolveI18nText(\n raw as Record<string, string>,\n locale,\n fallback,\n field,\n )\n }\n\n return result\n}\n","/**\n * _sync_credentials reserved collection —\n *\n * Stores per-adapter OAuth tokens (and any other long-lived sync secrets) as\n * encrypted records inside the vault itself. Tokens are wrapped with the\n * compartment's own DEK, live on disk as ciphertext like any other record, and\n * are accessed only through the dedicated API in this module — never via\n * `vault.collection('_sync_credentials')`.\n *\n * Design decisions\n * ────────────────\n *\n * **Why a reserved collection, not a separate store?**\n * The compartment's existing encryption stack (AES-256-GCM + collection DEK)\n * is exactly the right primitive for protecting OAuth tokens at rest. Using a\n * separate store would require a new encryption surface, new adapter calls,\n * and a new backup/restore path — all of which already exist for collections.\n *\n * **Why not exposed as a regular collection?**\n * The same reason `_keyring` and `_ledger` aren't: they have invariants that\n * must be enforced (naming scheme, no cross-user leakage, no schema\n * validation, no history/ledger writes for privacy). Routing through a\n * dedicated API enforces those invariants.\n *\n * **Token lifecycle:**\n * - `putCredential(vault, adapterId, token)` — store or overwrite\n * - `getCredential(vault, adapterId)` — load and decrypt\n * - `deleteCredential(vault, adapterId)` — remove\n * - `listCredentials(vault)` — enumerate adapter IDs (not tokens)\n *\n * The `adapterId` is the record ID within the `_sync_credentials` collection.\n * It should be a stable, human-readable identifier for the adapter instance\n * (e.g. `'google-drive'`, `'dropbox'`, `'s3-prod'`).\n *\n * **ACL:** only `owner` and `admin` roles can read/write sync credentials.\n * Operators, viewers, and clients cannot call this API. The check is made\n * against the caller's keyring role at call time.\n */\n\nimport type { NoydbStore, EncryptedEnvelope } from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport type { UnlockedKeyring } from './keyring.js'\nimport { encrypt, decrypt } from '../crypto.js'\nimport { ensureCollectionDEK } from './keyring.js'\nimport { PermissionDeniedError } from '../errors.js'\n\n/** The reserved collection name. Never collides with user collections. */\nexport const SYNC_CREDENTIALS_COLLECTION = '_sync_credentials'\n\n// ─── Token types ──────────────────────────────────────────────────────\n\n/**\n * An OAuth/auth token stored in `_sync_credentials`.\n *\n * Fields mirror the OAuth2 token response shape. `customData` is an escape\n * hatch for adapter-specific secrets (API keys, connection strings, etc.)\n * that don't fit the OAuth2 shape.\n */\nexport interface SyncCredential {\n /** Stable identifier for the adapter instance (e.g. 'google-drive'). */\n readonly adapterId: string\n /** OAuth token type, usually 'Bearer'. */\n readonly tokenType: string\n /** The access token. Expires at `expiresAt` if set. */\n readonly accessToken: string\n /** Long-lived refresh token for renewing the access token. */\n readonly refreshToken?: string\n /** ISO timestamp when `accessToken` expires. Absent means \"no expiry\". */\n readonly expiresAt?: string\n /** Space-separated OAuth scopes. */\n readonly scopes?: string\n /** Adapter-specific opaque data (API keys, endpoints, etc.). */\n readonly customData?: Record<string, string>\n}\n\n// ─── Access check ─────────────────────────────────────────────────────\n\nfunction requireAdminAccess(keyring: UnlockedKeyring): void {\n if (keyring.role !== 'owner' && keyring.role !== 'admin') {\n throw new PermissionDeniedError(\n `Sync credentials require owner or admin role. Current role: \"${keyring.role}\"`,\n )\n }\n}\n\n// ─── Public API ────────────────────────────────────────────────────────\n\n/**\n * Store or overwrite a sync credential for the given adapter.\n *\n * The credential is encrypted with the `_sync_credentials` collection DEK\n * (auto-generated on first use). The record ID is the `adapterId`.\n *\n * Requires owner or admin role.\n */\nexport async function putCredential(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n credential: SyncCredential,\n): Promise<void> {\n requireAdminAccess(keyring)\n\n const getDek = await ensureCollectionDEK(adapter, vault, keyring)\n const dek = await getDek(SYNC_CREDENTIALS_COLLECTION)\n\n const { iv, data } = await encrypt(JSON.stringify(credential), dek)\n\n const existing = await adapter.get(vault, SYNC_CREDENTIALS_COLLECTION, credential.adapterId)\n const version = existing ? existing._v + 1 : 1\n\n const envelope: EncryptedEnvelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: iv,\n _data: data,\n _by: keyring.userId,\n }\n\n await adapter.put(\n vault,\n SYNC_CREDENTIALS_COLLECTION,\n credential.adapterId,\n envelope,\n existing ? existing._v : undefined,\n )\n}\n\n/**\n * Load and decrypt a sync credential for the given adapter ID.\n *\n * Returns `null` if no credential exists for this adapter.\n * Requires owner or admin role.\n */\nexport async function getCredential(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n adapterId: string,\n): Promise<SyncCredential | null> {\n requireAdminAccess(keyring)\n\n const getDek = await ensureCollectionDEK(adapter, vault, keyring)\n const dek = await getDek(SYNC_CREDENTIALS_COLLECTION)\n\n const envelope = await adapter.get(vault, SYNC_CREDENTIALS_COLLECTION, adapterId)\n if (!envelope) return null\n\n const plaintext = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(plaintext) as SyncCredential\n}\n\n/**\n * Delete a sync credential by adapter ID.\n *\n * No-op if the credential doesn't exist. Requires owner or admin role.\n */\nexport async function deleteCredential(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n adapterId: string,\n): Promise<void> {\n requireAdminAccess(keyring)\n await adapter.delete(vault, SYNC_CREDENTIALS_COLLECTION, adapterId)\n}\n\n/**\n * List all adapter IDs that have stored credentials.\n *\n * Returns only the IDs, never the credential payloads. Useful for\n * displaying \"connected adapters\" in UI without decrypting tokens.\n * Requires owner or admin role.\n */\nexport async function listCredentials(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n): Promise<string[]> {\n requireAdminAccess(keyring)\n return adapter.list(vault, SYNC_CREDENTIALS_COLLECTION)\n}\n\n/**\n * Check whether a credential exists and whether its access token has expired.\n *\n * Returns `{ exists: false }` if no credential is stored, or\n * `{ exists: true, expired: boolean }` based on the `expiresAt` field.\n * Requires owner or admin role.\n */\nexport async function credentialStatus(\n adapter: NoydbStore,\n vault: string,\n keyring: UnlockedKeyring,\n adapterId: string,\n): Promise<{ exists: false } | { exists: true; expired: boolean }> {\n const credential = await getCredential(adapter, vault, keyring, adapterId)\n if (!credential) return { exists: false }\n\n const expired = credential.expiresAt\n ? Date.now() > new Date(credential.expiresAt).getTime()\n : false\n\n return { exists: true, expired }\n}\n","/**\n * Session policies —\n *\n * A `SessionPolicy` is a small declarative object that controls how long a\n * session lives and which operations require re-authentication. It is\n * evaluated by the `PolicyEnforcer` class, which the Noydb instance\n * integrates to replace the bare `sessionTimeout` timer from.\n *\n * Design decisions\n * ────────────────\n * Policies are stateless value objects — no timers, no event listeners.\n * The Noydb instance is the stateful coordinator: it holds the enforcer,\n * calls `enforcer.touch()` on every operation, and calls\n * `enforcer.checkOperation()` before high-risk operations.\n *\n * This keeps the policy module easy to unit-test (no global timers to mock)\n * and avoids the \"who owns cleanup\" problem that comes with timer-based\n * callbacks embedded in a value object.\n *\n * `lockOnBackground` registers a `visibilitychange` listener on the document\n * at enforcer creation time and removes it on `destroy()`. It is a no-op in\n * non-browser environments (no `document`).\n */\n\nimport type { SessionPolicy, ReAuthOperation } from '../types.js'\nimport { SessionExpiredError, SessionPolicyError } from '../errors.js'\nimport { revokeSession } from './session.js'\n\n// ─── PolicyEnforcer ────────────────────────────────────────────────────\n\nexport interface PolicyEnforcerOptions {\n /** The policy to enforce. */\n policy: SessionPolicy\n /** The session ID to revoke when idle/absolute timeouts fire. */\n sessionId: string\n /**\n * Called when the policy decides the session should end (idle timeout,\n * absolute timeout, or lockOnBackground). Use this to trigger the\n * same cleanup that `Noydb.close()` would perform.\n */\n onRevoke: (reason: 'idle' | 'absolute' | 'background') => void\n}\n\n/**\n * Stateful enforcer for a single session policy.\n *\n * Create one per open session, call `touch()` on every operation,\n * call `checkOperation(op)` before export/grant/revoke/rotate/changeSecret,\n * and call `destroy()` when the session ends.\n */\nexport class PolicyEnforcer {\n private readonly policy: SessionPolicy\n private readonly sessionId: string\n private readonly onRevoke: PolicyEnforcerOptions['onRevoke']\n private readonly createdAt: number\n private lastActivityAt: number\n private idleTimer: ReturnType<typeof setTimeout> | null = null\n private absoluteTimer: ReturnType<typeof setTimeout> | null = null\n private visibilityHandler: (() => void) | null = null\n\n constructor(opts: PolicyEnforcerOptions) {\n this.policy = opts.policy\n this.sessionId = opts.sessionId\n this.onRevoke = opts.onRevoke\n this.createdAt = Date.now()\n this.lastActivityAt = Date.now()\n\n this.scheduleIdleTimer()\n this.scheduleAbsoluteTimer()\n this.registerBackgroundLock()\n }\n\n /**\n * Record an activity timestamp and reset the idle timer.\n * Call this at the top of every Noydb public method.\n */\n touch(): void {\n this.lastActivityAt = Date.now()\n this.scheduleIdleTimer()\n }\n\n /**\n * Check whether the given operation is allowed under the active policy.\n * Throws `SessionPolicyError` if the operation requires re-authentication.\n * Throws `SessionExpiredError` if the absolute timeout has been exceeded\n * (defensive check in case the timer fired before the call arrived).\n *\n * This is a synchronous check — callers don't await it.\n */\n checkOperation(op: ReAuthOperation): void {\n // Defensive absolute-timeout check (timer may have fired late)\n const { absoluteTimeoutMs } = this.policy\n if (absoluteTimeoutMs !== undefined && Date.now() - this.createdAt >= absoluteTimeoutMs) {\n this.expire('absolute')\n throw new SessionExpiredError(this.sessionId)\n }\n\n const required = this.policy.requireReAuthFor ?? []\n if (required.includes(op)) {\n throw new SessionPolicyError(op)\n }\n }\n\n /**\n * Tear down timers and background-lock listener. Call from `Noydb.close()`\n * and whenever the session is revoked externally.\n */\n destroy(): void {\n if (this.idleTimer) {\n clearTimeout(this.idleTimer)\n this.idleTimer = null\n }\n if (this.absoluteTimer) {\n clearTimeout(this.absoluteTimer)\n this.absoluteTimer = null\n }\n if (this.visibilityHandler && typeof document !== 'undefined') {\n document.removeEventListener('visibilitychange', this.visibilityHandler)\n this.visibilityHandler = null\n }\n }\n\n /** How long since the last activity, in ms. */\n get idleMs(): number {\n return Date.now() - this.lastActivityAt\n }\n\n /** How long since session creation, in ms. */\n get ageMs(): number {\n return Date.now() - this.createdAt\n }\n\n // ── Private ──────────────────────────────────────────────────────────\n\n private scheduleIdleTimer(): void {\n const { idleTimeoutMs } = this.policy\n if (!idleTimeoutMs) return\n\n if (this.idleTimer) clearTimeout(this.idleTimer)\n this.idleTimer = setTimeout(() => {\n this.expire('idle')\n }, idleTimeoutMs)\n }\n\n private scheduleAbsoluteTimer(): void {\n const { absoluteTimeoutMs } = this.policy\n if (!absoluteTimeoutMs) return\n\n if (this.absoluteTimer) clearTimeout(this.absoluteTimer)\n this.absoluteTimer = setTimeout(() => {\n this.expire('absolute')\n }, absoluteTimeoutMs)\n }\n\n private registerBackgroundLock(): void {\n if (!this.policy.lockOnBackground) return\n if (typeof document === 'undefined') return\n\n this.visibilityHandler = () => {\n if (document.hidden) {\n this.expire('background')\n }\n }\n document.addEventListener('visibilitychange', this.visibilityHandler)\n }\n\n private expire(reason: 'idle' | 'absolute' | 'background'): void {\n this.destroy()\n revokeSession(this.sessionId)\n this.onRevoke(reason)\n }\n}\n\n// ─── Helpers ───────────────────────────────────────────────────────────\n\n/**\n * Build a `PolicyEnforcer` from a policy + session token, and return it\n * alongside a cleanup function. Convenience wrapper for Noydb.\n */\nexport function createEnforcer(opts: PolicyEnforcerOptions): PolicyEnforcer {\n return new PolicyEnforcer(opts)\n}\n\n/**\n * Validate that a `SessionPolicy` is well-formed.\n * Throws a plain `Error` (not `NoydbError`) because this is a developer\n * error — invalid policies passed at construction time, not at runtime.\n */\nexport function validateSessionPolicy(policy: SessionPolicy): void {\n const { idleTimeoutMs, absoluteTimeoutMs } = policy\n if (idleTimeoutMs !== undefined && (typeof idleTimeoutMs !== 'number' || idleTimeoutMs <= 0)) {\n throw new Error(`SessionPolicy.idleTimeoutMs must be a positive number, got ${idleTimeoutMs}`)\n }\n if (absoluteTimeoutMs !== undefined && (typeof absoluteTimeoutMs !== 'number' || absoluteTimeoutMs <= 0)) {\n throw new Error(`SessionPolicy.absoluteTimeoutMs must be a positive number, got ${absoluteTimeoutMs}`)\n }\n if (idleTimeoutMs !== undefined && absoluteTimeoutMs !== undefined && idleTimeoutMs >= absoluteTimeoutMs) {\n throw new Error(\n `SessionPolicy.idleTimeoutMs (${idleTimeoutMs}ms) must be less than absoluteTimeoutMs (${absoluteTimeoutMs}ms)`,\n )\n }\n}\n","/**\n * Session tokens —\n *\n * After a vault is unlocked (via passphrase, WebAuthn, OIDC, or magic-\n * link), the caller can call `createSession()` to get a session token that\n * allows re-establishing the KEK for the session lifetime without re-running\n * PBKDF2 or any interactive auth challenge.\n *\n * Security model\n * ──────────────\n * A session consists of two pieces that must both be present to recover the\n * KEK:\n *\n * 1. The **session key** — a non-extractable AES-256-GCM CryptoKey that\n * exists only in memory. \"Non-extractable\" is enforced by the WebCrypto\n * API: the key object cannot be serialized, exported, or sent over\n * postMessage. When the JS context is GC'd (tab close, navigation away,\n * worker termination) the key becomes unrecoverable.\n *\n * 2. The **session token** — a JSON object that carries the KEK wrapped\n * with the session key (AES-256-GCM, fresh IV per session), plus\n * unencrypted session metadata (sessionId, userId, vault, role,\n * expiresAt). The token can be serialized to JSON and stored in\n * sessionStorage or passed across callsites within the same tab, but\n * it is useless without the session key.\n *\n * The session key is kept in a module-level Map indexed by sessionId. Callers\n * that need to re-use a session must hold on to the sessionId returned from\n * `createSession()`; the key is looked up automatically by `resolveSession()`.\n *\n * Revocation: `revokeSession()` removes the entry from the Map. Because the\n * key is non-extractable, removal is sufficient — no one holds a serializable\n * copy of the key.\n *\n * Tab-scoped lifetime: the module-level Map lives only as long as the JS\n * module. Tab close → module unloaded → Map GC'd → all session keys gone.\n * This is the zero-effort logout: closing the tab is always a secure logout.\n *\n * Expiry: `createSession()` accepts a `ttlMs` option. `resolveSession()`\n * checks `expiresAt` and throws `SessionExpiredError` if the token is stale,\n * even if the session key is still in the Map.\n */\n\nimport { bufferToBase64, base64ToBuffer } from '../crypto.js'\nimport { generateULID } from '../bundle/ulid.js'\nimport type { Role } from '../types.js'\nimport type { UnlockedKeyring } from '../team/keyring.js'\nimport { SessionExpiredError, SessionNotFoundError } from '../errors.js'\n\nconst subtle = globalThis.crypto.subtle\n\n// Default session TTL: 60 minutes\nconst DEFAULT_TTL_MS = 60 * 60 * 1000\n\n// Module-level session key store. Tab-scoped by construction.\nconst sessionKeyStore = new Map<string, CryptoKey>()\n\n// ─── Public types ──────────────────────────────────────────────────────\n\n/** The serializable part of a session token. Safe to store in sessionStorage. */\nexport interface SessionToken {\n readonly _noydb_session: 1\n /** Unique session identifier (ULID). Use this as the handle for resolve/revoke. */\n readonly sessionId: string\n readonly userId: string\n readonly vault: string\n readonly role: Role\n /** ISO timestamp — resolveSession() rejects this token after this time. */\n readonly expiresAt: string\n /** KEK wrapped with the session key (AES-256-GCM). Base64. */\n readonly wrappedKek: string\n /** IV used for the wrapping operation. Base64. */\n readonly kekIv: string\n}\n\n/** Result returned from `createSession()`. */\nexport interface CreateSessionResult {\n /** Serializable token — store in sessionStorage or pass to `resolveSession()`. */\n token: SessionToken\n /** The sessionId — use this handle for `resolveSession()` and `revokeSession()`. */\n sessionId: string\n}\n\n/** Options for `createSession()`. */\nexport interface CreateSessionOptions {\n /**\n * Session lifetime in milliseconds. Defaults to 60 minutes.\n * After this duration, `resolveSession()` throws `SessionExpiredError`.\n */\n ttlMs?: number\n}\n\n// ─── Core session operations ───────────────────────────────────────────\n\n/**\n * Create a session for an already-unlocked keyring.\n *\n * Call this after any successful unlock (passphrase, WebAuthn, OIDC,\n * magic-link). The returned `sessionId` is the handle for later\n * `resolveSession()` and `revokeSession()` calls.\n *\n * The session key is generated fresh (non-extractable) and stored in the\n * module-level Map. The KEK from `keyring.kek` is exported (it must be\n * extractable — it was derived by `deriveKey()` which sets extractable: false,\n * but it's unwrapped from the keyring which sets extractable: true) and then\n * re-wrapped with the session key.\n *\n * @param keyring - An already-unlocked keyring whose `kek` is available.\n * @param vault - The vault name this session is scoped to.\n * @param options - Optional session configuration.\n */\nexport async function createSession(\n keyring: UnlockedKeyring,\n vault: string,\n options: CreateSessionOptions = {},\n): Promise<CreateSessionResult> {\n const ttlMs = options.ttlMs ?? DEFAULT_TTL_MS\n const sessionId = generateULID()\n const expiresAt = new Date(Date.now() + ttlMs).toISOString()\n\n // Generate a fresh non-extractable session key.\n // AES-256-GCM is used here (rather than AES-KW) because the session key\n // wraps raw key bytes (the exported KEK) rather than a CryptoKey object.\n const sessionKey = await subtle.generateKey(\n { name: 'AES-GCM', length: 256 },\n false, // non-extractable — this is the tab-scope security invariant\n ['encrypt', 'decrypt'],\n )\n\n // Export the KEK as raw bytes so we can wrap it.\n // The KEK is AES-256-KW, which must have been importable (extractable: true)\n // to allow wrapKey — it is, because unwrapKey sets extractable: true for\n // DEKs, but the KEK itself is derived with extractable: false (see\n // crypto.ts deriveKey). We use a separate raw export + encrypt path.\n //\n // Wait — the KEK is AES-KW with extractable:false. We cannot export it.\n // Instead, we wrap the DEKs (which ARE extractable) and the salt+role+userId\n // metadata together. This means resolveSession() reconstructs an\n // UnlockedKeyring by re-wrapping the DEKs list from the token.\n //\n // Simpler approach: export each DEK (they're extractable) and encrypt\n // the serialized DEK map with the session key. The keyring is reconstructed\n // from the session token without the original KEK — only DEKs matter for\n // record operations.\n //\n // This is the right design: sessions don't need the KEK (no re-grant,\n // no re-derive during session lifetime). They need the DEK set.\n\n const dekMap: Record<string, string> = {}\n for (const [collName, dek] of keyring.deks) {\n const raw = await subtle.exportKey('raw', dek)\n dekMap[collName] = bufferToBase64(raw)\n }\n\n const payload = JSON.stringify({\n userId: keyring.userId,\n displayName: keyring.displayName,\n role: keyring.role,\n permissions: keyring.permissions,\n deks: dekMap,\n salt: bufferToBase64(keyring.salt),\n })\n\n const iv = globalThis.crypto.getRandomValues(new Uint8Array(12))\n const encrypted = await subtle.encrypt(\n { name: 'AES-GCM', iv },\n sessionKey,\n new TextEncoder().encode(payload),\n )\n\n const token: SessionToken = {\n _noydb_session: 1,\n sessionId,\n userId: keyring.userId,\n vault,\n role: keyring.role,\n expiresAt,\n wrappedKek: bufferToBase64(encrypted),\n kekIv: bufferToBase64(iv),\n }\n\n sessionKeyStore.set(sessionId, sessionKey)\n return { token, sessionId }\n}\n\n/**\n * Resolve a session token back into an UnlockedKeyring.\n *\n * Looks up the session key by `sessionId`, checks the token is not expired,\n * then decrypts the payload to reconstruct the keyring's DEK set.\n *\n * Throws `SessionExpiredError` if the token's `expiresAt` is in the past.\n * Throws `SessionNotFoundError` if the session key is not in the store\n * (tab was reloaded, session was revoked, or the sessionId is wrong).\n *\n * @param token - The SessionToken from `createSession()`.\n */\nexport async function resolveSession(token: SessionToken): Promise<UnlockedKeyring> {\n // Expiry check first — fast path without touching crypto\n if (Date.now() > new Date(token.expiresAt).getTime()) {\n sessionKeyStore.delete(token.sessionId)\n throw new SessionExpiredError(token.sessionId)\n }\n\n const sessionKey = sessionKeyStore.get(token.sessionId)\n if (!sessionKey) {\n throw new SessionNotFoundError(token.sessionId)\n }\n\n const iv = base64ToBuffer(token.kekIv)\n const ciphertext = base64ToBuffer(token.wrappedKek)\n\n let plaintext: ArrayBuffer\n try {\n plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv },\n sessionKey,\n ciphertext,\n )\n } catch {\n throw new SessionNotFoundError(token.sessionId)\n }\n\n const payload = JSON.parse(new TextDecoder().decode(plaintext)) as {\n userId: string\n displayName: string\n role: Role\n permissions: Record<string, 'rw' | 'ro'>\n deks: Record<string, string>\n salt: string\n }\n\n const deks = new Map<string, CryptoKey>()\n for (const [collName, rawBase64] of Object.entries(payload.deks)) {\n const dek = await subtle.importKey(\n 'raw',\n base64ToBuffer(rawBase64),\n { name: 'AES-GCM', length: 256 },\n true,\n ['encrypt', 'decrypt'],\n )\n deks.set(collName, dek)\n }\n\n return {\n userId: payload.userId,\n displayName: payload.displayName,\n role: payload.role,\n permissions: payload.permissions,\n deks,\n kek: null as unknown as CryptoKey, // KEK not available in session context\n salt: base64ToBuffer(payload.salt),\n }\n}\n\n/**\n * Revoke a session by removing its key from the store.\n *\n * After revocation, `resolveSession()` will throw `SessionNotFoundError`\n * for this sessionId. The session token (if held by the caller) becomes\n * permanently useless. This is the explicit logout path.\n *\n * No-op if the session was already expired or does not exist.\n */\nexport function revokeSession(sessionId: string): void {\n sessionKeyStore.delete(sessionId)\n}\n\n/**\n * Check if a session is still alive (key in store + not expired).\n * Does not decrypt anything — purely a metadata check.\n */\nexport function isSessionAlive(token: SessionToken): boolean {\n if (Date.now() > new Date(token.expiresAt).getTime()) return false\n return sessionKeyStore.has(token.sessionId)\n}\n\n/**\n * Revoke all active sessions. Used by `Noydb.close()` to ensure that\n * closing the instance destroys all session state, not just the keyring\n * cache.\n */\nexport function revokeAllSessions(): void {\n sessionKeyStore.clear()\n}\n\n/**\n * Return the number of active sessions currently in the store.\n * Useful for diagnostics and tests.\n */\nexport function activeSessionCount(): number {\n return sessionKeyStore.size\n}\n","/**\n * Dev-mode persistent unlock —\n *\n * Solves the developer inner-loop friction: hot-reload destroys the session\n * (page navigation semantics), forcing a passphrase re-entry every refresh.\n *\n * This module provides an opt-in, deliberately-named escape hatch that lets\n * developers store the keyring payload in sessionStorage or localStorage so\n * the vault auto-unlocks on every page load — without a passphrase,\n * without a biometric prompt, without any OIDC flow.\n *\n * ⚠️ WARNING — this is a loaded footgun ⚠️\n * ─────────────────────────────────────────\n * The keyring payload stored by this module contains the DEKs. Whoever has\n * access to sessionStorage/localStorage has access to the DEKs. On a shared\n * development machine, a compromised browser extension, or a mis-configured\n * origin, this is a complete key exposure.\n *\n * This module is ONLY safe for local development. It must NEVER be active\n * in production builds.\n *\n * Guardrails (all enforced by the module, not by the caller)\n * ──────────────────────────────────────────────────────────\n * 1. **Production guard:** `enableDevUnlock()` throws immediately if\n * `process.env.NODE_ENV === 'production'` or if `import.meta.env?.PROD === true`\n * (Vite convention). Also throws if the hostname is NOT localhost or 127.0.0.1.\n *\n * 2. **Explicit acknowledgement string:** the caller must pass\n * `acknowledge: 'I-UNDERSTAND-THIS-DISABLES-UNLOCK-SECURITY'` or the call\n * throws. This string appears in every grep for `devUnlock` in the codebase,\n * making it impossible to enable this feature accidentally.\n *\n * 3. **Scope is vault + userId:** the storage key includes both the\n * vault name and the userId, so dev-unlock for vault-A does\n * NOT auto-unlock vault-B.\n *\n * 4. **Storage scope:** default is `sessionStorage` (cleared on tab close).\n * `localStorage` is opt-in and requires an additional\n * `persistAcrossTabs: true` flag in the options.\n *\n * 5. **Clear method:** `clearDevUnlock()` removes the stored payload. Wire\n * this to a dev toolbar button or `Ctrl+Shift+L` so clearing is one action.\n *\n * 6. **Console banner:** on first enable, a highly visible console warning\n * fires. Cannot be suppressed.\n *\n * Usage\n * ─────\n * ```ts\n * // In your dev entry point only (guarded by import.meta.env.DEV):\n * if (import.meta.env.DEV) {\n * const { enableDevUnlock, loadDevUnlock } = await import('@noy-db/hub')\n * enableDevUnlock('my-compartment', 'alice', keyring, {\n * acknowledge: 'I-UNDERSTAND-THIS-DISABLES-UNLOCK-SECURITY',\n * })\n * }\n *\n * // On page load:\n * if (import.meta.env.DEV) {\n * const keyring = await loadDevUnlock('my-compartment', 'alice')\n * if (keyring) {\n * // Skip unlock prompt, use keyring directly\n * }\n * }\n * ```\n */\n\nimport { bufferToBase64, base64ToBuffer } from '../crypto.js'\nimport { ValidationError } from '../errors.js'\nimport type { UnlockedKeyring } from '../team/keyring.js'\nimport type { Role } from '../types.js'\n\n// The exact acknowledgement string callers must pass\nconst REQUIRED_ACKNOWLEDGE = 'I-UNDERSTAND-THIS-DISABLES-UNLOCK-SECURITY'\n\nconst STORAGE_PREFIX = 'noydb:dev-unlock:'\n\n// ─── Options ──────────────────────────────────────────────────────────\n\nexport interface DevUnlockOptions {\n /**\n * Required: the exact string 'I-UNDERSTAND-THIS-DISABLES-UNLOCK-SECURITY'.\n * Any other value causes `enableDevUnlock()` to throw.\n */\n acknowledge: string\n /**\n * If `true`, stores in localStorage (persists across tabs and browser restarts).\n * If `false` (default), stores in sessionStorage (cleared on tab close).\n */\n persistAcrossTabs?: boolean\n}\n\n// ─── Production guard ─────────────────────────────────────────────────\n\nfunction assertDevEnvironment(): void {\n // Node.js: check NODE_ENV\n if (\n typeof process !== 'undefined' &&\n process.env.NODE_ENV === 'production'\n ) {\n throw new ValidationError(\n 'devUnlock is not available in production builds. ' +\n 'process.env.NODE_ENV is \"production\".',\n )\n }\n\n // Vite / build tool convention\n if (\n typeof globalThis !== 'undefined' &&\n (globalThis as Record<string, unknown>).__vite_is_production__ === true\n ) {\n throw new ValidationError('devUnlock is not available in production builds.')\n }\n\n // Browser: only allow on localhost\n if (\n typeof window !== 'undefined' &&\n typeof window.location !== 'undefined'\n ) {\n const host = window.location.hostname\n if (host !== 'localhost' && host !== '127.0.0.1' && host !== '::1' && !host.endsWith('.local')) {\n throw new ValidationError(\n `devUnlock is only available on localhost. Current hostname: \"${host}\". ` +\n 'Set NODE_ENV=development and run on localhost to use dev unlock.',\n )\n }\n }\n}\n\n// ─── Storage key ──────────────────────────────────────────────────────\n\nfunction storageKey(vault: string, userId: string): string {\n return `${STORAGE_PREFIX}${vault}:${userId}`\n}\n\nfunction resolveStorage(persistAcrossTabs?: boolean): Storage {\n if (typeof window === 'undefined') {\n throw new ValidationError('devUnlock requires a browser environment (window.sessionStorage / window.localStorage).')\n }\n return persistAcrossTabs ? window.localStorage : window.sessionStorage\n}\n\n// ─── Public API ────────────────────────────────────────────────────────\n\n/**\n * Serialize and store a keyring to browser storage for dev-mode auto-unlock.\n *\n * Throws immediately if:\n * - The acknowledge string is wrong.\n * - Running in a production environment (NODE_ENV=production).\n * - Running on a non-localhost hostname.\n *\n * Emits a highly visible console warning that cannot be suppressed.\n *\n * @param vault - The vault name.\n * @param userId - The user ID.\n * @param keyring - The unlocked keyring to persist.\n * @param options - Options including the required acknowledge string.\n */\nexport async function enableDevUnlock(\n vault: string,\n userId: string,\n keyring: UnlockedKeyring,\n options: DevUnlockOptions,\n): Promise<void> {\n if (options.acknowledge !== REQUIRED_ACKNOWLEDGE) {\n throw new ValidationError(\n `devUnlock requires acknowledge: '${REQUIRED_ACKNOWLEDGE}'. ` +\n `Got: '${options.acknowledge}'. This is intentional — the full string must appear in your source.`,\n )\n }\n\n assertDevEnvironment()\n\n const storage = resolveStorage(options.persistAcrossTabs)\n\n const dekMap: Record<string, string> = {}\n for (const [collName, dek] of keyring.deks) {\n const raw = await globalThis.crypto.subtle.exportKey('raw', dek)\n dekMap[collName] = bufferToBase64(raw)\n }\n\n const payload = JSON.stringify({\n _noydb_dev_unlock: 1,\n userId: keyring.userId,\n displayName: keyring.displayName,\n role: keyring.role,\n permissions: keyring.permissions,\n deks: dekMap,\n salt: bufferToBase64(keyring.salt),\n })\n\n storage.setItem(storageKey(vault, userId), payload)\n\n // Visible, unsuppressable warning\n console.warn(\n '%c⚠️ NOYDB DEV UNLOCK ACTIVE ⚠️',\n 'color: red; font-size: 16px; font-weight: bold',\n `\\n\\nCompartment \"${vault}\" user \"${userId}\" is stored in ` +\n `${options.persistAcrossTabs ? 'localStorage' : 'sessionStorage'} in PLAINTEXT DEKs.\\n` +\n 'This is ONLY safe for local development. Never use in production.\\n' +\n 'Call clearDevUnlock() to remove.',\n )\n}\n\n/**\n * Load a dev-mode keyring from browser storage.\n *\n * Returns `null` if no dev-unlock state is stored for this vault + user,\n * or if the stored payload is malformed.\n *\n * Does NOT perform the production environment check — it's safe to CALL\n * `loadDevUnlock` in production (it will simply return `null` because no\n * dev-unlock state was ever written). The guard only fires on `enableDevUnlock`.\n *\n * @param vault - The vault name.\n * @param userId - The user ID.\n * @param options - Optional storage override.\n */\nexport async function loadDevUnlock(\n vault: string,\n userId: string,\n options: { persistAcrossTabs?: boolean } = {},\n): Promise<UnlockedKeyring | null> {\n if (typeof window === 'undefined') return null\n\n const storage = resolveStorage(options.persistAcrossTabs)\n const raw = storage.getItem(storageKey(vault, userId))\n if (!raw) return null\n\n let parsed: {\n _noydb_dev_unlock?: number\n userId: string\n displayName: string\n role: Role\n permissions: Record<string, 'rw' | 'ro'>\n deks: Record<string, string>\n salt: string\n }\n try {\n parsed = JSON.parse(raw)\n } catch {\n return null\n }\n\n if (parsed._noydb_dev_unlock !== 1) return null\n\n const deks = new Map<string, CryptoKey>()\n for (const [collName, rawBase64] of Object.entries(parsed.deks)) {\n const dek = await globalThis.crypto.subtle.importKey(\n 'raw',\n base64ToBuffer(rawBase64),\n { name: 'AES-GCM', length: 256 },\n true,\n ['encrypt', 'decrypt'],\n )\n deks.set(collName, dek)\n }\n\n return {\n userId: parsed.userId,\n displayName: parsed.displayName,\n role: parsed.role,\n permissions: parsed.permissions,\n deks,\n kek: null as unknown as CryptoKey,\n salt: base64ToBuffer(parsed.salt),\n }\n}\n\n/**\n * Remove dev-unlock state from browser storage.\n *\n * Safe to call in production (no-op if no dev state exists).\n */\nexport function clearDevUnlock(\n vault: string,\n userId: string,\n options: { persistAcrossTabs?: boolean } = {},\n): void {\n if (typeof window === 'undefined') return\n const storage = resolveStorage(options.persistAcrossTabs)\n storage.removeItem(storageKey(vault, userId))\n}\n\n/**\n * Check if dev-unlock state exists for this vault + user.\n *\n * Safe to call in production (returns false if nothing is stored).\n */\nexport function isDevUnlockActive(\n vault: string,\n userId: string,\n options: { persistAcrossTabs?: boolean } = {},\n): boolean {\n if (typeof window === 'undefined') return false\n const storage = resolveStorage(options.persistAcrossTabs)\n return storage.getItem(storageKey(vault, userId)) !== null\n}\n","/**\n * Zero-dependency JSON diff.\n * Produces a flat list of changes between two plain objects.\n */\n\nexport type ChangeType = 'added' | 'removed' | 'changed'\n\nexport interface DiffEntry {\n /** Dot-separated path to the changed field (e.g. \"address.city\"). */\n readonly path: string\n /** Type of change. */\n readonly type: ChangeType\n /** Previous value (undefined for 'added'). */\n readonly from?: unknown\n /** New value (undefined for 'removed'). */\n readonly to?: unknown\n}\n\n/**\n * Compute differences between two objects.\n * Returns an array of DiffEntry describing each changed field.\n * Returns empty array if objects are identical.\n */\nexport function diff(oldObj: unknown, newObj: unknown, basePath = ''): DiffEntry[] {\n const changes: DiffEntry[] = []\n\n // Both primitives or nulls\n if (oldObj === newObj) return changes\n\n // One is null/undefined\n if (oldObj == null && newObj != null) {\n return [{ path: basePath || '(root)', type: 'added', to: newObj }]\n }\n if (oldObj != null && newObj == null) {\n return [{ path: basePath || '(root)', type: 'removed', from: oldObj }]\n }\n\n // Different types\n if (typeof oldObj !== typeof newObj) {\n return [{ path: basePath || '(root)', type: 'changed', from: oldObj, to: newObj }]\n }\n\n // Both primitives (and not equal — checked above)\n if (typeof oldObj !== 'object') {\n return [{ path: basePath || '(root)', type: 'changed', from: oldObj, to: newObj }]\n }\n\n // Both arrays\n if (Array.isArray(oldObj) && Array.isArray(newObj)) {\n const maxLen = Math.max(oldObj.length, newObj.length)\n for (let i = 0; i < maxLen; i++) {\n const p = basePath ? `${basePath}[${i}]` : `[${i}]`\n if (i >= oldObj.length) {\n changes.push({ path: p, type: 'added', to: newObj[i] })\n } else if (i >= newObj.length) {\n changes.push({ path: p, type: 'removed', from: oldObj[i] })\n } else {\n changes.push(...diff(oldObj[i], newObj[i], p))\n }\n }\n return changes\n }\n\n // Both objects\n const oldRecord = oldObj as Record<string, unknown>\n const newRecord = newObj as Record<string, unknown>\n const allKeys = new Set([...Object.keys(oldRecord), ...Object.keys(newRecord)])\n\n for (const key of allKeys) {\n const p = basePath ? `${basePath}.${key}` : key\n if (!(key in oldRecord)) {\n changes.push({ path: p, type: 'added', to: newRecord[key] })\n } else if (!(key in newRecord)) {\n changes.push({ path: p, type: 'removed', from: oldRecord[key] })\n } else {\n changes.push(...diff(oldRecord[key], newRecord[key], p))\n }\n }\n\n return changes\n}\n\n/** Format a diff as a human-readable string. */\nexport function formatDiff(changes: DiffEntry[]): string {\n if (changes.length === 0) return '(no changes)'\n return changes.map(c => {\n switch (c.type) {\n case 'added':\n return `+ ${c.path}: ${JSON.stringify(c.to)}`\n case 'removed':\n return `- ${c.path}: ${JSON.stringify(c.from)}`\n case 'changed':\n return `~ ${c.path}: ${JSON.stringify(c.from)} → ${JSON.stringify(c.to)}`\n }\n }).join('\\n')\n}\n","/**\n * Vault-level diff orchestrator.\n *\n * Compares a live `Vault`'s plaintext state against a candidate state\n * (another vault, a plain `{ collection: records[] }` map, or a vault\n * dump JSON) and returns a structured `VaultDiff` plan listing the\n * records that would be added, modified, or deleted to bring the live\n * vault into the candidate's shape.\n *\n * Builds on two existing record-level helpers:\n *\n * 1. `diff(a, b)` from `./history/diff.ts` — emits dot-pathed\n * `DiffEntry[]` with `type: 'added' | 'removed' | 'changed'` for\n * each changed field of two records. Used here for the\n * `fieldDiffs` of every `modified` entry, and (with empty result)\n * as the default deep-equal check.\n *\n * 2. `Vault.exportStream()` from `./vault.ts` — the canonical\n * decrypt-and-stream-records iterator. Used to walk both sides\n * when the candidate is itself a `Vault`. ACL-scoped: collections\n * the caller can't read silently drop out, the same way every\n * other plaintext-emitting export pipeline filters them.\n *\n * The new orchestration is the **vault-level** enumeration: bucket\n * each record id into added (only in candidate), deleted (only in\n * vault), or modified (in both with field changes); leave the\n * field-level granularity to the existing `diff()`.\n *\n * Use cases:\n *\n * - Import preview (`@noy-db/as-*` `fromString` returns a plan\n * whose body is a `VaultDiff`).\n * - Backup verification (\"does this `.noydb` bundle from yesterday\n * match the current vault?\").\n * - Two-vault reconciliation (\"what's different between Office A\n * and Office B before we sync?\").\n * - Test assertions (golden-file testing with one-liner\n * `expect(plan.summary).toEqual(...)`).\n *\n * @module\n */\n\nimport type { Vault } from './vault.js'\nimport { diff as fieldDiff, type DiffEntry as FieldDiffEntry } from './history/diff.js'\n\n// ─── Public types ──────────────────────────────────────────────────────\n\n/** Per-record entry shape — added and deleted records carry only the record value. */\nexport interface VaultDiffEntry<T = unknown> {\n readonly collection: string\n readonly id: string\n readonly record: T\n}\n\n/** Modified records carry both halves of the diff plus the field-level breakdown. */\nexport interface VaultDiffModifiedEntry<T = unknown> extends VaultDiffEntry<T> {\n /** The record as it stands in the live vault. */\n readonly before: T\n /** Top-level keys whose values differ between `before` and `record`. */\n readonly fieldsChanged: readonly string[]\n /**\n * Field-level diff entries from `diff(before, record)`. Reuses the\n * existing per-record diff helper so consumers can render git-style\n * `path: from → to` rows without re-walking the records.\n */\n readonly fieldDiffs: readonly FieldDiffEntry[]\n}\n\nexport interface VaultDiff<T = unknown> {\n readonly added: readonly VaultDiffEntry<T>[]\n readonly modified: readonly VaultDiffModifiedEntry<T>[]\n readonly deleted: readonly VaultDiffEntry<T>[]\n /** Only populated when `options.includeUnchanged: true`. */\n readonly unchanged: readonly VaultDiffEntry<T>[] | undefined\n readonly summary: {\n readonly add: number\n readonly modify: number\n readonly delete: number\n readonly total: number\n }\n /**\n * Format the diff as a human-readable string.\n *\n * - `'count'` — one line, just the numbers (`12 added · 3 modified · 0 deleted`)\n * - `'one-line'` — count plus a single overview line\n * - `'full'` — count + one row per added/modified/deleted record (default)\n */\n format(opts?: { detail?: 'count' | 'one-line' | 'full' }): string\n}\n\nexport interface DiffOptions {\n /** Restrict the diff to a subset of collections. */\n readonly collections?: readonly string[]\n /** Field on each record that carries its id. Defaults to `'id'`. */\n readonly idKey?: string\n /** Override the default deep-equal check for \"modified vs unchanged\". */\n readonly compareFn?: (a: unknown, b: unknown) => boolean\n /** If true, include unchanged records in the diff (off by default to save memory). */\n readonly includeUnchanged?: boolean\n}\n\n/**\n * Candidate state to diff the vault against:\n *\n * - A `Vault` instance — both sides are walked via `exportStream()`.\n * - A `Record<collection, records[]>` map — same shape `as-json.toObject()`\n * produces. Useful for diffing parsed file content against the live vault.\n * - A `VaultDump` (output of `vault.dump()`) — a JSON string carrying the\n * full vault state. Parsed and reduced to the map shape above.\n */\nexport type DiffCandidate<T = unknown> =\n | Vault\n | Record<string, readonly T[]>\n | string\n\n// ─── Implementation ────────────────────────────────────────────────────\n\n/**\n * Compute the diff between a live vault and a candidate state.\n *\n * Returns a fully buffered `VaultDiff` — no streaming. Memory cost is\n * O(n + m) in the row count of vault + candidate. For documented\n * 1K-50K-record vaults this is fine; a streaming variant lands as a\n * follow-up if a > 100K-record consumer arrives.\n */\nexport async function diffVault<T = unknown>(\n vault: Vault,\n candidate: DiffCandidate<T>,\n options: DiffOptions = {},\n): Promise<VaultDiff<T>> {\n const idKey = options.idKey ?? 'id'\n const filter = options.collections ? new Set(options.collections) : null\n const compareFn =\n options.compareFn ?? ((a: unknown, b: unknown) => fieldDiff(a, b).length === 0)\n\n // Side A — walk the live vault via exportStream(). Each chunk arrives\n // already decrypted and ACL-scoped, so collections the caller can't\n // read silently drop out. exportStream's records are typed `unknown[]`\n // — diffVault is the type-erasure boundary; the caller asserts the\n // record shape via the function's `<T>` generic.\n const live = new Map<string, Map<string, T>>()\n for await (const chunk of vault.exportStream({ granularity: 'collection' })) {\n if (filter && !filter.has(chunk.collection)) continue\n const collection = live.get(chunk.collection) ?? new Map<string, T>()\n for (const record of chunk.records) {\n const id = readIdField(record, idKey)\n if (!id) continue\n collection.set(id, record as T)\n }\n live.set(chunk.collection, collection)\n }\n\n // Side B — normalise the candidate into the same shape.\n const cand = await normaliseCandidate<T>(candidate, idKey, filter)\n\n // Walk every (collection, id) on either side and bucket.\n const added: VaultDiffEntry<T>[] = []\n const modified: VaultDiffModifiedEntry<T>[] = []\n const deleted: VaultDiffEntry<T>[] = []\n const unchanged: VaultDiffEntry<T>[] | undefined = options.includeUnchanged ? [] : undefined\n\n const collectionNames = new Set([...live.keys(), ...cand.keys()])\n for (const collection of [...collectionNames].sort()) {\n const liveColl = live.get(collection) ?? new Map<string, T>()\n const candColl = cand.get(collection) ?? new Map<string, T>()\n const allIds = new Set([...liveColl.keys(), ...candColl.keys()])\n\n for (const id of [...allIds].sort()) {\n const before = liveColl.get(id)\n const after = candColl.get(id)\n\n if (before === undefined && after !== undefined) {\n added.push({ collection, id, record: after })\n } else if (before !== undefined && after === undefined) {\n deleted.push({ collection, id, record: before })\n } else if (before !== undefined && after !== undefined) {\n if (compareFn(before, after)) {\n unchanged?.push({ collection, id, record: after })\n } else {\n const fieldDiffs = fieldDiff(before, after)\n const fieldsChanged = uniqueTopLevelKeys(fieldDiffs)\n modified.push({\n collection,\n id,\n record: after,\n before,\n fieldsChanged,\n fieldDiffs,\n })\n }\n }\n }\n }\n\n const summary = {\n add: added.length,\n modify: modified.length,\n delete: deleted.length,\n total: added.length + modified.length + deleted.length,\n }\n\n return {\n added,\n modified,\n deleted,\n unchanged,\n summary,\n format(opts) {\n return formatDiff(opts?.detail ?? 'full', { added, modified, deleted, summary })\n },\n }\n}\n\n// ─── Internals ─────────────────────────────────────────────────────────\n\nasync function normaliseCandidate<T>(\n candidate: DiffCandidate<T>,\n idKey: string,\n filter: Set<string> | null,\n): Promise<Map<string, Map<string, T>>> {\n const out = new Map<string, Map<string, T>>()\n\n // Vault instance — duck-type via the exportStream method (matches\n // vault.ts's structural shape without forcing a runtime instanceof check\n // that would import the class and risk circular deps).\n if (\n typeof candidate === 'object' &&\n candidate !== null &&\n 'exportStream' in candidate &&\n typeof (candidate as Vault).exportStream === 'function'\n ) {\n for await (const chunk of (candidate as Vault).exportStream({ granularity: 'collection' })) {\n if (filter && !filter.has(chunk.collection)) continue\n const collection = out.get(chunk.collection) ?? new Map<string, T>()\n for (const record of chunk.records) {\n const id = readIdField(record, idKey)\n if (!id) continue\n collection.set(id, record as T)\n }\n out.set(chunk.collection, collection)\n }\n return out\n }\n\n // String — assume a vault.dump() JSON string. Parse and reduce to the map shape.\n if (typeof candidate === 'string') {\n let parsed: unknown\n try {\n parsed = JSON.parse(candidate)\n } catch (err) {\n throw new Error(\n `diffVault: candidate string is not valid JSON (${(err as Error).message})`,\n )\n }\n return collectionsFromObject<T>(parsed, idKey, filter)\n }\n\n // Plain object — `Record<collection, records[]>` (same shape as-json.toObject() returns).\n return collectionsFromObject<T>(candidate, idKey, filter)\n}\n\nfunction collectionsFromObject<T>(\n raw: unknown,\n idKey: string,\n filter: Set<string> | null,\n): Map<string, Map<string, T>> {\n const out = new Map<string, Map<string, T>>()\n if (raw === null || typeof raw !== 'object') {\n throw new Error('diffVault: candidate must be a Vault, an object, or a JSON string')\n }\n // A vault dump JSON has a top-level shape like { _compartment, _keyring, <coll>: <records[]> }.\n // We accept both: keys starting with `_` are skipped (they're metadata), the rest are collections.\n for (const [key, value] of Object.entries(raw)) {\n if (key.startsWith('_')) continue\n if (filter && !filter.has(key)) continue\n if (!Array.isArray(value)) continue\n const collection = new Map<string, T>()\n for (const record of value as readonly T[]) {\n if (record === null || typeof record !== 'object') continue\n const id = readIdField(record, idKey)\n if (!id) continue\n collection.set(id, record)\n }\n out.set(key, collection)\n }\n return out\n}\n\nfunction uniqueTopLevelKeys(diffs: readonly FieldDiffEntry[]): readonly string[] {\n const keys = new Set<string>()\n for (const d of diffs) {\n // path is dot-separated; the top-level key is everything before the\n // first `.` or `[`. (`a.b.c` → `a`, `tags[0]` → `tags`, `(root)` → `(root)`).\n const m = /^[^.[]+/.exec(d.path)\n if (m) keys.add(m[0])\n }\n return [...keys]\n}\n\n/**\n * Pull the id field off a record without going through `String(obj)`,\n * which would emit `[object Object]` for nested objects and silently\n * collapse rows that share the same parent. Only string and number ids\n * are accepted; anything else returns the empty string and the record\n * is skipped at the call site.\n */\nfunction readIdField(record: unknown, idKey: string): string {\n if (record === null || typeof record !== 'object') return ''\n const v = (record as Record<string, unknown>)[idKey]\n if (typeof v === 'string') return v\n if (typeof v === 'number' && Number.isFinite(v)) return String(v)\n return ''\n}\n\ninterface FormatBuckets<T> {\n readonly added: readonly VaultDiffEntry<T>[]\n readonly modified: readonly VaultDiffModifiedEntry<T>[]\n readonly deleted: readonly VaultDiffEntry<T>[]\n readonly summary: VaultDiff<T>['summary']\n}\n\nfunction formatDiff<T>(\n detail: 'count' | 'one-line' | 'full',\n b: FormatBuckets<T>,\n): string {\n const head = `${b.summary.add} added · ${b.summary.modify} modified · ${b.summary.delete} deleted`\n if (detail === 'count') return head\n if (b.summary.total === 0) return head + '\\n(no changes)'\n if (detail === 'one-line') return head\n\n const rows: string[] = [head, '']\n for (const e of b.added) rows.push(`${e.collection}/${e.id}\\tadded`)\n for (const e of b.modified) {\n const fields = e.fieldDiffs\n .map((f) => `${f.path}: ${shortJSON(f.from)} → ${shortJSON(f.to)}`)\n .join(', ')\n rows.push(`${e.collection}/${e.id}\\tmodified\\t${fields}`)\n }\n for (const e of b.deleted) rows.push(`${e.collection}/${e.id}\\tdeleted`)\n return rows.join('\\n')\n}\n\nfunction shortJSON(value: unknown): string {\n if (value === undefined) return 'undefined'\n const s = JSON.stringify(value)\n // JSON.stringify returns string for any JSON value except `undefined`\n // (handled above), `function`, and `symbol`. Fall back to a static\n // tag for those — never let an arbitrary object hit the default\n // stringifier (which the lint rule explicitly bans).\n if (typeof s !== 'string') return '<unrepresentable>'\n return s.length > 60 ? s.slice(0, 57) + '...' : s\n}\n","import { ValidationError } from './errors.js'\n\n/**\n * Validate passphrase strength.\n * Checks length and basic entropy heuristics.\n * Throws ValidationError if too weak.\n */\nexport function validatePassphrase(passphrase: string): void {\n if (passphrase.length < 8) {\n throw new ValidationError(\n 'Passphrase too short — minimum 8 characters. ' +\n 'Recommended: 12+ characters or a 4+ word passphrase.',\n )\n }\n\n const entropy = estimateEntropy(passphrase)\n if (entropy < 28) {\n throw new ValidationError(\n 'Passphrase too weak — too little entropy. ' +\n 'Use a mix of uppercase, lowercase, numbers, and symbols, ' +\n 'or use a 4+ word passphrase.',\n )\n }\n}\n\n/**\n * Estimate passphrase entropy in bits.\n * Uses character class analysis (not dictionary-based).\n */\nexport function estimateEntropy(passphrase: string): number {\n let charsetSize = 0\n\n if (/[a-z]/.test(passphrase)) charsetSize += 26\n if (/[A-Z]/.test(passphrase)) charsetSize += 26\n if (/[0-9]/.test(passphrase)) charsetSize += 10\n if (/[^a-zA-Z0-9]/.test(passphrase)) charsetSize += 32\n\n if (charsetSize === 0) charsetSize = 26 // fallback\n\n return Math.floor(passphrase.length * Math.log2(charsetSize))\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AA+XO,SAAS,YACd,SACmC;AACnC,SAAO;AACT;AAnYA,IAyCa,sBAGA,uBAGA,sBAGA;AAlDb;AAAA;AAAA;AAyCO,IAAM,uBAAuB;AAG7B,IAAM,wBAAwB;AAG9B,IAAM,uBAAuB;AAG7B,IAAM,qBAAqB;AAAA;AAAA;;;AClDlC,IA4Ea,YAqBA,iBAgBA,eAgBA,iBAiBA,eAeA,eAkBA,wBAmBA,oBAmBA,uBA4BA,uBAmCA,qBA+BA,uBAiEA,sBAiBA,0BA+BA,mBA+BA,qBAoBA,uBAoBA,sBAiBA,uBAeA,8BAwBA,eAuBA,uBAsBA,4BAkBA,cAgBA,eAeA,iBA2BA,uBAwCA,uBAuCA,oBAmCA,wBAwCA,sBAkBA,6BAwBA,qBAyBA,mBAsCA,yBA6BA,yBA4BA,8BA+BA,mBAkBA,sBA4BA,qBAqBA,sBAmBA,oBAgCA,mBAoCA,wBAyBA,2BAaA;AA3rCb;AAAA;AAAA;AA4EO,IAAM,aAAN,cAAyB,MAAM;AAAA;AAAA,MAE3B;AAAA,MAET,YAAY,MAAc,SAAiB;AACzC,cAAM,OAAO;AACb,aAAK,OAAO;AACZ,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAYO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,qBAAqB;AACzC,cAAM,qBAAqB,OAAO;AAClC,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,yEAAoE;AACxF,cAAM,YAAY,OAAO;AACzB,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,4DAAuD;AAC3E,cAAM,eAAe,OAAO;AAC5B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAYO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,iEAA4D;AAChF,cAAM,aAAa,OAAO;AAC1B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAUO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,8DAAyD;AAC7E,cAAM,aAAa,OAAO;AAC1B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAaO,IAAM,yBAAN,cAAqC,WAAW;AAAA,MACrD,YAAY,WAAmB,WAAmB;AAChD;AAAA,UACE;AAAA,UACA,UAAU,SAAS,kCAAkC,SAAS;AAAA,QAChE;AACA,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,qBAAN,cAAiC,WAAW;AAAA,MACjD,YAAY,WAAmB;AAC7B;AAAA,UACE;AAAA,UACA,UAAU,SAAS;AAAA,QACrB;AACA,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MACpD,YAAY,UAAU,iEAA4D;AAChF,cAAM,qBAAqB,OAAO;AAClC,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAuBO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MAC3C;AAAA,MACA;AAAA,MACA;AAAA,MAET,YAAY,MAKT;AACD,cAAM,MACJ,KAAK,YACJ,KAAK,SAAS,cACX,4CAAuC,KAAK,MAAM,4DAA4D,KAAK,UAAU,WAAW,gGAAgG,KAAK,UAAU,UAAU,aACjQ,4CAAuC,KAAK,MAAM;AACxD,cAAM,qBAAqB,GAAG;AAC9B,aAAK,OAAO;AACZ,aAAK,OAAO,KAAK;AACjB,aAAK,SAAS,KAAK;AACnB,YAAI,KAAK,WAAW,OAAW,MAAK,SAAS,KAAK;AAAA,MACpD;AAAA,IACF;AAaO,IAAM,sBAAN,cAAkC,WAAW;AAAA,MACzC;AAAA,MACA;AAAA,MACT,YAAY,MAA6C;AACvD;AAAA,UACE;AAAA,UACA,YAAY,KAAK,MAAM,gBAAgB,KAAK,SAAS;AAAA,QAEvD;AACA,aAAK,OAAO;AACZ,aAAK,SAAS,KAAK;AACnB,aAAK,YAAY,KAAK;AAAA,MACxB;AAAA,IACF;AAkBO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MAC3C;AAAA,MACA;AAAA,MACA;AAAA,MAET,YAAY,MAKT;AACD,cAAM,MACJ,KAAK,YACJ,KAAK,SAAS,cACX,4CAAuC,KAAK,MAAM,4DAA4D,KAAK,UAAU,WAAW,gGAAgG,KAAK,UAAU,UAAU,aACjQ,4CAAuC,KAAK,MAAM;AACxD,cAAM,qBAAqB,GAAG;AAC9B,aAAK,OAAO;AACZ,aAAK,OAAO,KAAK;AACjB,aAAK,SAAS,KAAK;AACnB,YAAI,KAAK,WAAW,OAAW,MAAK,SAAS,KAAK;AAAA,MACpD;AAAA,IACF;AA2CO,IAAM,uBAAN,cAAmC,WAAW;AAAA;AAAA,MAE1C;AAAA,MAET,YAAY,YAAoB,WAAmB,WAAoB;AACrE;AAAA,UACE;AAAA,UACA,GAAG,SAAS,4CAA4C,UAAU,yBACzC,YAAY,KAAK,SAAS,MAAM,EAAE,sDAC3B,UAAU;AAAA,QAE5C;AACA,aAAK,OAAO;AACZ,aAAK,aAAa;AAAA,MACpB;AAAA,IACF;AAEO,IAAM,2BAAN,cAAuC,WAAW;AAAA,MAC9C;AAAA,MAET,YAAY,qBAA6B,SAAkB;AACzD;AAAA,UACE;AAAA,UACA,WACE,4DAA4D,mBAAmB;AAAA,QACnF;AACA,aAAK,OAAO;AACZ,aAAK,sBAAsB;AAAA,MAC7B;AAAA,IACF;AAmBO,IAAM,oBAAN,cAAgC,WAAW;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MAET,YAAY,YAAoB,SAAiB,UAAkB;AACjE;AAAA,UACE;AAAA,UACA,sCAAsC,QAAQ,qCACxC,UAAU,eAAe,OAAO;AAAA,QAExC;AACA,aAAK,OAAO;AACZ,aAAK,aAAa;AAClB,aAAK,UAAU;AACf,aAAK,WAAW;AAAA,MAClB;AAAA,IACF;AAcO,IAAM,sBAAN,cAAkC,WAAW;AAAA,MACzC;AAAA,MACA;AAAA,MAET,YAAY,YAAoB,MAAc;AAC5C;AAAA,UACE;AAAA,UACA,4BAA4B,IAAI,mBAAmB,UAAU;AAAA,QAC/D;AACA,aAAK,OAAO;AACZ,aAAK,aAAa;AAClB,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAOO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MAC3C;AAAA,MACA;AAAA,MAET,YAAY,MAA2C;AACrD;AAAA,UACE;AAAA,UACA,qBAAqB,KAAK,IAAI,eAAe,IAAI,KAAK,KAAK,SAAS,EAAE,YAAY,CAAC;AAAA,QACrF;AACA,aAAK,OAAO;AACZ,aAAK,OAAO,KAAK;AACjB,aAAK,YAAY,KAAK;AAAA,MACxB;AAAA,IACF;AAOO,IAAM,uBAAN,cAAmC,WAAW;AAAA,MAC1C;AAAA,MAET,YAAY,YAAoB;AAC9B;AAAA,UACE;AAAA,UACA,qCAAqC,UAAU;AAAA,QACjD;AACA,aAAK,OAAO;AACZ,aAAK,aAAa;AAAA,MACpB;AAAA,IACF;AAMO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MACpD,YAAY,IAAY,MAAc;AACpC;AAAA,UACE;AAAA,UACA,6DAA6D,EAAE,eAAe,IAAI;AAAA,QACpF;AACA,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAOO,IAAM,+BAAN,cAA2C,WAAW;AAAA,MAClD;AAAA,MAET,YAAY,QAAgB;AAC1B;AAAA,UACE;AAAA,UACA,2BAA2B,MAAM;AAAA,QACnC;AACA,aAAK,OAAO;AACZ,aAAK,SAAS;AAAA,MAChB;AAAA,IACF;AAaO,IAAM,gBAAN,cAA4B,WAAW;AAAA;AAAA,MAEnC;AAAA,MAET,YAAY,SAAiB,UAAU,oBAAoB;AACzD,cAAM,YAAY,OAAO;AACzB,aAAK,OAAO;AACZ,aAAK,UAAU;AAAA,MACjB;AAAA,IACF;AAcO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MAC3C;AAAA,MAET,YAAY,UAAkB;AAC5B;AAAA,UACE;AAAA,UACA,0DAA0D,QAAQ;AAAA,QACpE;AACA,aAAK,OAAO;AACZ,aAAK,WAAW;AAAA,MAClB;AAAA,IACF;AAWO,IAAM,6BAAN,cAAyC,WAAW;AAAA;AAAA,MAEhD;AAAA,MAET,YAAY,eAAuB,UAAU,0DAAqD;AAChG,cAAM,2BAA2B,OAAO;AACxC,aAAK,OAAO;AACZ,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF;AASO,IAAM,eAAN,cAA2B,WAAW;AAAA,MAC3C,YAAY,UAAU,iBAAiB;AACrC,cAAM,iBAAiB,OAAO;AAC9B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,oBAAoB;AACxC,cAAM,aAAa,OAAO;AAC1B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAUO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,oBAAoB;AACxC,cAAM,oBAAoB,OAAO;AACjC,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAsBO,IAAM,wBAAN,cAAoC,WAAW;AAAA,MAC3C;AAAA,MACA;AAAA,MAET,YACE,SACA,QACA,WACA;AACA,cAAM,4BAA4B,OAAO;AACzC,aAAK,OAAO;AACZ,aAAK,SAAS;AACd,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AA0BO,IAAM,wBAAN,cAAoC,WAAW;AAAA;AAAA,MAE3C;AAAA;AAAA,MAEA;AAAA;AAAA,MAEA;AAAA,MAET,YAAY,OAAe,aAAqB,WAAmB;AACjE;AAAA,UACE;AAAA,UACA,aAAa,KAAK,eAAe,WAAW,mCACzB,SAAS;AAAA,QAM9B;AACA,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,cAAc;AACnB,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AAeO,IAAM,qBAAN,cAAiC,WAAW;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MAET,YAAY,MAAkG;AAC5G;AAAA,UACE;AAAA,UACA,eAAe,KAAK,UAAU,+DACjB,KAAK,cAAc,KAAK,IAAI,CAAC;AAAA,QAE5C;AACA,aAAK,OAAO;AACZ,aAAK,aAAa,KAAK;AACvB,aAAK,gBAAgB,CAAC,GAAG,KAAK,aAAa;AAC3C,aAAK,gBAAgB,CAAC,GAAG,KAAK,aAAa;AAAA,MAC7C;AAAA,IACF;AAkBO,IAAM,yBAAN,cAAqC,WAAW;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,MACS;AAAA,MAElB,YAAY,MAAiF;AAC3F;AAAA,UACE;AAAA,UACA,kBAAkB,KAAK,EAAE,sBAAsB,KAAK,KAAK,gBAAgB,KAAK,QAAQ;AAAA,QACxF;AACA,aAAK,OAAO;AACZ,aAAK,WAAW,KAAK;AACrB,aAAK,QAAQ,KAAK;AAClB,aAAK,KAAK,KAAK;AACf,aAAK,QAAQ,KAAK;AAAA,MACpB;AAAA,IACF;AAuBO,IAAM,uBAAN,cAAmC,WAAW;AAAA,MACnD,YAAY,SAAiB;AAC3B,cAAM,oBAAoB,yCAAyC,OAAO,EAAE;AAC5E,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAaO,IAAM,8BAAN,cAA0C,WAAW;AAAA;AAAA,MAEjD;AAAA,MAET,YAAY,gBAAwB;AAClC;AAAA,UACE;AAAA,UACA,IAAI,cAAc,0DACS,eAAe,QAAQ,WAAW,EAAE,CAAC;AAAA,QAElE;AACA,aAAK,OAAO;AACZ,aAAK,iBAAiB;AAAA,MACxB;AAAA,IACF;AAUO,IAAM,sBAAN,cAAkC,WAAW;AAAA;AAAA,MAEzC;AAAA;AAAA,MAEA;AAAA,MAET,YAAY,gBAAwB,KAAa;AAC/C;AAAA,UACE;AAAA,UACA,eAAe,cAAc,2BAA2B,GAAG;AAAA,QAC7D;AACA,aAAK,OAAO;AACZ,aAAK,iBAAiB;AACtB,aAAK,MAAM;AAAA,MACb;AAAA,IACF;AAUO,IAAM,oBAAN,cAAgC,WAAW;AAAA;AAAA,MAEvC;AAAA;AAAA,MAEA;AAAA;AAAA,MAEA;AAAA;AAAA,MAEA;AAAA,MAET,YACE,gBACA,KACA,QACAA,QACA;AACA;AAAA,UACE;AAAA,UACA,sBAAsB,GAAG,sBAAsB,cAAc,MACxDA,MAAK,kBAAkB,MAAM,gDACN,GAAG;AAAA,QACjC;AACA,aAAK,OAAO;AACZ,aAAK,iBAAiB;AACtB,aAAK,MAAM;AACX,aAAK,SAAS;AACd,aAAK,QAAQA;AAAA,MACf;AAAA,IACF;AAUO,IAAM,0BAAN,cAAsC,WAAW;AAAA;AAAA,MAE7C;AAAA;AAAA,MAEA;AAAA,MAET,YAAY,OAAe,SAA4B,SAAkB;AACvE;AAAA,UACE;AAAA,UACA,WACE,UAAU,KAAK,uCAAuC,QAAQ,KAAK,IAAI,CAAC;AAAA,QAC5E;AACA,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,UAAU;AAAA,MACjB;AAAA,IACF;AAaO,IAAM,0BAAN,cAAsC,WAAW;AAAA;AAAA,MAE7C;AAAA,MAET,YAAY,OAAe,SAAkB;AAC3C;AAAA,UACE;AAAA,UACA,WACE,+BAA+B,KAAK;AAAA,QAGxC;AACA,aAAK,OAAO;AACZ,aAAK,QAAQ;AAAA,MACf;AAAA,IACF;AAaO,IAAM,+BAAN,cAA2C,WAAW;AAAA;AAAA,MAElD;AAAA;AAAA,MAEA;AAAA,MAET,YAAY,OAAe,YAAoB;AAC7C;AAAA,UACE;AAAA,UACA,UAAU,KAAK,oBAAoB,UAAU;AAAA,QAG/C;AACA,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,aAAa;AAAA,MACpB;AAAA,IACF;AAcO,IAAM,oBAAN,cAAgC,WAAW;AAAA;AAAA,MAEvC;AAAA,MAET,YAAY,SAAiB,YAAqB;AAChD,cAAM,iBAAiB,OAAO;AAC9B,aAAK,OAAO;AACZ,YAAI,eAAe,OAAW,MAAK,aAAa;AAAA,MAClD;AAAA,IACF;AASO,IAAM,uBAAN,cAAmC,WAAW;AAAA;AAAA,MAE1C;AAAA,MACA;AAAA,MAET,YAAY,YAAoB,IAAY,SAAiB;AAC3D,cAAM,oBAAoB,OAAO;AACjC,aAAK,OAAO;AACZ,aAAK,aAAa;AAClB,aAAK,KAAK;AAAA,MACZ;AAAA,IACF;AAiBO,IAAM,sBAAN,cAAkC,WAAW;AAAA,MACzC;AAAA,MAET,YAAY,WAAmB;AAC7B,cAAM,mBAAmB,YAAY,SAAS,uCAAuC;AACrF,aAAK,OAAO;AACZ,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AAaO,IAAM,uBAAN,cAAmC,WAAW;AAAA,MAC1C;AAAA,MAET,YAAY,WAAmB;AAC7B,cAAM,qBAAqB,oBAAoB,SAAS,sEAAsE;AAC9H,aAAK,OAAO;AACZ,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AAWO,IAAM,qBAAN,cAAiC,WAAW;AAAA,MACxC;AAAA,MAET,YAAY,WAAmB,SAAkB;AAC/C;AAAA,UACE;AAAA,UACA,WAAW,cAAc,SAAS;AAAA,QACpC;AACA,aAAK,OAAO;AACZ,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AAqBO,IAAM,oBAAN,cAAgC,WAAW;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAET,YAAY,MAMT;AACD,cAAM,kBAAkB,KAAK,OAAO;AACpC,aAAK,OAAO;AACZ,aAAK,WAAW,KAAK;AACrB,aAAK,YAAY,KAAK;AACtB,aAAK,UAAU,KAAK;AACpB,aAAK,OAAO,KAAK;AAAA,MACnB;AAAA,IACF;AAgBO,IAAM,yBAAN,cAAqC,WAAW;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,MAET,YAAY,MAKT;AACD,cAAM,sBAAsB,KAAK,OAAO;AACxC,aAAK,OAAO;AACZ,aAAK,QAAQ,KAAK;AAClB,aAAK,SAAS,KAAK;AACnB,aAAK,QAAQ,KAAK;AAAA,MACpB;AAAA,IACF;AAQO,IAAM,4BAAN,cAAwC,WAAW;AAAA,MACxD,YAAY,SAAiB;AAC3B,cAAM,yBAAyB,OAAO;AACtC,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAQO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MACrC;AAAA,MACA;AAAA,MAET,YAAY,MAAgD;AAC1D;AAAA,UACE;AAAA,UACA,uBAAuB,KAAK,SAAS,kCAAkC,KAAK,SAAS;AAAA,QACvF;AACA,aAAK,OAAO;AACZ,aAAK,YAAY,KAAK;AACtB,aAAK,YAAY,KAAK;AAAA,MACxB;AAAA,IACF;AAAA;AAAA;;;ACxsCA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkDA,eAAsB,UACpB,YACA,MACoB;AACpB,QAAM,cAAc,MAAM,OAAO;AAAA,IAC/B;AAAA,IACA,IAAI,YAAY,EAAE,OAAO,UAAU;AAAA,IACnC;AAAA,IACA;AAAA,IACA,CAAC,WAAW;AAAA,EACd;AAEA,SAAO,OAAO;AAAA,IACZ;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,MAAM;AAAA,IACR;AAAA,IACA;AAAA,IACA,EAAE,MAAM,UAAU,QAAQ,SAAS;AAAA,IACnC;AAAA,IACA,CAAC,WAAW,WAAW;AAAA,EACzB;AACF;AAKA,eAAsB,cAAkC;AACtD,SAAO,OAAO;AAAA,IACZ,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AAKA,eAAsB,QAAQ,KAAgB,KAAiC;AAC7E,QAAM,UAAU,MAAM,OAAO,QAAQ,OAAO,KAAK,KAAK,QAAQ;AAC9D,SAAO,eAAe,OAAO;AAC/B;AAGA,eAAsB,UACpB,eACA,KACoB;AACpB,MAAI;AACF,WAAO,MAAM,OAAO;AAAA,MAClB;AAAA,MACA,eAAe,aAAa;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,MACpC;AAAA,MACA,CAAC,WAAW,SAAS;AAAA,IACvB;AAAA,EACF,QAAQ;AACN,UAAM,IAAI,gBAAgB;AAAA,EAC5B;AACF;AAUA,eAAsB,QACpB,WACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAElD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAGA,eAAsB,QACpB,UACA,YACA,KACiB;AACjB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAE5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,YAAY,EAAE,OAAO,SAAS;AAAA,EAC3C,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAUA,eAAsB,aACpB,MACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAMA,eAAsB,aACpB,UACA,YACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAQA,eAAsB,UAAU,MAAmC;AACjE,QAAM,OAAO,MAAM,OAAO,OAAO,WAAW,IAA+B;AAC3E,SAAO,MAAM,KAAK,IAAI,WAAW,IAAI,CAAC,EACnC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,cAAc,KAAgB,MAAmC;AAErF,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,EAAE,MAAM,QAAQ,MAAM,UAAU;AAAA,IAChC;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AACA,QAAM,MAAM,MAAM,OAAO,KAAK,QAAQ,SAAS,IAA+B;AAC9E,SAAO,MAAM,KAAK,IAAI,WAAW,GAAG,CAAC,EAClC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,oBACpB,MACA,KACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,gBAAgB;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AASA,eAAsB,oBACpB,UACA,YACA,KACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B;AAAA,QACE,MAAM;AAAA,QACN;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAiBA,eAAsB,kBAAkB,KAAgB,gBAA4C;AAElG,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAGhD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,CAAC,YAAY;AAAA,EACf;AAGA,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,gBAAgB;AACtD,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,cAAc;AACpD,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA;AAAA,EACF;AAGA,SAAO,OAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AA2BA,eAAe,sBACb,KACA,SACA,WACqB;AACrB,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO,UAAU,OAAO,QAAQ,QAAQ,OAAO,CAAC,YAAY,CAAC;AACnF,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,wBAAwB;AAC9D,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,GAAG,OAAO,KAAO,SAAS,EAAE;AAClE,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA,WAAW;AAAA,EACb;AACA,SAAO,IAAI,WAAW,IAAI;AAC5B;AAgBA,eAAsB,qBACpB,WACA,KACA,SACwB;AACxB,QAAM,KAAK,MAAM,sBAAsB,KAAK,SAAS,SAAS;AAC9D,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAClD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAQA,eAAsB,qBACpB,UACA,YACA,KACiB;AACjB,SAAO,QAAQ,UAAU,YAAY,GAAG;AAC1C;AAKO,SAAS,aAAyB;AACvC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,QAAQ,CAAC;AACnE;AAGO,SAAS,eAA2B;AACzC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,UAAU,CAAC;AACrE;AAIO,SAAS,eAAe,QAA0C;AACvE,QAAM,QAAQ,kBAAkB,aAAa,SAAS,IAAI,WAAW,MAAM;AAC3E,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAU,OAAO,aAAa,MAAM,CAAC,CAAE;AAAA,EACzC;AACA,SAAO,KAAK,MAAM;AACpB;AAEO,SAAS,eAAe,QAAyC;AACtE,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AAnfA,IAwCM,mBACA,YACA,UACA,UAEA;AA7CN;AAAA;AAAA;AAsCA;AAEA,IAAM,oBAAoB;AAC1B,IAAM,aAAa;AACnB,IAAM,WAAW;AACjB,IAAM,WAAW;AAEjB,IAAM,SAAS,WAAW,OAAO;AAAA;AAAA;;;AC7CjC;AAAA;AAAA;AAAA;AAAA;AA4DA,SAAS,aAAa,OAAe,QAAwB;AAC3D,MAAI,MAAM;AACV,MAAI,IAAI;AACR,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,UAAM,mBAAmB,IAAI,EAAE,IAAK;AACpC,QAAI,KAAK,MAAM,IAAI,EAAE;AAAA,EACvB;AACA,SAAO;AACT;AAcO,SAAS,eAAuB;AACrC,QAAM,MAAM,KAAK,IAAI;AAQrB,QAAM,gBAAgB,KAAK,MAAM,MAAM,QAAS;AAChD,QAAM,eAAe,MAAM;AAC3B,QAAM,SACJ,aAAa,eAAe,CAAC,IAAI,aAAa,cAAc,CAAC;AAM/D,QAAM,YAAY,IAAI,WAAW,EAAE;AACnC,SAAO,gBAAgB,SAAS;AAMhC,QAAM,QACJ,UAAU,CAAC,IAAK,KAAK,MACpB,UAAU,CAAC,KAAM,OAAO,MACxB,UAAU,CAAC,KAAM,OACjB,UAAU,CAAC,KAAM,KAClB,UAAU,CAAC;AAEb,QAAM,QACJ,UAAU,CAAC,IAAK,KAAK,MACpB,UAAU,CAAC,KAAM,OAAO,MACxB,UAAU,CAAC,KAAM,OACjB,UAAU,CAAC,KAAM,KAClB,UAAU,CAAC;AACb,QAAM,WAAW,aAAa,OAAO,CAAC,IAAI,aAAa,OAAO,CAAC;AAE/D,SAAO,SAAS;AAClB;AAQO,SAAS,OAAO,OAAwB;AAC7C,SAAO,2BAA2B,KAAK,KAAK;AAC9C;AArIA,IAiDM;AAjDN;AAAA;AAAA;AAiDA,IAAM,qBAAqB;AAAA;AAAA;;;ACgHpB,SAAS,cAAc,OAAwB;AACpD,MAAI,UAAU,KAAM,QAAO;AAC3B,MAAI,OAAO,UAAU,UAAW,QAAO,QAAQ,SAAS;AACxD,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,CAAC,OAAO,SAAS,KAAK,GAAG;AAC3B,YAAM,IAAI;AAAA,QACR,uDAAuD,OAAO,KAAK,CAAC;AAAA,MACtE;AAAA,IACF;AACA,WAAO,KAAK,UAAU,KAAK;AAAA,EAC7B;AACA,MAAI,OAAO,UAAU,SAAU,QAAO,KAAK,UAAU,KAAK;AAC1D,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AACA,MAAI,OAAO,UAAU,eAAe,OAAO,UAAU,YAAY;AAC/D,UAAM,IAAI;AAAA,MACR,qCAAqC,OAAO,KAAK;AAAA,IACnD;AAAA,EACF;AACA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,MAAM,IAAI,CAAC,MAAM,cAAc,CAAC,CAAC,EAAE,KAAK,GAAG,IAAI;AAAA,EAC9D;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,MAAM;AACZ,UAAM,OAAO,OAAO,KAAK,GAAG,EAAE,KAAK;AACnC,UAAM,QAAkB,CAAC;AACzB,eAAW,OAAO,MAAM;AACtB,YAAM,KAAK,KAAK,UAAU,GAAG,IAAI,MAAM,cAAc,IAAI,GAAG,CAAC,CAAC;AAAA,IAChE;AACA,WAAO,MAAM,MAAM,KAAK,GAAG,IAAI;AAAA,EACjC;AACA,QAAM,IAAI,MAAM,yCAAyC,OAAO,KAAK,EAAE;AACzE;AAUA,eAAsBC,WAAU,OAAgC;AAC9D,QAAM,QAAQ,IAAI,YAAY,EAAE,OAAO,KAAK;AAC5C,QAAM,SAAS,MAAM,WAAW,OAAO,OAAO,OAAO,WAAW,KAAK;AACrE,SAAO,WAAW,IAAI,WAAW,MAAM,CAAC;AAC1C;AAQA,eAAsB,UAAU,OAAqC;AACnE,SAAOA,WAAU,cAAc,KAAK,CAAC;AACvC;AAGA,SAAS,WAAW,OAA2B;AAC7C,QAAMC,OAAM,IAAI,MAAc,MAAM,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AAIrC,IAAAA,KAAI,CAAC,KAAK,MAAM,CAAC,KAAK,GAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EACvD;AACA,SAAOA,KAAI,KAAK,EAAE;AACpB;AAQO,SAAS,YAAY,OAAuB;AACjD,SAAO,OAAO,KAAK,EAAE,SAAS,IAAI,GAAG;AACvC;AAGO,SAAS,WAAW,KAAqB;AAC9C,SAAO,OAAO,SAAS,KAAK,EAAE;AAChC;AArPA;AAAA;AAAA;AAAA;AAAA;;;AC4EO,SAAS,aAAa,MAAe,MAA0B;AACpE,QAAM,MAAqB,CAAC;AAC5B,OAAK,MAAM,MAAM,IAAI,GAAG;AACxB,SAAO;AACT;AAEA,SAAS,KACP,MACA,MACA,MACA,KACM;AAGN,MAAI,SAAS,KAAM;AAGnB,MAAI,SAAS,QAAQ,SAAS,MAAM;AAClC,QAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAC7C;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,QAAQ,IAAI;AACtC,QAAM,cAAc,MAAM,QAAQ,IAAI;AACtC,QAAM,eAAe,OAAO,SAAS,YAAY,CAAC;AAClD,QAAM,eAAe,OAAO,SAAS,YAAY,CAAC;AAGlD,MAAI,gBAAgB,eAAe,iBAAiB,cAAc;AAChE,QAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAC7C;AAAA,EACF;AAKA,MAAI,eAAe,aAAa;AAC9B,QAAI,CAAC,eAAe,MAAmB,IAAiB,GAAG;AACzD,UAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,IAC/C;AACA;AAAA,EACF;AAGA,MAAI,gBAAgB,cAAc;AAChC,UAAM,UAAU;AAChB,UAAM,UAAU;AAChB,UAAM,WAAW,OAAO,KAAK,OAAO;AACpC,UAAM,WAAW,OAAO,KAAK,OAAO;AAGpC,eAAW,OAAO,UAAU;AAC1B,YAAM,YAAY,OAAO,MAAM,kBAAkB,GAAG;AACpD,UAAI,EAAE,OAAO,UAAU;AACrB,YAAI,KAAK,EAAE,IAAI,UAAU,MAAM,UAAU,CAAC;AAAA,MAC5C,OAAO;AACL,aAAK,QAAQ,GAAG,GAAG,QAAQ,GAAG,GAAG,WAAW,GAAG;AAAA,MACjD;AAAA,IACF;AAEA,eAAW,OAAO,UAAU;AAC1B,UAAI,EAAE,OAAO,UAAU;AACrB,YAAI,KAAK;AAAA,UACP,IAAI;AAAA,UACJ,MAAM,OAAO,MAAM,kBAAkB,GAAG;AAAA,UACxC,OAAO,QAAQ,GAAG;AAAA,QACpB,CAAC;AAAA,MACH;AAAA,IACF;AACA;AAAA,EACF;AAGA,MAAI,KAAK,EAAE,IAAI,WAAW,MAAM,OAAO,KAAK,CAAC;AAC/C;AAEA,SAAS,eAAe,GAAc,GAAuB;AAC3D,MAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAClC,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,QAAI,CAAC,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,EAAG,QAAO;AAAA,EACrC;AACA,SAAO;AACT;AAEA,SAAS,UAAU,GAAY,GAAqB;AAClD,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,MAAM,QAAQ,MAAM,KAAM,QAAO;AACrC,MAAI,OAAO,MAAM,OAAO,EAAG,QAAO;AAClC,MAAI,OAAO,MAAM,SAAU,QAAO;AAClC,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,QAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,MAAI,WAAW,OAAQ,QAAO;AAC9B,MAAI,UAAU,OAAQ,QAAO,eAAe,GAAG,CAAc;AAC7D,QAAM,OAAO;AACb,QAAM,OAAO;AACb,QAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,QAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,MAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAC1C,aAAW,OAAO,OAAO;AACvB,QAAI,EAAE,OAAO,MAAO,QAAO;AAC3B,QAAI,CAAC,UAAU,KAAK,GAAG,GAAG,KAAK,GAAG,CAAC,EAAG,QAAO;AAAA,EAC/C;AACA,SAAO;AACT;AAsBO,SAAS,WAAwB,MAAS,OAAqB;AACpE,MAAI,SAAkB,MAAM,IAAI;AAChC,aAAW,MAAM,OAAO;AACtB,aAAS,QAAQ,QAAQ,EAAE;AAAA,EAC7B;AACA,SAAO;AACT;AAEA,SAAS,QAAQ,KAAc,IAA0B;AAIvD,MAAI,GAAG,SAAS,IAAI;AAClB,QAAI,GAAG,OAAO,SAAU,QAAO;AAC/B,WAAO,MAAM,GAAG,KAAK;AAAA,EACvB;AAEA,QAAM,WAAW,UAAU,GAAG,IAAI;AAClC,SAAO,aAAa,KAAK,UAAU,EAAE;AACvC;AAEA,SAAS,aACP,KACA,UACA,IACS;AACT,MAAI,SAAS,WAAW,GAAG;AAEzB,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACjE;AAEA,QAAM,CAAC,MAAM,GAAG,IAAI,IAAI;AACxB,MAAI,SAAS,OAAW,OAAM,IAAI,MAAM,iCAAiC;AAEzE,MAAI,KAAK,WAAW,GAAG;AACrB,WAAO,gBAAgB,KAAK,MAAM,EAAE;AAAA,EACtC;AAIA,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAM,MAAM,gBAAgB,MAAM,IAAI,MAAM;AAC5C,UAAM,QAAQ,IAAI,GAAG;AACrB,UAAM,WAAW,aAAa,OAAO,MAAM,EAAE;AAC7C,UAAM,OAAO,IAAI,MAAM;AACvB,SAAK,GAAG,IAAI;AACZ,WAAO;AAAA,EACT;AACA,MAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,UAAM,MAAM;AACZ,QAAI,EAAE,QAAQ,MAAM;AAClB,YAAM,IAAI,MAAM,6BAA6B,IAAI,uBAAuB;AAAA,IAC1E;AACA,UAAM,WAAW,aAAa,IAAI,IAAI,GAAG,MAAM,EAAE;AACjD,WAAO,EAAE,GAAG,KAAK,CAAC,IAAI,GAAG,SAAS;AAAA,EACpC;AACA,QAAM,IAAI;AAAA,IACR,gCAAgC,OAAO,GAAG,gBAAgB,IAAI;AAAA,EAChE;AACF;AAEA,SAAS,gBACP,KACA,SACA,IACS;AACT,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAM,MACJ,YAAY,MAAM,IAAI,SAAS,gBAAgB,SAAS,IAAI,SAAS,CAAC;AACxE,UAAM,OAAO,IAAI,MAAM;AACvB,QAAI,GAAG,OAAO,UAAU;AACtB,WAAK,OAAO,KAAK,CAAC;AAClB,aAAO;AAAA,IACT;AACA,QAAI,GAAG,OAAO,OAAO;AACnB,WAAK,OAAO,KAAK,GAAG,MAAM,GAAG,KAAK,CAAC;AACnC,aAAO;AAAA,IACT;AACA,QAAI,GAAG,OAAO,WAAW;AACvB,UAAI,OAAO,IAAI,QAAQ;AACrB,cAAM,IAAI;AAAA,UACR,oDAAoD,GAAG;AAAA,QACzD;AAAA,MACF;AACA,WAAK,GAAG,IAAI,MAAM,GAAG,KAAK;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AACA,MAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,UAAM,MAAM;AACZ,QAAI,GAAG,OAAO,UAAU;AACtB,UAAI,EAAE,WAAW,MAAM;AACrB,cAAM,IAAI;AAAA,UACR,sCAAsC,OAAO;AAAA,QAC/C;AAAA,MACF;AACA,YAAM,OAAO,EAAE,GAAG,IAAI;AACtB,aAAO,KAAK,OAAO;AACnB,aAAO;AAAA,IACT;AACA,QAAI,GAAG,OAAO,OAAO;AAEnB,aAAO,EAAE,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,GAAG,KAAK,EAAE;AAAA,IAC9C;AACA,QAAI,GAAG,OAAO,WAAW;AACvB,UAAI,EAAE,WAAW,MAAM;AACrB,cAAM,IAAI;AAAA,UACR,uCAAuC,OAAO;AAAA,QAChD;AAAA,MACF;AACA,aAAO,EAAE,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,GAAG,KAAK,EAAE;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,IAAI;AAAA,IACR,4BAA4B,GAAG,EAAE,yBAAyB,OAAO;AAAA,EACnE;AACF;AAYA,SAAS,kBAAkB,SAAyB;AAClD,SAAO,QAAQ,QAAQ,MAAM,IAAI,EAAE,QAAQ,OAAO,IAAI;AACxD;AAEA,SAAS,oBAAoB,SAAyB;AACpD,SAAO,QAAQ,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,GAAG;AACvD;AAEA,SAAS,UAAU,MAAwB;AACzC,MAAI,CAAC,KAAK,WAAW,GAAG,GAAG;AACzB,UAAM,IAAI,MAAM,8CAA8C,IAAI,GAAG;AAAA,EACvE;AACA,SAAO,KACJ,MAAM,CAAC,EACP,MAAM,GAAG,EACT,IAAI,mBAAmB;AAC5B;AAEA,SAAS,gBAAgB,SAAiBC,MAAqB;AAC7D,MAAI,CAAC,QAAQ,KAAK,OAAO,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,gEAAgE,OAAO;AAAA,IACzE;AAAA,EACF;AACA,QAAM,MAAM,OAAO,SAAS,SAAS,EAAE;AACvC,MAAI,MAAM,KAAK,MAAMA,MAAK;AACxB,UAAM,IAAI;AAAA,MACR,2BAA2B,GAAG,qBAAqBA,IAAG;AAAA,IACxD;AAAA,EACF;AACA,SAAO;AACT;AAeA,SAAS,MAAS,OAAa;AAC7B,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,SAAO,KAAK,MAAM,KAAK,UAAU,KAAK,CAAC;AACzC;AA3XA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAea,mBAuBA;AAtCb;AAAA;AAAA;AAeO,IAAM,oBAAoB;AAuB1B,IAAM,2BAA2B;AAAA;AAAA;;;ACfxC,eAAsB,oBACpB,UACiB;AACjB,MAAI,CAAC,SAAU,QAAO;AAMtB,SAAOC,WAAU,SAAS,KAAK;AACjC;AAjCA;AAAA;AAAA;AAWA;AAAA;AAAA;;;ACopBA,SAAS,aAAa,SAAgC;AACpD,QAAM,OAAO,IAAI,KAAK,IAAI,GAAG,OAAO;AACpC,QAAM,SAAS,KAAK,OAAO,IAAI;AAC/B,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,OAAO,MAAM,CAAC;AACpE;AAnqBA,IAqEM,qBA4DO;AAjIb;AAAA;AAAA;AA+CA;AACA;AACA;AACA;AAQA;AACA;AACA;AASA,IAAM,sBAAsB;AA4DrB,IAAM,cAAN,MAAkB;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAsBT,YAAqE;AAAA,MAE7E,YAAY,MAMT;AACD,aAAK,UAAU,KAAK;AACpB,aAAK,QAAQ,KAAK;AAClB,aAAK,YAAY,KAAK;AACtB,aAAK,SAAS,KAAK;AACnB,aAAK,QAAQ,KAAK;AAAA,MACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAc,gBAAsE;AAClF,YAAI,KAAK,cAAc,OAAW,QAAO,KAAK;AAC9C,cAAM,UAAU,MAAM,KAAK,eAAe;AAC1C,cAAM,OAAO,QAAQ,QAAQ,SAAS,CAAC;AACvC,YAAI,CAAC,MAAM;AACT,eAAK,YAAY;AACjB,iBAAO;AAAA,QACT;AACA,aAAK,YAAY,EAAE,OAAO,MAAM,MAAM,MAAM,UAAU,IAAI,EAAE;AAC5D,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MA4CA,MAAM,OAAO,OAA0C;AACrD,YAAI;AACJ,iBAAS,UAAU,GAAG,UAAU,qBAAqB,WAAW;AAI9D,cAAI,UAAU,GAAG;AACf,iBAAK,YAAY;AAAA,UACnB;AACA,cAAI;AACF,mBAAO,MAAM,KAAK,WAAW,KAAK;AAAA,UACpC,SAAS,KAAK;AACZ,gBAAI,eAAe,eAAe;AAChC,6BAAe;AACf,kBAAI,UAAU,sBAAsB,GAAG;AACrC,sBAAM,aAAa,OAAO;AAAA,cAC5B;AACA;AAAA,YACF;AACA,kBAAM;AAAA,UACR;AAAA,QACF;AACA,aAAK;AACL,cAAM,IAAI,sBAAsB,mBAAmB;AAAA,MACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAc,WAAW,OAA0C;AACjE,cAAM,SAAS,MAAM,KAAK,cAAc;AACxC,cAAM,YAAY,QAAQ;AAC1B,cAAM,WAAW,QAAQ,QAAQ;AACjC,cAAM,YAAY,YAAY,UAAU,QAAQ,IAAI;AAIpD,YAAI;AACJ,YAAI;AACJ,YAAI,MAAM,UAAU,QAAW;AAC7B,0BAAgB,MAAM,KAAK,aAAa,MAAM,KAAK;AACnD,sBAAY,MAAMC,WAAU,cAAc,KAAK;AAAA,QACjD;AAKA,cAAM,YAAY;AAAA,UAChB,OAAO;AAAA,UACP;AAAA,UACA,IAAI,MAAM;AAAA,UACV,YAAY,MAAM;AAAA,UAClB,IAAI,MAAM;AAAA,UACV,SAAS,MAAM;AAAA,UACf,KAAI,oBAAI,KAAK,GAAE,YAAY;AAAA,UAC3B,OAAO,MAAM,UAAU,KAAK,KAAK,QAAQ,MAAM;AAAA,UAC/C,aAAa,MAAM;AAAA,QACrB;AACA,cAAM,QACJ,cAAc,SACV,EAAE,GAAG,WAAW,UAAU,IAC1B;AAEN,cAAM,WAAW,MAAM,KAAK,aAAa,KAAK;AAG9C,cAAM,KAAK,QAAQ;AAAA,UACjB,KAAK;AAAA,UACL;AAAA,UACA,YAAY,MAAM,KAAK;AAAA,UACvB;AAAA,UACA;AAAA,QACF;AAGA,YAAI,eAAe;AACjB,gBAAM,KAAK,QAAQ;AAAA,YACjB,KAAK;AAAA,YACL;AAAA,YACA,YAAY,MAAM,KAAK;AAAA,YACvB;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAIA,aAAK,YAAY,EAAE,OAAO,MAAM,MAAM,UAAU,KAAK,EAAE;AACvD,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAYA,MAAM,UAAU,OAA0C;AACxD,cAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,UAClC,KAAK;AAAA,UACL;AAAA,UACA,YAAY,KAAK;AAAA,QACnB;AACA,YAAI,CAAC,SAAU,QAAO;AACtB,YAAI,CAAC,KAAK,WAAW;AACnB,iBAAO,KAAK,MAAM,SAAS,KAAK;AAAA,QAClC;AACA,cAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,cAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AAAA;AAAA,MAGA,MAAc,aAAa,OAA8C;AACvE,cAAM,OAAO,KAAK,UAAU,KAAK;AACjC,YAAI,CAAC,KAAK,WAAW;AACnB,iBAAO;AAAA,YACL,QAAQ;AAAA,YACR,IAAI;AAAA,YACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,YAC5B,KAAK;AAAA,YACL,OAAO;AAAA,YACP,KAAK,KAAK;AAAA,UACZ;AAAA,QACF;AACA,cAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,cAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,IAAI;AAAA,UACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,UAC5B,KAAK;AAAA,UACL,OAAO;AAAA,UACP,KAAK,KAAK;AAAA,QACZ;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQA,MAAM,iBAAyC;AAC7C,cAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,iBAAiB;AAGlE,aAAK,KAAK;AACV,cAAM,UAAyB,CAAC;AAChC,mBAAW,OAAO,MAAM;AACtB,gBAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,YAClC,KAAK;AAAA,YACL;AAAA,YACA;AAAA,UACF;AACA,cAAI,CAAC,SAAU;AACf,kBAAQ,KAAK,MAAM,KAAK,aAAa,QAAQ,CAAC;AAAA,QAChD;AACA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,OAGJ;AACA,cAAM,SAAS,MAAM,KAAK,cAAc;AACxC,YAAI,CAAC,OAAQ,QAAO;AAGpB,eAAO;AAAA,UACL,OAAO,OAAO;AAAA,UACd,MAAM,OAAO;AAAA,UACb,QAAQ,OAAO,MAAM,QAAQ;AAAA,QAC/B;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,QAAQ,OAAuC,CAAC,GAA2B;AAC/E,cAAM,MAAM,MAAM,KAAK,eAAe;AACtC,cAAM,OAAO,KAAK,IAAI,GAAG,KAAK,QAAQ,CAAC;AACvC,cAAM,KAAK,KAAK,IAAI,IAAI,QAAQ,KAAK,MAAM,IAAI,MAAM;AACrD,eAAO,IAAI,MAAM,MAAM,EAAE;AAAA,MAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MA+CA,MAAM,YACJ,YACA,IACA,SACA,WACmB;AACnB,cAAM,MAAM,MAAM,KAAK,eAAe;AAEtC,cAAM,WAAW,IAAI;AAAA,UACnB,CAAC,MAAM,EAAE,eAAe,cAAc,EAAE,OAAO;AAAA,QACjD;AACA,YAAI,SAAS,WAAW,GAAG;AAKzB,iBAAO;AAAA,QACT;AAIA,YAAI,QAAkB;AACtB,iBAAS,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AAC7C,gBAAM,QAAQ,SAAS,CAAC;AACxB,cAAI,CAAC,MAAO;AAMZ,cAAI,MAAM,YAAY,aAAa,MAAM,OAAO,UAAU;AACxD,mBAAO;AAAA,UACT;AAEA,cAAI,MAAM,OAAO,UAAU;AAOzB,mBAAO;AAAA,UACT;AAEA,cAAI,MAAM,cAAc,QAAW;AAOjC,gBAAI,MAAM,YAAY,UAAW,QAAO;AACxC,mBAAO;AAAA,UACT;AAEA,gBAAM,QAAQ,MAAM,KAAK,UAAU,MAAM,KAAK;AAC9C,cAAI,CAAC,OAAO;AAEV,mBAAO;AAAA,UACT;AAEA,cAAI,UAAU,MAAM;AAGlB,mBAAO;AAAA,UACT;AAEA,kBAAQ,WAAW,OAAO,KAAK;AAAA,QACjC;AAIA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAiCA,MAAM,SAAgC;AACpC,cAAM,UAAU,MAAM,KAAK,eAAe;AAC1C,YAAI,mBAAmB;AACvB,iBAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,gBAAM,QAAQ,QAAQ,CAAC;AACvB,cAAI,CAAC,MAAO;AACZ,cAAI,MAAM,aAAa,kBAAkB;AACvC,mBAAO;AAAA,cACL,IAAI;AAAA,cACJ,YAAY;AAAA,cACZ,UAAU;AAAA,cACV,QAAQ,MAAM;AAAA,YAChB;AAAA,UACF;AACA,cAAI,MAAM,UAAU,GAAG;AAIrB,mBAAO;AAAA,cACL,IAAI;AAAA,cACJ,YAAY;AAAA,cACZ,UAAU,SAAS,CAAC;AAAA,cACpB,QAAQ,SAAS,MAAM,KAAK;AAAA,YAC9B;AAAA,UACF;AACA,6BAAmB,MAAM,UAAU,KAAK;AAAA,QAC1C;AACA,eAAO;AAAA,UACL,IAAI;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ,QAAQ;AAAA,QAClB;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWA,MAAc,aAAa,OAAgD;AACzE,cAAM,OAAO,cAAc,KAAK;AAChC,YAAI,CAAC,KAAK,WAAW;AACnB,iBAAO;AAAA,YACL,QAAQ;AAAA,YACR,IAAI,MAAM,QAAQ;AAAA,YAClB,KAAK,MAAM;AAAA,YACX,KAAK;AAAA,YACL,OAAO;AAAA,YACP,KAAK,MAAM;AAAA,UACb;AAAA,QACF;AACA,cAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,cAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,IAAI,MAAM,QAAQ;AAAA,UAClB,KAAK,MAAM;AAAA,UACX,KAAK;AAAA,UACL,OAAO;AAAA,UACP,KAAK,MAAM;AAAA,QACb;AAAA,MACF;AAAA;AAAA,MAGA,MAAc,aAAa,UAAmD;AAC5E,YAAI,CAAC,KAAK,WAAW;AACnB,iBAAO,KAAK,MAAM,SAAS,KAAK;AAAA,QAClC;AACA,cAAM,MAAM,MAAM,KAAK,OAAO,iBAAiB;AAC/C,cAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AAAA,IACF;AAAA;AAAA;;;AClpBA;AAAA;AAAA;AAgBA;AASA;AAUA;AAAA;AAAA;;;ACZO,SAAS,OAAO,YAAoB,MAAsB;AAC/D,MAAI,QAAQ,EAAG,QAAO;AACtB,SAAO,GAAG,UAAU,IAAI,IAAI;AAC9B;AAQO,SAAS,mBAAmB,SAA0B,YAA4B;AACvF,MAAIC,OAAM;AACV,QAAM,SAAS,GAAG,UAAU;AAC5B,aAAW,OAAO,QAAQ,KAAK,KAAK,GAAG;AACrC,QAAI,CAAC,IAAI,WAAW,MAAM,EAAG;AAC7B,UAAM,SAAS,IAAI,MAAM,OAAO,MAAM;AACtC,UAAM,IAAI,OAAO,SAAS,QAAQ,EAAE;AACpC,QAAI,OAAO,SAAS,CAAC,KAAK,IAAIA,KAAK,CAAAA,OAAM;AAAA,EAC3C;AACA,SAAOA;AACT;AAYO,SAAS,iBACd,SACA,YACA,MACM;AACN,MAAI,QAAQ,EAAG;AACf,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,QAAS;AAC1D,MAAI,CAAC,QAAQ,KAAK,IAAI,OAAO,YAAY,IAAI,CAAC,GAAG;AAC/C,UAAM,IAAI,oBAAoB,YAAY,IAAI;AAAA,EAChD;AACF;AAlEA;AAAA;AAAA;AAoBA;AAAA;AAAA;;;ACpBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkFA,eAAsB,gBACpB,OACA,OACA,SACA,WACA,gBACA,MAC0B;AAC1B,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,6BAA6B,KAAK,MAAM;AAAA,EACpD;AACA,QAAM,OAAO,KAAK;AAClB,QAAM,iBAAiB,KAAK,cAAc;AAC1C,QAAM,sBAAsB,kBAAkB;AAE9C,QAAM,YAAY,iBACd,QAAQ,KAAK,IAAI,OAAO,gBAAgB,IAAI,CAAC,IAC7C;AACJ,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR,4BAA4B,IAAI,YAAY,uBAAuB,OAAO;AAAA,IAC5E;AAAA,EACF;AACA,QAAM,aAAa,MAAM,QAAQ,WAAW,SAAS;AAErD,QAAM,QAAQ,OAAO,KAAK,UAAU,WAAW,KAAK,QAAQ,KAAK,MAAM,YAAY;AACnF,QAAM,QAAyB;AAAA,IAC7B,IAAI,aAAa;AAAA,IACjB,QAAQ,KAAK;AAAA,IACb,UAAU,QAAQ;AAAA,IAClB;AAAA,IACA,YAAY;AAAA,IACZ,GAAI,KAAK,UAAU,EAAE,QAAQ,KAAK,OAAO;AAAA,IACzC;AAAA,IACA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,QAAM,YAAY,KAAK,UAAU,KAAK;AACtC,QAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,WAAW,cAAc;AAC5D,QAAM,WAA8B;AAAA,IAClC,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,KAAK,MAAM;AAAA,IACX,KAAK;AAAA,IACL,OAAO;AAAA,IACP,KAAK,QAAQ;AAAA,EACf;AACA,QAAM,MAAM,IAAI,OAAO,wBAAwB,MAAM,IAAI,QAAQ;AACjE,SAAO;AACT;AAQA,eAAsB,sBACpB,OACA,OACA,MACA,gBACA,MAAY,oBAAI,KAAK,GACO;AAC5B,QAAM,MAAM,MAAM,MAAM,KAAK,OAAO,sBAAsB;AAC1D,QAAM,SAA4B,CAAC;AACnC,QAAM,SAAS,IAAI,YAAY;AAC/B,aAAW,MAAM,KAAK;AACpB,UAAM,MAAM,MAAM,MAAM,IAAI,OAAO,wBAAwB,EAAE;AAC7D,QAAI,CAAC,IAAK;AACV,QAAI;AACJ,QAAI;AACF,YAAM,YAAY,MAAM,QAAQ,IAAI,KAAK,IAAI,OAAO,cAAc;AAClE,cAAQ,KAAK,MAAM,SAAS;AAAA,IAC9B,QAAQ;AACN;AAAA,IACF;AACA,QAAI,MAAM,WAAW,KAAK,OAAQ;AAClC,QAAI,MAAM,SAAS,OAAQ;AAE3B,QAAI;AACJ,QAAI;AACF,YAAM,MAAM,UAAU,MAAM,YAAY,KAAK,GAAG;AAAA,IAClD,QAAQ;AACN;AAAA,IACF;AACA,UAAM,IAAI,MAAM,aACZ,OAAO,MAAM,YAAY,MAAM,IAAI,IACnC,SAAS,MAAM,IAAI;AACvB,SAAK,KAAK,IAAI,GAAG,GAAG;AACpB,WAAO,KAAK,KAAK;AAAA,EACnB;AACA,SAAO;AACT;AAOA,eAAsB,iBACpB,OACA,OACA,IACe;AACf,QAAM,MAAM,OAAO,OAAO,wBAAwB,EAAE;AACtD;AA7LA,IAgDa;AAhDb;AAAA;AAAA;AA2CA;AACA;AACA;AACA;AAEO,IAAM,yBAAyB;AAAA;AAAA;;;AChDtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiIA;;;ACvHA;AACA;AASA;;;AC0BA,SAAS,IAAI,GAAuB;AAClC,SAAO,IAAI,WAAW,EAAE,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,SAAS,GAAG,EAAE,CAAC,CAAC;AAChE;AAgBA,IAAM,cAAoC;AAAA;AAAA;AAAA,EAIxC,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG/F,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAGlF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA,EAGA,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAGnF,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,OAAO,EAAE;AAAA;AAAA,EAGxD,EAAE,MAAM,6BAA6B,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG9E,EAAE,MAAM,gBAAgB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGjE;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,QAAQ;AAAA,IACR,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA;AAAA,EAKA,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGpE,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,gBAAgB,EAAE;AAAA;AAAA;AAAA,EAKvE,EAAE,MAAM,uBAAuB,QAAQ,UAAU,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG5G,EAAE,MAAM,uBAAuB,QAAQ,UAAU,OAAO,IAAI,sBAAsB,GAAG,eAAe,KAAK;AAAA;AAAA,EAGzG,EAAE,MAAM,+BAA+B,QAAQ,MAAM,OAAO,IAAI,mBAAmB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG1G,EAAE,MAAM,oBAAoB,QAAQ,MAAM,OAAO,IAAI,mBAAmB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG/F,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAGzF,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,OAAO,GAAG,eAAe,KAAK;AAAA;AAAA,EAGrF,EAAE,MAAM,uBAAuB,QAAQ,SAAS,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAG5F,EAAE,MAAM,sBAAsB,QAAQ,QAAQ,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK7F;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,EACnB;AAAA;AAAA,EAGA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,EACnB;AAAA;AAAA,EAGA,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGpE,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,cAAc,QAAQ,OAAO,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAKjF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA,EAGA,EAAE,MAAM,kBAAkB,QAAQ,OAAO,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAGpG,EAAE,MAAM,oBAAoB,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAG1F,EAAE,MAAM,eAAe,QAAQ,OAAO,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAGlF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,QAAQ;AAAA,IACR,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA;AAAA,EAIA,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,QAAQ,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK9F,EAAE,MAAM,2BAA2B,QAAQ,UAAU,OAAO,IAAI,yBAAyB,EAAE;AAAA;AAAA,EAG3F,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,qBAAqB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,iDAAiD,QAAQ,MAAM,OAAO,IAAI,OAAO,EAAE;AAAA;AAAA,EAG3F,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASvF,EAAE,MAAM,uBAAuB,QAAQ,cAAc,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG/E,EAAE,MAAM,+BAA+B,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA;AAAA,EAKhF,EAAE,MAAM,yCAAyC,QAAQ,OAAO,OAAO,IAAI,sBAAsB,EAAE;AAAA;AAAA,EAGnG,EAAE,MAAM,qBAAqB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,qCAAqC,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK3G,EAAE,MAAM,gCAAgC,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGlF,EAAE,MAAM,gCAAgC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGrF,EAAE,MAAM,wBAAwB,QAAQ,UAAU,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG5E,EAAE,MAAM,iCAAiC,QAAQ,OAAO,OAAO,IAAI,UAAU,EAAE;AAAA,EAC/E,EAAE,MAAM,iCAAiC,QAAQ,YAAY,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA,EACzG,EAAE,MAAM,iCAAiC,QAAQ,YAAY,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAKzG,EAAE,MAAM,kCAAkC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGvF,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,kCAAkC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AACzF;AASA,SAAS,cAAc,OAAe,OAAwB;AAC5D,SAAO,UAAU,QAAS,QAAQ,SAAU;AAC9C;AAWO,SAAS,eAAe,QAA4B;AACzD,QAAM,SAAS,YAAY,MAAM;AACjC,SAAO,QAAQ,QAAQ;AACzB;AAUO,SAAS,YACd,QACiE;AACjE,aAAW,QAAQ,aAAa;AAC9B,QAAI,UAAU,QAAQ,IAAI,GAAG;AAC3B,aAAO;AAAA,QACL,MAAM,KAAK;AAAA,QACX,QAAQ,KAAK;AAAA,QACb,eAAe,KAAK,iBAAiB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,UAAU,KAAK,cAAc,OAAO,CAAC,GAAI,OAAO,CAAC,CAAE,GAAG;AAC/D,WAAO,EAAE,MAAM,cAAc,QAAQ,OAAO,eAAe,KAAK;AAAA,EAClE;AAEA,SAAO;AACT;AAQO,SAAS,gBAAgB,UAA2B;AACzD,SAAO,qBAAqB,IAAI,QAAQ;AAC1C;AAIA,SAAS,UAAU,QAAoB,MAA0B;AAC/D,QAAM,SAAS,KAAK,UAAU;AAC9B,QAAM,MAAM,SAAS,KAAK,MAAM;AAGhC,MAAI,OAAO,SAAS,IAAK,QAAO;AAGhC,WAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,QAAI,OAAO,SAAS,CAAC,MAAM,KAAK,MAAM,CAAC,EAAG,QAAO;AAAA,EACnD;AAGA,MAAI,KAAK,kBAAkB,KAAK,oBAAoB,QAAW;AAC7D,UAAM,OAAO,KAAK,kBAAkB,KAAK,eAAe;AACxD,QAAI,OAAO,SAAS,KAAM,QAAO;AACjC,aAAS,IAAI,GAAG,IAAI,KAAK,eAAe,QAAQ,KAAK;AACnD,UAAI,OAAO,KAAK,kBAAkB,CAAC,MAAM,KAAK,eAAe,CAAC,EAAG,QAAO;AAAA,IAC1E;AAAA,EACF;AAEA,SAAO;AACT;AAQA,IAAM,uBAAuB,IAAI;AAAA,EAC/B,YAAY,OAAO,CAAC,MAAM,EAAE,aAAa,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAC9D;;;AD2gBA;AA52BO,IAAM,kBAAkB;AAGxB,IAAM,wBAAwB;AAO9B,IAAM,yBAAyB;AAG/B,IAAM,oBAAoB;AAG1B,IAAM,uBAAuB;AAO7B,IAAM,qBAAqB,MAAM;AAGxC,IAAM,kBAAkB;AAIxB,eAAe,cACb,MAC4D;AAC5D,MAAI,OAAO,sBAAsB,aAAa;AAC5C,WAAO,EAAE,OAAO,MAAM,WAAW,OAAO;AAAA,EAC1C;AACA,QAAM,KAAK,IAAI,kBAAkB,MAAM;AACvC,QAAM,SAAS,GAAG,SAAS,UAAU;AACrC,QAAM,OAAO,MAAM,IAA+B;AAClD,QAAM,OAAO,MAAM;AACnB,QAAM,MAAM,MAAM,IAAI,SAAS,GAAG,QAAQ,EAAE,YAAY;AACxD,SAAO,EAAE,OAAO,IAAI,WAAW,GAAG,GAAG,WAAW,OAAO;AACzD;AAEA,eAAe,gBAAgB,MAAuC;AACpE,MAAI,OAAO,wBAAwB,aAAa;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,IAAI,oBAAoB,MAAM;AACzC,QAAM,SAAS,GAAG,SAAS,UAAU;AACrC,QAAM,OAAO,MAAM,IAA+B;AAClD,QAAM,OAAO,MAAM;AACnB,QAAM,MAAM,MAAM,IAAI,SAAS,GAAG,QAAQ,EAAE,YAAY;AACxD,SAAO,IAAI,WAAW,GAAG;AAC3B;AAEA,SAAS,aAAa,QAAkC;AACtD,QAAM,QAAQ,OAAO,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,YAAY,CAAC;AACzD,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,KAAK,QAAQ;AACtB,QAAI,IAAI,GAAG,MAAM;AACjB,cAAU,EAAE;AAAA,EACd;AACA,SAAO;AACT;AAGA,SAAS,SAAS,MAAc,YAAoB,YAAgC;AAClF,SAAO,IAAI,YAAY,EAAE,OAAO,GAAG,IAAI,IAAI,UAAU,IAAI,UAAU,EAAE;AACvE;AA+BO,IAAM,UAAN,MAAc;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,MAST;AACD,SAAK,QAAQ,KAAK;AAClB,SAAK,QAAQ,KAAK;AAClB,SAAK,aAAa,KAAK;AACvB,SAAK,WAAW,KAAK;AACrB,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,SAAS,KAAK;AACnB,SAAK,eAAe,KAAK;AAAA,EAC3B;AAAA;AAAA,EAGA,IAAY,kBAA0B;AACpC,WAAO,GAAG,iBAAiB,GAAG,KAAK,UAAU;AAAA,EAC/C;AAAA;AAAA,EAGA,IAAY,qBAA6B;AACvC,WAAO,GAAG,oBAAoB,GAAG,KAAK,UAAU;AAAA,EAClD;AAAA;AAAA,EAIA,MAAc,YAGX;AACD,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,iBAAiB,KAAK,QAAQ;AACrF,QAAI,CAAC,SAAU,QAAO,EAAE,OAAO,CAAC,GAAG,SAAS,EAAE;AAE9C,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,OAAO,KAAK,MAAM,SAAS,KAAK;AAAA,QAChC,SAAS,SAAS;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO;AAAA,MACL,OAAO,KAAK,MAAM,IAAI;AAAA,MACtB,SAAS,SAAS;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,MAAc,UACZ,OACA,gBACe;AACf,UAAM,OAAO,KAAK,UAAU,KAAK;AACjC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI,iBAAiB;AAAA,QACrB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AAAA,IACF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI,iBAAiB;AAAA,QACrB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,KAAK,MAAM;AAAA,MACf,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,iBAAiB,IAAI,iBAAiB;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eACZ,QACe;AACf,aAAS,UAAU,GAAG,UAAU,iBAAiB,WAAW;AAC1D,YAAM,EAAE,OAAO,QAAQ,IAAI,MAAM,KAAK,UAAU;AAChD,YAAM,UAAU,OAAO,KAAK;AAC5B,UAAI,YAAY,KAAM;AACtB,UAAI;AACF,cAAM,KAAK,UAAU,SAAS,OAAO;AACrC;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,UAAU,kBAAkB,EAAG;AACnE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,eAAe,MAAqE;AAChG,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,uBAAuB,IAAI;AAC7E,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,EAAE,MAAM,KAAK,MAAM,SAAS,KAAK,GAAiB,SAAS,SAAS,GAAG;AAAA,IAChF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,eAAe;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,EAAE,MAAM,KAAK,MAAM,IAAI,GAAiB,SAAS,SAAS,GAAG;AAAA,EACtE;AAAA,EAEA,MAAc,gBAAgB,MAAkB,iBAAyC;AACvF,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,cAAc,mBAAmB,KAAK;AAC5C,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,eAAe;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,YAAY,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IAC5F,OAAO;AACL,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,YAAY,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IAC5F;AAEA,UAAM,KAAK,MAAM;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,MAAc,OAA8B;AAC1E,aAAS,UAAU,GAAG,UAAU,iBAAiB,WAAW;AAC1D,YAAM,SAAS,MAAM,KAAK,eAAe,IAAI;AAC7C,UAAI,CAAC,OAAQ,OAAM,IAAI,cAAc,cAAc,IAAI,YAAY;AACnE,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,YAAM,UAAsB,EAAE,GAAG,MAAM,UAAU,KAAK,WAAW,MAAM;AACvE,UAAI;AACF,cAAM,KAAK,gBAAgB,SAAS,OAAO;AAC3C;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,UAAU,kBAAkB,EAAG;AACnE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,WACZ,MACA,OACA,YACA,OACA,KACe;AACf,UAAM,KAAK,GAAG,IAAI,IAAI,KAAK;AAC3B,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK;AACP,YAAM,MAAM,SAAS,MAAM,OAAO,UAAU;AAC5C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,oBAAoB,OAAO,KAAK,GAAG;AAC9D,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO,eAAe,KAAK;AAAA,MAC7B;AAAA,IACF;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,wBAAwB,IAAI,QAAQ;AAAA,EACvE;AAAA,EAEA,MAAc,UACZ,MACA,OACA,YACA,KAC4B;AAC5B,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,wBAAwB,GAAG,IAAI,IAAI,KAAK,EAAE;AAC5F,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,KAAK;AACP,YAAM,MAAM,SAAS,MAAM,OAAO,UAAU;AAC5C,aAAO,MAAM,oBAAoB,SAAS,KAAK,SAAS,OAAO,KAAK,GAAG;AAAA,IACzE;AAEA,WAAO,eAAe,SAAS,KAAK;AAAA,EACtC;AAAA;AAAA,EAIQ,WAAW,UAAkB,OAAuB;AAC1D,WAAO,GAAG,KAAK,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,EAChD;AAAA,EAEA,MAAc,kBAAkB,UAAkB,OAA8C;AAC9F,UAAM,MAAM,KAAK,WAAW,UAAU,KAAK;AAC3C,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAC9E,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,SAAS,KAAK;AAAA,IAClC;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA,EAEA,MAAc,mBAAmB,UAAkB,QAAsC;AACvF,UAAM,MAAM,KAAK,WAAW,UAAU,OAAO,KAAK;AAClD,UAAM,OAAO,KAAK,UAAU,MAAM;AAClC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF,OAAO;AACL,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,KAAK,QAAQ;AAAA,EACzE;AAAA,EAEA,MAAc,oBAAoB,UAAkB,OAA8B;AAChF,UAAM,MAAM,KAAK,WAAW,UAAU,KAAK;AAC3C,UAAM,KAAK,MAAM,OAAO,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAAA,EAClE;AAAA;AAAA,EAIQ,mBAAmB,MAA+B;AACxD,QAAI,MAAM,UAAW,QAAO,KAAK;AACjC,QAAI,KAAK,aAAc,QAAO,KAAK;AACnC,WAAO;AAAA,EACT;AAAA;AAAA,EAIA,MAAc,eAAe,MAAuC;AAClE,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,eAAe,IAAI;AACtE,UAAM,SAAuB,CAAC;AAE9B,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,KAAK;AACxC,YAAM,QAAQ,MAAM,KAAK,UAAU,KAAK,MAAM,GAAG,KAAK,YAAY,OAAO;AACzE,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,cAAc,CAAC,IAAI,KAAK,UAAU,sBAAsB,KAAK,IAAI,gBAAgB,KAAK,QAAQ;AAAA,QAChG;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,UAAM,YAAY,aAAa,MAAM;AACrC,WAAO,KAAK,gBAAgB,SAAS,MAAM,gBAAgB,SAAS,IAAI;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,IAAI,UAAkB,MAAkB,MAAsC;AAElF,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,eAAe,IAAI;AACtE,UAAM,OAAO,UACT,MAAM,cAAc,SAAS,IAAI,IACjC,MAAM,eAAe,IAAI;AAG7B,QAAI,WAAW,MAAM;AACrB,QAAI,CAAC,UAAU;AACb,YAAM,WAAW,YAAY,KAAK,SAAS,GAAG,EAAE,CAAC;AACjD,UAAI,SAAU,YAAW,SAAS;AAAA,IACpC;AAGA,QAAI;AACJ,QAAI,MAAM,aAAa,QAAW;AAChC,uBAAiB,KAAK;AAAA,IACxB,WAAW,YAAY,gBAAgB,QAAQ,GAAG;AAChD,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB;AAAA,IACnB;AAGA,UAAM,eAAe,MAAM,KAAK,eAAe,IAAI;AAEnD,QAAI,cAAc;AAEhB,YAAM,KAAK,kBAAkB,MAAM,CAAE;AAAA,IACvC,OAAO;AAEL,YAAM,EAAE,OAAO,YAAY,UAAU,IAAI,iBACrC,MAAM,cAAc,IAAI,IACxB,EAAE,OAAO,MAAM,WAAW,OAAgB;AAE9C,YAAM,YAAY,KAAK,mBAAmB,IAAI;AAC9C,YAAM,aAAa,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW,aAAa,SAAS,CAAC;AAG3E,eAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,cAAM,QAAQ,IAAI;AAClB,cAAM,KAAK;AAAA,UACT;AAAA,UAAM;AAAA,UAAG;AAAA,UACT,WAAW,SAAS,OAAO,QAAQ,SAAS;AAAA,UAC5C;AAAA,QACF;AAAA,MACF;AAGA,YAAM,KAAK,gBAAgB;AAAA,QACzB;AAAA,QACA,MAAM,KAAK;AAAA,QACX,gBAAgB,WAAW;AAAA,QAC3B,aAAa;AAAA,QACb;AAAA,QACA;AAAA,QACA,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,QAC7C,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAGA,UAAM,iBAAiB,MAAM,cAAc,KAAK;AAChD,UAAM,KAAK,eAAe,CAAC,UAAU;AACnC,YAAM,UAAU,MAAM,QAAQ,GAAG;AACjC,YAAM,QAAQ,IAAI;AAAA,QAChB;AAAA,QACA,UAAU;AAAA,QACV,MAAM,KAAK;AAAA,QACX,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,QAC7C,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACnC,GAAI,mBAAmB,SAAY,EAAE,YAAY,eAAe,IAAI,CAAC;AAAA,MACvE;AAEA,UAAI,WAAW,YAAY,MAAM;AAC/B,aAAK,wBAAwB;AAAA,MAC/B;AACA,aAAO;AAAA,IACT,CAAC;AAGD,QAAI,KAAK,uBAAuB;AAC9B,YAAM,UAAU,KAAK;AACrB,WAAK,wBAAwB;AAC7B,YAAM,KAAK,kBAAkB,SAAS,EAAE,EAAE,MAAM,MAAM;AAAA,MAEtD,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOR,MAAM,IAAI,UAA8C;AACtD,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,eAAe,OAAO,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAA4B;AAChC,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,WAAO,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,OAAO,EAAE,MAAM,GAAG,KAAK,EAAE;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,UAAiC;AAC5C,QAAI;AAEJ,UAAM,KAAK,eAAe,CAAC,UAAU;AACnC,UAAI,EAAE,YAAY,OAAQ,QAAO;AACjC,wBAAkB,MAAM,QAAQ,EAAG;AACnC,aAAO,MAAM,QAAQ;AACrB,aAAO;AAAA,IACT,CAAC;AAED,QAAI,iBAAiB;AACnB,YAAM,KAAK,kBAAkB,iBAAiB,EAAE,EAAE,MAAM,MAAM;AAAA,MAE9D,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,SAAS,UAAkB,MAAsD;AACrF,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,cAAc,MAAM,OAAO,MAAM,IAAI;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,UACJ,UACA,MACqD;AACrD,QAAI,OAAO,QAAQ,eAAe,OAAO,IAAI,oBAAoB,YAAY;AAC3E,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,UAAM,OAAO,MAAM,YAAY,MAAM,YAAY;AAQjD,UAAM,SAAS,MAAM,OAAO,MAAM,MAAM,YAAY,MAAM,aAAa,MAAM,UAAU;AACvF,UAAM,OAAO,IAAI,KAAK,CAAC,MAAM,GAAG,EAAE,KAAK,CAAC;AACxC,UAAM,MAAM,IAAI,gBAAgB,IAAI;AACpC,QAAI,UAAU;AACd,UAAMC,UAAS,MAAY;AACzB,UAAI,QAAS;AACb,gBAAU;AACV,UAAI,gBAAgB,GAAG;AAAA,IACzB;AACA,WAAO,EAAE,KAAK,QAAAA,QAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,QAAQ,UAAkB,OAA8B;AAC5D,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,OAAM,IAAI,cAAc,SAAS,QAAQ,0BAA0B,KAAK,QAAQ,GAAG;AAG9F,UAAM,WAAW,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC7D,QAAI,YAAY,SAAS,SAAS,KAAK,KAAM;AAG7C,UAAM,SAAwB;AAAA,MAC5B;AAAA,MACA,MAAM,KAAK;AAAA,MACX,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC,GAAI,KAAK,WAAW,SAAY,EAAE,aAAa,KAAK,OAAO,IAAI,CAAC;AAAA,IAClE;AACA,UAAM,KAAK,mBAAmB,UAAU,MAAM;AAG9C,UAAM,KAAK,kBAAkB,KAAK,MAAM,CAAE;AAG1C,QAAI,YAAY,SAAS,SAAS,KAAK,MAAM;AAC3C,YAAM,KAAK,kBAAkB,SAAS,MAAM,EAAE,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,UAAkB,OAA2C;AAC5E,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ,QAAO;AAEpB,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO,IAAI;AACpD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,eAAe,OAAO,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAA4C;AAC7D,UAAM,SAAS,GAAG,KAAK,QAAQ,KAAK,QAAQ;AAC5C,UAAM,UAAU,MAAM,KAAK,MAAM,KAAK,KAAK,OAAO,KAAK,kBAAkB;AACzE,UAAM,eAAe,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,CAAC;AAE/D,UAAM,WAA4B,CAAC;AACnC,eAAW,OAAO,cAAc;AAC9B,YAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAC9E,UAAI,CAAC,SAAU;AAEf,UAAI,CAAC,KAAK,WAAW;AACnB,iBAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAkB;AAAA,MAC3D,OAAO;AACL,cAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,cAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,iBAAS,KAAK,KAAK,MAAM,IAAI,CAAkB;AAAA,MACjD;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,UAAkB,OAA8B;AAClE,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ;AAEb,UAAM,KAAK,oBAAoB,UAAU,KAAK;AAC9C,UAAM,KAAK,kBAAkB,OAAO,MAAM,EAAE,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,UACA,OACA,MAC0B;AAC1B,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ,QAAO;AAEpB,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO,IAAI;AACpD,QAAI,CAAC,OAAQ,QAAO;AAGpB,UAAM,WAAuB;AAAA,MAC3B,MAAM,OAAO;AAAA,MACb,UAAU,MAAM,YAAY,GAAG,QAAQ,IAAI,KAAK;AAAA,MAChD,MAAM,OAAO,KAAK;AAAA,MAClB,GAAI,OAAO,KAAK,aAAa,SAAY,EAAE,UAAU,OAAO,KAAK,SAAS,IAAI,CAAC;AAAA,MAC/E,YAAY,OAAO;AAAA,MACnB,GAAI,OAAO,gBAAgB,SAAY,EAAE,YAAY,OAAO,YAAY,IAAI,CAAC;AAAA,IAC/E;AAEA,WAAO,KAAK,cAAc,UAAU,OAAO,MAAM,IAAI;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,SAAS,UAA8C;AAC3D,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAClB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,aAAa,UAAkB,mBAAmB,MAA8B;AACpF,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAGpB,QAAI,OAAO,KAAK,eAAe,EAAG,QAAO;AACzC,QAAI,CAAC,KAAK,MAAM,WAAY,QAAO;AAEnC,UAAM,UAAU,GAAG,KAAK,IAAI;AAC5B,WAAO,KAAK,MAAM,WAAW,KAAK,OAAO,gBAAgB,SAAS,gBAAgB;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,gBAAgB,UAAkB,gBAAoD;AAC1F,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAGpB,UAAM,OAAO,MAAM,eAAe,KAAK;AACvC,UAAM,WAAW,KAAK,MAAM,IAAI;AAEhC,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,OAAO,IAAI;AAC9D,QAAI,CAAC,SAAS;AACZ,aAAO,KAAK,cAAc,MAAM,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAAA,IAC/D;AAGA,UAAM,MAAM,SAAS,KAAK,MAAM,GAAG,OAAO,KAAK,UAAU;AACzD,UAAM,EAAE,qBAAqB,WAAW,IAAI,MAAM;AAClD,UAAM,YAAY,MAAM,WAAW,SAAS,KAAK,SAAS,OAAO,SAAS,GAAG;AAC7E,UAAM,YAAY,OAAO,KAAK,gBAAgB,SAC1C,MAAM,gBAAgB,SAAS,IAC/B;AAEJ,UAAM,OAAO,IAAI,eAA2B;AAAA,MAC1C,MAAM,YAAY;AAChB,mBAAW,QAAQ,SAAS;AAC5B,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAED,UAAM,WAAW,KAAK;AACtB,WAAO,IAAI,SAAS,MAAM;AAAA,MACxB,SAAS;AAAA,QACP,gBAAgB,KAAK,YAAY;AAAA,QACjC,kBAAkB,OAAO,KAAK,IAAI;AAAA,QAClC,QAAQ,IAAI,KAAK,IAAI;AAAA,QACrB,uBAAuB,qBAAqB,QAAQ;AAAA,QACpD,iBAAiB,IAAI,KAAK,KAAK,UAAU,EAAE,YAAY;AAAA,MACzD;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA,EAIA,MAAc,cACZ,MACA,MACA,MACmB;AACnB,UAAM,iBAAiB,KAAK,eAAe,KAAK,IAAI;AAGpD,UAAM,OAAO,IAAI,eAA2B;AAAA,MAC1C,MAAM,MAAM,YAAY;AACtB,YAAI;AACF,gBAAM,SAAS,MAAM,eAAe,IAAI;AACxC,qBAAW,QAAQ,MAAM;AACzB,qBAAW,MAAM;AAAA,QACnB,SAAS,KAAK;AACZ,qBAAW,MAAM,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,WAAW,MAAM,YAAY,KAAK;AACxC,UAAM,cAAc,MAAM,SACtB,qBAAqB,QAAQ,MAC7B,yBAAyB,QAAQ;AAErC,WAAO,IAAI,SAAS,MAAM;AAAA,MACxB,SAAS;AAAA,QACP,gBAAgB,KAAK,YAAY;AAAA,QACjC,kBAAkB,OAAO,KAAK,IAAI;AAAA,QAClC,QAAQ,IAAI,KAAK,IAAI;AAAA,QACrB,uBAAuB;AAAA,QACvB,iBAAiB,IAAI,KAAK,KAAK,UAAU,EAAE,YAAY;AAAA,MACzD;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAMA,eAAe,eAAe,MAAmC;AAC/D,SAAO,UAAU,IAAI;AACvB;;;AE74BA;AAIA,IAAM,uBAAuB;AA4C7B,IAAM,uBAAuB;AAwBtB,SAAS,gBACd,QACA,SACyB;AACzB,QAAM,YAAY,SAAS,cAAc;AAGzC,QAAM,YAAY,oBAAI,IAA2B;AACjD,QAAM,WAAW,oBAAI,IAA2B;AAChD,QAAM,SAAS,oBAAI,IAAY;AAG/B,MAAI,aAAa;AAEjB,iBAAe,KAAK,OAAuC;AACzD,QAAI,OAAO,IAAI,KAAK,EAAG,QAAO,UAAU,IAAI,KAAK;AAEjD,UAAM,SAAS,MAAM,OAAO,WAAW,KAAK;AAC5C,QAAI,QAAQ;AACV,YAAM,OAAO,IAAI,YAAY,EAAE,OAAO,OAAO,KAAK;AAClD,YAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,gBAAU,IAAI,OAAO,OAAO,IAAI;AAChC,eAAS,IAAI,OAAO,OAAO,OAAO;AAAA,IACpC,OAAO;AACL,gBAAU,IAAI,OAAO,CAAC,CAAC;AACvB,eAAS,IAAI,OAAO,IAAI;AAAA,IAC1B;AAEA,WAAO,IAAI,KAAK;AAChB,WAAO,UAAU,IAAI,KAAK;AAAA,EAC5B;AAEA,iBAAe,MAAM,OAA8B;AACjD,UAAM,WAAW,UAAU,IAAI,KAAK,KAAK,CAAC;AAC1C,UAAM,SAA0B;AAAA,MAC9B,qBAAqB;AAAA,MACrB;AAAA,MACA,KAAI,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC3B,MAAM;AAAA,IACR;AACA,UAAM,QAAQ,IAAI,YAAY,EAAE,OAAO,KAAK,UAAU,MAAM,CAAC;AAC7D,UAAM,kBAAkB,SAAS,IAAI,KAAK,KAAK;AAE/C,aAAS,UAAU,GAAG,UAAU,sBAAsB,WAAW;AAC/D,UAAI;AACF,cAAM,EAAE,SAAS,WAAW,IAAI,MAAM,OAAO,YAAY,OAAO,OAAO,eAAe;AACtF,iBAAS,IAAI,OAAO,UAAU;AAC9B;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,8BAA8B,UAAU,uBAAuB,GAAG;AAEnF,gBAAM,SAAS,MAAM,OAAO,WAAW,KAAK;AAC5C,cAAI,QAAQ;AACV,kBAAM,aAAa,IAAI,YAAY,EAAE,OAAO,OAAO,KAAK;AACxD,kBAAM,eAAe,KAAK,MAAM,UAAU;AAC1C,kBAAM,YAAY,UAAU,IAAI,KAAK,KAAK,CAAC;AAC3C,kBAAM,aAAa,eAAe,aAAa,MAAM,SAAS;AAC9D,sBAAU,IAAI,OAAO,UAAU;AAC/B,qBAAS,IAAI,OAAO,OAAO,OAAO;AAAA,UACpC;AAEA;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,iBAAe,WAAW,OAA8B;AACtD,QAAI,aAAa,eAAe,GAAG;AACjC,YAAM,MAAM,KAAK;AAAA,IACnB;AAAA,EACF;AAEA,QAAM,QAAiC;AAAA,IACrC,MAAM,OAAO,QAAQ;AAAA,IAErB,MAAM,MAAM,SAAgC;AAC1C,YAAM,MAAM,OAAO;AAAA,IACrB;AAAA,IAEA,MAAM,MAAM,SAAiB,IAAwC;AACnE,YAAM,KAAK,OAAO;AAClB;AACA,UAAI;AACF,cAAM,GAAG;AAAA,MACX,UAAE;AACA;AAAA,MACF;AACA,YAAM,MAAM,OAAO;AAAA,IACrB;AAAA,IAEA,MAAM,IAAI,OAAe,YAAoB,IAA+C;AAC1F,YAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,aAAO,KAAK,UAAU,IAAI,EAAE,KAAK;AAAA,IACnC;AAAA,IAEA,MAAM,IACJ,OACA,YACA,IACA,UACA,iBACe;AACf,YAAM,OAAO,MAAM,KAAK,KAAK;AAE7B,UAAI,oBAAoB,QAAW;AACjC,cAAM,UAAU,KAAK,UAAU,IAAI,EAAE;AACrC,cAAM,iBAAiB,SAAS,MAAM;AACtC,YAAI,mBAAmB,iBAAiB;AACtC,gBAAM,IAAI;AAAA,YACR;AAAA,YACA,oBAAoB,eAAe,cAAc,cAAc,OAAO,UAAU,IAAI,EAAE;AAAA,UACxF;AAAA,QACF;AAAA,MACF;AAEA,WAAK,UAAU,MAAM,CAAC;AACtB,WAAK,UAAU,EAAE,EAAE,IAAI;AACvB,YAAM,WAAW,KAAK;AAAA,IACxB;AAAA,IAEA,MAAM,OAAO,OAAe,YAAoB,IAA2B;AACzE,YAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,UAAI,KAAK,UAAU,GAAG;AACpB,eAAO,KAAK,UAAU,EAAE,EAAE;AAC1B,cAAM,WAAW,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IAEA,MAAM,KAAK,OAAe,YAAuC;AAC/D,YAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,aAAO,OAAO,KAAK,KAAK,UAAU,KAAK,CAAC,CAAC;AAAA,IAC3C;AAAA,IAEA,MAAM,QAAQ,OAAuC;AACnD,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB;AAAA,IAEA,MAAM,QAAQ,OAAe,MAAoC;AAC/D,gBAAU,IAAI,OAAO,IAAI;AACzB,aAAO,IAAI,KAAK;AAChB,YAAM,MAAM,KAAK;AAAA,IACnB;AAAA,EACF;AAEA,SAAO;AACT;AAIA,SAAS,eAAe,QAAuB,OAAqC;AAClF,QAAM,SAAwB,CAAC;AAG/B,aAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,MAAM,GAAG;AACpD,WAAO,IAAI,IAAI,EAAE,GAAG,QAAQ;AAAA,EAC9B;AAGA,aAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,KAAK,GAAG;AACnD,QAAI,CAAC,OAAO,IAAI,GAAG;AACjB,aAAO,IAAI,IAAI,EAAE,GAAG,QAAQ;AAC5B;AAAA,IACF;AACA,eAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,YAAM,WAAW,OAAO,IAAI,EAAE,EAAE;AAChC,UAAI,CAAC,YAAY,SAAS,OAAO,SAAS,KAAK;AAC7C,eAAO,IAAI,EAAE,EAAE,IAAI;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAQO,SAAS,kBACd,SACyC;AACzC,SAAO;AACT;;;ACpJO,IAAM,uBAAmC;AAAA,EAC9C,MAAM,EAAE,MAAM,aAAa,eAAe,GAAG,UAAU,KAAK;AAAA,EAC5D,MAAM,EAAE,MAAM,SAAS;AACzB;AAGO,IAAM,sBAAkC;AAAA,EAC7C,MAAM,EAAE,MAAM,YAAY,YAAY,KAAQ,eAAe,MAAS,UAAU,KAAK;AAAA,EACrF,MAAM,EAAE,MAAM,YAAY,YAAY,IAAO;AAC/C;AA6CO,IAAM,gBAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EAET,SAA6B;AAAA,EAC7B,cAA6B;AAAA,EAC7B,cAA6B;AAAA,EAC7B,aAA2B;AAAA,EAC3B,gBAAgB;AAAA;AAAA;AAAA,EAGhB,gBAAsD;AAAA,EACtD,oBAA2D;AAAA,EAC3D,oBAA2D;AAAA;AAAA,EAGlD,0BAA+C;AAAA,EAC/C,oBAAyC;AAAA,EACzC,kBAAuC;AAAA,EAEhD,UAAU;AAAA,EAElB,YAAY,QAAoB,WAAmC;AACjE,SAAK,SAAS;AACd,SAAK,YAAY;AAGjB,QAAI,KAAK,qBAAqB,GAAG;AAC/B,WAAK,0BAA0B,KAAK,uBAAuB,KAAK,IAAI;AACpE,WAAK,kBAAkB,KAAK,eAAe,KAAK,IAAI;AACpD,WAAK,oBAAoB,KAAK,iBAAiB,KAAK,IAAI;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA,EAGA,IAAI,SAA8B;AAChC,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,YAAY,KAAK;AAAA,MACjB,WAAW,KAAK;AAAA,MAChB,eAAe,KAAK,UAAU,cAAc;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA,EAGA,QAAc;AACZ,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AAGf,QAAI,KAAK,OAAO,KAAK,SAAS,cAAc,KAAK,OAAO,KAAK,YAAY;AACvE,WAAK,oBAAoB,YAAY,MAAM;AACzC,aAAK,KAAK,YAAY;AAAA,MACxB,GAAG,KAAK,OAAO,KAAK,UAAU;AAAA,IAChC;AAGA,QAAI,KAAK,OAAO,KAAK,SAAS,cAAc,KAAK,OAAO,KAAK,YAAY;AACvE,WAAK,oBAAoB,YAAY,MAAM;AACzC,aAAK,KAAK,YAAY;AAAA,MACxB,GAAG,KAAK,OAAO,KAAK,UAAU;AAAA,IAChC;AAGA,QAAI,KAAK,OAAO,KAAK,SAAS,cAAc,OAAO,aAAa,aAAa;AAC3E,eAAS,iBAAiB,oBAAoB,KAAK,eAAe;AAAA,IACpE;AAGA,QAAI,KAAK,qBAAqB,GAAG;AAC/B,UAAI,OAAO,aAAa,eAAe,KAAK,yBAAyB;AACnE,iBAAS,iBAAiB,oBAAoB,KAAK,uBAAuB;AAAA,MAC5E;AACA,UAAI,OAAO,WAAW,qBAAqB,cAAc,KAAK,iBAAiB;AAC7E,mBAAW,iBAAiB,YAAY,KAAK,eAAe;AAAA,MAC9D;AACA,UAAI,OAAO,YAAY,eAAe,KAAK,mBAAmB;AAC5D,gBAAQ,GAAG,cAAc,KAAK,iBAAiB;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,OAAa;AACX,QAAI,CAAC,KAAK,QAAS;AACnB,SAAK,UAAU;AAEf,QAAI,KAAK,eAAe;AACtB,mBAAa,KAAK,aAAa;AAC/B,WAAK,gBAAgB;AAAA,IACvB;AACA,QAAI,KAAK,mBAAmB;AAC1B,oBAAc,KAAK,iBAAiB;AACpC,WAAK,oBAAoB;AAAA,IAC3B;AACA,QAAI,KAAK,mBAAmB;AAC1B,oBAAc,KAAK,iBAAiB;AACpC,WAAK,oBAAoB;AAAA,IAC3B;AAGA,QAAI,KAAK,OAAO,KAAK,SAAS,cAAc,OAAO,aAAa,aAAa;AAC3E,eAAS,oBAAoB,oBAAoB,KAAK,eAAe;AAAA,IACvE;AAGA,QAAI,OAAO,aAAa,eAAe,KAAK,yBAAyB;AACnE,eAAS,oBAAoB,oBAAoB,KAAK,uBAAuB;AAAA,IAC/E;AACA,QAAI,OAAO,WAAW,wBAAwB,cAAc,KAAK,iBAAiB;AAChF,iBAAW,oBAAoB,YAAY,KAAK,eAAe;AAAA,IACjE;AACA,QAAI,OAAO,YAAY,eAAe,KAAK,mBAAmB;AAC5D,cAAQ,eAAe,cAAc,KAAK,iBAAiB;AAAA,IAC7D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAqB;AACnB,QAAI,CAAC,KAAK,QAAS;AAEnB,QAAI,KAAK,OAAO,KAAK,SAAS,aAAa;AACzC,WAAK,KAAK,YAAY;AAAA,IACxB,WAAW,KAAK,OAAO,KAAK,SAAS,YAAY;AAC/C,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAA2B;AAC/B,UAAM,KAAK,YAAY;AAAA,EACzB;AAAA;AAAA,EAGA,MAAM,YAA2B;AAC/B,UAAM,KAAK,YAAY;AAAA,EACzB;AAAA;AAAA,EAIA,MAAc,cAA6B;AACzC,QAAI,KAAK,WAAW,UAAW;AAG/B,UAAM,cAAc,KAAK,OAAO,KAAK,iBAAiB;AACtD,QAAI,cAAc,GAAG;AACnB,YAAM,UAAU,KAAK,IAAI,IAAI,KAAK;AAClC,UAAI,UAAU,aAAa;AAEzB,YAAI,KAAK,OAAO,KAAK,SAAS,YAAY;AACxC,eAAK,iBAAiB,cAAc,OAAO;AAAA,QAC7C;AACA;AAAA,MACF;AAAA,IACF;AAGA,QAAI,KAAK,UAAU,cAAc,MAAM,GAAG;AACxC,WAAK,SAAS;AACd;AAAA,IACF;AAEA,SAAK,SAAS;AACd,QAAI;AACF,YAAM,KAAK,UAAU,KAAK;AAC1B,WAAK,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC1C,WAAK,gBAAgB,KAAK,IAAI;AAC9B,WAAK,aAAa;AAClB,WAAK,SAAS,KAAK,UAAU,cAAc,IAAI,IAAI,YAAY;AAAA,IACjE,SAAS,KAAK;AACZ,WAAK,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACpE,WAAK,SAAS;AAAA,IAChB;AAAA,EACF;AAAA,EAEA,MAAc,cAA6B;AACzC,QAAI,KAAK,WAAW,UAAW;AAE/B,UAAM,gBAAgB,KAAK;AAC3B,SAAK,SAAS;AACd,QAAI;AACF,YAAM,KAAK,UAAU,KAAK;AAC1B,WAAK,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC1C,WAAK,aAAa;AAClB,WAAK,SAAS,kBAAkB,YAAY,YAAY;AAAA,IAC1D,SAAS,KAAK;AACZ,WAAK,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACpE,WAAK,SAAS;AAAA,IAChB;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,QAAI,KAAK,cAAe,cAAa,KAAK,aAAa;AACvD,UAAM,KAAK,KAAK,OAAO,KAAK,cAAc;AAC1C,SAAK,SAAS;AACd,SAAK,iBAAiB,EAAE;AAAA,EAC1B;AAAA,EAEQ,iBAAiB,IAAkB;AACzC,QAAI,KAAK,cAAe,cAAa,KAAK,aAAa;AACvD,SAAK,gBAAgB,WAAW,MAAM;AACpC,WAAK,gBAAgB;AACrB,WAAK,KAAK,YAAY;AAAA,IACxB,GAAG,EAAE;AAAA,EACP;AAAA,EAEQ,uBAAgC;AACtC,UAAM,WAAW,KAAK,OAAO,KAAK;AAClC,QAAI,aAAa,OAAW,QAAO;AACnC,WAAO,KAAK,OAAO,KAAK,SAAS;AAAA,EACnC;AAAA;AAAA,EAIQ,yBAA+B;AACrC,QAAI,OAAO,aAAa,eAAe,SAAS,oBAAoB,UAAU;AAC5E,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA,EAEQ,iBAAuB;AAC7B,SAAK,eAAe;AAAA,EACtB;AAAA,EAEQ,mBAAyB;AAC/B,SAAK,eAAe;AAAA,EACtB;AAAA,EAEQ,kBAAkB,MAAY;AACpC,QAAI,OAAO,aAAa,eAAe,SAAS,oBAAoB,WAAW;AAC7E,WAAK,KAAK,YAAY;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,iBAAuB;AAC7B,QAAI,KAAK,UAAU,cAAc,MAAM,EAAG;AAE1C,SAAK,KAAK,UAAU,KAAK,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAC3C;AACF;;;AChYA,IAAM,cAAc;AACpB,IAAM,aAAa;AACnB,IAAM,aAAa;AACnB,IAAM,gBAAgB;AA+Qf,SAAS,WAAW,MAA2C;AACpE,QAAM,UAAU,KAAK;AAGrB,QAAM,gBAAgB,KAAK,SAAS,SAAS,KAAK;AAClD,QAAM,kBAAkB,gBAAgB,KAAK,QAAQ;AACrD,QAAM,cAAc,CAAC,gBAAgB,KAAK,QAAQ;AAClD,QAAM,gBAAgB,aAAa,aAAa,MAAM;AAGtD,QAAM,YAAY,oBAAI,IAAgB,CAAC,OAAO,CAAC;AAC/C,MAAI,gBAAiB,WAAU,IAAI,eAAe;AAClD,MAAI,aAAa,MAAO,WAAU,IAAI,YAAY,KAAK;AACvD,MAAI,aAAa,MAAO,WAAU,IAAI,YAAY,KAAK;AACvD,MAAI,KAAK,KAAK,KAAM,WAAU,IAAI,KAAK,IAAI,IAAI;AAC/C,MAAI,KAAK,OAAQ,YAAW,KAAK,OAAO,OAAO,KAAK,MAAM,EAAG,WAAU,IAAI,CAAC;AAC5E,MAAI,KAAK,YAAa,YAAW,KAAK,OAAO,OAAO,KAAK,WAAW,EAAG,WAAU,IAAI,CAAC;AACtF,MAAI,KAAK,WAAY,YAAW,KAAK,OAAO,OAAO,KAAK,UAAU,EAAG,WAAU,IAAI,CAAC;AACpF,MAAI,KAAK,SAAU,WAAU,IAAI,KAAK,QAAQ;AAC9C,MAAI,KAAK,eAAe,aAAc,WAAU,IAAI,KAAK,cAAc,YAAY;AAInF,QAAM,YAAY,oBAAI,IAAwB;AAC9C,QAAM,YAAY,oBAAI,IAAY;AAClC,QAAM,cAAc,oBAAI,IAAwD;AAGhF,QAAM,aAAyB;AAAA,IAC7B,MAAM;AAAA,IACN,MAAM,MAAM;AAAE,aAAO;AAAA,IAAK;AAAA,IAC1B,MAAM,MAAM;AAAA,IAAC;AAAA,IACb,MAAM,SAAS;AAAA,IAAC;AAAA,IAChB,MAAM,OAAO;AAAE,aAAO,CAAC;AAAA,IAAE;AAAA,IACzB,MAAM,UAAU;AAAE,aAAO,CAAC;AAAA,IAAE;AAAA,IAC5B,MAAM,UAAU;AAAA,IAAC;AAAA,EACnB;AAOA,WAAS,aAAa,OAAe,YAA4B;AAC/D,QAAI,KAAK,aAAa;AACpB,iBAAW,UAAU,OAAO,KAAK,KAAK,WAAW,GAAG;AAClD,YAAI,MAAM,WAAW,MAAM,EAAG,QAAO;AAAA,MACvC;AAAA,IACF;AACA,QAAI,KAAK,UAAU,CAAC,WAAW,WAAW,GAAG,KAAK,KAAK,OAAO,UAAU,GAAG;AACzE,aAAO;AAAA,IACT;AACA,QAAI,aAAa,UAAU,MAAM,mBAAmB,aAAc,QAAO;AACzE,QAAI,KAAK,iBAAiB,WAAW,UAAU,MAAM,mBAAmB,aAAc,QAAO;AAC7F,QAAI,KAAK,OAAO,CAAC,WAAW,WAAW,GAAG,GAAG;AAAA,IAE7C;AACA,WAAO;AAAA,EACT;AAIA,QAAM,gBAAgB;AAGtB,WAAS,qBAAqB,OAA2B;AACvD,QAAI,UAAU,QAAS,QAAO,mBAAmB,aAAa,SAAS;AACvE,QAAI,UAAU,OAAQ,QAAO,KAAK,KAAK,QAAQ;AAC/C,QAAI,KAAK,SAAS,KAAK,EAAG,QAAO,KAAK,OAAO,KAAK;AAClD,QAAI,KAAK,cAAc,KAAK,EAAG,QAAO,KAAK,YAAY,KAAK;AAC5D,WAAO;AAAA,EACT;AAMA,WAAS,gBACP,WACA,QACA,OACA,YACA,IACA,UACA,iBACS;AACT,QAAI,CAAC,UAAU,IAAI,SAAS,EAAG,QAAO;AACtC,UAAM,QAAQ,YAAY,IAAI,SAAS;AACvC,QAAI,CAAC,MAAO,QAAO;AAGnB,QAAI,MAAM,OAAO,UAAU,MAAM,SAAS;AACxC,YAAM,OAAO,MAAM;AAAA,IACrB;AACA,UAAM,OAAO,KAAK;AAAA,MAChB;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAY;AAAA,MAC3B,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,MAC7C,GAAI,oBAAoB,SAAY,EAAE,gBAAgB,IAAI,CAAC;AAAA,IAC7D,CAAC;AACD,WAAO;AAAA,EACT;AAIA,WAAS,aAAa,YAA6B;AACjD,WAAO,eAAe;AAAA,EACxB;AAEA,WAAS,WAAW,YAA6B;AAC/C,WAAO,eAAe,cACjB,WAAW,WAAW,UAAU,KAChC,WAAW,WAAW,aAAa;AAAA,EAC1C;AAEA,WAAS,WAAW,YAA6B;AAC/C,WAAO,WAAW,WAAW,GAAG;AAAA,EAClC;AAMA,WAAS,SAAS,OAAe,YAAgC;AAC/D,UAAM,QAAQ,aAAa,OAAO,UAAU;AAG5C,QAAI,UAAU,IAAI,KAAK,EAAG,QAAO;AACjC,QAAI,UAAU,IAAI,KAAK,EAAG,QAAO,UAAU,IAAI,KAAK;AAGpD,QAAI,KAAK,aAAa;AACpB,iBAAW,CAAC,QAAQC,MAAK,KAAK,OAAO,QAAQ,KAAK,WAAW,GAAG;AAC9D,YAAI,MAAM,WAAW,MAAM,EAAG,QAAOA;AAAA,MACvC;AAAA,IACF;AAGA,QAAI,KAAK,UAAU,CAAC,WAAW,UAAU,KAAK,KAAK,OAAO,UAAU,GAAG;AACrE,aAAO,KAAK,OAAO,UAAU;AAAA,IAC/B;AAGA,QAAI,aAAa,UAAU,GAAG;AAC5B,UAAI,gBAAiB,QAAO;AAG5B,UAAI,YAAa,QAAO,YAAY;AAAA,IACtC;AAGA,QAAI,KAAK,iBAAiB,WAAW,UAAU,GAAG;AAChD,UAAI,gBAAiB,QAAO;AAC5B,UAAI,YAAa,QAAO,YAAY;AAAA,IACtC;AAGA,QAAI,iBAAiB,KAAK,SAAU,QAAO,KAAK;AAGhD,WAAO;AAAA,EACT;AAKA,WAAS,iBAAiB,UAA8B;AACtD,QAAI,CAAC,YAAa,QAAO,mBAAmB;AAC5C,QAAI,YAAY,eAAe;AAC7B,aAAO,YAAY,SAAS;AAAA,IAC9B;AACA,WAAO,YAAY;AAAA,EACrB;AAKA,WAAS,OAAO,YAAoB,UAAsC;AACxE,QAAI,CAAC,KAAK,IAAK,QAAO;AACtB,QAAI,WAAW,UAAU,EAAG,QAAO;AACnC,QAAI,KAAK,IAAI,eAAe,KAAK,IAAI,YAAY,SAAS,GAAG;AAC3D,UAAI,CAAC,KAAK,IAAI,YAAY,SAAS,UAAU,EAAG,QAAO;AAAA,IACzD;AACA,UAAM,SAAS,KAAK,IAAI,IAAI,KAAK,IAAI,gBAAgB,KAAK,KAAK,KAAK;AACpE,UAAM,KAAK,IAAI,KAAK,SAAS,GAAG,EAAE,QAAQ;AAC1C,WAAO,KAAK;AAAA,EACd;AAIA,QAAM,QAA0B;AAAA,IAC9B,MAAM,UAAU;AAAA,IAEhB,MAAM,IAAI,OAAO,YAAY,IAAI;AAC/B,YAAM,IAAI,SAAS,OAAO,UAAU;AACpC,YAAM,SAAS,MAAM,EAAE,IAAI,OAAO,YAAY,EAAE;AAGhD,UAAI,WAAW,QAAQ,KAAK,OAAO,CAAC,WAAW,UAAU,GAAG;AAC1D,YAAI,CAAC,KAAK,IAAI,aAAa,UAAU,KAAK,IAAI,YAAY,SAAS,UAAU,GAAG;AAC9E,iBAAO,KAAK,IAAI,KAAK,IAAI,OAAO,YAAY,EAAE;AAAA,QAChD;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,IAAI,OAAO,YAAY,IAAI,UAAU,iBAAiB;AAE1D,YAAM,KAAK,aAAa,OAAO,UAAU;AACzC,UAAI,gBAAgB,IAAI,OAAO,OAAO,YAAY,IAAI,UAAU,eAAe,EAAG;AAGlF,UAAI,aAAa,UAAU,KAAK,aAAa;AAC3C,cAAM,WAAW,SAAS,MAAM;AAChC,cAAMC,KAAI,iBAAiB,QAAQ;AACnC,eAAOA,GAAE,IAAI,OAAO,YAAY,IAAI,UAAU,eAAe;AAAA,MAC/D;AAEA,YAAM,IAAI,SAAS,OAAO,UAAU;AAGpC,UAAI,KAAK,OAAO,CAAC,WAAW,UAAU,GAAG;AACvC,aAAK,IAAI,KAAK,OAAO,OAAO,YAAY,EAAE,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MAC5D;AAEA,aAAO,EAAE,IAAI,OAAO,YAAY,IAAI,UAAU,eAAe;AAAA,IAC/D;AAAA,IAEA,MAAM,OAAO,OAAO,YAAY,IAAI;AAElC,YAAM,KAAK,aAAa,OAAO,UAAU;AACzC,UAAI,gBAAgB,IAAI,UAAU,OAAO,YAAY,EAAE,EAAG;AAE1D,YAAM,IAAI,SAAS,OAAO,UAAU;AACpC,YAAM,EAAE,OAAO,OAAO,YAAY,EAAE;AAGpC,UAAI,KAAK,OAAO,CAAC,WAAW,UAAU,GAAG;AACvC,cAAM,KAAK,IAAI,KAAK,OAAO,OAAO,YAAY,EAAE,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MAClE;AAAA,IACF;AAAA,IAEA,MAAM,KAAK,OAAO,YAAY;AAC5B,YAAM,IAAI,SAAS,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,EAAE,KAAK,OAAO,UAAU;AAG1C,UAAI,KAAK,OAAO,CAAC,WAAW,UAAU,GAAG;AACvC,YAAI,CAAC,KAAK,IAAI,aAAa,UAAU,KAAK,IAAI,YAAY,SAAS,UAAU,GAAG;AAC9E,gBAAM,UAAU,MAAM,KAAK,IAAI,KAAK,KAAK,OAAO,UAAU,EAAE,MAAM,MAAM,CAAC,CAAa;AACtF,cAAI,QAAQ,SAAS,GAAG;AACtB,kBAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,uBAAW,MAAM,QAAS,QAAO,IAAI,EAAE;AACvC,mBAAO,CAAC,GAAG,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QAAQ,OAAO;AAEnB,YAAM,SAAS,kBAAkB,KAAK;AACtC,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,OAAO,IAAI,OAAK,EAAE,QAAQ,KAAK,EAAE,MAAM,OAAO,CAAC,EAAmB,CAAC;AAAA,MACrE;AACA,aAAOC,gBAAe,SAAS;AAAA,IACjC;AAAA,IAEA,MAAM,QAAQ,OAAO,MAAM;AAEzB,YAAM,cAAc,oBAAI,IAA+B;AAEvD,iBAAW,CAAC,YAAY,OAAO,KAAK,OAAO,QAAQ,IAAI,GAAG;AACxD,cAAM,IAAI,SAAS,OAAO,UAAU;AACpC,YAAI,CAAC,YAAY,IAAI,CAAC,EAAG,aAAY,IAAI,GAAG,CAAC,CAAC;AAC9C,oBAAY,IAAI,CAAC,EAAG,UAAU,IAAI;AAAA,MACpC;AAEA,YAAM,QAAQ;AAAA,QACZ,CAAC,GAAG,YAAY,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,IAAI,MAAM,EAAE,QAAQ,OAAO,IAAI,CAAC;AAAA,MACtE;AAAA,IACF;AAAA,IAEA,MAAM,QAAQ,OAAO;AACnB,UAAI,CAAC,KAAK,IAAK,QAAO;AACtB,UAAI,WAAW;AACf,YAAM,cAAc,KAAK,IAAI,aAAa,SACtC,KAAK,IAAI,cACT,MAAM,QAAQ,KAAK,OAAO,EAAE,EAAE,MAAM,MAAM,CAAC,CAAa;AAG5D,iBAAW,cAAc,aAAa;AACpC,cAAM,MAAM,MAAM,QAAQ,KAAK,OAAO,UAAU,EAAE,MAAM,MAAM,CAAC,CAAa;AAC5E,mBAAW,MAAM,KAAK;AACpB,gBAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,YAAY,EAAE;AACxD,cAAI,CAAC,SAAU;AACf,cAAI,OAAO,YAAY,QAAQ,GAAG;AAEhC,kBAAM,KAAK,IAAI,KAAK,IAAI,OAAO,YAAY,IAAI,QAAQ;AACvD,kBAAM,QAAQ,OAAO,OAAO,YAAY,EAAE;AAC1C;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA;AAAA,IAIA,SAAS,OAAuB,eAA2B,cAAsD;AAC/G,UAAI,cAAc,SAAS;AAEzB,gBAAQ,YAAY;AAIlB,oBAAU,IAAI,OAAO,aAAa;AAAA,QACpC,GAAG;AAAA,MACL;AACA,gBAAU,IAAI,OAAO,aAAa;AAAA,IACpC;AAAA,IAEA,cAAc,OAA6B;AACzC,gBAAU,OAAO,KAAK;AAAA,IACxB;AAAA,IAEA,QAAQ,OAAuB,aAAoC;AACjE,gBAAU,IAAI,KAAK;AACnB,UAAI,aAAa,OAAO;AACtB,oBAAY,IAAI,OAAO;AAAA,UACrB,QAAQ,CAAC;AAAA,UACT,SAAS,YAAY,gBAAgB;AAAA,QACvC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IAEA,MAAM,OAAO,OAAwC;AACnD,gBAAU,OAAO,KAAK;AACtB,YAAM,QAAQ,YAAY,IAAI,KAAK;AACnC,UAAI,CAAC,SAAS,MAAM,OAAO,WAAW,GAAG;AACvC,oBAAY,OAAO,KAAK;AACxB,eAAO;AAAA,MACT;AAGA,UAAI,WAAW;AACf,YAAM,SAAS,UAAU,IAAI,KAAK,KAAK,qBAAqB,KAAK;AACjE,iBAAW,SAAS,MAAM,QAAQ;AAChC,YAAI;AACF,cAAI,MAAM,WAAW,SAAS,MAAM,UAAU;AAC5C,kBAAM,OAAO,IAAI,MAAM,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM,UAAU,MAAM,eAAe;AAAA,UACjG,WAAW,MAAM,WAAW,UAAU;AACpC,kBAAM,OAAO,OAAO,MAAM,OAAO,MAAM,YAAY,MAAM,EAAE;AAAA,UAC7D;AACA;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,kBAAY,OAAO,KAAK;AACxB,aAAO;AAAA,IACT;AAAA,IAEA,cAA2B;AACzB,YAAM,KAA6B,CAAC;AACpC,iBAAW,CAAC,GAAG,CAAC,KAAK,UAAW,IAAG,CAAC,IAAI,EAAE,QAAQ;AAClD,YAAM,IAA4B,CAAC;AACnC,iBAAW,CAAC,GAAG,CAAC,KAAK,YAAa,GAAE,CAAC,IAAI,EAAE,OAAO;AAClD,aAAO,EAAE,WAAW,IAAI,WAAW,CAAC,GAAG,SAAS,GAAG,QAAQ,EAAE;AAAA,IAC/D;AAAA,EACF;AAKA,MAAI,OAAO,YAAY,GAAG;AACxB,UAAM,aAAa,YAAY;AAC7B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,CAAC,GAAG,SAAS,EACV,OAAO,OAAK,EAAE,eAAe,MAAS,EACtC,IAAI,OAAK,EAAE,WAAY,EAAE,MAAM,MAAM,CAAC,CAAa,CAAC;AAAA,MACzD;AACA,aAAO,CAAC,GAAG,IAAI,IAAI,QAAQ,KAAK,CAAC,CAAC;AAAA,IACpC;AAAA,EACF;AAGA,MAAI,OAAO,MAAM,GAAG;AAClB,UAAM,OAAO,YAAY;AACvB,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,CAAC,GAAG,SAAS,EACV,OAAO,OAAK,EAAE,SAAS,MAAS,EAChC,IAAI,OAAK,EAAE,KAAM,EAAE,MAAM,MAAM,KAAK,CAAC;AAAA,MAC1C;AACA,aAAO,QAAQ,KAAK,OAAO;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO;AAIP,WAAS,YAAoB;AAC3B,UAAM,QAAQ,CAAC,GAAG,SAAS,EAAE,IAAI,OAAK,EAAE,QAAQ,GAAG,EAAE,KAAK,GAAG;AAC7D,WAAO,SAAS,KAAK;AAAA,EACvB;AAEA,WAAS,OAAO,QAAyB;AACvC,WAAO,CAAC,GAAG,SAAS,EAAE,KAAK,OAAM,EAAyC,MAAM,CAAC;AAAA,EACnF;AAEA,WAAS,kBAAkB,OAA6B;AACtD,UAAM,SAAS,oBAAI,IAAgB;AAGnC,QAAI,KAAK,aAAa;AACpB,iBAAW,CAAC,QAAQ,CAAC,KAAK,OAAO,QAAQ,KAAK,WAAW,GAAG;AAC1D,YAAI,MAAM,WAAW,MAAM,GAAG;AAC5B,iBAAO,IAAI,CAAC;AACZ,iBAAO,CAAC,GAAG,MAAM;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AAGA,WAAO,IAAI,OAAO;AAClB,QAAI,gBAAiB,QAAO,IAAI,eAAe;AAC/C,QAAI,aAAa,MAAO,QAAO,IAAI,YAAY,KAAK;AACpD,QAAI,aAAa,SAAS,YAAY,UAAU,QAAS,QAAO,IAAI,YAAY,KAAK;AACrF,QAAI,KAAK,KAAK,KAAM,QAAO,IAAI,KAAK,IAAI,IAAI;AAC5C,QAAI,KAAK,QAAQ;AACf,iBAAW,KAAK,OAAO,OAAO,KAAK,MAAM,EAAG,QAAO,IAAI,CAAC;AAAA,IAC1D;AAEA,WAAO,CAAC,GAAG,MAAM;AAAA,EACnB;AACF;AAIA,SAASA,gBAAe,WAA2C;AACjE,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,WAAW;AAC5B,eAAW,CAAC,YAAY,OAAO,KAAK,OAAO,QAAQ,IAAI,GAAG;AACxD,UAAI,CAAC,OAAO,UAAU,GAAG;AACvB,eAAO,UAAU,IAAI,EAAE,GAAG,QAAQ;AAClC;AAAA,MACF;AACA,iBAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,cAAM,WAAW,OAAO,UAAU,EAAE,EAAE;AAEtC,YAAI,CAAC,YAAY,SAAS,OAAO,SAAS,KAAK;AAC7C,iBAAO,UAAU,EAAE,EAAE,IAAI;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/sBO,SAAS,UAAU,UAAsB,aAA4C;AAC1F,MAAI,SAAS;AAEb,WAAS,IAAI,YAAY,SAAS,GAAG,KAAK,GAAG,KAAK;AAChD,aAAS,YAAY,CAAC,EAAG,MAAM;AAAA,EACjC;AACA,SAAO;AACT;AAwBO,SAAS,UAAU,OAAqB,CAAC,GAAoB;AAClE,QAAM,aAAa,KAAK,cAAc;AACtC,QAAM,YAAY,KAAK,aAAa;AACpC,QAAM,SAAS,KAAK,UAAU;AAC9B,QAAM,UAAU,KAAK,UAAU,IAAI,IAAI,KAAK,OAAO,IAAI;AAEvD,WAAS,YAAY,KAAuB;AAC1C,QAAI,CAAC,QAAS,QAAO;AACrB,QAAI,OAAO,OAAO,QAAQ,YAAY,UAAU,KAAK;AACnD,aAAO,QAAQ,IAAK,IAAyB,IAAI;AAAA,IACnD;AACA,WAAO;AAAA,EACT;AAEA,iBAAe,UAAa,IAAkC;AAC5D,QAAI;AACJ,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,UAAI;AACF,eAAO,MAAM,GAAG;AAAA,MAClB,SAAS,KAAK;AACZ,oBAAY;AACZ,YAAI,WAAW,cAAc,CAAC,YAAY,GAAG,EAAG,OAAM;AACtD,cAAM,QAAQ,YAAY,KAAK,IAAI,GAAG,OAAO,KAAK,IAAI,KAAK,OAAO,IAAI;AACtE,cAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,KAAK,CAAC;AAAA,MAC7C;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAEA,SAAO,CAAC,UAAU;AAAA,IAChB,GAAG;AAAA,IACH,MAAM,KAAK,OAAO,SAAS,KAAK,IAAI,MAAM;AAAA,IAC1C,KAAK,CAAC,GAAG,GAAG,OAAO,UAAU,MAAM,KAAK,IAAI,GAAG,GAAG,EAAE,CAAC;AAAA,IACrD,KAAK,CAAC,GAAG,GAAG,IAAI,KAAK,OAAO,UAAU,MAAM,KAAK,IAAI,GAAG,GAAG,IAAI,KAAK,EAAE,CAAC;AAAA,IACvE,QAAQ,CAAC,GAAG,GAAG,OAAO,UAAU,MAAM,KAAK,OAAO,GAAG,GAAG,EAAE,CAAC;AAAA,IAC3D,MAAM,CAAC,GAAG,MAAM,UAAU,MAAM,KAAK,KAAK,GAAG,CAAC,CAAC;AAAA,IAC/C,SAAS,CAAC,MAAM,UAAU,MAAM,KAAK,QAAQ,CAAC,CAAC;AAAA,IAC/C,SAAS,CAAC,GAAG,MAAM,UAAU,MAAM,KAAK,QAAQ,GAAG,CAAC,CAAC;AAAA,EACvD;AACF;AAsBA,IAAM,aAAuC,EAAE,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,EAAE;AAO7E,SAAS,YAAY,OAAuB,CAAC,GAAoB;AACtE,QAAM,WAAW,WAAW,KAAK,SAAS,MAAM;AAChD,QAAM,SAAS,KAAK,UAAU;AAC9B,QAAM,UAAU,KAAK,WAAW;AAEhC,WAAS,IAAI,OAAiB,QAAgB,MAA+B,YAAqB;AAChG,QAAI,WAAW,KAAK,IAAI,SAAU;AAClC,UAAM,QAAQ,CAAC,UAAU,MAAM,KAAK,GAAG,OAAO,QAAQ,IAAI,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC;AAChG,QAAI,eAAe,OAAW,OAAM,KAAK,GAAG,UAAU,IAAI;AAC1D,WAAO,KAAK,EAAE,MAAM,KAAK,GAAG,CAAC;AAAA,EAC/B;AAEA,WAAS,MAAS,QAAgB,MAA+B,IAAkC;AACjG,UAAM,QAAQ,KAAK,IAAI;AACvB,WAAO,GAAG,EAAE;AAAA,MACV,CAAC,WAAW;AACV,YAAI,SAAS,QAAQ,MAAM,KAAK,IAAI,IAAI,KAAK;AAC7C,eAAO;AAAA,MACT;AAAA,MACA,CAAC,QAAQ;AACP,YAAI,SAAS,QAAQ,EAAE,GAAG,MAAM,OAAQ,IAAc,QAAQ,GAAG,KAAK,IAAI,IAAI,KAAK;AACnF,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO,CAAC,UAAU;AAAA,IAChB,GAAG;AAAA,IACH,MAAM,KAAK,OAAO,OAAO,KAAK,IAAI,MAAM;AAAA,IACxC,KAAK,CAAC,GAAG,GAAG,OAAO,MAAM,OAAO,EAAE,OAAO,GAAG,YAAY,GAAG,GAAG,GAAG,MAAM,KAAK,IAAI,GAAG,GAAG,EAAE,CAAC;AAAA,IACzF,KAAK,CAAC,GAAG,GAAG,IAAI,KAAK,OAAO,MAAM,OAAO;AAAA,MACvC,OAAO;AAAA,MAAG,YAAY;AAAA,MAAG;AAAA,MAAI,SAAS,IAAI;AAAA,MAC1C,GAAI,UAAU,EAAE,MAAM,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,MAAM,IAAI,CAAC;AAAA,IAC5D,GAAG,MAAM,KAAK,IAAI,GAAG,GAAG,IAAI,KAAK,EAAE,CAAC;AAAA,IACpC,QAAQ,CAAC,GAAG,GAAG,OAAO,MAAM,UAAU,EAAE,OAAO,GAAG,YAAY,GAAG,GAAG,GAAG,MAAM,KAAK,OAAO,GAAG,GAAG,EAAE,CAAC;AAAA,IAClG,MAAM,CAAC,GAAG,MAAM,MAAM,QAAQ,EAAE,OAAO,GAAG,YAAY,EAAE,GAAG,MAAM,KAAK,KAAK,GAAG,CAAC,CAAC;AAAA,IAChF,SAAS,CAAC,MAAM,MAAM,WAAW,EAAE,OAAO,EAAE,GAAG,MAAM,KAAK,QAAQ,CAAC,CAAC;AAAA,IACpE,SAAS,CAAC,GAAG,MAAM,MAAM,WAAW,EAAE,OAAO,EAAE,GAAG,MAAM,KAAK,QAAQ,GAAG,CAAC,CAAC;AAAA,EAC5E;AACF;AAgCO,SAAS,YAAY,MAAuC;AACjE,WAAS,QACP,QACA,OACA,IACA,YACA,IACY;AACZ,UAAM,QAAQ,KAAK,IAAI;AACvB,WAAO,GAAG,EAAE;AAAA,MACV,CAAC,WAAW;AACV,aAAK,YAAY;AAAA,UACf;AAAA,UAAQ;AAAA,UACR,GAAI,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,UACjD,GAAI,OAAO,SAAY,EAAE,GAAG,IAAI,CAAC;AAAA,UACjC,YAAY,KAAK,IAAI,IAAI;AAAA,UAAO,SAAS;AAAA,QAC3C,CAAC;AACD,eAAO;AAAA,MACT;AAAA,MACA,CAAC,QAAQ;AACP,aAAK,YAAY;AAAA,UACf;AAAA,UAAQ;AAAA,UACR,GAAI,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,UACjD,GAAI,OAAO,SAAY,EAAE,GAAG,IAAI,CAAC;AAAA,UACjC,YAAY,KAAK,IAAI,IAAI;AAAA,UAAO,SAAS;AAAA,UAAO,OAAO;AAAA,QACzD,CAAC;AACD,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO,CAAC,UAAU;AAAA,IAChB,GAAG;AAAA,IACH,MAAM,KAAK,OAAO,WAAW,KAAK,IAAI,MAAM;AAAA,IAC5C,KAAK,CAAC,GAAG,GAAG,OAAO,QAAQ,OAAO,GAAG,MAAM,KAAK,IAAI,GAAG,GAAG,EAAE,GAAG,GAAG,EAAE;AAAA,IACpE,KAAK,CAAC,GAAG,GAAG,IAAI,KAAK,OAAO,QAAQ,OAAO,GAAG,MAAM,KAAK,IAAI,GAAG,GAAG,IAAI,KAAK,EAAE,GAAG,GAAG,EAAE;AAAA,IACtF,QAAQ,CAAC,GAAG,GAAG,OAAO,QAAQ,UAAU,GAAG,MAAM,KAAK,OAAO,GAAG,GAAG,EAAE,GAAG,GAAG,EAAE;AAAA,IAC7E,MAAM,CAAC,GAAG,MAAM,QAAQ,QAAQ,GAAG,MAAM,KAAK,KAAK,GAAG,CAAC,GAAG,CAAC;AAAA,IAC3D,SAAS,CAAC,MAAM,QAAQ,WAAW,GAAG,MAAM,KAAK,QAAQ,CAAC,CAAC;AAAA,IAC3D,SAAS,CAAC,GAAG,MAAM,QAAQ,WAAW,GAAG,MAAM,KAAK,QAAQ,GAAG,CAAC,CAAC;AAAA,EACnE;AACF;AAmCO,SAAS,mBAAmB,OAA8B,CAAC,GAAoB;AACpF,QAAM,YAAY,KAAK,oBAAoB;AAC3C,QAAM,UAAU,KAAK,kBAAkB;AAEvC,MAAI,QAAsB;AAC1B,MAAI,WAAW;AACf,MAAI,kBAAkB;AAEtB,WAAS,gBAAsB;AAC7B,QAAI,UAAU,aAAa;AACzB,cAAQ;AACR,iBAAW;AACX,WAAK,UAAU;AAAA,IACjB;AACA,eAAW;AAAA,EACb;AAEA,WAAS,gBAAsB;AAC7B;AACA,sBAAkB,KAAK,IAAI;AAC3B,QAAI,YAAY,aAAa,UAAU,UAAU;AAC/C,cAAQ;AACR,WAAK,SAAS;AAAA,IAChB;AAAA,EACF;AAEA,WAAS,aAAsB;AAC7B,QAAI,UAAU,SAAU,QAAO;AAC/B,QAAI,UAAU,QAAQ;AACpB,UAAI,KAAK,IAAI,IAAI,mBAAmB,SAAS;AAC3C,gBAAQ;AACR,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAEA,iBAAe,QAAW,IAAsB,UAAyB;AACvE,QAAI,CAAC,WAAW,EAAG,QAAO;AAC1B,QAAI;AACF,YAAM,SAAS,MAAM,GAAG;AACxB,oBAAc;AACd,aAAO;AAAA,IACT,SAAS,KAAK;AACZ,oBAAc;AACd,YAAM;AAAA,IACR;AAAA,EACF;AAEA,SAAO,CAAC,UAAU;AAAA,IAChB,GAAG;AAAA,IACH,MAAM,KAAK,OAAO,MAAM,KAAK,IAAI,MAAM;AAAA,IACvC,KAAK,CAAC,GAAG,GAAG,OAAO,QAAQ,MAAM,KAAK,IAAI,GAAG,GAAG,EAAE,GAAG,IAAI;AAAA,IACzD,KAAK,CAAC,GAAG,GAAG,IAAI,KAAK,OAAO,QAAQ,MAAM,KAAK,IAAI,GAAG,GAAG,IAAI,KAAK,EAAE,GAAG,MAAS;AAAA,IAChF,QAAQ,CAAC,GAAG,GAAG,OAAO,QAAQ,MAAM,KAAK,OAAO,GAAG,GAAG,EAAE,GAAG,MAAS;AAAA,IACpE,MAAM,CAAC,GAAG,MAAM,QAAQ,MAAM,KAAK,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;AAAA,IACjD,SAAS,CAAC,MAAM,QAAQ,MAAM,KAAK,QAAQ,CAAC,GAAG,CAAC,CAAC;AAAA,IACjD,SAAS,CAAC,GAAG,MAAM,QAAQ,MAAM,KAAK,QAAQ,GAAG,CAAC,GAAG,MAAS;AAAA,EAChE;AACF;AAmCO,SAAS,UAAU,OAA0B,CAAC,GAAoB;AACvE,QAAM,aAAa,KAAK,cAAc;AACtC,QAAM,QAAQ,KAAK,SAAS;AAG5B,QAAM,QAAQ,oBAAI,IAAwB;AAE1C,WAAS,SAAS,OAAe,YAAoB,IAAoB;AACvE,WAAO,GAAG,KAAK,KAAK,UAAU,KAAK,EAAE;AAAA,EACvC;AAEA,WAAS,aAAa,KAAmD;AACvE,UAAM,QAAQ,MAAM,IAAI,GAAG;AAC3B,QAAI,CAAC,MAAO,QAAO;AACnB,QAAI,QAAQ,KAAK,KAAK,IAAI,IAAI,MAAM,WAAW,OAAO;AACpD,YAAM,OAAO,GAAG;AAChB,aAAO;AAAA,IACT;AAEA,UAAM,OAAO,GAAG;AAChB,UAAM,IAAI,KAAK,KAAK;AACpB,WAAO,MAAM;AAAA,EACf;AAEA,WAAS,WAAW,KAAa,UAA0C;AAEzE,QAAI,MAAM,QAAQ,YAAY;AAC5B,YAAM,SAAS,MAAM,KAAK,EAAE,KAAK,EAAE;AACnC,UAAI,WAAW,OAAW,OAAM,OAAO,MAAM;AAAA,IAC/C;AACA,UAAM,IAAI,KAAK,EAAE,UAAU,UAAU,KAAK,IAAI,EAAE,CAAC;AAAA,EACnD;AAEA,WAAS,WAAW,KAAmB;AACrC,UAAM,OAAO,GAAG;AAAA,EAClB;AAEA,SAAO,CAAC,UAAU;AAAA,IAChB,GAAG;AAAA,IACH,MAAM,KAAK,OAAO,SAAS,KAAK,IAAI,MAAM;AAAA,IAE1C,MAAM,IAAI,OAAO,YAAY,IAAI;AAC/B,YAAM,MAAM,SAAS,OAAO,YAAY,EAAE;AAC1C,YAAM,SAAS,aAAa,GAAG;AAC/B,UAAI,WAAW,OAAW,QAAO;AACjC,YAAM,SAAS,MAAM,KAAK,IAAI,OAAO,YAAY,EAAE;AACnD,iBAAW,KAAK,MAAM;AACtB,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,IAAI,OAAO,YAAY,IAAI,KAAK,IAAI;AACxC,iBAAW,SAAS,OAAO,YAAY,EAAE,CAAC;AAC1C,YAAM,KAAK,IAAI,OAAO,YAAY,IAAI,KAAK,EAAE;AAC7C,iBAAW,SAAS,OAAO,YAAY,EAAE,GAAG,GAAG;AAAA,IACjD;AAAA,IAEA,MAAM,OAAO,OAAO,YAAY,IAAI;AAClC,iBAAW,SAAS,OAAO,YAAY,EAAE,CAAC;AAC1C,YAAM,KAAK,OAAO,OAAO,YAAY,EAAE;AAAA,IACzC;AAAA,IAEA,MAAM,CAAC,GAAG,MAAM,KAAK,KAAK,GAAG,CAAC;AAAA,IAC9B,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC;AAAA,IAC9B,SAAS,CAAC,GAAG,MAAM,KAAK,QAAQ,GAAG,CAAC;AAAA,EACtC;AACF;AA4BO,SAAS,gBAAgB,OAA2B,CAAC,GAAoB;AAC9E,QAAM,aAAa,KAAK,mBAAmB;AAC3C,QAAM,gBAAgB,KAAK,wBAAwB;AACnD,QAAM,mBAAmB,KAAK,sBAAsB;AAEpD,MAAI,cAAc;AAClB,MAAI,sBAAsB;AAC1B,MAAI,uBAAuB;AAE3B,SAAO,CAAC,SAAS;AACf,UAAM,UAAU,KAAK,UACnB,KAAK,OACD,MAAM,KAAK,KAAM,IACjB,YAAY;AAAE,YAAM,KAAK,KAAK,cAAc,UAAU;AAAG,aAAO;AAAA,IAAK;AAG3E,mBAAe,UAAyB;AACtC,UAAI;AACF,cAAM,KAAK,MAAM,QAAQ;AACzB,YAAI,IAAI;AACN,gCAAsB;AACtB;AACA,cAAI,eAAe,wBAAwB,kBAAkB;AAC3D,0BAAc;AACd,mCAAuB;AACvB,iBAAK,WAAW;AAAA,UAClB;AAAA,QACF,OAAO;AACL,gBAAM,IAAI,MAAM,6BAA6B;AAAA,QAC/C;AAAA,MACF,QAAQ;AACN,+BAAuB;AACvB;AACA,YAAI,CAAC,eAAe,uBAAuB,eAAe;AACxD,wBAAc;AACd,gCAAsB;AACtB,eAAK,YAAY;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AAGA,gBAAY,MAAM;AAAE,WAAK,QAAQ;AAAA,IAAE,GAAG,UAAU;AAEhD,UAAM,UAAsB;AAAA,MAC1B,GAAG;AAAA,MACH,MAAM,KAAK,OAAO,UAAU,KAAK,IAAI,MAAM;AAAA,MAE3C,MAAM,IAAI,GAAG,GAAG,IAAI;AAAE,eAAO,cAAc,OAAO,KAAK,IAAI,GAAG,GAAG,EAAE;AAAA,MAAE;AAAA,MACrE,MAAM,IAAI,GAAG,GAAG,IAAI,KAAK,IAAI;AAAE,YAAI,CAAC,YAAa,OAAM,KAAK,IAAI,GAAG,GAAG,IAAI,KAAK,EAAE;AAAA,MAAE;AAAA,MACnF,MAAM,OAAO,GAAG,GAAG,IAAI;AAAE,YAAI,CAAC,YAAa,OAAM,KAAK,OAAO,GAAG,GAAG,EAAE;AAAA,MAAE;AAAA,MACvE,MAAM,KAAK,GAAG,GAAG;AAAE,eAAO,cAAc,CAAC,IAAI,KAAK,KAAK,GAAG,CAAC;AAAA,MAAE;AAAA,MAC7D,MAAM,QAAQ,GAAG;AAAE,eAAO,cAAc,CAAC,IAAI,KAAK,QAAQ,CAAC;AAAA,MAAE;AAAA,MAC7D,MAAM,QAAQ,GAAG,GAAG;AAAE,YAAI,CAAC,YAAa,OAAM,KAAK,QAAQ,GAAG,CAAC;AAAA,MAAE;AAAA,IACnE;AAEA,WAAO;AAAA,EACT;AACF;;;AN9VA;;;AO3IO,IAAM,qBAAqB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAGlE,IAAM,4BAA4B;AAGlC,IAAM,8BAA8B;AASpC,IAAM,kBAAkB;AACxB,IAAM,0BAA0B;AAchC,IAAM,mBAAmB;AACzB,IAAM,mBAAmB;AACzB,IAAM,qBAAqB;AA4ClC,IAAM,sBAA2C,oBAAI,IAAI;AAAA,EACvD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAiBM,SAAS,qBACd,QACqC;AACrC,MAAI,WAAW,QAAQ,OAAO,WAAW,UAAU;AACjD,UAAM,IAAI;AAAA,MACR,mDAAmD,WAAW,OAAO,SAAS,OAAO,MAAM;AAAA,IAC7F;AAAA,EACF;AAIA,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,CAAC,oBAAoB,IAAI,GAAG,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,gDAAgD,GAAG,kDAE9C,CAAC,GAAG,mBAAmB,EAAE,KAAK,IAAI,CAAC;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AACA,QAAM,IAAI;AACV,MAAI,OAAO,EAAE,eAAe,MAAM,YAAY,EAAE,eAAe,MAAM,6BAA6B;AAChG,UAAM,IAAI;AAAA,MACR,8CAA8C,2BAA2B,SAChE,OAAO,EAAE,eAAe,CAAC,CAAC;AAAA,IAErC;AAAA,EACF;AACA,MAAI,OAAO,EAAE,QAAQ,MAAM,YAAY,CAAC,2BAA2B,KAAK,EAAE,QAAQ,CAAC,GAAG;AACpF,UAAM,IAAI;AAAA,MACR,iFACS,OAAO,EAAE,QAAQ,MAAM,WAAW,IAAI,EAAE,QAAQ,CAAC,MAAM,OAAO,EAAE,QAAQ,CAAC,CAAC;AAAA,IACrF;AAAA,EACF;AACA,MAAI,OAAO,EAAE,WAAW,MAAM,YAAY,CAAC,OAAO,UAAU,EAAE,WAAW,CAAC,KAAK,EAAE,WAAW,IAAI,GAAG;AACjG,UAAM,IAAI;AAAA,MACR,sEACS,OAAO,EAAE,WAAW,CAAC,CAAC;AAAA,IACjC;AAAA,EACF;AACA,MAAI,OAAO,EAAE,YAAY,MAAM,YAAY,CAAC,iBAAiB,KAAK,EAAE,YAAY,CAAC,GAAG;AAClF,UAAM,IAAI;AAAA,MACR,oFACS,OAAO,EAAE,YAAY,MAAM,WAAW,IAAI,EAAE,YAAY,CAAC,MAAM,OAAO,EAAE,YAAY,CAAC,CAAC;AAAA,IACjG;AAAA,EACF;AACF;AAOO,SAAS,mBAAmB,QAAuC;AACxE,uBAAqB,MAAM;AAK3B,QAAM,OAAO,KAAK,UAAU;AAAA,IAC1B,eAAe,OAAO;AAAA,IACtB,QAAQ,OAAO;AAAA,IACf,WAAW,OAAO;AAAA,IAClB,YAAY,OAAO;AAAA,EACrB,CAAC;AACD,SAAO,IAAI,YAAY,EAAE,OAAO,IAAI;AACtC;AAMO,SAAS,mBAAmB,OAAsC;AACvE,QAAM,OAAO,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,KAAK;AACnE,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,IAAI;AAAA,EAC1B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,2CAA4C,IAAc,OAAO;AAAA,IACnE;AAAA,EACF;AACA,uBAAqB,MAAM;AAC3B,SAAO;AACT;AAQO,SAAS,aAAa,OAAmB,QAAwB;AACtE,UACG,MAAM,MAAM,KAAM,OAAO,MACzB,MAAM,SAAS,CAAC,KAAM,OACtB,MAAM,SAAS,CAAC,KAAM,KACvB,MAAM,SAAS,CAAC;AAEpB;AAMO,SAAS,cAAc,OAAmB,QAAgB,OAAqB;AACpF,QAAM,MAAM,IAAK,UAAU,KAAM;AACjC,QAAM,SAAS,CAAC,IAAK,UAAU,KAAM;AACrC,QAAM,SAAS,CAAC,IAAK,UAAU,IAAK;AACpC,QAAM,SAAS,CAAC,IAAI,QAAQ;AAC9B;AAOO,SAAS,oBAAoB,OAA4B;AAC9D,MAAI,MAAM,SAAS,mBAAmB,OAAQ,QAAO;AACrD,WAAS,IAAI,GAAG,IAAI,mBAAmB,QAAQ,KAAK;AAClD,QAAI,MAAM,CAAC,MAAM,mBAAmB,CAAC,EAAG,QAAO;AAAA,EACjD;AACA,SAAO;AACT;;;AC1NA;AAgGA,IAAI,sBAAsC;AAC1C,SAAS,4BAAqC;AAC5C,MAAI,wBAAwB,KAAM,QAAO;AACzC,MAAI;AACF,QAAI,kBAAkB,IAAyB;AAC/C,0BAAsB;AAAA,EACxB,QAAQ;AACN,0BAAsB;AAAA,EACxB;AACA,SAAO;AACT;AAGO,SAAS,0BAAgC;AAC9C,wBAAsB;AACxB;AASA,SAAS,kBAAkB,QAGzB;AACA,QAAM,SAAS,UAAU;AACzB,MAAI,WAAW,OAAQ,QAAO,EAAE,QAAQ,kBAAkB,cAAc,KAAK;AAC7E,MAAI,WAAW,OAAQ,QAAO,EAAE,QAAQ,kBAAkB,cAAc,OAAO;AAC/E,MAAI,WAAW,UAAU;AACvB,QAAI,CAAC,0BAA0B,GAAG;AAChC,YAAM,IAAI;AAAA,QACR;AAAA,MAIF;AAAA,IACF;AACA,WAAO,EAAE,QAAQ,oBAAoB,cAAc,KAA0B;AAAA,EAC/E;AAEA,MAAI,0BAA0B,GAAG;AAC/B,WAAO,EAAE,QAAQ,oBAAoB,cAAc,KAA0B;AAAA,EAC/E;AACA,SAAO,EAAE,QAAQ,kBAAkB,cAAc,OAAO;AAC1D;AAcA,eAAe,kBACb,OACA,QACqB;AACrB,QAAM,WAAW,IAAI,KAAK,CAAC,KAAiB,CAAC,EAAE,OAAO,EAAE,YAAY,MAAM;AAC1E,QAAM,SAAS,SAAS,UAAU;AAClC,QAAM,SAAuB,CAAC;AAC9B,MAAI,QAAQ;AACZ,aAAS;AACP,UAAM,EAAE,OAAO,KAAK,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,QAAI,OAAO;AACT,aAAO,KAAK,KAAmB;AAC/B,eAAS,MAAM;AAAA,IACjB;AAAA,EACF;AACA,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC1B,QAAI,IAAI,OAAO,MAAM;AACrB,cAAU,MAAM;AAAA,EAClB;AACA,SAAO;AACT;AAWA,eAAeC,WAAU,OAAoC;AAS3D,QAAM,OAAO,IAAI,WAAW,MAAM,MAAM;AACxC,OAAK,IAAI,KAAK;AACd,QAAM,SAAS,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AACzD,QAAM,OAAO,IAAI,WAAW,MAAM;AAClC,MAAIC,OAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,IAAAA,QAAO,KAAK,CAAC,EAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EAC9C;AACA,SAAOA;AACT;AAOA,SAAS,YAAY,OAA0C;AAC7D,MAAI,QAAQ;AACZ,aAAW,KAAK,MAAO,UAAS,EAAE;AAClC,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,KAAK,OAAO;AACrB,QAAI,IAAI,GAAG,MAAM;AACjB,cAAU,EAAE;AAAA,EACd;AACA,SAAO;AACT;AAeA,eAAe,qBACb,OACA,UACA,MACiB;AACjB,MAAI,KAAK,qBAAqB,UAAa,KAAK,eAAe,QAAW;AACxE,WAAO;AAAA,EACT;AAEA,QAAM,aACJ,KAAK,cAAc;AAAA,IACjB;AAAA,MACE,IAAI,MAAM;AAAA,MACV,YAAY,KAAK;AAAA,MACjB,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AAEF,QAAM,oBAAoB,MAAM,MAAM,6BAA6B,UAAU;AAE7E,QAAM,SAAS,KAAK,MAAM,QAAQ;AAClC,SAAO,WAAW;AAClB,SAAO,KAAK,UAAU,MAAM;AAC9B;AAiBA,SAAS,kBACP,UACA,MACQ;AACR,QAAM,oBAAoB,KAAK,cAC3B,IAAI,IAAI,KAAK,WAAW,IACxB;AACJ,QAAM,UACJ,KAAK,UAAU,SAAY,IAAI,KAAK,KAAK,KAAK,EAAE,QAAQ,IAAI;AAC9D,MAAI,sBAAsB,QAAQ,YAAY,KAAM,QAAO;AAO3D,QAAM,SAAS,KAAK,MAAM,QAAQ;AAKlC,MAAI,OAAO,eAAe,OAAO,OAAO,gBAAgB,UAAU;AAChE,UAAM,OAAgD,CAAC;AACvD,eAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AAChE,UAAI,qBAAqB,CAAC,kBAAkB,IAAI,IAAI,EAAG;AACvD,UAAI,YAAY,MAAM;AACpB,aAAK,IAAI,IAAI;AACb;AAAA,MACF;AACA,YAAM,OAAgC,CAAC;AACvC,iBAAW,CAAC,IAAI,GAAG,KAAK,OAAO,QAAQ,OAAO,GAAG;AAC/C,cAAM,QAAQ,IAAI,MAAM,IAAI,KAAK,IAAI,GAAG,EAAE,QAAQ,IAAI;AACtD,YAAI,OAAO,SAAS,KAAK,KAAK,SAAS,SAAS;AAC9C,eAAK,EAAE,IAAI;AAAA,QACb;AAAA,MACF;AACA,WAAK,IAAI,IAAI;AAAA,IACf;AACA,WAAO,cAAc;AAAA,EACvB;AAEA,SAAO,KAAK,UAAU,MAAM;AAC9B;AAcA,eAAe,sBACb,OACA,UACA,MACiB;AACjB,MAAI,KAAK,UAAU,UAAa,KAAK,eAAe,QAAW;AAC7D,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,KAAK,MAAM,QAAQ;AAIlC,MAAI,CAAC,OAAO,eAAe,OAAO,OAAO,gBAAgB,UAAU;AACjE,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,KAAK;AACzB,QAAM,QAAQ,KAAK;AAEnB,QAAM,OAA4C,CAAC;AACnD,aAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AACpE,UAAM,OAA4B,CAAC;AACnC,eAAW,CAAC,IAAI,GAAG,KAAK,OAAO,QAAQ,OAAO,GAAG;AAI/C,UAAI,gBAAgB,QAAW;AAC7B,cAAM,OAAO,IAAI,SAAS;AAC1B,YAAI,OAAO,YAAa;AAAA,MAC1B;AAIA,UAAI,UAAU,QAAW;AACvB,cAAM,SAAS,MAAM,MAAM;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AACA,cAAM,KAAK,MAAM,MAAM,QAAQ,EAAE,YAAY,UAAU,GAAG,CAAC;AAC3D,YAAI,CAAC,GAAI;AAAA,MACX;AACA,WAAK,EAAE,IAAI;AAAA,IACb;AACA,SAAK,QAAQ,IAAI;AAAA,EACnB;AACA,SAAO,cAAc;AACrB,SAAO,KAAK,UAAU,MAAM;AAC9B;AAyBA,eAAsB,iBACpB,OACA,OAAgC,CAAC,GACZ;AACrB,MAAI,KAAK,qBAAqB,UAAa,KAAK,eAAe,QAAW;AACxE,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,MAAM,gBAAgB;AAC3C,QAAM,WAAW,MAAM,MAAM,KAAK;AAKlC,QAAM,UAAU,MAAM,qBAAqB,OAAO,UAAU,IAAI;AAKhE,QAAM,gBAAgB,MAAM,sBAAsB,OAAO,SAAS,IAAI;AACtE,QAAM,WAAW,kBAAkB,eAAe,IAAI;AACtD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,QAAQ;AAEnD,QAAM,EAAE,QAAQ,aAAa,IAAI,kBAAkB,KAAK,WAAW;AACnE,QAAM,OAAO,iBAAiB,OAC1B,YACA,MAAM,kBAAkB,WAAW,IAAI,kBAAkB,YAAY,CAAC;AAE1E,QAAM,aAAa,MAAMD,WAAU,IAAI;AACvC,QAAM,SAA4B;AAAA,IAChC,eAAe;AAAA,IACf;AAAA,IACA,WAAW,KAAK;AAAA,IAChB;AAAA,EACF;AACA,QAAM,cAAc,mBAAmB,MAAM;AAG7C,QAAM,SAAS,IAAI,WAAW,yBAAyB;AACvD,SAAO,IAAI,oBAAoB,CAAC;AAChC,SAAO,CAAC,KACL,iBAAiB,OAAO,IAAI,mBAAmB;AAClD,SAAO,CAAC,IAAI;AACZ,gBAAc,QAAQ,GAAG,YAAY,MAAM;AAE3C,SAAO,YAAY,CAAC,QAAQ,aAAa,IAAI,CAAC;AAChD;AAYA,SAAS,qBAAqB,OAK5B;AACA,MAAI,CAAC,oBAAoB,KAAK,GAAG;AAC/B,UAAM,IAAI;AAAA,MACR,2EACS,CAAC,GAAG,MAAM,MAAM,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA,IACvF;AAAA,EACF;AACA,MAAI,MAAM,SAAS,2BAA2B;AAC5C,UAAM,IAAI;AAAA,MACR,yCAAyC,MAAM,MAAM,kCACzB,yBAAyB;AAAA,IACvD;AAAA,EACF;AACA,QAAM,QAAQ,MAAM,CAAC;AACrB,QAAM,OAAO,MAAM,CAAC;AACpB,MAAI,SAAS,oBAAoB,SAAS,oBAAoB,SAAS,oBAAoB;AACzF,UAAM,IAAI;AAAA,MACR,wDAAwD,IAAI;AAAA,IAE9D;AAAA,EACF;AACA,QAAM,eAAe,aAAa,OAAO,CAAC;AAC1C,QAAM,aAAa,4BAA4B;AAC/C,MAAI,aAAa,MAAM,QAAQ;AAC7B,UAAM,IAAI;AAAA,MACR,mDAAmD,YAAY,mCAC3B,MAAM,MAAM;AAAA,IAClD;AAAA,EACF;AACA,QAAM,cAAc,MAAM,MAAM,2BAA2B,UAAU;AACrE,QAAM,SAAS,mBAAmB,WAAW;AAC7C,SAAO,EAAE,QAAQ,YAAY,MAA+B,MAAM;AACpE;AAWO,SAAS,sBAAsB,OAAsC;AAC1E,SAAO,qBAAqB,KAAK,EAAE;AACrC;AAoBA,eAAsB,gBACpB,OACgC;AAChC,QAAM,EAAE,QAAQ,YAAY,KAAK,IAAI,qBAAqB,KAAK;AAC/D,QAAM,OAAO,MAAM,MAAM,UAAU;AAInC,MAAI,KAAK,WAAW,OAAO,WAAW;AACpC,UAAM,IAAI;AAAA,MACR,eAAe,KAAK,MAAM,oCACrB,OAAO,SAAS;AAAA,IAEvB;AAAA,EACF;AAEA,QAAM,YAAY,MAAMA,WAAU,IAAI;AACtC,MAAI,cAAc,OAAO,YAAY;AACnC,UAAM,IAAI;AAAA,MACR,eAAe,SAAS,qCACnB,OAAO,UAAU;AAAA,IAExB;AAAA,EACF;AAEA,MAAI;AACJ,MAAI,SAAS,kBAAkB;AAC7B,gBAAY;AAAA,EACd,OAAO;AACL,UAAM,eACJ,SAAS,qBAAsB,OAA6B;AAC9D,QAAI;AACF,kBAAY,MAAM,kBAAkB,MAAM,IAAI,oBAAoB,YAAY,CAAC;AAAA,IACjF,SAAS,KAAK;AACZ,YAAM,IAAI;AAAA,QACR,yBAA0B,IAAc,OAAO,oEAE1C,YAAY;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,SAAS;AAC3E,SAAO,EAAE,QAAQ,SAAS;AAC5B;;;ARvYA;;;ASlNA;AAiGA,eAAsB,oBACpB,QACA,OACA,SACiB;AACjB,QAAM,SAAS,MAAM,OAAO,WAAW,EAAE,SAAS,KAAK;AACvD,MAAI,OAAO,WAAW,UAAa,OAAO,OAAO,SAAS,GAAG;AAC3D,UAAM,IAAI;AAAA,MACR,+BAA+B,OAAO,KAAK,gBAAgB,OAAO,MAAM,CAAC;AAAA,MACzE,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO;AAChB;AAcA,eAAsB,qBACpB,QACA,OACA,SACiB;AACjB,QAAM,SAAS,MAAM,OAAO,WAAW,EAAE,SAAS,KAAK;AACvD,MAAI,OAAO,WAAW,UAAa,OAAO,OAAO,SAAS,GAAG;AAC3D,UAAM,IAAI;AAAA,MACR,mBAAmB,OAAO,2DACP,gBAAgB,OAAO,MAAM,CAAC;AAAA,MACjD,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACA,SAAO,OAAO;AAChB;AAWA,SAAS,gBACP,QACQ;AACR,QAAM,QAAQ,OAAO,MAAM,GAAG,CAAC,EAAE,IAAI,CAAC,UAAU;AAC9C,UAAM,UAAU,WAAW,MAAM,IAAI;AACrC,WAAO,GAAG,OAAO,KAAK,MAAM,OAAO;AAAA,EACrC,CAAC;AACD,QAAM,SAAS,OAAO,SAAS,IAAI,MAAM,OAAO,SAAS,CAAC,WAAW;AACrE,SAAO,MAAM,KAAK,IAAI,IAAI;AAC5B;AAEA,SAAS,WACP,MACQ;AACR,MAAI,CAAC,QAAQ,KAAK,WAAW,EAAG,QAAO;AACvC,SAAO,KACJ;AAAA,IAAI,CAAC,YACJ,OAAO,YAAY,YAAY,YAAY,OACvC,OAAO,QAAQ,GAAG,IAClB,OAAO,OAAO;AAAA,EACpB,EACC,KAAK,GAAG;AACb;;;ACzMA,IAAM,qBAAqB;AAuB3B,SAAS,cAAc,IAAY,YAAoB,UAA4B;AACjF,MAAI,UAAU;AACZ,WAAO,GAAG,WAAW,GAAG,UAAU,IAAI,QAAQ,GAAG;AAAA,EACnD;AACA,SAAO,GAAG,WAAW,GAAG,UAAU,GAAG;AACvC;AAeA,eAAsB,WACpB,SACA,OACA,YACA,UACA,SAC8B;AAC9B,QAAM,SAAS,MAAM,QAAQ,KAAK,OAAO,kBAAkB;AAC3D,QAAM,cAAc,OACjB,OAAO,QAAM,cAAc,IAAI,YAAY,QAAQ,CAAC,EACpD,KAAK,EACL,QAAQ;AAEX,QAAM,UAA+B,CAAC;AAEtC,aAAW,MAAM,aAAa;AAC5B,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,oBAAoB,EAAE;AAChE,QAAI,CAAC,SAAU;AAGf,QAAI,SAAS,QAAQ,SAAS,MAAM,QAAQ,KAAM;AAClD,QAAI,SAAS,MAAM,SAAS,MAAM,QAAQ,GAAI;AAE9C,YAAQ,KAAK,QAAQ;AAErB,QAAI,SAAS,SAAS,QAAQ,UAAU,QAAQ,MAAO;AAAA,EACzD;AAEA,SAAO;AACT;;;AC9BA;AACA;AA4CO,IAAM,eAAN,MAAmB;AAAA,EACxB,YACmB,QAED,WAChB;AAHiB;AAED;AAAA,EACf;AAAA,EAHgB;AAAA,EAED;AAAA;AAAA,EAIlB,WAAwB,MAAoC;AAC1D,WAAO,IAAI,kBAAqB,KAAK,QAAQ,KAAK,WAAW,IAAI;AAAA,EACnE;AACF;AAYO,IAAM,oBAAN,MAAqC;AAAA,EAC1C,YACmB,QACA,UACD,MAChB;AAHiB;AACA;AACD;AAAA,EACf;AAAA,EAHgB;AAAA,EACA;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlB,MAAM,IAAI,IAA+B;AACvC,UAAM,WAAW,MAAM,KAAK,gBAAgB,EAAE;AAC9C,QAAI,CAAC,SAAU,QAAO;AACtB,UAAM,YAAY,KAAK,OAAO,YAC1B,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,MAAM,KAAK,OAAO,OAAO,KAAK,IAAI,CAAC,IAC/E,SAAS;AACb,WAAO,KAAK,MAAM,SAAS;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,OAA0B;AAC9B,UAAM,aAAa,MAAM,kBAAkB,KAAK,OAAO,SAAS,KAAK,OAAO,MAAM,KAAK,IAAI;AAC3F,UAAM,UAAU,MAAM,KAAK,OAAO,QAAQ,KAAK,KAAK,OAAO,MAAM,KAAK,IAAI;AAC1E,UAAM,eAAe,oBAAI,IAAY,CAAC,GAAG,YAAY,GAAG,OAAO,CAAC;AAChE,UAAM,QAAkB,CAAC;AACzB,eAAW,MAAM,cAAc;AAC7B,YAAM,MAAM,MAAM,KAAK,gBAAgB,EAAE;AACzC,UAAI,IAAK,OAAM,KAAK,EAAE;AAAA,IACxB;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAAA;AAAA,EAIA,MAAM,IAAI,KAAa,SAA4B;AACjD,UAAM,IAAI,uBAAuB,OAAO,KAAK,QAAQ;AAAA,EACvD;AAAA,EACA,MAAM,OAAO,KAA6B;AACxC,UAAM,IAAI,uBAAuB,UAAU,KAAK,QAAQ;AAAA,EAC1D;AAAA,EACA,MAAM,OAAO,KAAa,QAAoC;AAC5D,UAAM,IAAI,uBAAuB,UAAU,KAAK,QAAQ;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkCA,MAAc,gBAAgB,IAA+C;AAC3E,UAAM,SAAS,KAAK,OAAO,UAAU;AACrC,QAAI,QAAQ;AACV,aAAO,KAAK,iBAAiB,IAAI,MAAM;AAAA,IACzC;AACA,WAAO,KAAK,qBAAqB,EAAE;AAAA,EACrC;AAAA,EAEA,MAAc,iBAAiB,IAAY,QAAwD;AACjG,UAAM,UAAU,MAAM,OAAO,QAAQ;AAErC,QAAI,SAA2D;AAC/D,eAAW,KAAK,SAAS;AACvB,UAAI,EAAE,eAAe,KAAK,QAAQ,EAAE,OAAO,GAAI;AAC/C,UAAI,EAAE,KAAK,KAAK,SAAU;AAC1B,eAAS,EAAE,IAAI,EAAE,IAAI,SAAS,EAAE,QAAQ;AAAA,IAC1C;AACA,QAAI,CAAC,OAAQ,QAAO;AACpB,QAAI,OAAO,OAAO,SAAU,QAAO;AACnC,WAAO,KAAK,YAAY,IAAI,OAAO,OAAO;AAAA,EAC5C;AAAA,EAEA,MAAc,qBAAqB,IAA+C;AAChF,UAAM,UAAU,MAAM;AAAA,MACpB,KAAK,OAAO;AAAA,MAAS,KAAK,OAAO;AAAA,MAAM,KAAK;AAAA,MAAM;AAAA,IACpD;AACA,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO,MAAM,KAAK,MAAM,EAAE;AAC1E,UAAM,YAAY,oBAAI,IAA+B;AACrD,eAAW,KAAK,QAAS,WAAU,IAAI,EAAE,IAAI,CAAC;AAC9C,QAAI,KAAM,WAAU,IAAI,KAAK,IAAI,IAAI;AACrC,UAAM,SAAS,CAAC,GAAG,UAAU,OAAO,CAAC,EAAE;AAAA,MAAK,CAAC,GAAG,MAC9C,EAAE,MAAM,EAAE,MAAM,IAAI,EAAE,MAAM,EAAE,MAAM,KAAK;AAAA,IAC3C;AACA,WAAO,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,KAAK,QAAQ,KAAK;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,YAAY,IAAY,SAAoD;AACxF,UAAM,OAAO,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO,MAAM,KAAK,MAAM,EAAE;AAC1E,QAAI,QAAQ,KAAK,OAAO,QAAS,QAAO;AAGxC,UAAM,YAAY,GAAG,KAAK,IAAI,IAAI,EAAE,IAAI,OAAO,OAAO,EAAE,SAAS,IAAI,GAAG,CAAC;AACzE,WAAO,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO,MAAM,YAAY,SAAS;AAAA,EAC9E;AACF;AASA,eAAe,kBACb,SACA,OACA,YACmB;AACnB,QAAM,MAAM,MAAM,QAAQ,KAAK,OAAO,UAAU;AAChD,QAAM,SAAS,GAAG,UAAU;AAC5B,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,OAAO,KAAK;AACrB,QAAI,CAAC,IAAI,WAAW,MAAM,EAAG;AAC7B,UAAM,YAAY,IAAI,YAAY,GAAG;AACrC,QAAI,aAAa,OAAO,OAAQ;AAChC,UAAM,SAAS,IAAI,MAAM,OAAO,QAAQ,SAAS;AACjD,SAAK,IAAI,MAAM;AAAA,EACjB;AACA,SAAO,CAAC,GAAG,IAAI;AACjB;;;AChPA;AAMO,IAAM,aAAN,MAAiB;AAAA,EACtB,YAA6B,OAAc;AAAd;AAAA,EAAe;AAAA,EAAf;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ7B,WAAwB,MAAkC;AACxD,WAAO,IAAI,gBAAmB,KAAK,MAAM,WAAc,IAAI,GAAG,IAAI;AAAA,EACpE;AAAA;AAAA,EAGA,MAAM,cAAiC;AACrC,WAAO,KAAK,MAAM,YAAY;AAAA,EAChC;AACF;AAOO,IAAM,kBAAN,MAAmC;AAAA,EACxC,YACmB,OAGD,MAChB;AAJiB;AAGD;AAAA,EACf;AAAA,EAJgB;AAAA,EAGD;AAAA;AAAA,EAKlB,IAAI,IAAY,QAA+C;AAC7D,WAAO,KAAK,MAAM,IAAI,IAAI,MAAM;AAAA,EAClC;AAAA,EAEA,KAAK,QAA0C;AAC7C,WAAO,KAAK,MAAM,KAAK,MAAM;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,MAA8E;AACrF,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI;AAAA,EACjC;AAAA;AAAA,EAGA,WAAW,MAAkF;AAC3F,WAAO,KAAK,MAAM,QAAQ,GAAG,IAAI;AAAA,EACnC;AAAA,EAEA,WAAW,IAAY,SAAoC;AACzD,WAAO,KAAK,MAAM,WAAW,IAAI,OAAO;AAAA,EAC1C;AAAA;AAAA,EAIA,MAAM,IAAI,KAAa,SAA4B;AACjD,UAAM,IAAI,mBAAmB,KAAK;AAAA,EACpC;AAAA,EACA,MAAM,OAAO,KAA6B;AACxC,UAAM,IAAI,mBAAmB,QAAQ;AAAA,EACvC;AAAA,EACA,MAAM,OAAO,KAAa,QAAoC;AAC5D,UAAM,IAAI,mBAAmB,QAAQ;AAAA,EACvC;AAAA,EACA,MAAM,OAAO,KAAa,UAAkC;AAC1D,UAAM,IAAI,mBAAmB,QAAQ;AAAA,EACvC;AAAA,EACA,MAAM,QAAQ,UAA+D;AAC3E,UAAM,IAAI,mBAAmB,SAAS;AAAA,EACxC;AAAA,EACA,MAAM,WAAW,MAAyC;AACxD,UAAM,IAAI,mBAAmB,YAAY;AAAA,EAC3C;AACF;;;ACzEO,IAAM,2BAA2B;;;Ab8NxC;;;Ac3OA;AA6BO,IAAM,oBAAN,cAAgC,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,MAOT;AACD,UAAM,iBAAiB,KAAK,OAAO;AACnC,SAAK,OAAO;AACZ,SAAK,aAAa,KAAK;AACvB,SAAK,KAAK,KAAK;AACf,SAAK,QAAQ,KAAK;AAClB,SAAK,QAAQ,KAAK;AAClB,SAAK,QAAQ,KAAK;AAAA,EACpB;AACF;AAQO,IAAM,gBAAN,cAA4B,WAAW;AAAA,EAC5C,YAAY,QAAgB;AAC1B;AAAA,MACE;AAAA,MACA,kEAA6D,MAAM;AAAA,IAGrE;AACA,SAAK,OAAO;AAAA,EACd;AACF;AAUO,SAAS,IAAI,QAAgB,OAAgB,UAAyB;AAC3E,MAAI,OAAO,SAAS,GAAG,GAAG;AACxB,UAAM,IAAI,cAAc,MAAM;AAAA,EAChC;AACA,MAAI,CAAC,UAAU,OAAO,WAAW,GAAG,GAAG;AACrC,UAAM,IAAI;AAAA,MACR,uHAAuH,MAAM;AAAA,IAC/H;AAAA,EACF;AACA,SAAO,EAAE,QAAQ,KAAK;AACxB;AAyBO,IAAM,cAAN,MAAkB;AAAA,EACN,WAAW,oBAAI,IAA2C;AAAA,EAC1D,UAAU,oBAAI,IAG7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUF,SAAS,YAAoB,MAA2C;AACtE,UAAM,WAAW,KAAK,SAAS,IAAI,UAAU;AAC7C,QAAI,UAAU;AAEZ,YAAM,eAAe,OAAO,KAAK,QAAQ,EAAE,KAAK;AAChD,YAAM,UAAU,OAAO,KAAK,IAAI,EAAE,KAAK;AACvC,UAAI,aAAa,KAAK,GAAG,MAAM,QAAQ,KAAK,GAAG,GAAG;AAChD,cAAM,IAAI;AAAA,UACR,6DAA6D,UAAU;AAAA,QACzE;AAAA,MACF;AACA,iBAAW,KAAK,cAAc;AAC5B,cAAM,IAAI,SAAS,CAAC;AACpB,cAAM,IAAI,KAAK,CAAC;AAChB,YAAI,CAAC,KAAK,CAAC,KAAK,EAAE,WAAW,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM;AAC1D,gBAAM,IAAI;AAAA,YACR,6DAA6D,UAAU,YAAY,CAAC;AAAA,UACtF;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AACA,SAAK,SAAS,IAAI,YAAY,EAAE,GAAG,KAAK,CAAC;AACzC,eAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAM,OAAO,KAAK,QAAQ,IAAI,KAAK,MAAM,KAAK,CAAC;AAC/C,WAAK,KAAK,EAAE,YAAY,OAAO,MAAM,KAAK,KAAK,CAAC;AAChD,WAAK,QAAQ,IAAI,KAAK,QAAQ,IAAI;AAAA,IACpC;AAAA,EACF;AAAA;AAAA,EAGA,YAAY,YAAmD;AAC7D,WAAO,KAAK,SAAS,IAAI,UAAU,KAAK,CAAC;AAAA,EAC3C;AAAA;AAAA,EAGA,WACE,QACqE;AACrE,WAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,CAAC;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAA0D;AACxD,WAAO,CAAC,GAAG,KAAK,SAAS,QAAQ,CAAC;AAAA,EACpC;AAAA;AAAA,EAGA,QAAc;AACZ,SAAK,SAAS,MAAM;AACpB,SAAK,QAAQ,MAAM;AAAA,EACrB;AACF;;;ACpOA;AACA;AAWA;AA0BA,IAAM,0BAA2C,CAAC,YAAY,UAAU,UAAU,OAAO;AAEzF,SAAS,SAAS,YAAkB,YAA2B;AAC7D,MAAI,eAAe,QAAS,QAAO;AACnC,MAAI,eAAe,QAAS,QAAO,wBAAwB,SAAS,UAAU;AAC9E,SAAO;AACT;AAEA,SAAS,UAAU,YAAkB,YAA2B;AAC9D,MAAI,eAAe,QAAS,QAAO;AACnC,MAAI,eAAe,QAAS,QAAO;AACnC,MAAI,eAAe,QAAS,QAAO,wBAAwB,SAAS,UAAU;AAC9E,SAAO;AACT;AA+BA,eAAsB,YACpB,SACA,OACA,QACA,YAC0B;AAC1B,QAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,YAAY,MAAM;AAE5D,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,cAAc,8BAA8B,MAAM,eAAe,KAAK,GAAG;AAAA,EACrF;AAEA,QAAM,cAAc,KAAK,MAAM,SAAS,KAAK;AAO7C,MAAI,YAAY,eAAe,QAAW;AACxC,UAAM,SAAS,KAAK,MAAM,YAAY,UAAU;AAChD,QAAI,OAAO,SAAS,MAAM,KAAK,KAAK,IAAI,KAAK,QAAQ;AACnD,YAAM,IAAI,oBAAoB,EAAE,QAAQ,YAAY,SAAS,WAAW,YAAY,WAAW,CAAC;AAAA,IAClG;AAAA,EACF;AAEA,QAAM,OAAO,eAAe,YAAY,IAAI;AAC5C,QAAM,MAAM,MAAM,UAAU,YAAY,IAAI;AAE5C,QAAM,OAAO,oBAAI,IAAuB;AACxC,aAAW,CAAC,UAAU,UAAU,KAAK,OAAO,QAAQ,YAAY,IAAI,GAAG;AACrE,UAAM,MAAM,MAAM,UAAU,YAAY,GAAG;AAC3C,SAAK,IAAI,UAAU,GAAG;AAAA,EACxB;AAEA,SAAO;AAAA,IACL,QAAQ,YAAY;AAAA,IACpB,aAAa,YAAY;AAAA,IACzB,MAAM,YAAY;AAAA,IAClB,aAAa,YAAY;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAI,YAAY,sBAAsB,UAAa,EAAE,kBAAkB,YAAY,kBAAkB;AAAA,IACrG,GAAI,YAAY,sBAAsB,UAAa,EAAE,kBAAkB,YAAY,kBAAkB;AAAA,EACvG;AACF;AAGA,eAAsB,mBACpB,SACA,OACA,QACA,YAC0B;AAC1B,QAAM,OAAO,aAAa;AAC1B,QAAM,MAAM,MAAM,UAAU,YAAY,IAAI;AAE5C,QAAM,cAA2B;AAAA,IAC/B,gBAAgB;AAAA,IAChB,SAAS;AAAA,IACT,cAAc;AAAA,IACd,MAAM;AAAA,IACN,aAAa,CAAC;AAAA,IACd,MAAM,CAAC;AAAA,IACP,MAAM,eAAe,IAAI;AAAA,IACzB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,YAAY;AAAA,EACd;AAEA,QAAM,iBAAiB,SAAS,OAAO,QAAQ,WAAW;AAE1D,SAAO;AAAA,IACL;AAAA,IACA,aAAa;AAAA,IACb,MAAM;AAAA,IACN,aAAa,CAAC;AAAA,IACd,MAAM,oBAAI,IAAI;AAAA,IACd;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAsB,MACpB,SACA,OACA,eACA,SACe;AACf,MAAI,CAAC,SAAS,cAAc,MAAM,QAAQ,IAAI,GAAG;AAC/C,UAAM,IAAI;AAAA,MACR,SAAS,cAAc,IAAI,wBAAwB,QAAQ,IAAI;AAAA,IACjE;AAAA,EACF;AAGA,QAAM,cAAc,mBAAmB,QAAQ,MAAM,QAAQ,WAAW;AAGxE,QAAM,UAAU,aAAa;AAC7B,QAAM,SAAS,MAAM,UAAU,QAAQ,YAAY,OAAO;AAG1D,QAAM,cAAsC,CAAC;AAC7C,aAAW,YAAY,OAAO,KAAK,WAAW,GAAG;AAC/C,UAAM,MAAM,cAAc,KAAK,IAAI,QAAQ;AAC3C,QAAI,KAAK;AACP,kBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,IACnD;AAAA,EACF;AAGA,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,WAAW,QAAQ,SAAS,UAAU;AACrF,eAAW,CAAC,UAAU,GAAG,KAAK,cAAc,MAAM;AAChD,UAAI,EAAE,YAAY,cAAc;AAC9B,oBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAgBA,aAAW,CAAC,UAAU,GAAG,KAAK,cAAc,MAAM;AAChD,QAAI,SAAS,WAAW,GAAG,KAAK,EAAE,YAAY,cAAc;AAC1D,kBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,IACnD;AAAA,EACF;AAWA,aAAW,YAAY,OAAO,KAAK,WAAW,GAAG;AAC/C,QAAI,CAAC,cAAc,KAAK,IAAI,QAAQ,GAAG;AACrC,YAAM,IAAI,yBAAyB,QAAQ;AAAA,IAC7C;AAAA,EACF;AAEA,QAAM,cAA2B;AAAA,IAC/B,gBAAgB;AAAA,IAChB,SAAS,QAAQ;AAAA,IACjB,cAAc,QAAQ;AAAA,IACtB,MAAM,QAAQ;AAAA,IACd;AAAA,IACA,MAAM;AAAA,IACN,MAAM,eAAe,OAAO;AAAA,IAC5B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,YAAY,cAAc;AAAA,IAC1B,GAAI,QAAQ,qBAAqB,UAAa,EAAE,mBAAmB,QAAQ,iBAAiB;AAAA,IAC5F,GAAI,QAAQ,qBAAqB,UAAa,EAAE,mBAAmB,QAAQ,iBAAiB;AAAA,EAC9F;AAEA,QAAM,iBAAiB,SAAS,OAAO,QAAQ,QAAQ,WAAW;AACpE;AAoBA,eAAe,qBACb,SACA,OACA,YACmB;AACnB,QAAM,aAAa,MAAM,QAAQ,KAAK,OAAO,UAAU;AAKvD,QAAM,mBAAmB,oBAAI,IAAsB;AACnD,aAAW,UAAU,YAAY;AAC/B,UAAM,MAAM,MAAM,QAAQ,IAAI,OAAO,YAAY,MAAM;AACvD,QAAI,CAAC,IAAK;AACV,UAAM,KAAK,KAAK,MAAM,IAAI,KAAK;AAC/B,QAAI,GAAG,SAAS,QAAS;AACzB,QAAI,GAAG,YAAY,WAAY;AAC/B,UAAM,OAAO,iBAAiB,IAAI,GAAG,UAAU,KAAK,CAAC;AACrD,SAAK,KAAK,GAAG,OAAO;AACpB,qBAAiB,IAAI,GAAG,YAAY,IAAI;AAAA,EAC1C;AAEA,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC;AACzB,QAAM,QAAkB,CAAC,GAAI,iBAAiB,IAAI,UAAU,KAAK,CAAC,CAAE;AACpE,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,QAAI,QAAQ,IAAI,IAAI,EAAG;AACvB,YAAQ,IAAI,IAAI;AAChB,UAAM,KAAK,IAAI;AACf,eAAW,cAAc,iBAAiB,IAAI,IAAI,KAAK,CAAC,GAAG;AACzD,UAAI,CAAC,QAAQ,IAAI,UAAU,EAAG,OAAM,KAAK,UAAU;AAAA,IACrD;AAAA,EACF;AACA,SAAO;AACT;AAGA,eAAsB,OACpB,SACA,OACA,eACA,SACe;AAEf,QAAM,iBAAiB,MAAM,QAAQ,IAAI,OAAO,YAAY,QAAQ,MAAM;AAC1E,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI,cAAc,SAAS,QAAQ,MAAM,8BAA8B,KAAK,GAAG;AAAA,EACvF;AAEA,QAAM,gBAAgB,KAAK,MAAM,eAAe,KAAK;AAErD,MAAI,CAAC,UAAU,cAAc,MAAM,cAAc,IAAI,GAAG;AACtD,UAAM,IAAI;AAAA,MACR,SAAS,cAAc,IAAI,yBAAyB,cAAc,IAAI;AAAA,IACxE;AAAA,EACF;AAKA,QAAM,cAAc,QAAQ,WAAW;AACvC,QAAM,gBAA0B,CAAC,QAAQ,MAAM;AAC/C,QAAM,sBAAsB,IAAI,IAAI,OAAO,KAAK,cAAc,IAAI,CAAC;AAEnE,MAAI,cAAc,SAAS,SAAS;AAClC,UAAM,cAAc,MAAM,qBAAqB,SAAS,OAAO,QAAQ,MAAM;AAC7E,QAAI,YAAY,SAAS,GAAG;AAC1B,UAAI,gBAAgB,QAAQ;AAM1B,gBAAQ;AAAA,UACN,mBAAmB,QAAQ,MAAM,oCAC5B,YAAY,MAAM,kCAClB,YAAY,KAAK,IAAI,CAAC;AAAA,QAE7B;AAAA,MACF,OAAO;AAIL,mBAAW,UAAU,aAAa;AAChC,gBAAM,UAAU,MAAM,QAAQ,IAAI,OAAO,YAAY,MAAM;AAC3D,cAAI,CAAC,QAAS;AACd,gBAAM,SAAS,KAAK,MAAM,QAAQ,KAAK;AACvC,wBAAc,KAAK,MAAM;AACzB,qBAAW,KAAK,OAAO,KAAK,OAAO,IAAI,EAAG,qBAAoB,IAAI,CAAC;AAAA,QACrE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAKA,aAAW,UAAU,eAAe;AAClC,UAAM,QAAQ,OAAO,OAAO,YAAY,MAAM;AAAA,EAChD;AAOA,MAAI,QAAQ,eAAe,SAAS,oBAAoB,OAAO,GAAG;AAChE,UAAM,WAAW,SAAS,OAAO,eAAe,CAAC,GAAG,mBAAmB,CAAC;AAAA,EAC1E;AACF;AAUA,eAAsB,WACpB,SACA,OACA,eACA,aACe;AAEf,QAAM,UAAU,oBAAI,IAAuB;AAC3C,aAAW,YAAY,aAAa;AAClC,YAAQ,IAAI,UAAU,MAAM,YAAY,CAAC;AAAA,EAC3C;AAGA,aAAW,YAAY,aAAa;AAClC,UAAM,SAAS,cAAc,KAAK,IAAI,QAAQ;AAC9C,UAAM,SAAS,QAAQ,IAAI,QAAQ;AACnC,QAAI,CAAC,OAAQ;AAEb,UAAM,MAAM,MAAM,QAAQ,KAAK,OAAO,QAAQ;AAC9C,eAAW,MAAM,KAAK;AACpB,YAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,UAAU,EAAE;AACtD,UAAI,CAAC,YAAY,CAAC,SAAS,IAAK;AAGhC,YAAM,YAAY,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,MAAM;AAGpE,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,WAAW,MAAM;AACpD,YAAM,cAAiC;AAAA,QACrC,QAAQ;AAAA,QACR,IAAI,SAAS;AAAA,QACb,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AACA,YAAM,QAAQ,IAAI,OAAO,UAAU,IAAI,WAAW;AAAA,IACpD;AAAA,EACF;AAGA,aAAW,CAAC,UAAU,MAAM,KAAK,SAAS;AACxC,kBAAc,KAAK,IAAI,UAAU,MAAM;AAAA,EACzC;AACA,QAAM,eAAe,SAAS,OAAO,aAAa;AAGlD,QAAM,UAAU,MAAM,QAAQ,KAAK,OAAO,UAAU;AACpD,aAAW,UAAU,SAAS;AAC5B,QAAI,WAAW,cAAc,OAAQ;AAErC,UAAM,eAAe,MAAM,QAAQ,IAAI,OAAO,YAAY,MAAM;AAChE,QAAI,CAAC,aAAc;AAEnB,UAAM,kBAAkB,KAAK,MAAM,aAAa,KAAK;AAyDrD,UAAM,cAAc,EAAE,GAAG,gBAAgB,KAAK;AAC9C,eAAW,YAAY,aAAa;AAClC,aAAO,YAAY,QAAQ;AAAA,IAC7B;AAEA,UAAM,qBAAqB,EAAE,GAAG,gBAAgB,YAAY;AAC5D,eAAW,YAAY,aAAa;AAClC,aAAO,mBAAmB,QAAQ;AAAA,IACpC;AAEA,UAAM,iBAA8B;AAAA,MAClC,GAAG;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAEA,UAAM,iBAAiB,SAAS,OAAO,QAAQ,cAAc;AAAA,EAC/D;AACF;AAKA,eAAsB,aACpB,SACA,OACA,SACA,eAC0B;AAC1B,QAAM,UAAU,aAAa;AAC7B,QAAM,SAAS,MAAM,UAAU,eAAe,OAAO;AAGrD,QAAM,cAAsC,CAAC;AAC7C,aAAW,CAAC,UAAU,GAAG,KAAK,QAAQ,MAAM;AAC1C,gBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,EACnD;AAEA,QAAM,cAA2B;AAAA,IAC/B,gBAAgB;AAAA,IAChB,SAAS,QAAQ;AAAA,IACjB,cAAc,QAAQ;AAAA,IACtB,MAAM,QAAQ;AAAA,IACd,aAAa,QAAQ;AAAA,IACrB,MAAM;AAAA,IACN,MAAM,eAAe,OAAO;AAAA,IAC5B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,YAAY,QAAQ;AAAA,EACtB;AAEA,QAAM,iBAAiB,SAAS,OAAO,QAAQ,QAAQ,WAAW;AAElE,SAAO;AAAA,IACL,QAAQ,QAAQ;AAAA,IAChB,aAAa,QAAQ;AAAA,IACrB,MAAM,QAAQ;AAAA,IACd,aAAa,QAAQ;AAAA,IACrB,MAAM,QAAQ;AAAA;AAAA,IACd,KAAK;AAAA,IACL,MAAM;AAAA,EACR;AACF;AA2DA,eAAsB,0BACpB,eACA,WACsB;AACtB,QAAM,OAAa,UAAU,QAAQ;AACrC,QAAM,cAAc,mBAAmB,MAAM,UAAU,WAAW;AAElE,QAAM,UAAU,aAAa;AAC7B,QAAM,SAAS,MAAM,UAAU,UAAU,YAAY,OAAO;AAE5D,QAAM,cAAsC,CAAC;AAG7C,aAAW,YAAY,OAAO,KAAK,WAAW,GAAG;AAC/C,UAAM,MAAM,cAAc,KAAK,IAAI,QAAQ;AAC3C,QAAI,KAAK;AACP,kBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,IACnD;AAAA,EACF;AAGA,MAAI,SAAS,WAAW,SAAS,WAAW,SAAS,UAAU;AAC7D,eAAW,CAAC,UAAU,GAAG,KAAK,cAAc,MAAM;AAChD,UAAI,EAAE,YAAY,cAAc;AAC9B,oBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAIA,aAAW,CAAC,UAAU,GAAG,KAAK,cAAc,MAAM;AAChD,QAAI,SAAS,WAAW,GAAG,KAAK,EAAE,YAAY,cAAc;AAC1D,kBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,MAAM;AAAA,IACnD;AAAA,EACF;AAIA,aAAW,YAAY,OAAO,KAAK,WAAW,GAAG;AAC/C,QAAI,CAAC,cAAc,KAAK,IAAI,QAAQ,GAAG;AACrC,YAAM,IAAI,yBAAyB,QAAQ;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AAAA,IACL,gBAAgB;AAAA,IAChB,SAAS,UAAU;AAAA,IACnB,cAAc,UAAU,eAAe,UAAU;AAAA,IACjD;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,MAAM,eAAe,OAAO;AAAA,IAC5B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,YAAY,cAAc;AAAA,IAC1B,GAAI,UAAU,qBAAqB,SAC/B,EAAE,mBAAmB,UAAU,iBAAiB,IAChD,CAAC;AAAA,IACL,GAAI,UAAU,qBAAqB,SAC/B,EAAE,mBAAmB,UAAU,iBAAiB,IAChD,CAAC;AAAA,IACL,GAAI,UAAU,cAAc,SACxB,EAAE,YAAY,UAAU,UAAU,IAClC,CAAC;AAAA,EACP;AACF;AAKA,eAAsB,UACpB,SACA,OACqB;AACrB,QAAM,UAAU,MAAM,QAAQ,KAAK,OAAO,UAAU;AACpD,QAAM,QAAoB,CAAC;AAE3B,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,YAAY,MAAM;AAC5D,QAAI,CAAC,SAAU;AACf,UAAM,KAAK,KAAK,MAAM,SAAS,KAAK;AACpC,UAAM,KAAK;AAAA,MACT,QAAQ,GAAG;AAAA,MACX,aAAa,GAAG;AAAA,MAChB,MAAM,GAAG;AAAA,MACT,aAAa,GAAG;AAAA,MAChB,WAAW,GAAG;AAAA,MACd,WAAW,GAAG;AAAA,IAChB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAsB,oBACpB,SACA,OACA,SACyD;AAOzD,QAAM,WAAW,oBAAI,IAAgC;AACrD,SAAO,OAAO,mBAA+C;AAC3D,UAAM,WAAW,QAAQ,KAAK,IAAI,cAAc;AAChD,QAAI,SAAU,QAAO;AACrB,UAAM,UAAU,SAAS,IAAI,cAAc;AAC3C,QAAI,QAAS,QAAO;AAEpB,UAAM,WAAW,YAAY;AAC3B,YAAM,MAAM,MAAM,YAAY;AAC9B,cAAQ,KAAK,IAAI,gBAAgB,GAAG;AACpC,YAAM,eAAe,SAAS,OAAO,OAAO;AAC5C,aAAO;AAAA,IACT,GAAG;AACH,aAAS,IAAI,gBAAgB,OAAO;AACpC,QAAI;AACF,aAAO,MAAM;AAAA,IACf,UAAE;AACA,eAAS,OAAO,cAAc;AAAA,IAChC;AAAA,EACF;AACF;AAKO,SAAS,mBAAmB,SAA0B,gBAAiC;AAC5F,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,QAAS,QAAO;AACjE,MAAI,QAAQ,SAAS,YAAY,QAAQ,SAAS,SAAU,QAAO;AACnE,SAAO,QAAQ,YAAY,cAAc,MAAM;AACjD;AAGO,SAAS,UAAU,SAA0B,gBAAiC;AACnF,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,WAAW,QAAQ,SAAS,SAAU,QAAO;AAC9F,SAAO,kBAAkB,QAAQ;AACnC;AAKA,eAAsB,eACpB,SACA,OACA,SACe;AACf,QAAM,cAAsC,CAAC;AAC7C,aAAW,CAAC,UAAU,GAAG,KAAK,QAAQ,MAAM;AAC1C,gBAAY,QAAQ,IAAI,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAAA,EACxD;AAEA,QAAM,cAA2B;AAAA,IAC/B,gBAAgB;AAAA,IAChB,SAAS,QAAQ;AAAA,IACjB,cAAc,QAAQ;AAAA,IACtB,MAAM,QAAQ;AAAA,IACd,aAAa,QAAQ;AAAA,IACrB,MAAM;AAAA,IACN,MAAM,eAAe,QAAQ,IAAI;AAAA,IACjC,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,YAAY,QAAQ;AAAA,IACpB,GAAI,QAAQ,qBAAqB,UAAa,EAAE,mBAAmB,QAAQ,iBAAiB;AAAA,IAC5F,GAAI,QAAQ,qBAAqB,UAAa,EAAE,mBAAmB,QAAQ,iBAAiB;AAAA,EAC9F;AAEA,QAAM,iBAAiB,SAAS,OAAO,QAAQ,QAAQ,WAAW;AACpE;AAkBA,SAAS,wBAAwB,MAAqB;AACpD,SAAO,SAAS,WAAW,SAAS;AACtC;AA4BO,SAAS,oBACd,SACA,MACA,QACS;AACT,QAAM,MAAM,QAAQ;AACpB,MAAI,SAAS,aAAa;AACxB,UAAM,UAAU,KAAK,aAAa,CAAC;AACnC,WAAO,QAAQ,SAAS,GAAG,KAAM,WAAW,UAAa,QAAQ,SAAS,MAAM;AAAA,EAClF;AAEA,SAAO,KAAK,UAAU,wBAAwB,QAAQ,IAAI;AAC5D;AAkBO,SAAS,yBACd,YACA,MACA,MACA,QACS;AACT,MAAI,SAAS,aAAa;AACxB,UAAM,UAAU,YAAY,aAAa,CAAC;AAC1C,WAAO,QAAQ,SAAS,GAAG,KAAM,WAAW,UAAa,QAAQ,SAAS,MAAM;AAAA,EAClF;AACA,SAAO,YAAY,UAAU,wBAAwB,IAAI;AAC3D;AA0BO,SAAS,oBACd,SACA,MACA,QACS;AACT,QAAM,MAAM,QAAQ;AACpB,MAAI,SAAS,aAAa;AACxB,UAAM,UAAU,KAAK,aAAa,CAAC;AACnC,WAAO,QAAQ,SAAS,GAAG,KAAM,WAAW,UAAa,QAAQ,SAAS,MAAM;AAAA,EAClF;AAEA,SAAO,KAAK,WAAW;AACzB;AAoBO,SAAS,yBACd,YACA,OACA,MACA,QACS;AACT,MAAI,SAAS,aAAa;AACxB,UAAM,UAAU,YAAY,aAAa,CAAC;AAC1C,WAAO,QAAQ,SAAS,GAAG,KAAM,WAAW,UAAa,QAAQ,SAAS,MAAM;AAAA,EAClF;AACA,SAAO,YAAY,WAAW;AAChC;AAEA,SAAS,mBAAmB,MAAY,UAAqC;AAC3E,MAAI,SAAS,WAAW,SAAS,WAAW,SAAS,SAAU,QAAO,CAAC;AACvE,SAAO,YAAY,CAAC;AACtB;AAEA,eAAe,iBACb,SACA,OACA,QACA,aACe;AACf,QAAM,WAAW;AAAA,IACf,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,IAC5B,KAAK;AAAA,IACL,OAAO,KAAK,UAAU,WAAW;AAAA,EACnC;AACA,QAAM,QAAQ,IAAI,OAAO,YAAY,QAAQ,QAAQ;AACvD;;;AC77BA;;;ACNA;;;ACdA;;;ACoCA,IAAM,cAAc,IAAI;AAAA,EACtB;AAEF;AAUO,IAAM,UAAwB;AAAA,EACnC,mBAAmB;AAAE,UAAM;AAAA,EAAY;AAAA,EACvC,gBAAgB;AAAE,UAAM;AAAA,EAAY;AAAA,EACpC,kBAAkB;AAAE,UAAM;AAAA,EAAY;AAAA,EACtC,sBAAsB;AAAE,UAAM;AAAA,EAAY;AAC5C;;;ACgDA,SAAS,WAAW,IAAmB;AACrC,SAAO,IAAI;AAAA,IACT,GAAG,EAAE;AAAA,EAGP;AACF;AAQO,IAAM,UAAwB;AAAA,EACnC,gBAAgB,QAAQ;AAAE,WAAO;AAAA,EAAO;AAAA,EACxC,wBAAwB;AAAE,UAAM,WAAW,2BAA2B;AAAA,EAAE;AAAA,EACxE,wBAAwB;AAAE,UAAM,WAAW,oBAAoB;AAAA,EAAE;AACnE;;;AFlHA;AACA;AACA;;;AG2JA,SAASE,YAAW,IAAmB;AACrC,SAAO,IAAI;AAAA,IACT,GAAG,EAAE;AAAA,EAGP;AACF;AAUO,IAAM,aAA8B;AAAA,EACzC,MAAM,cAAc;AAAA,EAAC;AAAA,EACrB,MAAM,oBAAoB;AAAE,UAAMA,YAAW,sBAAsB;AAAA,EAAE;AAAA,EACrE,MAAM,qBAAqB;AAAE,UAAMA,YAAW,yBAAyB;AAAA,EAAE;AAAA,EACzE,MAAM,eAAe;AAAE,WAAO;AAAA,EAAE;AAAA,EAChC,MAAM,eAAe;AAAE,WAAO;AAAA,EAAE;AAAA,EAChC,MAAM,sBAAsB;AAAE,WAAO;AAAA,EAAG;AAAA,EACxC,eAAe;AAAE,WAAO,CAAC;AAAA,EAAE;AAAA,EAC3B,OAAO;AAAE,UAAMA,YAAW,mBAAmB;AAAA,EAAE;AAAA,EAC/C,cAAc;AAAE,WAAO;AAAA,EAAK;AAAA,EAC5B,oBAAoB;AAAE,UAAMA,YAAW,kCAAkC;AAAA,EAAE;AAC7E;;;ACtIO,SAAS,SAAS,QAAiB,MAAuB;AAC/D,MAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,MAAI,CAAC,KAAK,SAAS,GAAG,GAAG;AACvB,WAAQ,OAAmC,IAAI;AAAA,EACjD;AACA,QAAM,WAAW,KAAK,MAAM,GAAG;AAC/B,MAAI,SAAkB;AACtB,aAAW,WAAW,UAAU;AAC9B,QAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,aAAU,OAAmC,OAAO;AAAA,EACtD;AACA,SAAO;AACT;AAOO,SAAS,oBAAoB,QAAiB,QAA8B;AACjF,QAAM,SAAS,SAAS,QAAQ,OAAO,KAAK;AAC5C,QAAM,EAAE,IAAI,MAAM,IAAI;AAEtB,UAAQ,IAAI;AAAA,IACV,KAAK;AACH,aAAO,WAAW;AAAA,IACpB,KAAK;AACH,aAAO,WAAW;AAAA,IACpB,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,SAAqB;AAAA,IAC9D,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,UAAsB;AAAA,IAC/D,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,SAAqB;AAAA,IAC9D,KAAK;AACH,aAAO,aAAa,QAAQ,KAAK,KAAM,UAAsB;AAAA,IAC/D,KAAK;AACH,aAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,MAAM;AAAA,IACtD,KAAK;AACH,UAAI,OAAO,WAAW,SAAU,QAAO,OAAO,UAAU,YAAY,OAAO,SAAS,KAAK;AACzF,UAAI,MAAM,QAAQ,MAAM,EAAG,QAAO,OAAO,SAAS,KAAK;AACvD,aAAO;AAAA,IACT,KAAK;AACH,aAAO,OAAO,WAAW,YAAY,OAAO,UAAU,YAAY,OAAO,WAAW,KAAK;AAAA,IAC3F,KAAK,WAAW;AACd,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,EAAG,QAAO;AACxD,YAAM,CAAC,IAAI,EAAE,IAAI;AACjB,UAAI,CAAC,aAAa,QAAQ,EAAE,KAAK,CAAC,aAAa,QAAQ,EAAE,EAAG,QAAO;AACnE,aAAQ,UAAsB,MAAkB,UAAsB;AAAA,IACxE;AAAA,IACA,SAAS;AAEP,YAAM,cAAqB;AAC3B,WAAK;AACL,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAMA,SAAS,aAAa,GAAY,GAAqB;AACrD,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO;AAC3D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO;AAC3D,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO;AACnD,SAAO;AACT;AAOO,SAAS,eAAe,QAAiB,QAAyB;AACvE,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,aAAO,oBAAoB,QAAQ,MAAM;AAAA,IAC3C,KAAK;AACH,aAAO,OAAO,GAAG,MAAM;AAAA,IACzB,KAAK;AACH,UAAI,OAAO,OAAO,OAAO;AACvB,mBAAW,SAAS,OAAO,SAAS;AAClC,cAAI,CAAC,eAAe,QAAQ,KAAK,EAAG,QAAO;AAAA,QAC7C;AACA,eAAO;AAAA,MACT,OAAO;AACL,mBAAW,SAAS,OAAO,SAAS;AAClC,cAAI,eAAe,QAAQ,KAAK,EAAG,QAAO;AAAA,QAC5C;AACA,eAAO;AAAA,MACT;AAAA,EACJ;AACF;;;ACtGA;AAMO,IAAM,wBAAwB;AAQrC,IAAM,qBAAqB;AA4G3B,SAAS,aAAa,OAA+B;AACnD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,SAAU,QAAO,OAAO,KAAK;AAC/E,SAAO;AACT;AAOA,IAAM,qBAAqB,oBAAI,IAAY;AAC3C,SAAS,iBAAiB,OAAe,QAAgB,OAAqB;AAC5E,QAAM,MAAM,GAAG,KAAK,SAAI,MAAM,IAAI,KAAK;AACvC,MAAI,mBAAmB,IAAI,GAAG,EAAG;AACjC,qBAAmB,IAAI,GAAG;AAC1B,UAAQ;AAAA,IACN,oEACY,KAAK,aAAQ,MAAM,IAAI,KAAK;AAAA,EAC1C;AACF;AAOA,IAAM,oBAAoB,oBAAI,IAAY;AAC1C,SAAS,uBACP,QACA,MACA,MACA,SACM;AACN,QAAM,MAAM,GAAG,MAAM,IAAI,IAAI;AAC7B,MAAI,kBAAkB,IAAI,GAAG,EAAG;AAChC,oBAAkB,IAAI,GAAG;AACzB,QAAM,MAAM,KAAK,MAAO,OAAO,UAAW,GAAG;AAC7C,UAAQ;AAAA,IACN,oBAAoB,IAAI,eAAe,GAAG,YAAY,OAAO,4BACpC,MAAM,MAAM,IAAI;AAAA,EAE3C;AACF;AA6BO,SAAS,WACd,MACA,OACA,SACW;AACX,MAAI,MAAM,WAAW,EAAG,QAAO,CAAC,GAAG,IAAI;AAEvC,MAAI,SAAoB,CAAC,GAAG,IAAI;AAChC,aAAW,OAAO,OAAO;AACvB,aAAS,aAAa,QAAQ,KAAK,OAAO;AAAA,EAC5C;AACA,SAAO;AACT;AAEA,SAAS,aACP,UACA,KACA,SACW;AAGX,MAAI,IAAI,YAAY;AAClB,UAAM,aAAa,QAAQ,oBAAoB,IAAI,KAAK;AACxD,QAAI,CAAC,YAAY;AACf,YAAM,IAAI;AAAA,QACR,kBAAkB,IAAI,KAAK,SAAS,QAAQ,cAAc;AAAA,MAG5D;AAAA,IACF;AACA,UAAM,MAAiB,CAAC;AACxB,UAAM,WAAW,WAAW,SAAS;AACrC,UAAM,UAAU,oBAAI,IAAqB;AACzC,eAAW,SAAS,UAAU;AAC5B,YAAM,IAAI,SAAS,OAAO,KAAK;AAC/B,UAAI,OAAO,MAAM,SAAU,SAAQ,IAAI,GAAG,KAAK;AAAA,IACjD;AACA,eAAW,QAAQ,UAAU;AAC3B,YAAM,QAAQ,SAAS,MAAM,IAAI,KAAK;AACtC,YAAM,MAAM,aAAa,KAAK;AAC9B,YAAM,YAAY,QAAQ,OAAO,SAAY,QAAQ,IAAI,GAAG;AAC5D,UAAI,KAAK,EAAE,GAAI,MAAkC,CAAC,IAAI,EAAE,GAAG,aAAa,KAAK,CAAC;AAAA,IAChF;AACA,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,QAAQ,cAAc,IAAI,MAAM;AAC/C,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI;AAAA,MACR,6CAA6C,IAAI,MAAM,6BAC1B,IAAI,KAAK,SAAS,QAAQ,cAAc;AAAA,IAGvE;AAAA,EACF;AAEA,QAAM,UAAU,IAAI,WAAW;AAO/B,MAAI,SAAS,SAAS,SAAS;AAC7B,UAAM,IAAI,kBAAkB;AAAA,MAC1B,UAAU,SAAS;AAAA,MACnB,WAAW;AAAA,MACX;AAAA,MACA,MAAM;AAAA,MACN,SACE,yBAAyB,SAAS,MAAM,wBAAwB,OAAO,4BAChD,IAAI,MAAM;AAAA,IAGrC,CAAC;AAAA,EACH;AACA,MAAI,SAAS,SAAS,UAAU,oBAAoB;AAClD,2BAAuB,IAAI,QAAQ,QAAQ,SAAS,QAAQ,OAAO;AAAA,EACrE;AAEA,QAAM,gBAAgB,OAAO,SAAS;AACtC,MAAI,cAAc,SAAS,SAAS;AAClC,UAAM,IAAI,kBAAkB;AAAA,MAC1B,UAAU,SAAS;AAAA,MACnB,WAAW,cAAc;AAAA,MACzB;AAAA,MACA,MAAM;AAAA,MACN,SACE,uBAAuB,IAAI,MAAM,SAAS,cAAc,MAAM,wBAC7C,OAAO;AAAA,IAE5B,CAAC;AAAA,EACH;AACA,MAAI,cAAc,SAAS,UAAU,oBAAoB;AACvD,2BAAuB,IAAI,QAAQ,SAAS,cAAc,QAAQ,OAAO;AAAA,EAC3E;AAKA,QAAM,WACJ,IAAI,aAAa,OAAO,aAAa,WAAW;AAElD,MAAI,aAAa,YAAY,OAAO,YAAY;AAI9C,UAAM,SAAS,CAAC,OAAwB,OAAO,aAAa,EAAE;AAC9D,WAAO,eAAe,UAAU,KAAK,MAAM;AAAA,EAC7C;AACA,SAAO,SAAS,UAAU,KAAK,aAAa;AAC9C;AAEA,SAAS,eACP,UACA,KACA,YACW;AACX,QAAM,MAAiB,CAAC;AACxB,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,SAAS,MAAM,IAAI,KAAK;AACtC,UAAM,MAAM,aAAa,KAAK;AAC9B,UAAM,QAAQ,QAAQ,OAAO,SAAY,WAAW,GAAG;AACvD,QAAI,KAAK,WAAW,MAAM,KAAK,OAAO,KAAK,CAAC;AAAA,EAC9C;AACA,SAAO;AACT;AAEA,SAAS,SACP,UACA,KACA,eACW;AAIX,QAAM,WAAW,oBAAI,IAAqB;AAC1C,aAAW,UAAU,eAAe;AAClC,UAAM,QAAQ,SAAS,QAAQ,IAAI;AACnC,UAAM,MAAM,aAAa,KAAK;AAC9B,QAAI,QAAQ,MAAM;AAChB,eAAS,IAAI,KAAK,MAAM;AAAA,IAC1B;AAAA,EACF;AACA,QAAM,MAAiB,CAAC;AACxB,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,SAAS,MAAM,IAAI,KAAK;AACtC,UAAM,MAAM,aAAa,KAAK;AAC9B,UAAM,QAAQ,QAAQ,OAAO,SAAY,SAAS,IAAI,GAAG;AACzD,QAAI,KAAK,WAAW,MAAM,KAAK,OAAO,KAAK,CAAC;AAAA,EAC9C;AACA,SAAO;AACT;AAgBA,SAAS,WACP,MACA,KACA,OACA,OACS;AACT,MAAI,SAAS,QAAQ,OAAO,SAAS,UAAU;AAI7C,WAAO;AAAA,EACT;AACA,QAAM,SAAkC,EAAE,GAAI,KAAiC;AAM/E,QAAM,SAAS,aAAa,KAAK;AACjC,MAAI,UAAU,QAAW;AACvB,QAAI,WAAW,QAAQ,IAAI,SAAS,UAAU;AAC5C,YAAM,IAAI,uBAAuB;AAAA,QAC/B,OAAO,IAAI;AAAA,QACX,QAAQ,IAAI;AAAA,QACZ,OAAO;AAAA,QACP,SACE,+CAA+C,IAAI,MAAM,IAAI,MAAM,gBACrD,IAAI,KAAK;AAAA,MAG3B,CAAC;AAAA,IACH;AACA,QAAI,WAAW,QAAQ,IAAI,SAAS,QAAQ;AAC1C,uBAAiB,IAAI,OAAO,IAAI,QAAQ,MAAM;AAAA,IAChD;AAIA,WAAO,IAAI,EAAE,IAAI;AAAA,EACnB,OAAO;AACL,WAAO,IAAI,EAAE,IAAI;AAAA,EACnB;AACA,SAAO;AACT;AAQO,SAAS,oBAA0B;AACxC,qBAAmB,MAAM;AACzB,oBAAkB,MAAM;AAC1B;;;ACjWO,SAAS,eACd,WACA,WACc;AACd,SAAO,IAAI,cAAiB,WAAW,SAAS;AAClD;AAEA,IAAM,gBAAN,MAA+C;AAAA,EAO7C,YACmB,WACjB,WACA;AAFiB;AAOjB,SAAK,QAAQ;AACb,eAAW,YAAY,WAAW;AAChC,UAAI;AACF,aAAK,OAAO,KAAK,SAAS,UAAU,KAAK,gBAAgB,CAAC;AAAA,MAC5D,SAAS,KAAK;AAMZ,aAAK,SAAS,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AAAA,EApBmB;AAAA,EAPX,SAAuB,CAAC;AAAA,EACxB,SAAuB;AAAA,EACd,YAAY,oBAAI,IAAgB;AAAA,EAChC,SAA4B,CAAC;AAAA,EACtC,UAAU;AAAA,EAyBlB,IAAI,QAAsB;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,QAAsB;AACxB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOiB,mBAAmB,MAAY;AAC9C,SAAK,QAAQ;AACb,eAAW,MAAM,KAAK,WAAW;AAC/B,UAAI;AACF,WAAG;AAAA,MACL,QAAQ;AAAA,MAGR;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,UAAgB;AACtB,QAAI,KAAK,QAAS;AAClB,QAAI;AACF,WAAK,SAAS,KAAK,UAAU;AAC7B,WAAK,SAAS;AAAA,IAChB,SAAS,KAAK;AACZ,WAAK,SAAS,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,IAKlE;AAAA,EACF;AAAA,EAEA,UAAU,IAA4B;AACpC,QAAI,KAAK,QAAS,QAAO,MAAM;AAAA,IAAC;AAChC,SAAK,UAAU,IAAI,EAAE;AACrB,WAAO,MAAM,KAAK,UAAU,OAAO,EAAE;AAAA,EACvC;AAAA,EAEA,OAAa;AACX,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AACf,eAAW,SAAS,KAAK,QAAQ;AAC/B,UAAI;AACF,cAAM;AAAA,MACR,QAAQ;AAAA,MAGR;AAAA,IACF;AACA,SAAK,OAAO,SAAS;AACrB,SAAK,UAAU,MAAM;AAAA,EACvB;AACF;;;AC7IA,IAAMC,eAAc,IAAI;AAAA,EACtB;AAGF;AAUO,IAAM,eAAkC;AAAA,EAC7C,YAAY;AAAE,UAAMA;AAAA,EAAY;AAAA,EAChC,UAAU;AAAE,UAAMA;AAAA,EAAY;AAAA,EAC9B,gBAAgB;AAAE,UAAMA;AAAA,EAAY;AACtC;;;ACvCA,IAAM,aAAwB;AAAA,EAC5B,SAAS,CAAC;AAAA,EACV,SAAS,CAAC;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO,CAAC;AACV;AA8CO,IAAM,QAAN,MAAM,OAAS;AAAA,EACH;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YACE,QACA,OAAkB,YAClB,aACA,oBAAuC,cACvC;AACA,SAAK,SAAS;AACd,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,oBAAoB;AAAA,EAC3B;AAAA;AAAA,EAGA,MAAM,OAAe,IAAc,OAA0B;AAC3D,UAAM,SAAsB,EAAE,MAAM,SAAS,OAAO,IAAI,MAAM;AAC9D,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,MAAM,EAAE;AAAA,MACxD,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,GAAG,SAA8C;AAC/C,UAAM,MAAM;AAAA,MACV,IAAI,OAAS,KAAK,QAA0B,YAAY,KAAK,aAAa,KAAK,iBAAiB;AAAA,IAClG;AACA,UAAM,QAAqB;AAAA,MACzB,MAAM;AAAA,MACN,IAAI;AAAA,MACJ,SAAS,IAAI,KAAK;AAAA,IACpB;AACA,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,KAAK,EAAE;AAAA,MACvD,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,SAA8C;AAChD,UAAM,MAAM;AAAA,MACV,IAAI,OAAS,KAAK,QAA0B,YAAY,KAAK,aAAa,KAAK,iBAAiB;AAAA,IAClG;AACA,UAAM,QAAqB;AAAA,MACzB,MAAM;AAAA,MACN,IAAI;AAAA,MACJ,SAAS,IAAI,KAAK;AAAA,IACpB;AACA,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,KAAK,EAAE;AAAA,MACvD,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,IAAsC;AAC3C,UAAM,SAAuB;AAAA,MAC3B,MAAM;AAAA,MACN;AAAA,IACF;AACA,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,MAAM,EAAE;AAAA,MACxD,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA,EAGA,QAAQ,OAAe,YAA4B,OAAiB;AAClE,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,EAAE,OAAO,UAAU,CAAC,EAAE;AAAA,MACtE,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,GAAqB;AACzB,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,OAAO,EAAE;AAAA,MACzB,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,GAAqB;AAC1B,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,QAAQ,EAAE;AAAA,MAC1B,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA4DA,KACE,OACA,MACiC;AACjC,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,IAAI;AAAA,QACR;AAAA,MAIF;AAAA,IACF;AACA,UAAM,aAAa,KAAK,YAAY,WAAW,KAAK;AAEpD,UAAM,kBAAkB,CAAC,cAAc,KAAK,YAAY,oBAAoB,KAAK,KAAK;AACtF,QAAI,CAAC,cAAc,CAAC,iBAAiB;AACnC,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,oBAC7C,KAAK,YAAY,cAAc,kBACxB,KAAK;AAAA,MAEpB;AAAA,IACF;AACA,UAAM,MAAe,aACjB;AAAA,MACE;AAAA,MACA,IAAI,KAAK;AAAA,MACT,QAAQ,WAAW;AAAA,MACnB,MAAM,WAAW;AAAA,MACjB,UAAU,KAAK;AAAA,MACf,SAAS,KAAK;AAAA;AAAA,MAEd,gBAAgB;AAAA,IAClB,IACA;AAAA;AAAA,MAEE;AAAA,MACA,IAAI,KAAK;AAAA,MACT,QAAQ;AAAA;AAAA,MACR,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,SAAS,KAAK;AAAA,MACd,gBAAgB;AAAA,MAChB,YAAY;AAAA,IACd;AACJ,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,EAAE,GAAG,KAAK,MAAM,OAAO,CAAC,GAAG,KAAK,KAAK,OAAO,GAAG,EAAE;AAAA,MACjD,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAe;AACb,UAAM,OAAO,sBAAsB,KAAK,QAAQ,KAAK,IAAI;AACzD,QAAI,KAAK,KAAK,MAAM,WAAW,EAAG,QAAO;AACzC,QAAI,CAAC,KAAK,aAAa;AAGrB,YAAM,IAAI;AAAA,QACR,iCAAiC,KAAK,KAAK,MAAM,MAAM;AAAA,MAIzD;AAAA,IACF;AACA,WAAO,WAAW,MAAM,KAAK,KAAK,OAAO,KAAK,WAAW;AAAA,EAC3D;AAAA;AAAA,EAGA,QAAkB;AAChB,UAAM,MAAM,KAAK,MAAM,CAAC,EAAE,QAAQ;AAClC,WAAO,IAAI,CAAC,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAgB;AAId,UAAM,EAAE,YAAY,iBAAiB,IAAI,iBAAiB,KAAK,QAAQ,KAAK,KAAK,OAAO;AACxF,QAAI,iBAAiB,WAAW,EAAG,QAAO,WAAW;AACrD,WAAO,cAAc,YAAY,gBAAgB,EAAE;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0CA,UACE,MACoC;AAMpC,UAAM,SAAS,KAAK;AACpB,UAAM,UAAU,KAAK,KAAK;AAC1B,UAAM,iBAAiB,MAA0B;AAC/C,YAAM,EAAE,YAAY,iBAAiB,IAAI,iBAAiB,QAAQ,OAAO;AACzE,aAAO,iBAAiB,WAAW,IAC/B,aACA,cAAc,YAAY,gBAAgB;AAAA,IAChD;AAKA,UAAM,YAAmC,CAAC;AAC1C,QAAI,OAAO,WAAW;AACpB,YAAM,YAAY,OAAO,UAAU,KAAK,MAAM;AAC9C,gBAAU,KAAK,EAAE,WAAW,CAAC,OAAmB,UAAU,EAAE,EAAE,CAAC;AAAA,IACjE;AAEA,WAAO,KAAK,kBAAkB,UAAgB,gBAAgB,MAAM,SAAS;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgDA,QAA0B,OAA8B;AAKtD,UAAM,SAAS,KAAK;AACpB,UAAM,UAAU,KAAK,KAAK;AAC1B,UAAM,iBAAiB,MAA0B;AAC/C,YAAM,EAAE,YAAY,iBAAiB,IAAI,iBAAiB,QAAQ,OAAO;AACzE,aAAO,iBAAiB,WAAW,IAC/B,aACA,cAAc,YAAY,gBAAgB;AAAA,IAChD;AAEA,UAAM,YAAmC,CAAC;AAC1C,QAAI,OAAO,WAAW;AACpB,YAAM,YAAY,OAAO,UAAU,KAAK,MAAM;AAC9C,gBAAU,KAAK,EAAE,WAAW,CAAC,OAAmB,UAAU,EAAE,EAAE,CAAC;AAAA,IACjE;AAGA,UAAM,UAAU,KAAK;AACrB,UAAM,oBAAoB,SAAS,qBAC9B,MAAM;AACL,YAAM,aAAa,QAAQ,kBAAkB,KAAK;AAClD,UAAI,CAAC,WAAY,QAAO;AACxB,YAAM,WAAW,WAAW,SAAS;AACrC,YAAM,UAAU,oBAAI,IAAoC;AACxD,iBAAW,SAAS,UAAU;AAC5B,cAAM,IAAK,MAAkC,KAAK;AAClD,cAAM,SAAU,MAAkC,QAAQ;AAC1D,YAAI,OAAO,MAAM,YAAY,UAAU,OAAO,WAAW,UAAU;AACjE,kBAAQ,IAAI,GAAG,MAAgC;AAAA,QACjD;AAAA,MACF;AACA,aAAO,OACL,KACA,QACA,aACgC;AAChC,cAAM,SAAS,QAAQ,IAAI,GAAG;AAC9B,YAAI,CAAC,OAAQ,QAAO;AACpB,YAAI,OAAO,MAAM,MAAM,OAAW,QAAO,OAAO,MAAM;AACtD,cAAM,QAAQ,MAAM,QAAQ,QAAQ,IAC/B,WACD,WACE,CAAC,QAAkB,IACnB,CAAC;AACP,mBAAW,MAAM,OAAO;AACtB,cAAI,OAAO,OAAO;AAChB,kBAAM,MAAM,OAAO,OAAO,MAAM,EAAE,CAAC;AACnC,gBAAI,QAAQ,OAAW,QAAO;AAAA,UAChC,WAAW,OAAO,EAAE,MAAM,QAAW;AACnC,mBAAO,OAAO,EAAE;AAAA,UAClB;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,IACF,GAAG,IACH;AAEJ,WAAO,KAAK,kBAAkB,QAAc,gBAAgB,OAAO,WAAW,iBAAiB;AAAA,EACjG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,UAAU,IAAuC;AAC/C,QAAI,CAAC,KAAK,OAAO,WAAW;AAC1B,YAAM,IAAI,MAAM,uFAAuF;AAAA,IACzG;AACA,OAAG,KAAK,QAAQ,CAAC;AACjB,WAAO,KAAK,OAAO,UAAU,MAAM,GAAG,KAAK,QAAQ,CAAC,CAAC;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoDA,OAAqB;AACnB,UAAM,YAA4B,CAAC;AAInC,QAAI,KAAK,OAAO,WAAW;AACzB,YAAM,gBAAgB,KAAK,OAAO,UAAU,KAAK,KAAK,MAAM;AAC5D,gBAAU,KAAK;AAAA,QACb,WAAW,CAAC,OAAmB,cAAc,EAAE;AAAA,MACjD,CAAC;AAAA,IACH;AAMA,QAAI,KAAK,KAAK,MAAM,SAAS,KAAK,KAAK,aAAa;AAClD,YAAM,aAAa,oBAAI,IAAY;AACnC,iBAAW,OAAO,KAAK,KAAK,OAAO;AACjC,YAAI,WAAW,IAAI,IAAI,MAAM,EAAG;AAChC,mBAAW,IAAI,IAAI,MAAM;AACzB,cAAM,cAAc,KAAK,YAAY,cAAc,IAAI,MAAM;AAC7D,YAAI,aAAa,WAAW;AAC1B,gBAAM,iBAAiB,YAAY,UAAU,KAAK,WAAW;AAC7D,oBAAU,KAAK;AAAA,YACb,WAAW,CAAC,OAAmB,eAAe,EAAE;AAAA,UAClD,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAIA,WAAO,eAAkB,MAAM,KAAK,QAAQ,GAAG,SAAS;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAkB;AAChB,WAAO,cAAc,KAAK,IAAI;AAAA,EAChC;AACF;AAOA,SAAS,sBAAsB,QAAwB,MAA4B;AACjF,QAAM,EAAE,YAAY,iBAAiB,IAAI,iBAAiB,QAAQ,KAAK,OAAO;AAK9E,MAAI,SAAS,iBAAiB,WAAW,IACrC,CAAC,GAAG,UAAU,IACd,cAAc,YAAY,gBAAgB;AAC9C,MAAI,KAAK,QAAQ,SAAS,GAAG;AAC3B,aAAS,YAAY,QAAQ,KAAK,OAAO;AAAA,EAC3C;AACA,MAAI,KAAK,SAAS,GAAG;AACnB,aAAS,OAAO,MAAM,KAAK,MAAM;AAAA,EACnC;AACA,MAAI,KAAK,UAAU,QAAW;AAC5B,aAAS,OAAO,MAAM,GAAG,KAAK,KAAK;AAAA,EACrC;AACA,SAAO;AACT;AAsBA,SAAS,iBAAiB,QAAwB,SAA6C;AAC7F,QAAM,UAAU,OAAO,aAAa;AACpC,MAAI,CAAC,WAAW,CAAC,OAAO,cAAc,QAAQ,WAAW,GAAG;AAC1D,WAAO,EAAE,YAAY,OAAO,SAAS,GAAG,kBAAkB,QAAQ;AAAA,EACpE;AAGA,QAAM,aAAa,CAAC,OAAwB,OAAO,aAAa,EAAE;AAElE,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,SAAS,QAAQ,CAAC;AACxB,QAAI,OAAO,SAAS,QAAS;AAC7B,QAAI,CAAC,QAAQ,IAAI,OAAO,KAAK,EAAG;AAEhC,QAAI,MAAkC;AACtC,QAAI,OAAO,OAAO,MAAM;AACtB,YAAM,QAAQ,YAAY,OAAO,OAAO,OAAO,KAAK;AAAA,IACtD,WAAW,OAAO,OAAO,QAAQ,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC5D,YAAM,QAAQ,SAAS,OAAO,OAAO,OAAO,KAAK;AAAA,IACnD;AAEA,QAAI,QAAQ,MAAM;AAGhB,YAAM,YAAsB,CAAC;AAC7B,eAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAI,MAAM,EAAG,WAAU,KAAK,QAAQ,CAAC,CAAE;AAAA,MACzC;AACA,aAAO;AAAA,QACL,YAAY,eAAe,KAAK,UAAU;AAAA,QAC1C,kBAAkB;AAAA,MACpB;AAAA,IACF;AAAA,EAGF;AAGA,SAAO,EAAE,YAAY,OAAO,SAAS,GAAG,kBAAkB,QAAQ;AACpE;AAEA,SAAS,eACP,KACA,YACW;AACX,QAAM,MAAiB,CAAC;AACxB,aAAW,MAAM,KAAK;AACpB,UAAM,SAAS,WAAW,EAAE;AAC5B,QAAI,WAAW,OAAW,KAAI,KAAK,MAAM;AAAA,EAC3C;AACA,SAAO;AACT;AASO,SAAS,YAAY,SAA6B,MAA4B;AACnF,MAAI,SAAS,cAAc,SAAS,KAAK,OAAO;AAChD,MAAI,KAAK,QAAQ,SAAS,GAAG;AAC3B,aAAS,YAAY,QAAQ,KAAK,OAAO;AAAA,EAC3C;AACA,MAAI,KAAK,SAAS,GAAG;AACnB,aAAS,OAAO,MAAM,KAAK,MAAM;AAAA,EACnC;AACA,MAAI,KAAK,UAAU,QAAW;AAC5B,aAAS,OAAO,MAAM,GAAG,KAAK,KAAK;AAAA,EACrC;AACA,SAAO;AACT;AAEA,SAAS,cAAc,SAA6B,SAAuC;AACzF,MAAI,QAAQ,WAAW,EAAG,QAAO,CAAC,GAAG,OAAO;AAC5C,QAAM,MAAiB,CAAC;AACxB,aAAW,KAAK,SAAS;AACvB,QAAI,UAAU;AACd,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,eAAe,GAAG,MAAM,GAAG;AAC9B,kBAAU;AACV;AAAA,MACF;AAAA,IACF;AACA,QAAI,QAAS,KAAI,KAAK,CAAC;AAAA,EACzB;AACA,SAAO;AACT;AAEA,SAAS,YAAY,SAAoB,SAAwC;AAE/E,SAAO,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM;AACjC,eAAW,EAAE,OAAO,UAAU,KAAK,SAAS;AAC1C,YAAM,KAAK,UAAU,GAAG,KAAK;AAC7B,YAAM,KAAK,UAAU,GAAG,KAAK;AAC7B,YAAM,MAAM,cAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,cAAc,QAAQ,MAAM,CAAC;AAAA,IACrD;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAEA,SAAS,UAAU,QAAiB,OAAwB;AAC1D,MAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,MAAI,CAAC,MAAM,SAAS,GAAG,GAAG;AACxB,WAAQ,OAAmC,KAAK;AAAA,EAClD;AACA,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,SAAkB;AACtB,aAAW,WAAW,UAAU;AAC9B,QAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,aAAU,OAAmC,OAAO;AAAA,EACtD;AACA,SAAO;AACT;AAEA,SAAS,cAAc,GAAY,GAAoB;AAErD,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO,MAAM,UAAa,MAAM,OAAO,IAAI;AAC9E,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO;AAC1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAG3E,SAAO;AACT;AAEA,SAAS,cAAc,MAA0B;AAC/C,SAAO;AAAA,IACL,SAAS,KAAK,QAAQ,IAAI,eAAe;AAAA,IACzC,SAAS,KAAK;AAAA,IACd,OAAO,KAAK;AAAA,IACZ,QAAQ,KAAK;AAAA,IACb,OAAO,KAAK;AAAA,EACd;AACF;AAEA,SAAS,gBAAgB,QAAyB;AAChD,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,EAAE,MAAM,UAAU,IAAI,aAAa;AAAA,EAC5C;AACA,MAAI,OAAO,SAAS,SAAS;AAC3B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,IAAI,OAAO;AAAA,MACX,SAAS,OAAO,QAAQ,IAAI,eAAe;AAAA,IAC7C;AAAA,EACF;AACA,SAAO;AACT;;;ACpyBO,IAAM,oBAAN,MAAwB;AAAA,EACZ,UAAU,oBAAI,IAAuB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMtD,QAAQ,OAAqB;AAC3B,QAAI,KAAK,QAAQ,IAAI,KAAK,EAAG;AAC7B,SAAK,QAAQ,IAAI,OAAO,EAAE,OAAO,SAAS,oBAAI,IAAI,EAAE,CAAC;AAAA,EACvD;AAAA;AAAA,EAGA,IAAI,OAAwB;AAC1B,WAAO,KAAK,QAAQ,IAAI,KAAK;AAAA,EAC/B;AAAA;AAAA,EAGA,SAAmB;AACjB,WAAO,CAAC,GAAG,KAAK,QAAQ,KAAK,CAAC;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAS,SAAyD;AAChE,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,UAAI,QAAQ,MAAM;AAClB,iBAAW,EAAE,IAAI,OAAO,KAAK,SAAS;AACpC,mBAAW,KAAK,IAAI,MAAM;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAU,IAAY,WAAc,gBAAgC;AAClE,QAAI,KAAK,QAAQ,SAAS,EAAG;AAC7B,QAAI,mBAAmB,MAAM;AAC3B,WAAK,OAAO,IAAI,cAAc;AAAA,IAChC;AACA,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,iBAAW,KAAK,IAAI,SAAS;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAU,IAAY,QAAiB;AACrC,QAAI,KAAK,QAAQ,SAAS,EAAG;AAC7B,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,sBAAgB,KAAK,IAAI,MAAM;AAAA,IACjC;AAAA,EACF;AAAA;AAAA,EAGA,QAAc;AACZ,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,UAAI,QAAQ,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,YAAY,OAAe,OAA4C;AACrE,UAAM,MAAM,KAAK,QAAQ,IAAI,KAAK;AAClC,QAAI,CAAC,IAAK,QAAO;AACjB,UAAM,MAAM,aAAa,KAAK;AAC9B,WAAO,IAAI,QAAQ,IAAI,GAAG,KAAK;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,OAAe,QAAwD;AAC9E,UAAM,MAAM,KAAK,QAAQ,IAAI,KAAK;AAClC,QAAI,CAAC,IAAK,QAAO;AACjB,UAAM,MAAM,oBAAI,IAAY;AAC5B,eAAW,SAAS,QAAQ;AAC1B,YAAM,MAAM,aAAa,KAAK;AAC9B,YAAM,SAAS,IAAI,QAAQ,IAAI,GAAG;AAClC,UAAI,QAAQ;AACV,mBAAW,MAAM,OAAQ,KAAI,IAAI,EAAE;AAAA,MACrC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEA,IAAM,YAAiC,oBAAI,IAAI;AAW/C,SAAS,aAAa,OAAwB;AAC5C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAW,QAAO,OAAO,KAAK;AAChF,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO;AACT;AAEA,SAAS,WAAc,KAAgB,IAAY,QAAiB;AAClE,QAAM,QAAQ,SAAS,QAAQ,IAAI,KAAK;AACxC,MAAI,UAAU,QAAQ,UAAU,OAAW;AAC3C,QAAM,MAAM,aAAa,KAAK;AAC9B,MAAI,SAAS,IAAI,QAAQ,IAAI,GAAG;AAChC,MAAI,CAAC,QAAQ;AACX,aAAS,oBAAI,IAAI;AACjB,QAAI,QAAQ,IAAI,KAAK,MAAM;AAAA,EAC7B;AACA,SAAO,IAAI,EAAE;AACf;AAEA,SAAS,gBAAmB,KAAgB,IAAY,QAAiB;AACvE,QAAM,QAAQ,SAAS,QAAQ,IAAI,KAAK;AACxC,MAAI,UAAU,QAAQ,UAAU,OAAW;AAC3C,QAAM,MAAM,aAAa,KAAK;AAC9B,QAAM,SAAS,IAAI,QAAQ,IAAI,GAAG;AAClC,MAAI,CAAC,OAAQ;AACb,SAAO,OAAO,EAAE;AAEhB,MAAI,OAAO,SAAS,EAAG,KAAI,QAAQ,OAAO,GAAG;AAC/C;;;AC9GO,SAAS,MAAM,MAAgD;AAGpE,QAAM,QAAQ,MAAM;AACpB,OAAK;AACL,SAAO;AAAA,IACL,MAAM,MAAM;AAAA,IACZ,MAAM,CAAC,UAAU,QAAQ;AAAA,IACzB,QAAQ,CAAC,UAAU,QAAQ;AAAA,IAC3B,UAAU,CAAC,UAAU;AAAA,EACvB;AACF;AAQO,SAAS,IACd,OACA,MACiB;AACjB,QAAM,QAAQ,MAAM;AACpB,OAAK;AACL,SAAO;AAAA,IACL,MAAM,MAAM;AAAA,IACZ,MAAM,CAAC,OAAO,WAAW,QAAQ,WAAW,QAAQ,KAAK;AAAA,IACzD,QAAQ,CAAC,OAAO,WAAW,QAAQ,WAAW,QAAQ,KAAK;AAAA,IAC3D,UAAU,CAAC,UAAU;AAAA,EACvB;AACF;AAgBO,SAAS,IACd,OACA,MACwD;AACxD,QAAM,QAAQ,MAAM;AACpB,OAAK;AACL,SAAO;AAAA,IACL,MAAM,OAAO,EAAE,KAAK,GAAG,OAAO,EAAE;AAAA,IAChC,MAAM,CAAC,OAAO,YAAY;AAAA,MACxB,KAAK,MAAM,MAAM,WAAW,QAAQ,KAAK;AAAA,MACzC,OAAO,MAAM,QAAQ;AAAA,IACvB;AAAA,IACA,QAAQ,CAAC,OAAO,YAAY;AAAA,MAC1B,KAAK,MAAM,MAAM,WAAW,QAAQ,KAAK;AAAA,MACzC,OAAO,MAAM,QAAQ;AAAA,IACvB;AAAA,IACA,UAAU,CAAC,UAAW,MAAM,UAAU,IAAI,OAAO,MAAM,MAAM,MAAM;AAAA,EACrE;AACF;AAcA,SAAS,UAAU,OAAoB,OAA4B;AACjE,SAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,QAAQ,KAAK,EAAE;AAC5C;AAEA,SAAS,YAAY,OAAoB,OAA4B;AAInE,QAAM,MAAM,MAAM,OAAO,QAAQ,KAAK;AACtC,MAAI,MAAM,EAAG,QAAO;AACpB,QAAM,OAAO,MAAM,OAAO,MAAM;AAChC,OAAK,OAAO,KAAK,CAAC;AAClB,SAAO,EAAE,QAAQ,KAAK;AACxB;AAcO,SAAS,IACd,OACA,MACqC;AACrC,QAAM,QAAQ,MAAM;AACpB,OAAK;AACL,SAAO;AAAA,IACL,MAAM,OAAO,EAAE,QAAQ,CAAC,EAAE;AAAA,IAC1B,MAAM,CAAC,OAAO,WAAW,UAAU,OAAO,WAAW,QAAQ,KAAK,CAAC;AAAA,IACnE,QAAQ,CAAC,OAAO,WAAW,YAAY,OAAO,WAAW,QAAQ,KAAK,CAAC;AAAA,IACvE,UAAU,CAAC,UAAU;AACnB,UAAI,MAAM,OAAO,WAAW,EAAG,QAAO;AACtC,UAAI,MAAM,MAAM,OAAO,CAAC;AACxB,eAAS,IAAI,GAAG,IAAI,MAAM,OAAO,QAAQ,KAAK;AAC5C,cAAM,IAAI,MAAM,OAAO,CAAC;AACxB,YAAI,IAAI,IAAK,OAAM;AAAA,MACrB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAOO,SAAS,IACd,OACA,MACqC;AACrC,QAAM,QAAQ,MAAM;AACpB,OAAK;AACL,SAAO;AAAA,IACL,MAAM,OAAO,EAAE,QAAQ,CAAC,EAAE;AAAA,IAC1B,MAAM,CAAC,OAAO,WAAW,UAAU,OAAO,WAAW,QAAQ,KAAK,CAAC;AAAA,IACnE,QAAQ,CAAC,OAAO,WAAW,YAAY,OAAO,WAAW,QAAQ,KAAK,CAAC;AAAA,IACvE,UAAU,CAAC,UAAU;AACnB,UAAI,MAAM,OAAO,WAAW,EAAG,QAAO;AACtC,UAAI,MAAM,MAAM,OAAO,CAAC;AACxB,eAAS,IAAI,GAAG,IAAI,MAAM,OAAO,QAAQ,KAAK;AAC5C,cAAM,IAAI,MAAM,OAAO,CAAC;AACxB,YAAI,IAAI,IAAK,OAAM;AAAA,MACrB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAaA,SAAS,WAAW,QAAiB,OAAuB;AAC1D,QAAM,QAAQ,SAAS,QAAQ,KAAK;AACpC,SAAO,OAAO,UAAU,YAAY,OAAO,SAAS,KAAK,IAAI,QAAQ;AACvE;;;ACjMO,SAAS,cACd,SACA,MACuB;AAEvB,QAAM,QAAiC,CAAC;AACxC,aAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACnC,UAAM,GAAG,IAAI,KAAK,GAAG,EAAG,KAAK;AAAA,EAC/B;AACA,aAAW,UAAU,SAAS;AAC5B,eAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACnC,YAAM,GAAG,IAAI,KAAK,GAAG,EAAG,KAAK,MAAM,GAAG,GAAG,MAAM;AAAA,IACjD;AAAA,EACF;AACA,QAAM,SAAkC,CAAC;AACzC,aAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACnC,WAAO,GAAG,IAAI,KAAK,GAAG,EAAG,SAAS,MAAM,GAAG,CAAC;AAAA,EAC9C;AACA,SAAO;AACT;AA4DA,IAAM,sBAAN,MAA2D;AAAA,EAOzD,YACmB,WACjB,WACA;AAFiB;AASjB,QAAI;AACF,WAAK,QAAQ,UAAU;AACvB,WAAK,QAAQ;AAAA,IACf,SAAS,KAAK;AACZ,WAAK,QAAQ;AACb,WAAK,QAAQ;AAAA,IACf;AAIA,eAAW,YAAY,WAAW;AAChC,YAAM,QAAQ,SAAS,UAAU,MAAM,KAAK,QAAQ,CAAC;AACrD,WAAK,aAAa,KAAK,KAAK;AAAA,IAC9B;AAAA,EACF;AAAA,EAvBmB;AAAA,EAPZ;AAAA,EACA;AAAA,EACU,YAAY,oBAAI,IAAgB;AAAA,EAChC,eAAkC,CAAC;AAAA,EAC5C,UAAU;AAAA,EA4BV,UAAgB;AACtB,QAAI,KAAK,QAAS;AAClB,QAAI;AACF,WAAK,QAAQ,KAAK,UAAU;AAC5B,WAAK,QAAQ;AAAA,IACf,SAAS,KAAK;AAIZ,WAAK,QAAQ;AAAA,IACf;AACA,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI;AACF,iBAAS;AAAA,MACX,SAAS,KAAK;AAGZ,gBAAQ,KAAK,4CAA4C,GAAG;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,IAA4B;AACpC,QAAI,KAAK,SAAS;AAGhB,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AACA,SAAK,UAAU,IAAI,EAAE;AACrB,WAAO,MAAM;AACX,WAAK,UAAU,OAAO,EAAE;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,OAAa;AACX,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AACf,eAAW,SAAS,KAAK,cAAc;AACrC,UAAI;AACF,cAAM;AAAA,MACR,SAAS,KAAK;AACZ,gBAAQ,KAAK,wDAAwD,GAAG;AAAA,MAC1E;AAAA,IACF;AACA,SAAK,aAAa,SAAS;AAC3B,SAAK,UAAU,MAAM;AAAA,EACvB;AACF;AAkBO,IAAM,cAAN,MAAqB;AAAA,EAC1B,YACmB,gBACA,MACA,WACjB;AAHiB;AACA;AACA;AAAA,EAChB;AAAA,EAHgB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASnB,MAAS;AACP,WAAO,cAAc,KAAK,eAAe,GAAG,KAAK,IAAI;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,OAA2B;AACzB,UAAM,YAAY,MAChB,cAAc,KAAK,eAAe,GAAG,KAAK,IAAI;AAChD,WAAO,IAAI,oBAAuB,WAAW,KAAK,SAAS;AAAA,EAC7D;AACF;AAUO,SAAS,qBACd,WACA,WACoB;AACpB,SAAO,IAAI,oBAAuB,WAAW,SAAS;AACxD;;;ACvOA;AAQO,IAAM,2BAA2B;AACjC,IAAM,0BAA0B;AASvC,IAAM,0BAA0B,oBAAI,IAAY;AAChD,SAAS,2BAA2B,OAAe,UAAwB;AACzE,MAAI,wBAAwB,IAAI,KAAK,EAAG;AACxC,0BAAwB,IAAI,KAAK;AACjC,UAAQ;AAAA,IACN,sBAAsB,KAAK,eAAe,QAAQ,qBAC7C,KAAK,MAAO,WAAW,0BAA2B,GAAG,CAAC,YACtD,uBAAuB;AAAA,EAE9B;AACF;AAmCO,IAAM,eAAN,MAAwC;AAAA,EAC7C,YACmB,gBACA,OACA,WAKA,mBAKjB;AAZiB;AACA;AACA;AAKA;AAAA,EASnB;AAAA,EAhBmB;AAAA,EACA;AAAA,EACA;AAAA,EAKA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBnB,UACE,MAC0D;AAC1D,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAaO,SAAS,eACd,SACA,OACA,MACK;AAGL,QAAM,UAAU,oBAAI,IAAwB;AAC5C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,SAAS,QAAQ,KAAK;AAClC,QAAI,SAAS,QAAQ,IAAI,GAAG;AAC5B,QAAI,WAAW,QAAW;AACxB,UAAI,QAAQ,QAAQ,yBAAyB;AAC3C,cAAM,IAAI;AAAA,UACR;AAAA,UACA,QAAQ,OAAO;AAAA,UACf;AAAA,QACF;AAAA,MACF;AACA,eAAS,CAAC;AACV,cAAQ,IAAI,KAAK,MAAM;AAAA,IACzB;AACA,WAAO,KAAK,MAAM;AAAA,EACpB;AAEA,MAAI,QAAQ,QAAQ,0BAA0B;AAC5C,+BAA2B,OAAO,QAAQ,IAAI;AAAA,EAChD;AAOA,QAAM,OAAO,OAAO,KAAK,IAAI;AAC7B,QAAM,MAAW,CAAC;AAClB,aAAW,CAAC,UAAU,aAAa,KAAK,SAAS;AAC/C,UAAM,QAAiC,CAAC;AACxC,eAAW,OAAO,MAAM;AACtB,YAAM,GAAG,IAAI,KAAK,GAAG,EAAG,KAAK;AAAA,IAC/B;AACA,eAAW,UAAU,eAAe;AAClC,iBAAW,OAAO,MAAM;AACtB,cAAM,GAAG,IAAI,KAAK,GAAG,EAAG,KAAK,MAAM,GAAG,GAAG,MAAM;AAAA,MACjD;AAAA,IACF;AACA,UAAM,MAA+B,EAAE,CAAC,KAAK,GAAG,SAAS;AACzD,eAAW,OAAO,MAAM;AACtB,UAAI,GAAG,IAAI,KAAK,GAAG,EAAG,SAAS,MAAM,GAAG,CAAC;AAAA,IAC3C;AACA,QAAI,KAAK,GAAmB;AAAA,EAC9B;AACA,SAAO;AACT;AAYO,IAAM,qBAAN,MAA4B;AAAA,EACjC,YACmB,gBACA,OACA,MACA,WAKA,mBAKjB;AAbiB;AACA;AACA;AACA;AAKA;AAAA,EAKhB;AAAA,EAbgB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAKA;AAAA;AAAA,EAQnB,MAAW;AACT,WAAO,eAAkB,KAAK,eAAe,GAAG,KAAK,OAAO,KAAK,IAAI;AAAA,EACvE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,SAAS,MAGE;AACf,UAAM,OAAO,eAAkB,KAAK,eAAe,GAAG,KAAK,OAAO,KAAK,IAAI;AAC3E,QAAI,CAAC,MAAM,UAAU,CAAC,KAAK,kBAAmB,QAAO;AAErD,UAAM,UAAU,KAAK;AACrB,UAAM,SAAS,KAAK;AACpB,UAAM,WAAW,KAAK;AACtB,UAAM,WAAW,GAAG,KAAK,KAAK;AAE9B,WAAO,QAAQ;AAAA,MACb,KAAK,IAAI,OAAO,QAAQ;AACtB,cAAM,MAAO,IAAgC,KAAK,KAAK;AACvD,YAAI,OAAO,QAAQ,SAAU,QAAO;AACpC,cAAM,QAAQ,MAAM,QAAQ,KAAK,QAAQ,QAAQ;AACjD,eAAO,EAAE,GAAI,KAAiC,CAAC,QAAQ,GAAG,MAAM;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,OAA6B;AAC3B,UAAM,YAAY,MAChB,eAAkB,KAAK,eAAe,GAAG,KAAK,OAAO,KAAK,IAAI;AAChE,WAAO,qBAA0B,WAAW,KAAK,SAAS;AAAA,EAC5D;AACF;;;AC9PA;AAgBA,IAAM,yBAAyB;AAYxB,IAAM,cAAN,MAAM,aAA2C;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA;AAAA,EAEjB,YACE,cACA,WAAmB,wBACnB,UAA6B,CAAC,GAC9B,QAA4B,CAAC,GAC7B,aACA;AACA,SAAK,eAAe;AACpB,SAAK,WAAW;AAChB,SAAK,UAAU;AACf,SAAK,QAAQ;AACb,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,OAAe,IAAc,OAAgC;AACjE,UAAM,SAAsB,EAAE,MAAM,SAAS,OAAO,IAAI,MAAM;AAC9D,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AAAA,MACL,CAAC,GAAG,KAAK,SAAS,MAAM;AAAA,MACxB,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,IAA4C;AACjD,UAAM,SAAiB;AAAA,MACrB,MAAM;AAAA,MACN;AAAA,IACF;AACA,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AAAA,MACL,CAAC,GAAG,KAAK,SAAS,MAAM;AAAA,MACxB,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgFA,KACE,OACA,MACuC;AACvC,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,IAAI;AAAA,QACR;AAAA,MAKF;AAAA,IACF;AACA,UAAM,aAAa,KAAK,YAAY,WAAW,KAAK;AACpD,QAAI,CAAC,YAAY;AACf,YAAM,IAAI;AAAA,QACR,oDAAoD,KAAK,oBACxC,KAAK,YAAY,cAAc,kBACnC,KAAK;AAAA,MAEpB;AAAA,IACF;AACA,UAAM,MAAe;AAAA,MACnB;AAAA,MACA,IAAI,KAAK;AAAA,MACT,QAAQ,WAAW;AAAA,MACnB,MAAM,WAAW;AAAA,MACjB,UAAU;AAAA,MACV,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,MAKT,gBAAgB;AAAA,IAClB;AACA,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,CAAC,GAAG,KAAK,OAAO,GAAG;AAAA,MACnB,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,QAAQ,OAAO,aAAa,IAAsB;AAWhD,UAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,OAAO,KAAK,mBAAmB;AAE/E,QAAI,OAAO,MAAM,KAAK,aAAa,SAAS,EAAE,OAAO,KAAK,SAAS,CAAC;AACpE,WAAO,MAAM;AACX,iBAAW,UAAU,KAAK,OAAO;AAC/B,YAAI,CAAC,KAAK,cAAc,MAAM,EAAG;AACjC,YAAI,kBAAkB,MAAM;AAC1B,gBAAM;AAAA,QACR,OAAO;AAKL,cAAI,WAAoB;AACxB,qBAAW,YAAY,eAAe;AACpC,uBAAW,KAAK,sBAAsB,UAAU,QAAQ;AAAA,UAC1D;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AACA,UAAI,KAAK,eAAe,KAAM;AAC9B,aAAO,MAAM,KAAK,aAAa,SAAS;AAAA,QACtC,QAAQ,KAAK;AAAA,QACb,OAAO,KAAK;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBQ,qBAML;AACD,QAAI,CAAC,KAAK,aAAa;AAMrB,YAAM,IAAI;AAAA,QACR,yBAAyB,KAAK,MAAM,MAAM;AAAA,MAG5C;AAAA,IACF;AACA,UAAM,YAMD,CAAC;AACN,eAAW,OAAO,KAAK,OAAO;AAC5B,YAAM,SAAS,KAAK,YAAY,cAAc,IAAI,MAAM;AACxD,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI;AAAA,UACR,wDACM,IAAI,MAAM,6BAA6B,IAAI,KAAK,SAChD,KAAK,YAAY,cAAc;AAAA,QAGvC;AAAA,MACF;AAIA,UAAI,aAA+C;AACnD,UAAI,mBAAwD;AAC5D,UAAI,OAAO,YAAY;AAIrB,cAAM,KAAK,OAAO,WAAW,KAAK,MAAM;AACxC,qBAAa,CAAC,OAAwB,GAAG,EAAE;AAAA,MAC7C,OAAO;AACL,cAAM,MAAM,oBAAI,IAAqB;AACrC,mBAAW,UAAU,OAAO,SAAS,GAAG;AACtC,gBAAM,QAAQ,SAAS,QAAQ,IAAI;AACnC,gBAAM,MAAMC,cAAa,KAAK;AAC9B,cAAI,QAAQ,KAAM,KAAI,IAAI,KAAK,MAAM;AAAA,QACvC;AACA,2BAAmB;AAAA,MACrB;AACA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY,oBAAI,IAAY;AAAA,MAC9B,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcQ,sBACN,MACA,UAOS;AACT,QAAI,SAAS,QAAQ,OAAO,SAAS,UAAU;AAE7C,aAAO;AAAA,IACT;AACA,UAAM,EAAE,IAAI,IAAI;AAChB,UAAM,QAAQ,SAAS,MAAM,IAAI,KAAK;AACtC,UAAM,SAASA,cAAa,KAAK;AACjC,QAAI,QAAiB;AACrB,QAAI,WAAW,MAAM;AACnB,UAAI,SAAS,eAAe,MAAM;AAChC,gBAAQ,SAAS,WAAW,MAAM;AAAA,MACpC,WAAW,SAAS,qBAAqB,MAAM;AAC7C,gBAAQ,SAAS,iBAAiB,IAAI,MAAM;AAAA,MAC9C;AAAA,IACF;AAEA,UAAM,SAAkC;AAAA,MACtC,GAAI;AAAA,IACN;AACA,QAAI,UAAU,QAAW;AAGvB,UAAI,WAAW,QAAQ,IAAI,SAAS,UAAU;AAC5C,cAAM,IAAI,uBAAuB;AAAA,UAC/B,OAAO,IAAI;AAAA,UACX,QAAQ,IAAI;AAAA,UACZ,OAAO;AAAA,UACP,SACE,0DACI,IAAI,MAAM,IAAI,MAAM,gBAAgB,IAAI,KAAK;AAAA,QAIrD,CAAC;AAAA,MACH;AACA,UAAI,WAAW,QAAQ,IAAI,SAAS,QAAQ;AAC1C,cAAM,WAAW,GAAG,IAAI,KAAK,SAAI,IAAI,MAAM,IAAI,MAAM;AACrD,YAAI,CAAC,SAAS,WAAW,IAAI,QAAQ,GAAG;AACtC,mBAAS,WAAW,IAAI,QAAQ;AAChC,kBAAQ;AAAA,YACN,+EACyB,IAAI,KAAK,aAAQ,IAAI,MAAM,IAC/C,MAAM;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAGA,aAAO,IAAI,EAAE,IAAI;AAAA,IACnB,OAAO;AACL,aAAO,IAAI,EAAE,IAAI;AAAA,IACnB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsCA,MAAM,UACJ,MACgC;AAChC,UAAM,OAAO,OAAO,KAAK,IAAI;AAG7B,UAAM,QAAiC,CAAC;AACxC,eAAW,OAAO,MAAM;AACtB,YAAM,GAAG,IAAI,KAAK,GAAG,EAAG,KAAK;AAAA,IAC/B;AAMA,qBAAiB,UAAU,MAAM;AAC/B,iBAAW,OAAO,MAAM;AACtB,cAAM,GAAG,IAAI,KAAK,GAAG,EAAG,KAAK,MAAM,GAAG,GAAG,MAAM;AAAA,MACjD;AAAA,IACF;AAEA,UAAM,SAAkC,CAAC;AACzC,eAAW,OAAO,MAAM;AACtB,aAAO,GAAG,IAAI,KAAK,GAAG,EAAG,SAAS,MAAM,GAAG,CAAC;AAAA,IAC9C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,cAAc,QAAoB;AACxC,QAAI,KAAK,QAAQ,WAAW,EAAG,QAAO;AACtC,eAAW,UAAU,KAAK,SAAS;AACjC,UAAI,CAAC,eAAe,QAAQ,MAAM,EAAG,QAAO;AAAA,IAC9C;AACA,WAAO;AAAA,EACT;AACF;AAcA,SAASA,cAAa,OAA+B;AACnD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,SAAU,QAAO,OAAO,KAAK;AAC/E,SAAO;AACT;;;AC1kBO,IAAM,aAAa;AAWnB,SAAS,YAAY,OAAe,UAA0B;AACnE,SAAO,GAAG,UAAU,GAAG,KAAK,IAAI,QAAQ;AAC1C;AASO,SAAS,YAAY,IAAwD;AAClF,MAAI,CAAC,GAAG,WAAW,UAAU,EAAG,QAAO;AACvC,QAAM,OAAO,GAAG,MAAM,WAAW,MAAM;AACvC,QAAM,aAAa,KAAK,QAAQ,GAAG;AACnC,MAAI,cAAc,EAAG,QAAO;AAC5B,QAAM,QAAQ,KAAK,MAAM,GAAG,UAAU;AACtC,QAAM,WAAW,KAAK,MAAM,aAAa,CAAC;AAC1C,MAAI,SAAS,WAAW,EAAG,QAAO;AAClC,SAAO,EAAE,OAAO,SAAS;AAC3B;;;ACxBA;AA2BA,IAAMC,cAAuB;AAAA,EAC3B,SAAS,CAAC;AAAA,EACV,SAAS,CAAC;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEO,IAAM,YAAN,MAAM,WAAa;AAAA,EACP;AAAA,EACA;AAAA,EAEjB,YAAY,QAA4B,OAAiBA,aAAY;AACnE,SAAK,SAAS;AACd,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,MAAS,OAAe,IAAc,OAAwB;AAC5D,UAAM,SAAsB,EAAE,MAAM,SAAS,OAAO,IAAI,MAAM;AAC9D,WAAO,IAAI,WAAa,KAAK,QAAQ;AAAA,MACnC,GAAG,KAAK;AAAA,MACR,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,MAAM;AAAA,IACxC,CAAC;AAAA,EACH;AAAA,EAEA,QAAQ,OAAe,YAA4B,OAAqB;AACtE,WAAO,IAAI,WAAa,KAAK,QAAQ;AAAA,MACnC,GAAG,KAAK;AAAA,MACR,SAAS,CAAC,GAAG,KAAK,KAAK,SAAS,EAAE,OAAO,UAAU,CAAC;AAAA,IACtD,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,GAAyB;AAC7B,WAAO,IAAI,WAAa,KAAK,QAAQ,EAAE,GAAG,KAAK,MAAM,OAAO,EAAE,CAAC;AAAA,EACjE;AAAA,EAEA,OAAO,GAAyB;AAC9B,WAAO,IAAI,WAAa,KAAK,QAAQ,EAAE,GAAG,KAAK,MAAM,QAAQ,EAAE,CAAC;AAAA,EAClE;AAAA,EAEA,MAAM,UAAwB;AAC5B,UAAM,KAAK,OAAO,6BAA6B;AAE/C,UAAM,gBAAgB,qBAAqB,KAAK,IAAI;AACpD,UAAM,gBAAgB,cAAc,OAAO,OAAK,CAAC,eAAe,GAAG,KAAK,OAAO,gBAAgB,CAAC;AAChG,QAAI,cAAc,SAAS,GAAG;AAC5B,YAAM,IAAI,mBAAmB;AAAA,QAC3B,YAAY,KAAK,OAAO;AAAA,QACxB;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,eAAe,KAAK,oBAAoB;AAC9C,QAAI,iBAAiB,MAAM;AAMzB,YAAM,IAAI,mBAAmB;AAAA,QAC3B,YAAY,KAAK,OAAO;AAAA,QACxB;AAAA,QACA,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,UAAM,UAAe,CAAC;AACtB,eAAW,MAAM,cAAc;AAC7B,YAAM,SAAS,MAAM,KAAK,OAAO,UAAU,EAAE;AAC7C,UAAI,WAAW,KAAM;AACrB,UAAI,CAAC,WAAW,QAAQ,KAAK,KAAK,OAAO,EAAG;AAC5C,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,UAAM,SAAS,KAAK,KAAK,QAAQ,SAAS,IACtCC,aAAY,SAAS,KAAK,KAAK,OAAO,IACtC;AAEJ,UAAM,SAAS,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;AACzD,UAAM,UAAU,KAAK,KAAK,UAAU,SAChC,OAAO,MAAM,MAAM,IACnB,OAAO,MAAM,QAAQ,SAAS,KAAK,KAAK,KAAK;AAEjD,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAA2B;AAC/B,UAAM,MAAM,MAAM,KAAK,MAAM,CAAC,EAAE,QAAQ;AACxC,WAAO,IAAI,SAAS,IAAI,IAAI,CAAC,IAAK;AAAA,EACpC;AAAA,EAEA,MAAM,QAAyB;AAC7B,UAAM,MAAM,MAAM,KAAK,QAAQ;AAC/B,WAAO,IAAI;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,sBAAgD;AACtD,UAAM,MAAM,KAAK,OAAO;AAMxB,UAAM,QAAQ,oBAAI,IAAqB;AACvC,eAAW,UAAU,KAAK,KAAK,SAAS;AACtC,UAAI,OAAO,OAAO,KAAM,OAAM,IAAI,OAAO,OAAO,OAAO,KAAK;AAAA,IAC9D;AACA,QAAI,MAAM,QAAQ,GAAG;AACnB,iBAAW,OAAO,IAAI,YAAY,GAAG;AACnC,YAAI,IAAI,SAAS,YAAa;AAC9B,YAAI,IAAI,OAAO,MAAM,OAAK,MAAM,IAAI,CAAC,CAAC,GAAG;AACvC,gBAAM,QAAQ,IAAI,OAAO,IAAI,OAAK,MAAM,IAAI,CAAC,CAAC;AAC9C,gBAAM,MAAM,IAAI,YAAY,IAAI,KAAK,KAAK;AAC1C,cAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAEA,eAAW,UAAU,KAAK,KAAK,SAAS;AACtC,UAAI,OAAO,OAAO,MAAM;AACtB,cAAM,MAAM,IAAI,YAAY,OAAO,OAAO,OAAO,KAAK;AACtD,YAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,MACzB,WAAW,OAAO,OAAO,QAAQ,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC5D,cAAM,MAAM,IAAI,SAAS,OAAO,OAAO,OAAO,KAA2B;AACzE,YAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,MACzB,WAAW,UAAU,OAAO,EAAE,GAAG;AAI/B,cAAM,MAAM,IAAI,YAAY,OAAO,OAAO,OAAO,IAAI,OAAO,KAAK;AACjE,YAAI,IAAK,QAAO,CAAC,GAAG,GAAG;AAAA,MACzB;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,QAAQ,SAAS,GAAG;AAChC,YAAM,UAAU,KAAK,KAAK,QAAQ,CAAC;AACnC,YAAM,UAAU,IAAI,UAAU,QAAQ,OAAO,QAAQ,SAAS;AAC9D,UAAI,QAAS,QAAO,QAAQ,IAAI,OAAK,EAAE,QAAQ;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AACF;AAUA,SAAS,eAAe,OAAe,KAAwC;AAC7E,MAAI,IAAI,IAAI,KAAK,EAAG,QAAO;AAC3B,aAAW,OAAO,IAAI,YAAY,GAAG;AACnC,QAAI,IAAI,SAAS,eAAe,IAAI,OAAO,SAAS,KAAK,EAAG,QAAO;AAAA,EACrE;AACA,SAAO;AACT;AAEA,SAAS,UAAU,IAAyD;AAC1E,SAAO,OAAO,OAAO,OAAO,QAAQ,OAAO,OAAO,OAAO,QAAQ,OAAO;AAC1E;AAEA,SAAS,qBAAqB,MAA0B;AACtD,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,KAAK,KAAK,QAAS,MAAK,IAAI,EAAE,KAAK;AAC9C,aAAW,KAAK,KAAK,QAAS,MAAK,IAAI,EAAE,KAAK;AAC9C,SAAO,CAAC,GAAG,IAAI;AACjB;AAEA,SAAS,WAAW,QAAiB,SAAqC;AACxE,aAAW,KAAK,SAAS;AACvB,QAAI,CAAC,eAAe,QAAQ,CAAC,EAAG,QAAO;AAAA,EACzC;AACA,SAAO;AACT;AAEA,SAASA,aAAe,SAAc,SAAsC;AAC1E,SAAO,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM;AACjC,eAAW,EAAE,OAAO,UAAU,KAAK,SAAS;AAC1C,YAAM,KAAK,SAAS,GAAG,KAAK;AAC5B,YAAM,KAAK,SAAS,GAAG,KAAK;AAC5B,YAAM,MAAMC,eAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,cAAc,QAAQ,MAAM,CAAC;AAAA,IACrD;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAEA,SAASA,eAAc,GAAY,GAAoB;AACrD,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO,MAAM,UAAa,MAAM,OAAO,IAAI;AAC9E,MAAI,MAAM,UAAa,MAAM,KAAM,QAAO;AAC1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAC3E,SAAO;AACT;;;ACxMO,IAAM,cAA6B;AAAA,EACxC,cAAc;AACZ,WAAO;AAAA,EACT;AACF;AAEA,IAAM,iBAA6B;AAAA,EACjC,WAAW;AAAA,EACX,iBAAiB,MAAM;AAAA,EACvB,qBAAqB,MAAM;AAC7B;;;AhB1CA;;;AiBwBO,IAAM,MAAN,MAAgB;AAAA,EACJ,UAAU,oBAAI,IAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACT,eAAe;AAAA,EACf,OAAO;AAAA,EACP,SAAS;AAAA,EACT,YAAY;AAAA,EAEpB,YAAY,SAAqB;AAC/B,QAAI,QAAQ,eAAe,UAAa,QAAQ,aAAa,QAAW;AACtE,YAAM,IAAI,MAAM,iDAAiD;AAAA,IACnE;AACA,SAAK,aAAa,QAAQ;AAC1B,SAAK,WAAW,QAAQ;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,KAAuB;AACzB,UAAM,QAAQ,KAAK,QAAQ,IAAI,GAAG;AAClC,QAAI,CAAC,OAAO;AACV,WAAK;AACL,aAAO;AAAA,IACT;AAEA,SAAK,QAAQ,OAAO,GAAG;AACvB,SAAK,QAAQ,IAAI,KAAK,KAAK;AAC3B,SAAK;AACL,WAAO,MAAM;AAAA,EACf;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,KAAQ,OAAU,MAAoB;AACxC,UAAM,WAAW,KAAK,QAAQ,IAAI,GAAG;AACrC,QAAI,UAAU;AAEZ,WAAK,gBAAgB,SAAS;AAC9B,WAAK,QAAQ,OAAO,GAAG;AAAA,IACzB;AACA,SAAK,QAAQ,IAAI,KAAK,EAAE,OAAO,KAAK,CAAC;AACrC,SAAK,gBAAgB;AACrB,SAAK,sBAAsB;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,KAAiB;AACtB,UAAM,WAAW,KAAK,QAAQ,IAAI,GAAG;AACrC,QAAI,CAAC,SAAU,QAAO;AACtB,SAAK,gBAAgB,SAAS;AAC9B,SAAK,QAAQ,OAAO,GAAG;AACvB,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,IAAI,KAAiB;AACnB,WAAO,KAAK,QAAQ,IAAI,GAAG;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AACZ,SAAK,QAAQ,MAAM;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA,EAGA,aAAmB;AACjB,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,QAAkB;AAChB,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,QAAQ,KAAK;AAAA,MACb,WAAW,KAAK;AAAA,MAChB,MAAM,KAAK,QAAQ;AAAA,MACnB,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,CAAC,SAA8B;AAC7B,eAAW,SAAS,KAAK,QAAQ,OAAO,EAAG,OAAM,MAAM;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,wBAA8B;AACpC,WAAO,KAAK,WAAW,GAAG;AACxB,YAAM,SAAS,KAAK,QAAQ,KAAK,EAAE,KAAK;AACxC,UAAI,OAAO,KAAM;AACjB,YAAM,MAAM,OAAO;AACnB,YAAM,QAAQ,KAAK,QAAQ,IAAI,GAAG;AAClC,UAAI,MAAO,MAAK,gBAAgB,MAAM;AACtC,WAAK,QAAQ,OAAO,GAAG;AACvB,WAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEQ,aAAsB;AAC5B,QAAI,KAAK,eAAe,UAAa,KAAK,QAAQ,OAAO,KAAK,WAAY,QAAO;AACjF,QAAI,KAAK,aAAa,UAAa,KAAK,eAAe,KAAK,SAAU,QAAO;AAC7E,WAAO;AAAA,EACT;AACF;;;ACjKA,IAAM,QAAgC;AAAA,EACpC,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,MAAM;AAAA,EACN,MAAM,OAAO;AAAA,EACb,MAAM,OAAO,OAAO;AAAA;AAEtB;AAGO,SAAS,WAAW,OAAgC;AACzD,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,CAAC,OAAO,SAAS,KAAK,KAAK,SAAS,GAAG;AACzC,YAAM,IAAI,MAAM,mEAAmE,OAAO,KAAK,CAAC,EAAE;AAAA,IACpG;AACA,WAAO,KAAK,MAAM,KAAK;AAAA,EACzB;AAEA,QAAM,UAAU,MAAM,KAAK;AAC3B,MAAI,YAAY,IAAI;AAClB,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAIA,QAAM,QAAQ,wCAAwC,KAAK,OAAO;AAClE,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,oCAAoC,KAAK,mDAAmD;AAAA,EAC9G;AAEA,QAAM,QAAQ,WAAW,MAAM,CAAC,CAAE;AAClC,QAAM,QAAQ,MAAM,CAAC,KAAK,IAAI,YAAY;AAE1C,MAAI,EAAE,QAAQ,QAAQ;AACpB,UAAM,IAAI,MAAM,6BAA6B,MAAM,CAAC,CAAC,SAAS,KAAK,6BAA6B;AAAA,EAClG;AAEA,QAAM,QAAQ,KAAK,MAAM,QAAQ,MAAM,IAAI,CAAE;AAC7C,MAAI,SAAS,GAAG;AACd,UAAM,IAAI,MAAM,4CAA4C,KAAK,UAAU,KAAK,GAAG;AAAA,EACrF;AACA,SAAO;AACT;AAgBO,SAAS,oBAAoB,QAAyB;AAC3D,MAAI;AACF,WAAO,KAAK,UAAU,MAAM,EAAE;AAAA,EAChC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AlBlDA;;;AmBoCA,SAASC,YAAW,IAAmB;AACrC,SAAO,IAAI;AAAA,IACT,GAAG,EAAE;AAAA,EAGP;AACF;AAUO,IAAM,UAAwB;AAAA,EACnC,kBAAkB;AAAE,UAAMA,YAAW,YAAY;AAAA,EAAE;AAAA,EACnD,uBAAuB;AAAE,UAAMA,YAAW,iBAAiB;AAAA,EAAE;AAAA,EAC7D,gBAAgB;AAAE,UAAMA,YAAW,uBAAuB;AAAA,EAAE;AAC9D;;;AClCO,IAAM,WAAyB;AAAA,EACpC,WAAW;AACT,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AACF;;;ApB0BA,IAAM,iBAAiB,oBAAI,IAAY;AACvC,SAAS,iBAAiB,aAA2B;AACnD,MAAI,eAAe,IAAI,WAAW,EAAG;AACrC,iBAAe,IAAI,WAAW;AAE9B,MAAI,OAAO,YAAY,eAAe,QAAQ,IAAI,UAAU,MAAM,OAAQ;AAC1E,UAAQ;AAAA,IACN,qBAAqB,WAAW;AAAA,EAGlC;AACF;AAGO,IAAM,aAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,oBAAI,IAA4C;AAAA,EACjE,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAST,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQjC,IAAY,UAAoC;AAC9C,WAAO,KAAK,WAAW,gBAAgB;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAY,mBAAoD;AAC9D,WAAO,KAAK,WAAW,oBAAoB;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQT,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BA;AAAA,EAQjB,YAAY,MAmOT;AACD,SAAK,UAAU,KAAK;AACpB,SAAK,QAAQ,KAAK;AAClB,SAAK,OAAO,KAAK;AACjB,SAAK,UAAU,KAAK;AACpB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AACpB,SAAK,eAAe,KAAK,gBAAgB;AACzC,SAAK,oBAAoB,KAAK,qBAAqB;AACnD,SAAK,eAAe,KAAK,gBAAgB;AACzC,SAAK,kBAAkB,KAAK,mBAAmB;AAC/C,SAAK,eAAe,KAAK,gBAAgB;AACzC,SAAK,eAAe,KAAK,gBAAgB;AACzC,SAAK,kBAAkB,KAAK,mBAAmB;AAC/C,SAAK,SAAS,KAAK;AACnB,SAAK,UAAU,KAAK;AACpB,SAAK,gBAAgB,KAAK,iBAAiB,EAAE,SAAS,KAAK;AAC3D,SAAK,SAAS,KAAK;AACnB,SAAK,SAAS,KAAK;AACnB,SAAK,cAAc,KAAK;AACxB,SAAK,eAAe,KAAK;AACzB,SAAK,aAAa,KAAK;AACvB,SAAK,gBAAgB,KAAK;AAC1B,SAAK,oBAAoB,KAAK;AAC9B,SAAK,mBAAmB,KAAK;AAC7B,SAAK,oBAAoB,KAAK;AAC9B,SAAK,gBAAgB,KAAK;AAC1B,SAAK,WAAW,KAAK;AACrB,SAAK,cAAc,KAAK;AACxB,SAAK,WAAW,KAAK;AACrB,SAAK,cAAc,KAAK;AAGxB,SAAK,QAAQ,KAAK,SAAS,KAAK,MAAM,SAAS,IAAI,IAAI,IAAI,KAAK,KAAK,IAAI;AACzE,SAAK,WAAW,KAAK,YAAY;AACjC,SAAK,oBAAoB,KAAK;AAG9B,QAAI,KAAK,uBAAuB,KAAK,oBAAoB,SAAS,GAAG;AACnE,UAAI,KAAK,iCAAiC,MAAM;AAC9C,cAAM,IAAI;AAAA,UACR,eAAe,KAAK,IAAI;AAAA,QAI1B;AAAA,MACF;AACA,WAAK,sBAAsB,OAAO,OAAO,IAAI,IAAI,KAAK,mBAAmB,CAAC;AAAA,IAC5E,OAAO;AACL,WAAK,sBAAsB;AAAA,IAC7B;AAGA,QAAI,KAAK,QAAQ,KAAK,4BAA4B;AAChD,YAAM,WAAW,KAAK;AACtB,YAAM,eAA2C,OAAO,KAAK,OAAO,WAAW;AAC7E,YAAI,aAAa,OAAO;AAEtB,iBAAO,MAAM,MAAM,OAAO,KAAK,QAAQ;AAAA,QACzC;AACA,cAAM,YAAY,MAAM,KAAK,kBAAkB,KAAK;AACpD,cAAM,aAAa,MAAM,KAAK,kBAAkB,MAAM;AACtD,cAAM,aAAa,KAAK,MAAM,SAAS;AACvC,cAAM,cAAc,KAAK,MAAM,UAAU;AACzC,cAAM,SAAS,KAAK,aAAa,gBAAgB,YAAY,WAAW;AACxE,cAAM,gBAAgB,KAAK,IAAI,MAAM,IAAI,OAAO,EAAE,IAAI;AACtD,eAAO,KAAK,kBAAkB,KAAK,UAAU,MAAM,GAAG,aAAa;AAAA,MACrE;AACA,WAAK,2BAA2B,KAAK,MAAM,YAAY;AAAA,IACzD;AAGA,QAAI,KAAK,mBAAmB,UAAa,KAAK,4BAA4B;AACxE,YAAM,SAAS,KAAK;AACpB,YAAM,kBAAkB,KAAK;AAC7B,YAAM,iBAAiB,KAAK;AAC5B,YAAM,UAAU,KAAK;AACrB,UAAI;AAEJ,UAAI,WAAW,oBAAoB;AACjC,mBAAW,OAAO,KAAK,OAAO,WAAY,MAAM,OAAO,OAAO,MAAM,QAAQ;AAAA,MAC9E,WAAW,WAAW,qBAAqB;AACzC,mBAAW,OAAO,KAAK,OAAO,WAAY,MAAM,MAAM,OAAO,KAAK,QAAQ;AAAA,MAC5E,WAAW,WAAW,UAAU;AAC9B,mBAAW,CAAC,IAAI,OAAO,WACrB,IAAI,QAAkC,oBAAkB;AACtD,cAAI,UAAU;AACd,gBAAM,kBAAkB,CAAC,WAAqC;AAC5D,gBAAI,CAAC,SAAS;AACZ,wBAAU;AACV,6BAAe,MAAM;AAAA,YACvB;AAAA,UACF;AACA,kBAAQ,KAAK,iBAAiB;AAAA,YAC5B,OAAO;AAAA,YACP,YAAY;AAAA,YACZ;AAAA,YACA;AAAA,YACA;AAAA,YACA,cAAc,MAAM;AAAA,YACpB,eAAe,OAAO;AAAA,YACtB,SAAS;AAAA,UACX,CAAC;AAED,cAAI,CAAC,SAAS;AACZ,sBAAU;AACV,2BAAe,IAAI;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACL,OAAO;AAEL,cAAM,UAAU;AAChB,mBAAW,OAAO,KAAK,OAAO,WAAW;AACvC,gBAAM,cAAc,MAAM,KAAK,cAAc,OAAO,EAAE,gBAAgB,KAAK,CAAC;AAC5E,gBAAM,eAAe,MAAM,KAAK,cAAc,QAAQ,EAAE,gBAAgB,KAAK,CAAC;AAC9E,gBAAM,SAAS,QAAQ,aAAa,YAAY;AAChD,gBAAM,gBAAgB,KAAK,IAAI,MAAM,IAAI,OAAO,EAAE,IAAI;AACtD,iBAAO,KAAK,cAAc,QAAQ,aAAa;AAAA,QACjD;AAAA,MACF;AAEA,WAAK,2BAA2B,gBAAgB,QAAQ;AAAA,IAC1D;AAIA,SAAK,OAAO,KAAK,aAAa;AAE9B,QAAI,KAAK,MAAM;AACb,UAAI,CAAC,KAAK,SAAU,KAAK,MAAM,eAAe,UAAa,KAAK,MAAM,aAAa,QAAY;AAC7F,cAAM,IAAI;AAAA,UACR,eAAe,KAAK,IAAI;AAAA,QAE1B;AAAA,MACF;AACA,YAAM,aAAyD,CAAC;AAChE,UAAI,KAAK,MAAM,eAAe,OAAW,YAAW,aAAa,KAAK,MAAM;AAC5E,UAAI,KAAK,MAAM,aAAa,OAAW,YAAW,WAAW,WAAW,KAAK,MAAM,QAAQ;AAC3F,WAAK,MAAM,IAAI,IAA4C,UAAU;AACrE,WAAK,WAAW;AAAA,IAClB,OAAO;AACL,WAAK,MAAM;AAAA,IACb;AAQA,UAAM,WAAW,KAAK,iBAAiB;AACvC,SAAK,aAAa,SAAS,YAAY;AAAA,MACrC,MAAM,KAAK,WAAW,CAAC;AAAA,MACvB,MAAM,KAAK;AAAA,IACb,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,YAAsD;AACpD,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,IAAI,IAAY,QAA+C;AACnE,QAAI;AAEJ,QAAI,KAAK,QAAQ,KAAK,KAAK;AAEzB,YAAM,SAAS,KAAK,IAAI,IAAI,EAAE;AAC9B,UAAI,QAAQ;AACV,iBAAS,OAAO;AAAA,MAClB,OAAO;AAEL,cAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,YAAI,CAAC,SAAU,QAAO;AACtB,iBAAS,MAAM,KAAK,cAAc,QAAQ;AAC1C,aAAK,IAAI,IAAI,IAAI,EAAE,QAAQ,SAAS,SAAS,GAAG,GAAG,oBAAoB,MAAM,CAAC;AAAA,MAChF;AAAA,IACF,OAAO;AAEL,YAAM,KAAK,eAAe;AAC1B,YAAM,QAAQ,KAAK,MAAM,IAAI,EAAE;AAC/B,eAAS,QAAQ,MAAM,SAAS;AAAA,IAClC;AAEA,QAAI,WAAW,KAAM,QAAO;AAC5B,UAAM,KAAK,WAAW,OAAO,EAAE;AAC/B,WAAO,KAAK,oBAAoB,QAAQ,MAAM;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,IAAuC;AAClD,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAE1B;AAAA,IACF;AACA,UAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,QAAI,CAAC,SAAU,QAAO;AACtB,UAAM,OAAO,MAAM,KAAK,kBAAkB,QAAQ;AAClD,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,SAAsB,MAAyE;AAC7F,UAAM,eAAmC;AAAA,MACvC,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,gBAAgB,KAAK;AAAA,MACrB,QAAQ,KAAK,QAAQ;AAAA,MACrB,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK;AAAA,IACf;AACA,QAAI,KAAK,gBAAgB,OAAW,cAAa,cAAc,KAAK;AACpE,QAAI,MAAM,YAAY,OAAW,cAAa,UAAU,KAAK;AAC7D,QAAI,MAAM,mBAAmB,OAAW,cAAa,iBAAiB,KAAK;AAC3E,WAAO,KAAK,aAAa,cAAiB,YAAY;AAAA,EACxD;AAAA;AAAA,EAGA,MAAM,IAAI,IAAY,QAA0B;AAC9C,QAAI,CAAC,mBAAmB,KAAK,SAAS,KAAK,IAAI,GAAG;AAChD,YAAM,IAAI,cAAc;AAAA,IAC1B;AAUA,QAAI,KAAK,gBAAgB,QAAW;AAClC,YAAM,cAAc,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACpE,UAAI,cAA8C;AAClD,UAAI,aAAa;AACf,YAAI;AACF,wBAAe,MAAM,KAAK,cAAc,aAAa,EAAE,gBAAgB,KAAK,CAAC;AAAA,QAC/E,QAAQ;AACN,wBAAc;AAAA,QAChB;AAAA,MACF;AACA,YAAM,KAAK;AAAA,QACT,cAAc,EAAE,IAAI,YAAY,KAAK,QAAQ,YAAY,IAAI;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AASA,QAAI,KAAK,WAAW,QAAW;AAC7B,eAAS,MAAM,oBAAoB,KAAK,QAAQ,QAAQ,OAAO,EAAE,GAAG;AAAA,IACtE;AAMA,QAAI,KAAK,YAAY;AACnB,YAAM,MAAM;AACZ,iBAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,KAAK,UAAU,GAAG;AACjE,YAAI,CAAC,WAAW,QAAQ,cAAe;AACvC,cAAM,QAAQ,IAAI,KAAK;AACvB,YAAI,CAAC,SAAS,OAAO,UAAU,YAAY,MAAM,QAAQ,KAAK,EAAG;AACjE,cAAM,MAAM;AAIZ,cAAM,EAAE,WAAW,SAAS,IAAI,WAAW;AAC3C,cAAM,UAAoB,UAAU;AAAA,UAClC,CAAC,SAAS,EAAE,QAAQ,QAAQ,IAAI,IAAI,MAAM;AAAA,QAC5C;AACA,YAAI,QAAQ,WAAW,EAAG;AAE1B,cAAM,eAAe,UAAU,KAAK,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE;AACpE,YAAI,CAAC,aAAc;AACnB,YAAI,CAAC,KAAK,mBAAmB;AAC3B,gBAAM,IAAI,6BAA6B,OAAO,KAAK,IAAI;AAAA,QACzD;AAEA,cAAM,cACJ,aAAa,QACT,CAAC,IACD,aAAa,QACX,UACA,QAAQ,OAAO,CAAC,MAAM,SAAS,SAAS,CAAC,CAAC;AAClD,cAAM,aAAa,EAAE,GAAG,IAAI;AAC5B,mBAAW,gBAAgB,aAAa;AACtC,qBAAW,YAAY,IAAI,MAAM,KAAK;AAAA,YACpC,IAAI,YAAY;AAAA,YAChB;AAAA,YACA;AAAA,YACA;AAAA,YACA,KAAK;AAAA,UACP;AAAA,QACF;AACA;AAAC,QAAC,OAAmC,KAAK,IAAI;AAAA,MAChD;AAAA,IACF;AAKA,QAAI,KAAK,qBAAqB,QAAW;AACvC,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAOA,QAAI,KAAK,gBAAgB,QAAW;AAClC,YAAM,KAAK,YAAY,iBAAiB,KAAK,MAAM,MAAM;AAAA,IAC3D;AAMA,QAAI,KAAK,UAAU;AACjB,YAAM,mBAAmB,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACzE,YAAM,kBAAkB,kBAAkB,MAAM;AAChD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,UAAI;AAEJ,UAAI,KAAK,aAAa,WAAW;AAC/B,YAAI;AACJ,YAAI,kBAAkB;AACpB,gBAAM,WAAW,MAAM,KAAK,kBAAkB,gBAAgB;AAC9D,gBAAM,aAAa,KAAK,MAAM,QAAQ;AACtC,cAAI,eAAe,QAAQ,OAAO,eAAe,YAAY,WAAW,YAAY;AAClF,4BAAgB;AAAA,UAClB;AAAA,QACF;AACA,oBAAY,KAAK,aAAa,iBAAiB,QAAmC,eAAe,GAAG;AAAA,MACtG,WAAW,KAAK,aAAa,OAAO;AAClC,YAAI;AACJ,YAAI,kBAAkB;AACpB,gBAAM,WAAW,MAAM,KAAK,kBAAkB,gBAAgB;AAC9D,gBAAM,aAAa,KAAK,MAAM,QAAQ;AACtC,cAAI,eAAe,QAAQ,OAAO,eAAe,YAAY,WAAW,YAAY;AAClF,4BAAgB;AAAA,UAClB;AAAA,QACF;AACA,cAAM,MAAM,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AACpD,oBAAY,KAAK,aAAa,cAAc,KAAK,eAAe,YAAY;AAAA,MAC9E,OAAO;AAEL,oBAAY,EAAE,OAAO,OAAO,QAAQ,OAA4B;AAAA,MAClE;AAEA,YAAMC,WAAU,kBAAkB;AAClC,YAAMC,YAAW,MAAM,KAAK,kBAAkB,KAAK,UAAU,SAAS,GAAGD,QAAO;AAChF,YAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,IAAIC,SAAQ;AAG1D,YAAM,iBAAiB,KAAK,aAAa,oBAAoB,SAAS;AACtE,YAAM,mBAAmB,mBACrB,EAAE,QAAQ,MAAM,KAAK,cAAc,kBAAkB,EAAE,gBAAgB,KAAK,CAAC,GAAG,SAAS,gBAAgB,IACzG;AAEJ,UAAI,oBAAoB,KAAK,cAAc,YAAY,OAAO;AAC5D,cAAM,eAAe,MAAM,KAAK,cAAc,iBAAiB,QAAQ,iBAAiB,OAAO;AAC/F,cAAM,KAAK,gBAAgB,YAAY,KAAK,SAAS,KAAK,OAAO,KAAK,MAAM,IAAI,YAAY;AAC5F,aAAK,QAAQ,KAAK,gBAAgB,EAAE,OAAO,KAAK,OAAO,YAAY,KAAK,MAAM,IAAI,SAAS,iBAAiB,QAAQ,CAAC;AACrH,YAAI,KAAK,cAAc,aAAa;AAClC,gBAAM,KAAK,gBAAgB,aAAa,KAAK,SAAS,KAAK,OAAO,KAAK,MAAM,IAAI,EAAE,cAAc,KAAK,cAAc,YAAY,CAAC;AAAA,QACnI;AAAA,MACF;AAEA,UAAI,KAAK,QAAQ;AACf,cAAM,cAAwD;AAAA,UAC5D,IAAI;AAAA,UAAO,YAAY,KAAK;AAAA,UAAM;AAAA,UAAI,SAAAD;AAAA,UAAS,OAAO,KAAK,QAAQ;AAAA,UACnE,aAAa,MAAM,KAAK,gBAAgB,oBAAoBC,SAAQ;AAAA,QACtE;AACA,YAAI,iBAAkB,aAAY,QAAQ,KAAK,gBAAgB,aAAa,gBAAgB,iBAAiB,MAAM;AACnH,cAAM,KAAK,OAAO,OAAO,WAAW;AAAA,MACtC;AAEA,UAAI,KAAK,QAAQ,KAAK,KAAK;AACzB,aAAK,IAAI,IAAI,IAAI,EAAE,QAAQ,gBAAgB,SAAAD,SAAQ,GAAG,oBAAoB,cAAc,CAAC;AACzF,cAAM,KAAK;AAAA,UACT;AAAA,UACA;AAAA,UACA,mBAAmB,iBAAiB,SAAS;AAAA,UAC7CA;AAAA,QACF;AAAA,MACF,OAAO;AACL,aAAK,MAAM,IAAI,IAAI,EAAE,QAAQ,gBAAgB,SAAAA,SAAQ,CAAC;AACtD,aAAK,SAAS,OAAO,IAAI,gBAAgB,mBAAmB,iBAAiB,SAAS,IAAI;AAAA,MAC5F;AAEA,YAAM,KAAK,UAAU,KAAK,MAAM,IAAI,OAAOA,QAAO;AAClD,WAAK,QAAQ,KAAK,UAAU,EAAE,OAAO,KAAK,OAAO,YAAY,KAAK,MAAM,IAAI,QAAQ,MAAM,CAAuB;AACjH,YAAM,KAAK,WAAW,OAAO,EAAE;AAC/B;AAAA,IACF;AAMA,QAAI;AACJ,QAAI,KAAK,QAAQ,KAAK,KAAK;AACzB,iBAAW,KAAK,IAAI,IAAI,EAAE;AAC1B,UAAI,CAAC,UAAU;AACb,cAAM,mBAAmB,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACzE,YAAI,kBAAkB;AACpB,gBAAM,iBAAiB,MAAM,KAAK,cAAc,gBAAgB;AAChE,qBAAW,EAAE,QAAQ,gBAAgB,SAAS,iBAAiB,GAAG;AAAA,QACpE;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM,KAAK,eAAe;AAC1B,iBAAW,KAAK,MAAM,IAAI,EAAE;AAAA,IAC9B;AAEA,UAAM,UAAU,WAAW,SAAS,UAAU,IAAI;AAGlD,QAAI,YAAY,KAAK,cAAc,YAAY,OAAO;AACpD,YAAM,kBAAkB,MAAM,KAAK,cAAc,SAAS,QAAQ,SAAS,OAAO;AAClF,YAAM,KAAK,gBAAgB,YAAY,KAAK,SAAS,KAAK,OAAO,KAAK,MAAM,IAAI,eAAe;AAE/F,WAAK,QAAQ,KAAK,gBAAgB;AAAA,QAChC,OAAO,KAAK;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,SAAS,SAAS;AAAA,MACpB,CAAC;AAGD,UAAI,KAAK,cAAc,aAAa;AAClC,cAAM,KAAK,gBAAgB,aAAa,KAAK,SAAS,KAAK,OAAO,KAAK,MAAM,IAAI;AAAA,UAC/E,cAAc,KAAK,cAAc;AAAA,QACnC,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,WAAW,MAAM,KAAK,cAAc,QAAQ,OAAO;AACzD,UAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,IAAI,QAAQ;AAiB1D,QAAI,KAAK,QAAQ;AACf,YAAM,cAAwD;AAAA,QAC5D,IAAI;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB;AAAA,QACA;AAAA,QACA,OAAO,KAAK,QAAQ;AAAA,QACpB,aAAa,MAAM,KAAK,gBAAgB,oBAAoB,QAAQ;AAAA,MACtE;AACA,UAAI,UAAU;AAQZ,oBAAY,QAAQ,KAAK,gBAAgB,aAAa,QAAQ,SAAS,MAAM;AAAA,MAC/E;AACA,YAAM,KAAK,OAAO,OAAO,WAAW;AAAA,IACtC;AAEA,QAAI,KAAK,QAAQ,KAAK,KAAK;AACzB,WAAK,IAAI,IAAI,IAAI,EAAE,QAAQ,QAAQ,GAAG,oBAAoB,MAAM,CAAC;AAIjE,YAAM,KAAK,8BAA8B,IAAI,QAAQ,WAAW,SAAS,SAAS,MAAM,OAAO;AAAA,IACjG,OAAO;AACL,WAAK,MAAM,IAAI,IAAI,EAAE,QAAQ,QAAQ,CAAC;AAItC,WAAK,SAAS,OAAO,IAAI,QAAQ,WAAW,SAAS,SAAS,IAAI;AAAA,IACpE;AAEA,UAAM,KAAK,UAAU,KAAK,MAAM,IAAI,OAAO,OAAO;AAElD,SAAK,QAAQ,KAAK,UAAU;AAAA,MAC1B,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB;AAAA,MACA,QAAQ;AAAA,IACV,CAAuB;AAEvB,UAAM,KAAK,WAAW,OAAO,EAAE;AAAA,EACjC;AAAA;AAAA,EAGA,MAAM,OAAO,IAA2B;AACtC,QAAI,CAAC,mBAAmB,KAAK,SAAS,KAAK,IAAI,GAAG;AAChD,YAAM,IAAI,cAAc;AAAA,IAC1B;AAIA,QAAI,KAAK,gBAAgB,QAAW;AAClC,YAAM,cAAc,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACpE,UAAI,cAA8C;AAClD,UAAI,aAAa;AACf,YAAI;AACF,wBAAe,MAAM,KAAK,cAAc,aAAa,EAAE,gBAAgB,KAAK,CAAC;AAAA,QAC/E,QAAQ;AACN,wBAAc;AAAA,QAChB;AAAA,MACF;AACA,YAAM,KAAK;AAAA,QACT,cAAc,EAAE,IAAI,YAAY,KAAK,QAAQ,YAAY,IAAI;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AASA,QAAI,KAAK,gBAAgB,QAAW;AAClC,YAAM,KAAK,YAAY,oBAAoB,KAAK,MAAM,EAAE;AAAA,IAC1D;AAIA,QAAI;AACJ,QAAI,KAAK,QAAQ,KAAK,KAAK;AACzB,iBAAW,KAAK,IAAI,IAAI,EAAE;AAC1B,UAAI,CAAC,YAAY,KAAK,cAAc,YAAY,OAAO;AACrD,cAAME,oBAAmB,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACzE,YAAIA,mBAAkB;AACpB,gBAAM,iBAAiB,MAAM,KAAK,cAAcA,iBAAgB;AAChE,qBAAW,EAAE,QAAQ,gBAAgB,SAASA,kBAAiB,GAAG;AAAA,QACpE;AAAA,MACF;AAAA,IACF,OAAO;AACL,iBAAW,KAAK,MAAM,IAAI,EAAE;AAAA,IAC9B;AAGA,QAAI,YAAY,KAAK,cAAc,YAAY,OAAO;AACpD,YAAM,kBAAkB,MAAM,KAAK,cAAc,SAAS,QAAQ,SAAS,OAAO;AAClF,YAAM,KAAK,gBAAgB,YAAY,KAAK,SAAS,KAAK,OAAO,KAAK,MAAM,IAAI,eAAe;AAAA,IACjG;AAOA,UAAM,mBAAmB,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACzE,UAAM,sBAAsB,MAAM,KAAK,gBAAgB,oBAAoB,gBAAgB;AAE3F,UAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK,MAAM,EAAE;AAMnD,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO,OAAO;AAAA,QACvB,IAAI;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,SAAS,UAAU,WAAW;AAAA,QAC9B,OAAO,KAAK,QAAQ;AAAA,QACpB,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,KAAK,QAAQ,KAAK,KAAK;AACzB,WAAK,IAAI,OAAO,EAAE;AAIlB,UAAI,UAAU;AACZ,cAAM,KAAK,iCAAiC,IAAI,SAAS,MAAM;AAAA,MACjE;AAAA,IACF,OAAO;AACL,WAAK,MAAM,OAAO,EAAE;AAGpB,UAAI,UAAU;AACZ,aAAK,SAAS,OAAO,IAAI,SAAS,MAAM;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,KAAK,UAAU,KAAK,MAAM,IAAI,UAAU,UAAU,WAAW,CAAC;AAEpE,SAAK,QAAQ,KAAK,UAAU;AAAA,MAC1B,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB;AAAA,MACA,QAAQ;AAAA,IACV,CAAuB;AAEvB,UAAM,KAAK,WAAW,UAAU,EAAE;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,KAAK,QAA0C;AACnD,QAAI,KAAK,MAAM;AACb,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAE1B;AAAA,IACF;AACA,UAAM,KAAK,eAAe;AAC1B,UAAM,UAAU,CAAC,GAAG,KAAK,MAAM,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,MAAM;AAC1D,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,QAAQ,IAAI,QAAQ,IAAI,OAAK,KAAK,oBAAoB,GAAG,MAAM,CAAC,CAAC;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BA,MAAM,QACJ,SACA,SACwB;AACxB,QAAI,SAAS,QAAQ;AACnB,aAAO,KAAK,cAAc,OAAO;AAAA,IACnC;AACA,UAAM,UAAoB,CAAC;AAC3B,UAAM,WAAgD,CAAC;AACvD,eAAW,SAAS,SAAS;AAC3B,YAAM,CAAC,IAAI,MAAM,IAAI;AACrB,UAAI;AACF,cAAM,KAAK,IAAI,IAAI,MAAM;AACzB,gBAAQ,KAAK,EAAE;AAAA,MACjB,SAAS,OAAO;AACd,iBAAS,KAAK,EAAE,IAAI,MAAsB,CAAC;AAAA,MAC7C;AAAA,IACF;AACA,WAAO,EAAE,IAAI,SAAS,WAAW,GAAG,SAAS,SAAS;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,cACZ,SACwB;AAExB,UAAM,SAAS,oBAAI,IAAsC;AACzD,eAAW,CAAC,IAAI,EAAE,IAAI,KAAK,SAAS;AAClC,UAAI,CAAC,OAAO,IAAI,EAAE,GAAG;AACnB,eAAO,IAAI,IAAI,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE,CAAC;AAAA,MAClE;AACA,UAAI,MAAM,oBAAoB,QAAW;AACvC,cAAM,MAAM,OAAO,IAAI,EAAE,KAAK;AAC9B,cAAM,SAAS,KAAK,MAAM;AAC1B,YAAI,WAAW,KAAK,iBAAiB;AACnC,gBAAM,IAAI;AAAA,YACR;AAAA,YACA,mBAAmB,KAAK,KAAK,IAAI,KAAK,IAAI,IAAI,EAAE,cACjC,KAAK,eAAe,YAAY,MAAM;AAAA,UACvD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAmE,CAAC;AAC1E,QAAI;AACF,iBAAW,CAAC,IAAI,MAAM,KAAK,SAAS;AAClC,cAAM,KAAK,IAAI,IAAI,MAAM;AACzB,iBAAS,KAAK,EAAE,IAAI,OAAO,OAAO,IAAI,EAAE,KAAK,KAAK,CAAC;AAAA,MACrD;AACA,aAAO,EAAE,IAAI,MAAM,SAAS,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,UAAU,CAAC,EAAE;AAAA,IACtE,SAAS,KAAK;AACZ,iBAAW,EAAE,IAAI,MAAM,KAAK,SAAS,MAAM,EAAE,QAAQ,GAAG;AACtD,YAAI;AACF,cAAI,MAAO,OAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,IAAI,KAAK;AAAA,cAC7D,OAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK,MAAM,EAAE;AAAA,QAC1D,QAAQ;AAAA,QAAoB;AAAA,MAC9B;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,QAAQ,KAAwD;AACpE,UAAM,SAAS,oBAAI,IAAsB;AACzC,eAAW,MAAM,KAAK;AACpB,aAAO,IAAI,IAAI,MAAM,KAAK,IAAI,EAAE,CAAC;AAAA,IACnC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,WAAW,KAAmD;AAClE,UAAM,UAAoB,CAAC;AAC3B,UAAM,WAAgD,CAAC;AACvD,eAAW,MAAM,KAAK;AACpB,UAAI;AACF,cAAM,KAAK,OAAO,EAAE;AACpB,gBAAQ,KAAK,EAAE;AAAA,MACjB,SAAS,OAAO;AACd,iBAAS,KAAK,EAAE,IAAI,MAAsB,CAAC;AAAA,MAC7C;AAAA,IACF;AACA,WAAO,EAAE,IAAI,SAAS,WAAW,GAAG,SAAS,SAAS;AAAA,EACxD;AAAA,EAyBA,MAAM,WAAoD;AACxD,QAAI,KAAK,MAAM;AACb,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAG1B;AAAA,IACF;AACA,QAAI,cAAc,QAAW;AAE3B,aAAO,CAAC,GAAG,KAAK,MAAM,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,MAAM,EAAE,OAAO,SAAS;AAAA,IACrE;AAEA,UAAM,SAAyB;AAAA,MAC7B,UAAU,MAAM,CAAC,GAAG,KAAK,MAAM,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,MAAM;AAAA,MAC1D,WAAW,CAAC,OAAmB;AAC7B,cAAM,UAAU,CAAC,UAA6B;AAC5C,cAAI,MAAM,UAAU,KAAK,SAAS,MAAM,eAAe,KAAK,MAAM;AAChE,eAAG;AAAA,UACL;AAAA,QACF;AACA,aAAK,QAAQ,GAAG,UAAU,OAAO;AACjC,eAAO,MAAM,KAAK,QAAQ,IAAI,UAAU,OAAO;AAAA,MACjD;AAAA;AAAA;AAAA;AAAA,MAIA,YAAY,MAAM,KAAK,WAAW;AAAA,MAClC,YAAY,CAAC,OAAe,KAAK,MAAM,IAAI,EAAE,GAAG;AAAA,IAClD;AAKA,UAAM,WAAW,KAAK;AACtB,UAAM,iBAAiB,KAAK;AAC5B,UAAM,cAAuC,WACzC;AAAA,MACE;AAAA,MACA,YAAY,CAAC,UAAkB,SAAS,WAAW,gBAAgB,KAAK;AAAA,MACxE,eAAe,CAAC,mBAA2B,SAAS,cAAc,cAAc;AAAA,MAChF,GAAI,SAAS,oBACT,EAAE,mBAAmB,CAAC,UAAkB,SAAS,kBAAmB,gBAAgB,KAAK,EAAE,IAC3F,CAAC;AAAA,IACP,IACA;AACJ,WAAO,IAAI,MAAS,QAAQ,QAAW,aAAa,KAAK,iBAAiB;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgCA,UAAU,IAA2D;AACnE,UAAM,UAAU,CAAC,UAA6B;AAC5C,UAAI,MAAM,UAAU,KAAK,SAAS,MAAM,eAAe,KAAK,KAAM;AAClE,UAAI,MAAM,WAAW,OAAO;AAE1B,aAAK,KAAK,IAAI,MAAM,EAAE,EAAE,KAAK,YAAU;AACrC,aAAG,EAAE,MAAM,OAAO,IAAI,MAAM,IAAI,QAAQ,UAAU,KAAK,CAAC;AAAA,QAC1D,CAAC,EAAE,MAAM,MAAM;AAGb,aAAG,EAAE,MAAM,OAAO,IAAI,MAAM,IAAI,QAAQ,KAAK,CAAC;AAAA,QAChD,CAAC;AAAA,MACH,OAAO;AAEL,WAAG,EAAE,MAAM,UAAU,IAAI,MAAM,IAAI,QAAQ,KAAK,CAAC;AAAA,MACnD;AAAA,IACF;AACA,SAAK,QAAQ,GAAG,UAAU,OAAO;AACjC,WAAO,MAAM;AACX,WAAK,QAAQ,IAAI,UAAU,OAAO;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,qBAAqC;AACnC,QAAI,KAAK,MAAM;AACb,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAI1B;AAAA,IACF;AAMA,WAAO;AAAA,MACL,UAAU,MAAM,CAAC,GAAG,KAAK,MAAM,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,MAAM;AAAA,MAC1D,YAAY,CAAC,OAAe,KAAK,MAAM,IAAI,EAAE,GAAG;AAAA,MAChD,WAAW,CAAC,OAAmB;AAC7B,cAAM,UAAU,CAAC,UAA6B;AAC5C,cAAI,MAAM,UAAU,KAAK,SAAS,MAAM,eAAe,KAAK,MAAM;AAChE,eAAG;AAAA,UACL;AAAA,QACF;AACA,aAAK,QAAQ,GAAG,UAAU,OAAO;AACjC,eAAO,MAAM,KAAK,QAAQ,IAAI,UAAU,OAAO;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,aAAyB;AACvB,QAAI,KAAK,QAAQ,KAAK,KAAK;AACzB,aAAO,EAAE,GAAG,KAAK,IAAI,MAAM,GAAG,MAAM,KAAK;AAAA,IAC3C;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,MAAM,KAAK,MAAM;AAAA,MACjB,OAAO;AAAA,MACP,MAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,IAAY,SAAsD;AAC9E,UAAM,YAAY,MAAM,KAAK,gBAAgB;AAAA,MAC3C,KAAK;AAAA,MAAS,KAAK;AAAA,MAAO,KAAK;AAAA,MAAM;AAAA,MAAI;AAAA,IAC3C;AAEA,UAAM,UAA6B,CAAC;AACpC,eAAW,OAAO,WAAW;AAE3B,YAAM,SAAS,MAAM,KAAK,cAAc,KAAK,EAAE,gBAAgB,KAAK,CAAC;AACrE,cAAQ,KAAK;AAAA,QACX,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,QAAQ,IAAI,OAAO;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,WAAW,IAAY,SAAoC;AAC/D,UAAM,WAAW,MAAM,KAAK,gBAAgB;AAAA,MAC1C,KAAK;AAAA,MAAS,KAAK;AAAA,MAAO,KAAK;AAAA,MAAM;AAAA,MAAI;AAAA,IAC3C;AACA,QAAI,CAAC,SAAU,QAAO;AACtB,WAAO,KAAK,cAAc,UAAU,EAAE,gBAAgB,KAAK,CAAC;AAAA,EAC9D;AAAA;AAAA,EAGA,MAAM,OAAO,IAAY,SAAgC;AACvD,UAAM,YAAY,MAAM,KAAK,WAAW,IAAI,OAAO;AACnD,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,WAAW,OAAO,0BAA0B,EAAE,GAAG;AAAA,IACnE;AACA,UAAM,KAAK,IAAI,IAAI,SAAS;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KAAK,IAAY,UAAkB,UAAyC;AAChF,UAAM,UAAU,aAAa,IAAI,OAAO,MAAM,KAAK,eAAe,IAAI,QAAQ;AAC9E,UAAM,UAAU,aAAa,UAAa,aAAa,IAClD,aAAa,IAAI,OAAO,MAAM,KAAK,wBAAwB,EAAE,IAC9D,MAAM,KAAK,eAAe,IAAI,QAAQ;AAC1C,WAAO,KAAK,gBAAgB,KAAK,SAAS,OAAO;AAAA,EACnD;AAAA;AAAA,EAGA,MAAc,eAAe,IAAY,SAAoC;AAE3E,UAAM,cAAc,MAAM,KAAK,WAAW,IAAI,OAAO;AACrD,QAAI,YAAa,QAAO;AAExB,UAAM,KAAK,eAAe;AAC1B,UAAM,UAAU,KAAK,MAAM,IAAI,EAAE;AACjC,QAAI,WAAW,QAAQ,YAAY,QAAS,QAAO,QAAQ;AAC3D,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,wBAAwB,IAA+B;AACnE,UAAM,KAAK,eAAe;AAC1B,WAAO,KAAK,MAAM,IAAI,EAAE,GAAG,UAAU;AAAA,EACvC;AAAA;AAAA,EAGA,MAAM,mBAAmB,IAAwB,SAAwC;AACvF,UAAM,SAAS,MAAM,KAAK,gBAAgB;AAAA,MACxC,KAAK;AAAA,MAAS,KAAK;AAAA,MAAO,KAAK;AAAA,MAAM;AAAA,MAAI;AAAA,IAC3C;AACA,QAAI,SAAS,GAAG;AACd,WAAK,QAAQ,KAAK,iBAAiB;AAAA,QACjC,OAAO,KAAK;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,IAAI,MAAM;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,aAAa,IAA8B;AAC/C,WAAO,KAAK,gBAAgB,aAAa,KAAK,SAAS,KAAK,OAAO,KAAK,MAAM,EAAE;AAAA,EAClF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,QAAyB;AAC7B,QAAI,KAAK,MAAM;AACb,YAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AACzD,aAAO,IAAI;AAAA,IACb;AACA,UAAM,KAAK,eAAe;AAC1B,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,OAA4C,CAAC,GAGzD;AACD,UAAM,QAAQ,KAAK,SAAS;AAE5B,QAAI,KAAK,QAAQ,UAAU;AACzB,YAAM,SAAS,MAAM,KAAK,QAAQ,SAAS,KAAK,OAAO,KAAK,MAAM,KAAK,QAAQ,KAAK;AACpF,YAAM,YAAiB,CAAC;AACxB,iBAAW,EAAE,QAAQ,SAAS,GAAG,KAAK,MAAM,KAAK,YAAY,OAAO,KAAK,GAAG;AAO1E,YAAI,CAAC,KAAK,QAAQ,CAAC,KAAK,MAAM,IAAI,EAAE,GAAG;AACrC,eAAK,MAAM,IAAI,IAAI,EAAE,QAAQ,QAAQ,CAAC;AAAA,QACxC;AACA,kBAAU,KAAK,MAAM;AAAA,MACvB;AACA,aAAO,EAAE,OAAO,WAAW,YAAY,OAAO,WAAW;AAAA,IAC3D;AAKA,qBAAiB,KAAK,QAAQ,QAAQ,SAAS;AAC/C,UAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI,GAAG,MAAM,EAAE,KAAK;AAC1E,UAAM,QAAQ,KAAK,SAAS,SAAS,KAAK,QAAQ,EAAE,IAAI;AACxD,UAAM,MAAM,KAAK,IAAI,QAAQ,OAAO,IAAI,MAAM;AAC9C,UAAM,QAAa,CAAC;AACpB,aAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAChC,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,UAAI,UAAU;AACZ,cAAM,SAAS,MAAM,KAAK,cAAc,QAAQ;AAChD,cAAM,KAAK,MAAM;AAGjB,YAAI,CAAC,KAAK,QAAQ,CAAC,KAAK,MAAM,IAAI,EAAE,GAAG;AACrC,eAAK,MAAM,IAAI,IAAI,EAAE,QAAQ,SAAS,SAAS,GAAG,CAAC;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,YAAY,MAAM,IAAI,SAAS,OAAO,GAAG,IAAI;AAAA,IAC/C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoCA,KAAK,OAA8B,CAAC,GAAmB;AACrD,UAAM,WAAW,KAAK,YAAY;AAMlC,UAAM,WAAW,KAAK;AACtB,UAAM,iBAAiB,KAAK;AAC5B,UAAM,cAAuC,WACzC;AAAA,MACE;AAAA,MACA,YAAY,CAAC,UAAkB,SAAS,WAAW,gBAAgB,KAAK;AAAA,MACxE,eAAe,CAAC,mBAA2B,SAAS,cAAc,cAAc;AAAA,MAChF,GAAI,SAAS,oBACT,EAAE,mBAAmB,CAAC,UAAkB,SAAS,kBAAmB,gBAAgB,KAAK,EAAE,IAC3F,CAAC;AAAA,IACP,IACA;AAMJ,WAAO,IAAI;AAAA,MACT;AAAA,QACE,UAAU,CAAC,aAAa,KAAK,SAAS,QAAQ;AAAA,MAChD;AAAA,MACA;AAAA,MACA,CAAC;AAAA,MACD,CAAC;AAAA,MACD;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,YACZ,OAC4D;AAC5D,UAAM,MAAyD,CAAC;AAChE,eAAW,EAAE,IAAI,SAAS,KAAK,OAAO;AACpC,YAAM,SAAS,MAAM,KAAK,cAAc,QAAQ;AAChD,UAAI,KAAK,EAAE,IAAI,QAAQ,SAAS,SAAS,GAAG,CAAC;AAAA,IAC/C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAgC;AAC5C,QAAI,KAAK,SAAU;AAEnB,UAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AACzD,eAAW,MAAM,KAAK;AACpB,YAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,UAAI,UAAU;AACZ,cAAM,SAAS,MAAM,KAAK,cAAc,QAAQ;AAChD,aAAK,MAAM,IAAI,IAAI,EAAE,QAAQ,SAAS,SAAS,GAAG,CAAC;AAAA,MACrD;AAAA,IACF;AACA,SAAK,WAAW;AAChB,SAAK,6BAA6B;AAAA,EACpC;AAAA;AAAA,EAGA,MAAM,oBAAoB,SAA2D;AACnF,eAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,YAAM,SAAS,MAAM,KAAK,cAAc,QAAQ;AAChD,WAAK,MAAM,IAAI,IAAI,EAAE,QAAQ,SAAS,SAAS,GAAG,CAAC;AAAA,IACrD;AACA,SAAK,WAAW;AAChB,SAAK,6BAA6B;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYQ,+BAAqC;AAC3C,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,SAAS,MAAM,OAAO,EAAE,WAAW,EAAG;AAC3C,UAAM,WAA6C,CAAC;AACpD,eAAW,CAAC,IAAI,KAAK,KAAK,KAAK,OAAO;AACpC,eAAS,KAAK,EAAE,IAAI,QAAQ,MAAM,OAAO,CAAC;AAAA,IAC5C;AACA,UAAM,MAAM,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,MAAM,iBAAgC;AACpC,QAAI,CAAC,KAAK,MAAM;AACd,YAAM,KAAK,eAAe;AAC1B,WAAK,6BAA6B;AAClC;AAAA,IACF;AAEA,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,UAAW;AAChB,UAAM,SAAS,UAAU,OAAO;AAChC,QAAI,OAAO,WAAW,EAAG;AAMzB,UAAM,SAAS,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AAC5D,UAAM,eAAyB,CAAC;AAChC,UAAM,cAAwB,CAAC;AAC/B,eAAW,MAAM,QAAQ;AACvB,UAAI,YAAY,EAAE,GAAG;AACnB,oBAAY,KAAK,EAAE;AAAA,MACrB,WAAW,CAAC,GAAG,WAAW,GAAG,GAAG;AAC9B,qBAAa,KAAK,EAAE;AAAA,MACtB;AAAA,IACF;AAMA,eAAW,MAAM,aAAa;AAC5B,UAAI;AAAE,cAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK,MAAM,EAAE;AAAA,MAAE,QAAQ;AAAA,MAAe;AAAA,IACpF;AACA,cAAU,MAAM;AAGhB,eAAW,YAAY,cAAc;AACnC,YAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,QAAQ;AACvE,UAAI,CAAC,SAAU;AACf,YAAM,SAAS,MAAM,KAAK,cAAc,UAAU,EAAE,gBAAgB,KAAK,CAAC;AAC1E,YAAM,KAAK,8BAA8B,UAAU,QAAQ,MAAM,SAAS,EAAE;AAAA,IAC9E;AAEA,SAAK,yBAAyB;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,MAAM,eACJ,OACA,OAA6B,CAAC,GACmD;AACjF,QAAI,CAAC,KAAK,MAAM;AACd,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAE1B;AAAA,IACF;AACA,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,WAAW;AACd,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAE1B;AAAA,IACF;AACA,QAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AACzB,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI,aAAa,KAAK;AAAA,MAE5C;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,WAAW;AAC/B,UAAM,SAAS,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AAI5D,UAAM,UAAU,oBAAI,IAAqB;AACzC,UAAM,aAAa,oBAAI,IAAoB;AAC3C,eAAW,MAAM,QAAQ;AACvB,YAAM,UAAU,YAAY,EAAE;AAC9B,UAAI,CAAC,WAAW,QAAQ,UAAU,MAAO;AACzC,iBAAW,IAAI,QAAQ,UAAU,EAAE;AACnC,YAAM,MAAM,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AAC5D,UAAI,CAAC,IAAK;AACV,UAAI;AACF,cAAM,OAAO,KAAK,MAAM,MAAM,KAAK,kBAAkB,GAAG,CAAC;AACzD,gBAAQ,IAAI,QAAQ,UAAU,KAAK,KAAK;AAAA,MAC1C,QAAQ;AAEN,gBAAQ,IAAI,QAAQ,UAAU,MAAS;AAAA,MACzC;AAAA,IACF;AAGA,UAAM,UAAoB,CAAC;AAC3B,UAAM,QAAkB,CAAC;AACzB,UAAM,WAAoE,CAAC;AAC3E,eAAW,MAAM,QAAQ;AACvB,UAAI,YAAY,EAAE,EAAG;AACrB,UAAI,GAAG,WAAW,GAAG,EAAG;AACxB,YAAM,MAAM,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AAC5D,UAAI,CAAC,IAAK;AACV,YAAM,SAAS,MAAM,KAAK,cAAc,KAAK,EAAE,gBAAgB,KAAK,CAAC;AACrE,YAAM,OAAO,mBAAmB,QAA8C,KAAK;AACnF,YAAM,SAAS,QAAQ,IAAI,EAAE;AAC7B,YAAM,aAAa,WAAW,IAAI,EAAE;AACpC,YAAM,YAAY,SAAS,QAAQ,SAAS;AAE5C,UAAI,aAAa,CAAC,YAAY;AAC5B,gBAAQ,KAAK,EAAE;AACf,iBAAS,KAAK,EAAE,UAAU,IAAI,QAAQ,SAAS,IAAI,GAAG,CAAC;AAAA,MACzD,WAAW,aAAa,cAAc,CAAC,YAAY,QAAQ,IAAI,GAAG;AAGhE,gBAAQ,KAAK,EAAE;AACf,iBAAS,KAAK,EAAE,UAAU,IAAI,QAAQ,SAAS,IAAI,GAAG,CAAC;AAAA,MACzD,WAAW,CAAC,aAAa,YAAY;AAGnC,cAAM,KAAK,WAAW,IAAI,EAAE,CAAE;AAAA,MAChC;AACA,iBAAW,OAAO,EAAE;AAAA,IACtB;AAEA,eAAW,CAAC,EAAE,KAAK,KAAK,WAAY,OAAM,KAAK,KAAK;AAEpD,QAAI,UAAU;AACd,QAAI,CAAC,QAAQ;AACX,iBAAW,SAAS,OAAO;AACzB,YAAI;AACF,gBAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK,MAAM,KAAK;AACtD;AAAA,QACF,QAAQ;AAAA,QAA4C;AAAA,MACtD;AACA,iBAAW,OAAO,UAAU;AAC1B,cAAM,KAAK,8BAA8B,IAAI,UAAU,IAAI,QAAQ,MAAM,IAAI,OAAO;AACpF;AAAA,MACF;AAGA,gBAAU,MAAM;AAChB,WAAK,yBAAyB;AAC9B,YAAM,KAAK,6BAA6B;AAAA,IAC1C;AAEA,WAAO,EAAE,OAAO,SAAS,OAAO,QAAQ;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAuC;AACrC,UAAM,QAAQ,KAAK;AACnB,WAAO,SAAS,MAAM,OAAO,EAAE,SAAS,IAAI,QAAQ;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+BA,KAAK,IAAqB;AAMxB,WAAO,KAAK,aAAa,SAAS;AAAA,MAChC,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,UAAU;AAAA,MACV,QAAQ,KAAK;AAAA,MACb,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK,QAAQ;AAAA,IACvB,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,MAAM,gBAA4D;AAChE,UAAM,KAAK,eAAe;AAC1B,UAAM,SAA4C,CAAC;AACnD,eAAW,CAAC,IAAI,KAAK,KAAK,KAAK,OAAO;AACpC,aAAO,EAAE,IAAI,MAAM,KAAK,cAAc,MAAM,QAAQ,MAAM,OAAO;AAAA,IACnE;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAc,oBACZ,QACA,YACY;AACZ,UAAM,UAAU,KAAK,cAAc,OAAO,KAAK,KAAK,UAAU,EAAE,SAAS;AACzE,UAAM,UAAU,KAAK,iBAAiB,OAAO,KAAK,KAAK,aAAa,EAAE,SAAS;AAC/E,QAAI,CAAC,WAAW,CAAC,QAAS,QAAO;AAEjC,UAAM,SAAS,YAAY,UAAU,KAAK;AAC1C,QAAI,CAAC,OAAQ,QAAO;AAEpB,QAAI,SAAS;AAGb,QAAI,WAAW,KAAK,YAAY;AAC9B,eAAS,KAAK,aAAa,gBAAgB,QAAQ,KAAK,YAAY,QAAQ,YAAY,QAAQ;AAAA,IAClG;AAGA,QAAI,WAAW,KAAK,iBAAiB,KAAK,qBAAqB,WAAW,OAAO;AAC/E,YAAM,aAAa,EAAE,GAAG,OAAO;AAC/B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,KAAK,aAAa,GAAG;AAC9D,cAAM,MAAM,OAAO,KAAK;AACxB,YAAI,OAAO,QAAQ,SAAU;AAC7B,cAAM,QAAQ,MAAM,KAAK;AAAA,UACvB,KAAK;AAAA,UACL;AAAA,UACA;AAAA,UACA,YAAY;AAAA,QACd;AACA,YAAI,UAAU,QAAW;AACvB,qBAAW,GAAG,KAAK,OAAO,IAAI;AAAA,QAChC;AAAA,MACF;AACA,eAAS;AAAA,IACX;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAc,8BACZ,IACA,WACA,gBACA,SACe;AACf,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,UAAW;AAChB,UAAM,OAAO,UAAU,YAAY;AACnC,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,SAAS;AACf,UAAM,UAAU;AAEhB,eAAW,OAAO,MAAM;AACtB,YAAM,WAAW,kBAAkB,QAAQ,GAAG;AAC9C,YAAM,gBAAgB,UAAU,kBAAkB,SAAS,GAAG,IAAI;AAMlE,gBAAU,OAAO,IAAI,IAAI,KAAK,UAAU,aAAa;AAErD,YAAM,QAAQ,YAAY,IAAI,KAAK,EAAE;AACrC,UAAI;AACF,YAAI,aAAa,QAAQ,aAAa,QAAW;AAE/C,cAAI,kBAAkB,QAAQ,kBAAkB,QAAW;AACzD,kBAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK,MAAM,KAAK;AAAA,UACxD;AAAA,QACF,OAAO;AACL,gBAAM,OAAO,KAAK,UAAU;AAAA,YAC1B,OAAO,IAAI;AAAA,YACX,OAAO,oBAAoB,QAAQ;AAAA,YACnC,UAAU;AAAA,YACV,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UACpC,CAAC;AACD,gBAAM,WAAW,MAAM,KAAK,kBAAkB,MAAM,OAAO;AAC3D,gBAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,OAAO,QAAQ;AAAA,QAC/D;AAAA,MACF,SAAS,OAAO;AACd,aAAK,QAAQ,KAAK,uBAAuB;AAAA,UACvC,OAAO,KAAK;AAAA,UACZ,YAAY,KAAK;AAAA,UACjB;AAAA,UACA,QAAQ;AAAA,UACR,OAAO,IAAI,uBAAuB,EAAE,UAAU,IAAI,OAAO,IAAI,KAAK,IAAI,OAAO,MAAM,CAAC;AAAA,QACtF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,iCAAiC,IAAY,gBAAkC;AAC3F,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,UAAW;AAChB,UAAM,OAAO,UAAU,YAAY;AACnC,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,UAAU;AAChB,eAAW,OAAO,MAAM;AACtB,YAAM,gBAAgB,kBAAkB,SAAS,GAAG;AACpD,UAAI,kBAAkB,QAAQ,kBAAkB,QAAW;AACzD,kBAAU,OAAO,IAAI,IAAI,KAAK,aAAa;AAAA,MAC7C;AAEA,YAAM,QAAQ,YAAY,IAAI,KAAK,EAAE;AACrC,UAAI;AACF,cAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK,MAAM,KAAK;AAAA,MACxD,SAAS,OAAO;AACd,aAAK,QAAQ,KAAK,uBAAuB;AAAA,UACvC,OAAO,KAAK;AAAA,UACZ,YAAY,KAAK;AAAA,UACjB;AAAA,UACA,QAAQ;AAAA,UACR,OAAO,IAAI,uBAAuB,EAAE,UAAU,IAAI,OAAO,IAAI,KAAK,IAAI,UAAU,MAAM,CAAC;AAAA,QACzF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,+BAA8C;AAC1D,QAAI,KAAK,uBAAwB;AACjC,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,aAAa,UAAU,OAAO,EAAE,WAAW,GAAG;AACjD,WAAK,yBAAyB;AAC9B;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AACzD,UAAM,UAAU,oBAAI,IAAyD;AAC7E,eAAW,MAAM,KAAK;AACpB,YAAM,UAAU,YAAY,EAAE;AAC9B,UAAI,CAAC,QAAS;AACd,UAAI,CAAC,UAAU,IAAI,QAAQ,KAAK,EAAG;AACnC,YAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,UAAI,CAAC,SAAU;AACf,UAAI;AACF,cAAM,OAAO,MAAM,KAAK,kBAAkB,QAAQ;AAClD,cAAM,OAAO,KAAK,MAAM,IAAI;AAC5B,YAAI,OAAO,KAAK,aAAa,SAAU;AACvC,cAAM,OAAO,QAAQ,IAAI,QAAQ,KAAK,KAAK,CAAC;AAC5C,aAAK,KAAK,EAAE,UAAU,KAAK,UAAU,OAAO,KAAK,MAAM,CAAC;AACxD,gBAAQ,IAAI,QAAQ,OAAO,IAAI;AAAA,MACjC,QAAQ;AAAA,MAER;AAAA,IACF;AACA,eAAW,CAAC,OAAO,IAAI,KAAK,SAAS;AACnC,gBAAU,OAAO,OAAO,IAAI;AAAA,IAC9B;AACA,SAAK,yBAAyB;AAO9B,QAAI,KAAK,oBAAoB,SAAS,CAAC,KAAK,iBAAiB;AAC3D,YAAM,KAAK,cAAc;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,gBAA+B;AAC3C,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,UAAW;AAChB,SAAK,kBAAkB;AACvB,QAAI;AACF,YAAM,SAAS,KAAK,oBAAoB;AACxC,iBAAW,OAAO,UAAU,YAAY,GAAG;AACzC,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK,eAAe,IAAI,KAAK,EAAE,OAAO,CAAC;AAC5D,eAAK,QAAQ,KAAK,oBAAoB;AAAA,YACpC,OAAO,KAAK;AAAA,YACZ,YAAY,KAAK;AAAA,YACjB,OAAO,IAAI;AAAA,YACX,SAAS,OAAO;AAAA,YAChB,OAAO,OAAO;AAAA,YACd,SAAS,OAAO;AAAA,YAChB,SAAS;AAAA,UACX,CAAC;AAAA,QACH,QAAQ;AAAA,QAMR;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsCA,YAA0B;AACxB,QAAI,CAAC,KAAK,MAAM;AACd,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAE1B;AAAA,IACF;AACA,UAAM,YAAY,KAAK;AACvB,QAAI,CAAC,WAAW;AACd,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAG1B;AAAA,IACF;AACA,QAAI,UAAU,OAAO,EAAE,WAAW,GAAG;AACnC,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAG1B;AAAA,IACF;AACA,UAAM,SAA6B;AAAA,MACjC,gBAAgB,KAAK;AAAA,MACrB,kBAAkB;AAAA,MAClB,8BAA8B,MAAM,KAAK,6BAA6B;AAAA,MACtE,WAAW,CAAC,OAAe,KAAK,IAAI,EAAE;AAAA,IACxC;AACA,WAAO,IAAI,UAAa,MAAM;AAAA,EAChC;AAAA,EAEA,MAAc,kBAAkB,MAAc,SAA6C;AACzF,UAAM,KAAK,KAAK,QAAQ;AAExB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO;AAAA,QACP,KAAK;AAAA,MACP;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,IAAI;AACvC,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAE5C,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,QAAW,SAA6C;AAClF,UAAM,OAAO,MAAM,KAAK,kBAAkB,KAAK,UAAU,MAAM,GAAG,OAAO;AACzE,QAAI,CAAC,KAAK,uBAAuB,CAAC,KAAK,UAAW,QAAO;AAKzD,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,IAAI;AACvC,UAAM,MAAM;AACZ,UAAM,MAA8B,CAAC;AACrC,eAAW,SAAS,KAAK,qBAAqB;AAC5C,YAAM,QAAQ,IAAI,KAAK;AACvB,UAAI,UAAU,UAAa,UAAU,KAAM;AAC3C,YAAM,YAAY,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AAC1E,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,qBAAqB,WAAW,KAAK,GAAG,KAAK,IAAI,IAAI,KAAK,EAAE;AACvF,UAAI,KAAK,IAAI,GAAG,EAAE,IAAI,IAAI;AAAA,IAC5B;AACA,QAAI,OAAO,KAAK,GAAG,EAAE,WAAW,EAAG,QAAO;AAC1C,WAAO,EAAE,GAAG,MAAM,MAAM,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,UAAU,OAAe,OAAmC;AAChE,QAAI,CAAC,KAAK,uBAAuB,CAAC,KAAK,oBAAoB,IAAI,KAAK,GAAG;AACrE,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI,aAAa,KAAK;AAAA,MAC5C;AAAA,IACF;AACA,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAC1B;AAAA,IACF;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,IAAI;AACvC,UAAM,YAAY,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AAC1E,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,qBAAqB,WAAW,KAAK,GAAG,KAAK,IAAI,IAAI,KAAK,EAAE;AACvF,UAAM,SAAS,GAAG,EAAE,IAAI,IAAI;AAE5B,UAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AACzD,eAAW,MAAM,KAAK;AACpB,YAAM,MAAM,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AAC5D,UAAI,CAAC,OAAO,CAAC,IAAI,KAAM;AACvB,UAAI,IAAI,KAAK,KAAK,MAAM,QAAQ;AAC9B,eAAO,KAAK,cAAc,GAAG;AAAA,MAC/B;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAAe,OAA8B;AAC5D,QAAI,CAAC,KAAK,uBAAuB,CAAC,KAAK,oBAAoB,IAAI,KAAK,GAAG;AACrE,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI,aAAa,KAAK;AAAA,MAC5C;AAAA,IACF;AACA,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAC1B;AAAA,IACF;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,IAAI;AACvC,UAAM,YAAY,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AAC1E,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,qBAAqB,WAAW,KAAK,GAAG,KAAK,IAAI,IAAI,KAAK,EAAE;AACvF,UAAM,SAAS,GAAG,EAAE,IAAI,IAAI;AAE5B,UAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AACzD,UAAM,UAAe,CAAC;AACtB,eAAW,MAAM,KAAK;AACpB,YAAM,MAAM,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AAC5D,UAAI,CAAC,OAAO,CAAC,IAAI,KAAM;AACvB,UAAI,IAAI,KAAK,KAAK,MAAM,QAAQ;AAC9B,gBAAQ,KAAK,MAAM,KAAK,cAAc,GAAG,CAAC;AAAA,MAC5C;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAIQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,OAAO;AACf,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI;AAAA,MAE1B;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,mBAAmB,MAAoB;AAC7C,QAAI,OAAO,KAAK,CAAC,OAAO,UAAU,IAAI,GAAG;AACvC,YAAM,IAAI,MAAM,eAAe,KAAK,IAAI,+CAA+C,IAAI,EAAE;AAAA,IAC/F;AACA,QAAI,SAAS,EAAG;AAChB,QAAI,CAAC,KAAK,SAAS,CAAC,KAAK,MAAM,IAAI,IAAI,GAAG;AACxC,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,IAAI,WAAW,IAAI;AAAA,MACzC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,UACJ,IACA,QACA,MACA,MACe;AACf,SAAK,mBAAmB;AACxB,SAAK,mBAAmB,IAAI;AAC5B,qBAAiB,KAAK,SAAS,KAAK,MAAM,IAAI;AAE9C,UAAM,MAAM,OAAO,KAAK,MAAM,IAAI;AAClC,UAAM,MAAM,MAAM,KAAK,OAAO,GAAG;AAEjC,UAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,UAAM,UAAU,WAAW,SAAS,KAAK,IAAI;AAC7C,UAAM,OAAO,KAAK,UAAU,MAAM;AAClC,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,UAAM,WAA8B;AAAA,MAClC,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,KAAK,QAAQ;AAAA,MAClB,GAAI,OAAO,KAAK,EAAE,OAAO,KAAK;AAAA,IAChC;AAEA,UAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,IAAI,QAAQ;AAE1D,QAAI,OAAO,GAAG;AACZ,WAAK,mBAAmB;AAAA,QACtB,OAAO,KAAK,QAAQ;AAAA,QACpB,YAAY,KAAK;AAAA,QACjB;AAAA,QACA;AAAA,QACA,eAAe,MAAM,YAAY,cAAc;AAAA,QAC/C,IAAI;AAAA,QACJ,IAAI,SAAS;AAAA,QACb,GAAI,MAAM,aAAa;AAAA,UACrB,QAAQ,KAAK,UAAU;AAAA,UACvB,cAAc,KAAK,UAAU;AAAA,QAC/B;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,UAAU,IAA6C;AAC3D,SAAK,mBAAmB;AACxB,UAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,QAAI,CAAC,SAAU,QAAO;AACtB,UAAM,OAAO,SAAS,SAAS;AAC/B,QAAI,SAAS,GAAG;AACd,aAAO,KAAK,cAAc,QAAQ;AAAA,IACpC;AAEA,UAAM,MAAM,OAAO,KAAK,MAAM,IAAI;AAClC,QAAI,CAAC,KAAK,QAAQ,KAAK,IAAI,GAAG,GAAG;AAC/B,UAAI,KAAK,aAAa,SAAS;AAC7B,eAAO,EAAE,QAAQ,MAAM,OAAO,KAAK;AAAA,MACrC;AACA,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,GAAG;AACjC,UAAM,YAAY,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AACjE,UAAM,SAAS,KAAK,MAAM,SAAS;AAEnC,SAAK,mBAAmB;AAAA,MACtB,OAAO,KAAK,QAAQ;AAAA,MACpB,YAAY,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,MACA,eAAe,KAAK,kBAAkB,IAAI,aAAa;AAAA,MACvD,IAAI;AAAA,MACJ,KAAI,oBAAI,KAAK,GAAE,YAAY;AAAA,IAC7B,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aAA8E;AAClF,SAAK,mBAAmB;AACxB,UAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,OAAO,KAAK,IAAI;AACzD,UAAM,MAA8D,CAAC;AACrE,eAAW,MAAM,KAAK;AACpB,YAAM,MAAM,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AAC5D,UAAI,CAAC,IAAK;AACV,YAAM,OAAO,IAAI,SAAS;AAC1B,YAAM,WAAW,SAAS,KAAK,KAAK,QAAQ,KAAK,IAAI,OAAO,KAAK,MAAM,IAAI,CAAC;AAC5E,UAAI,CAAC,YAAY,KAAK,aAAa,eAAgB;AACnD,UAAI,KAAK,EAAE,IAAI,MAAM,SAAS,CAAC;AAAA,IACjC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,QAAQ,IAAY,QAA+B;AACvD,SAAK,mBAAmB;AACxB,SAAK,mBAAmB,MAAM;AAC9B,qBAAiB,KAAK,SAAS,KAAK,MAAM,MAAM;AAEhD,UAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,QAAI,CAAC,SAAU,OAAM,IAAI,MAAM,WAAW,EAAE,8BAA8B,KAAK,IAAI,GAAG;AACtF,UAAM,WAAW,SAAS,SAAS;AACnC,QAAI,WAAW,SAAU;AACzB,QAAI,SAAS,UAAU;AACrB,YAAM,IAAI,MAAM,sCAAsC,EAAE,UAAU,QAAQ,OAAO,MAAM,EAAE;AAAA,IAC3F;AAEA,QAAI,WAAW,EAAG,kBAAiB,KAAK,SAAS,KAAK,MAAM,QAAQ;AAEpE,UAAM,UAAU,OAAO,KAAK,MAAM,QAAQ;AAC1C,UAAM,QAAQ,OAAO,KAAK,MAAM,MAAM;AACtC,UAAM,UAAU,MAAM,KAAK,OAAO,OAAO;AACzC,UAAM,QAAQ,MAAM,KAAK,OAAO,KAAK;AAErC,UAAM,YAAY,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,OAAO;AACrE,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,WAAW,KAAK;AACnD,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,OAA0B;AAAA,MAC9B,QAAQ;AAAA,MACR,IAAI,SAAS,KAAK;AAAA,MAClB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,KAAK,QAAQ;AAAA,MAClB,OAAO;AAAA,MACP,aAAa,KAAK,QAAQ;AAAA,IAC5B;AACA,UAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,IAAI,IAAI;AAEtD,SAAK,mBAAmB;AAAA,MACtB,OAAO,KAAK,QAAQ;AAAA,MACpB,YAAY,KAAK;AAAA,MACjB;AAAA,MACA,MAAM;AAAA,MACN,eAAe;AAAA,MACf,IAAI;AAAA,MACJ,IAAI;AAAA,IACN,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,IAAY,QAA+B;AACtD,SAAK,mBAAmB;AACxB,QAAI,SAAS,EAAG,OAAM,IAAI,MAAM,kCAAkC,MAAM,EAAE;AAE1E,UAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,EAAE;AACjE,QAAI,CAAC,SAAU,OAAM,IAAI,MAAM,WAAW,EAAE,8BAA8B,KAAK,IAAI,GAAG;AACtF,UAAM,WAAW,SAAS,SAAS;AACnC,QAAI,WAAW,SAAU;AACzB,QAAI,SAAS,UAAU;AACrB,YAAM,IAAI,MAAM,uCAAuC,EAAE,UAAU,QAAQ,OAAO,MAAM,EAAE;AAAA,IAC5F;AACA,UAAM,UAAU,KAAK,QAAQ,SAAS;AACtC,UAAM,qBAAqB,SAAS,gBAAgB,KAAK,QAAQ;AACjE,QAAI,CAAC,WAAW,CAAC,oBAAoB;AACnC,YAAM,IAAI,sBAAsB,IAAI,QAAQ;AAAA,IAC9C;AAEA,qBAAiB,KAAK,SAAS,KAAK,MAAM,QAAQ;AAClD,QAAI,SAAS,EAAG,MAAK,mBAAmB,MAAM;AAE9C,UAAM,UAAU,MAAM,KAAK,OAAO,OAAO,KAAK,MAAM,QAAQ,CAAC;AAC7D,UAAM,QAAQ,MAAM,KAAK,OAAO,OAAO,KAAK,MAAM,MAAM,CAAC;AAEzD,UAAM,YAAY,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,OAAO;AACrE,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,WAAW,KAAK;AACnD,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,OAA0B;AAAA,MAC9B,QAAQ;AAAA,MACR,IAAI,SAAS,KAAK;AAAA,MAClB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,KAAK,QAAQ;AAAA,MAClB,GAAI,SAAS,KAAK,EAAE,OAAO,OAAO;AAAA,IACpC;AACA,UAAM,KAAK,QAAQ,IAAI,KAAK,OAAO,KAAK,MAAM,IAAI,IAAI;AAEtD,SAAK,mBAAmB;AAAA,MACtB,OAAO,KAAK,QAAQ;AAAA,MACpB,YAAY,KAAK;AAAA,MACjB;AAAA,MACA,MAAM;AAAA,MACN,eAAe;AAAA,MACf,IAAI;AAAA,MACJ,IAAI;AAAA,IACN,CAAC;AAAA,EACH;AAAA,EAEQ,oBAA6B;AACnC,WAAO,KAAK,QAAQ,SAAS,WAAW,KAAK,QAAQ,SAAS;AAAA,EAChE;AAAA,EAEQ,mBAAmB,OAAmC;AAC5D,QAAI;AACF,WAAK,oBAAoB,KAAK;AAAA,IAChC,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,kBAAkB,UAA8C;AAC5E,QAAI,CAAC,KAAK,UAAW,QAAO,SAAS;AACrC,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,IAAI;AACvC,WAAO,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAc,cACZ,UACA,OAAqC,CAAC,GAC1B;AACZ,UAAM,OAAO,MAAM,KAAK,kBAAkB,QAAQ;AAClD,QAAI,SAAkB,KAAK,MAAM,IAAI;AAIrC,QAAI,KAAK,YAAY,WAAW,QAAQ,OAAO,WAAW,YAAY,WAAW,QAAQ;AACvF,eAAS,KAAK,aAAa,oBAAoB,MAAmB;AAAA,IACpE;AAEA,QAAI,SAAS;AAEb,QAAI,KAAK,WAAW,UAAa,CAAC,KAAK,gBAAgB;AAKrD,eAAS,MAAM;AAAA,QACb,KAAK;AAAA,QACL;AAAA,QACA,GAAG,KAAK,IAAI,KAAK,SAAS,EAAE;AAAA,MAC9B;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;AAOA,SAAS,mBAAmB,QAAiC,OAAwB;AACnF,MAAI,CAAC,MAAM,SAAS,GAAG,EAAG,QAAO,OAAO,KAAK;AAC7C,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,SAAkB;AACtB,aAAW,WAAW,UAAU;AAC9B,QAAI,WAAW,QAAQ,WAAW,OAAW,QAAO;AACpD,aAAU,OAAmC,OAAO;AAAA,EACtD;AACA,SAAO;AACT;AAYA,SAAS,oBAAoB,OAAyB;AACpD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO;AACT;AASA,SAAS,kBACP,QACA,KACS;AACT,MAAI,IAAI,SAAS,UAAU;AACzB,UAAM,IAAI,mBAAmB,QAAQ,IAAI,KAAK;AAC9C,WAAO,MAAM,UAAa,MAAM,OAAO,OAAO;AAAA,EAChD;AACA,QAAM,QAAmB,CAAC;AAC1B,aAAW,KAAK,IAAI,QAAQ;AAC1B,UAAM,IAAI,mBAAmB,QAAQ,CAAC;AACtC,QAAI,MAAM,UAAa,MAAM,KAAM,QAAO;AAC1C,UAAM,KAAK,CAAC;AAAA,EACd;AACA,SAAO;AACT;AAOA,SAAS,YAAY,QAAiB,MAAwB;AAC5D,QAAM,aAAa,oBAAoB,IAAI;AAC3C,MAAI,WAAW,WAAY,QAAO;AAClC,MAAI,WAAW,UAAa,eAAe,OAAW,QAAO,WAAW;AAExE,MAAI;AACF,WAAO,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,UAAU;AAAA,EAC7D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ADh8FA;AASA;AAWA;AACA;;;AsBrBA,IAAMC,eAAc,IAAI;AAAA,EACtB;AAGF;AAOO,IAAM,YAA4B;AAAA,EACvC,aAAa;AAAE,UAAMA;AAAA,EAAY;AACnC;;;ACeO,IAAM,aAA8B;AAAA,EACzC,MAAM,QAAQ;AAAA,EAAC;AAAA,EACf,MAAM,OAAO;AAAE,WAAO,CAAC;AAAA,EAAE;AAC3B;;;ACFA,IAAMC,eAAc,IAAI;AAAA,EACtB;AAGF;AAEO,IAAM,aAA8B;AAAA,EACzC,MAAM,cAAc;AAAE,WAAO,CAAC;AAAA,EAAE;AAAA,EAChC,MAAM,cAAc;AAAE,WAAO,EAAE,iBAAiB,GAAG;AAAA,EAAE;AAAA,EACrD,mBAAmB;AAAA,EAAC;AAAA,EACpB,qBAAqB;AAAE,UAAMA;AAAA,EAAY;AAAA,EACzC,MAAM,0BAA0B;AAAE,UAAMA;AAAA,EAAY;AACtD;;;AC/BA;AAEA;AAGA;AACA;AAMO,IAAM,yBAAyB;AAG/B,SAAS,mBAAmB,gBAAgC;AACjE,SAAO,GAAG,sBAAsB,GAAG,cAAc;AACnD;AAGO,SAAS,qBAAqB,MAAuB;AAC1D,SAAO,KAAK,WAAW,sBAAsB;AAC/C;AA6CO,SAAS,QACd,MACA,MACyB;AACzB,SAAO,EAAE,eAAe,MAAM,MAAM,KAAK;AAC3C;AAGO,SAAS,oBAAoB,GAAoC;AACtE,SACE,OAAO,MAAM,YACb,MAAM,QACL,EAAkC,kBAAkB;AAEzD;AA2CO,IAAM,mBAAN,MAAqD;AAAA,EA2B1D,YACmB,SACA,iBACA,gBACA,SACA,QACA,WACA,QACA,SAMA,yBAOA,SACjB;AArBiB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AAOA;AAEjB,SAAK,WAAW,mBAAmB,cAAc;AAAA,EACnD;AAAA,EAvBmB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAMA;AAAA,EAOA;AAAA,EA/CF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,aAAa,oBAAI,IAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOzD,kBAAsD;AACpD,WAAO,MAAM,KAAK,KAAK,WAAW,OAAO,CAAC,EAAE,IAAI,CAAC,OAAO;AAAA,MACtD,KAAK,EAAE;AAAA,MACP,QAAQ,EAAE;AAAA,MACV,GAAG,EAAE;AAAA,IACP,EAAE;AAAA,EACJ;AAAA;AAAA,EA8BQ,qBAA2B;AACjC,UAAM,UAAU,KAAK,QAAQ,cAAc;AAC3C,UAAM,WAAmC;AAAA,MACvC,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,OAAO;AAAA,MACP,OAAO;AAAA,IACT;AACA,UAAM,aAAa,SAAS,KAAK,QAAQ,IAAI,KAAK;AAClD,UAAM,eAAe,SAAS,OAAO,KAAK;AAC1C,QAAI,aAAa,cAAc;AAC7B,YAAM,IAAI;AAAA,QACR,eAAe,KAAK,cAAc,qBAAqB,OAAO,mCAC1C,KAAK,QAAQ,IAAI;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,gBAAoC;AAChD,UAAM,UAAU,MAAM;AAAA,MACpB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AACA,WAAO,QAAQ,KAAK,QAAQ;AAAA,EAC9B;AAAA,EAEA,MAAc,aAAa,OAAkB,SAA6C;AACxF,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO,KAAK,UAAU,KAAK;AAAA,QAC3B,KAAK,KAAK,QAAQ;AAAA,MACpB;AAAA,IACF;AACA,UAAM,MAAM,MAAM,KAAK,cAAc;AACrC,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,KAAK,UAAU,KAAK,GAAG,GAAG;AAC7D,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,KAAK,QAAQ;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,MAAc,aAAa,UAAiD;AAC1E,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,SAAS,KAAK;AAAA,IAClC;AACA,UAAM,MAAM,MAAM,KAAK,cAAc;AACrC,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,IAAI,KAAW,QAA+C;AAClE,SAAK,mBAAmB;AAExB,UAAM,QAAmB,EAAE,KAAK,OAAO;AACvC,UAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,MAClC,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACF;AACA,UAAM,UAAU,WAAW,SAAS,KAAK,IAAI;AAC7C,UAAM,WAAW,MAAM,KAAK,aAAa,OAAO,OAAO;AAEvD,UAAM,KAAK,QAAQ;AAAA,MACjB,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,WAAW,SAAS,KAAK;AAAA,IAC3B;AAGA,SAAK,WAAW,IAAI,KAAK,KAAK;AAE9B,SAAK,QAAQ,KAAK,UAAU;AAAA,MAC1B,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,IAAI;AAAA,MACJ,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO,OAAO;AAAA,QACvB,IAAI;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB,IAAI;AAAA,QACJ;AAAA,QACA,OAAO,KAAK,QAAQ;AAAA;AAAA;AAAA,QAGpB,aAAa,MAAM,oBAAoB,QAAQ;AAAA,MACjD,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAO,SAA8D;AACzE,SAAK,mBAAmB;AACxB,eAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,OAAO,GAAuC;AACvF,YAAM,KAAK,IAAI,KAAK,MAAM;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,IAAI,KAAmD;AAC3D,UAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,MAClC,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACF;AACA,QAAI,CAAC,SAAU,QAAO;AACtB,UAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ;AAC9C,WAAO,MAAM;AAAA,EACf;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,OAAO,KAAW,OAAqC,CAAC,GAAkB;AAC9E,SAAK,mBAAmB;AAExB,UAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,MAClC,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACF;AACA,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,oBAAoB,KAAK,gBAAgB,GAAG;AAAA,IACxD;AAEA,UAAM,OAAO,KAAK,QAAQ;AAC1B,QAAI,SAAS,YAAY,KAAK,yBAAyB;AAAA,IAOvD;AAEA,UAAM,KAAK,QAAQ,OAAO,KAAK,iBAAiB,KAAK,UAAU,GAAG;AAGlE,SAAK,WAAW,OAAO,GAAG;AAE1B,SAAK,QAAQ,KAAK,UAAU;AAAA,MAC1B,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,IAAI;AAAA,MACJ,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO,OAAO;AAAA,QACvB,IAAI;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB,IAAI;AAAA,QACJ,SAAS,SAAS;AAAA,QAClB,OAAO,KAAK,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAMpB,aAAa,MAAM,oBAAoB,QAAQ;AAAA,MACjD,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,OAAO,QAAc,QAA+B;AACxD,SAAK,mBAAmB;AAGxB,UAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,MAClC,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACF;AACA,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,oBAAoB,KAAK,gBAAgB,MAAM;AAAA,IAC3D;AACA,UAAM,WAAW,MAAM,KAAK,aAAa,QAAQ;AAGjD,UAAM,WAAsB,EAAE,KAAK,QAAQ,QAAQ,SAAS,OAAO;AACnE,UAAM,cAAc,MAAM,KAAK,aAAa,UAAU,CAAC;AACvD,UAAM,KAAK,QAAQ;AAAA,MACjB,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAGA,QAAI,KAAK,yBAAyB;AAChC,YAAM,KAAK,wBAAwB,KAAK,gBAAgB,QAAQ,MAAM;AAAA,IACxE;AAGA,UAAM,KAAK,QAAQ,OAAO,KAAK,iBAAiB,KAAK,UAAU,MAAM;AAGrE,SAAK,WAAW,OAAO,MAAM;AAC7B,SAAK,WAAW,IAAI,QAAQ,QAAQ;AAEpC,SAAK,QAAQ,KAAK,UAAU;AAAA,MAC1B,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,IAAI;AAAA,MACJ,QAAQ;AAAA,IACV,CAAC;AACD,SAAK,QAAQ,KAAK,UAAU;AAAA,MAC1B,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,IAAI;AAAA,MACJ,QAAQ;AAAA,IACV,CAAC;AAOD,QAAI,KAAK,QAAQ;AACf,YAAM,KAAK,OAAO,OAAO;AAAA,QACvB,IAAI;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB,IAAI;AAAA,QACJ,SAAS,SAAS;AAAA,QAClB,OAAO,KAAK,QAAQ;AAAA,QACpB,aAAa,MAAM,oBAAoB,QAAQ;AAAA,MACjD,CAAC;AACD,YAAM,KAAK,OAAO,OAAO;AAAA,QACvB,IAAI;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB,IAAI;AAAA,QACJ,SAAS;AAAA,QACT,OAAO,KAAK,QAAQ;AAAA,QACpB,aAAa,MAAM,oBAAoB,WAAW;AAAA,MACpD,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAA6B;AACjC,UAAM,OAAO,MAAM,KAAK,QAAQ,KAAK,KAAK,iBAAiB,KAAK,QAAQ;AACxE,UAAM,UAAuB,CAAC;AAC9B,eAAW,OAAO,MAAM;AACtB,YAAM,WAAW,MAAM,KAAK,QAAQ;AAAA,QAClC,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,MACF;AACA,UAAI,CAAC,SAAU;AACf,YAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ;AAC9C,cAAQ,KAAK,KAAK;AAElB,WAAK,WAAW,IAAI,KAAK,KAAK;AAAA,IAChC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,aACJ,KACA,QACA,UAC6B;AAC7B,UAAM,SAAS,MAAM,KAAK,IAAI,GAAW;AACzC,QAAI,CAAC,OAAQ,QAAO;AAGpB,QAAI,OAAO,MAAM,MAAM,OAAW,QAAO,OAAO,MAAM;AAGtD,UAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAK,WAAiC,WAAW,CAAC,QAAkB,IAAI,CAAC;AAC7G,eAAW,MAAM,OAAO;AACtB,UAAI,OAAO,OAAO;AAEhB,cAAM,MAAM,OAAO,OAAO,MAAM,EAAE,CAAC;AACnC,YAAI,QAAQ,OAAW,QAAO;AAAA,MAChC,WAAW,OAAO,EAAE,MAAM,QAAW;AACnC,eAAO,OAAO,EAAE;AAAA,MAClB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;AzB9eA;;;A0BLO,IAAM,qBAAqB;;;A1BYlC;;;A2BsBO,IAAM,0BAAN,cAAsC,MAAM;AAAA,EACjD,YAAY,QAAgB;AAC1B,UAAM,wBAAwB,MAAM,EAAE;AACtC,SAAK,OAAO;AAAA,EACd;AACF;AAIO,IAAM,0BAA0B;AAmBhC,SAAS,wBACd,OACA,2BACA,eACA,YACA,SACmB;AACnB,MAAI,UAAU;AAEd,QAAM,QAAQ,MAAY;AACxB,cAAU;AAAA,EACZ;AAEA,MAAI,QAAQ,QAAQ;AAClB,QAAI,QAAQ,OAAO,QAAS,WAAU;AACtC,YAAQ,OAAO,iBAAiB,SAAS,MAAM;AAAE,gBAAU;AAAA,IAAK,CAAC;AAAA,EACnE;AAEA,WAAS,aAAmB;AAC1B,QAAI,QAAS,OAAM,IAAI,wBAAwB,mBAAmB;AAAA,EACpE;AAEA,QAAM,YAAY,QAAQ,cAAc,IAAI,IAAI,QAAQ,WAAW,IAAI;AAIvE,MAAI,eAAqC;AACzC,WAAS,iBAAgC;AACvC,QAAI,CAAC,cAAc;AACjB,qBAAe,WAAW;AAAA,QACxB,IAAI,gBAAgB;AAAA,QACpB,WAAW;AAAA,QACX;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,aAAa,QAAQ,eAAe;AAAA,QACpC,WAAW,QAAQ,QAAQ,KAAK;AAAA,QAChC,aAAa,QAAQ,eAAe;AAAA,MACtC,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAEA,kBAAgB,WAAyC;AACvD,UAAM,eAAe;AACrB,eAAW;AAGX,UAAM,iBAAiB,MAAM,0BAA0B;AACvD,UAAM,UAAU,eAAe,OAAO,UAAQ;AAC5C,UAAI,KAAK,WAAW,GAAG,EAAG,QAAO;AACjC,UAAI,aAAa,CAAC,UAAU,IAAI,IAAI,EAAG,QAAO;AAC9C,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB,QAAQ,gBAAgB;AAE9C,eAAW,kBAAkB,SAAS;AACpC,UAAI,QAAS;AAEb,YAAM,OAAO,cAAuC,cAAc;AAClE,YAAM,UAAU,MAAM,KAAK,KAAK,EAAE,MAAM,MAAM,CAAC,CAAC;AAChD,iBAAW,UAAU,SAAS;AAC5B,YAAI,QAAS;AACb,mBAAW;AAEX,cAAM,UAAW,OAA4B;AAC7C,YAAI,OAAO,YAAY,SAAU;AAEjC,YAAI,QAAQ,SAAS,CAAC,QAAQ,MAAM,QAAQ,EAAE,YAAY,gBAAgB,IAAI,QAAQ,CAAC,EAAG;AAE1F,cAAM,UAAU,KAAK,KAAK,OAAO;AACjC,cAAM,QAAQ,MAAM,QAAQ,KAAK,EAAE,MAAM,MAAM,CAAC,CAAe;AAC/D,mBAAW,QAAQ,OAAO;AACxB,cAAI,QAAS;AAEb,cAAI,CAAC,iBAAiB;AACpB,gBAAI,KAAK,SAAS,QAAQ,aAAa;AACrC,gCAAkB;AAAA,YACpB;AACA;AAAA,UACF;AAEA,gBAAM,QAAQ,MAAM,QAAQ,IAAI,KAAK,IAAI;AACzC,cAAI,CAAC,MAAO;AAEZ,gBAAM,OAAqB;AAAA,YACzB,QAAQ,KAAK;AAAA,YACb,WAAW,EAAE,YAAY,gBAAgB,IAAI,SAAS,MAAM,KAAK,KAAK;AAAA,YACtE;AAAA,YACA,MAAM;AAAA,cACJ,MAAM,KAAK;AAAA,cACX,UAAU,KAAK;AAAA,cACf,GAAI,KAAK,aAAa,UAAa,EAAE,UAAU,KAAK,SAAS;AAAA,cAC7D,GAAI,KAAK,eAAe,UAAa,EAAE,WAAW,KAAK,WAAW;AAAA,YACpE;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAA4B;AAAA,IAChC;AAAA,IACA,IAAI,UAAU;AAAE,aAAO;AAAA,IAAQ;AAAA,IAC/B,CAAC,OAAO,aAAa,GAAG,MAAM,SAAS;AAAA,EACzC;AACA,SAAO;AACT;AAIA,SAAS,kBAA0B;AAEjC,QAAM,MAAM,WAAW,OAAO,gBAAgB,IAAI,WAAW,EAAE,CAAC;AAChE,MAAI,IAAI;AACR,aAAW,KAAK,IAAK,MAAK,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AACxD,SAAO,SAAS,KAAK,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAC3D;;;AClNA;AACA;AAuBO,IAAM,iCAAiC;AAsE9C,eAAsB,cACpB,KACA,UAA6B,CAAC,GACH;AAC3B,QAAM,MAAM,QAAQ,OAAO,oBAAI,KAAK;AACpC,QAAM,eAAe,QAAQ,gBAAgB;AAC7C,QAAM,SAAS,QAAQ,WAAW;AAElC,QAAM,iBAAiB,MAAM,IAAI,gBAAgB;AACjD,QAAM,eAAqE,CAAC;AAC5E,MAAI,UAAU;AACd,MAAI,UAAU;AACd,MAAI,eAAe;AACnB,MAAI,wBAAwB;AAE5B,QAAO,YAAW,kBAAkB,gBAAgB;AAClD,QAAI,eAAe,WAAW,GAAG,EAAG;AACpC,UAAM,SAAS,IAAI,cAAc,cAAc;AAC/C,QAAI,CAAC,OAAQ;AACb,UAAM,kBAAkB,OAAO,KAAK,MAAM;AAC1C,QAAI,gBAAgB,WAAW,EAAG;AAClC,6BAAyB;AACzB,iBAAa,cAAc,IAAI,EAAE,SAAS,GAAG,SAAS,EAAE;AAExD,UAAM,MAAM,MAAM,IAAI,YAAY,cAAc;AAChD,eAAW,YAAY,KAAK;AAC1B,UAAI,WAAW,aAAc,OAAM;AAEnC,YAAM,SAAS,MAAM,IAAI,UAAU,gBAAgB,QAAQ,EAAE,MAAM,MAAM,IAAI;AAC7E,UAAI,WAAW,KAAM;AACrB,iBAAW;AACX,mBAAa,cAAc,EAAE,WAAW;AAExC,YAAM,QAAQ,MAAM,IAAI,UAAU,gBAAgB,QAAQ,EAAE,MAAM,MAAM,CAAC,CAAC;AAC1E,iBAAW,QAAQ,OAAO;AACxB,YAAI,WAAW,aAAc,OAAM;AACnC,cAAM,SAAS,OAAO,KAAK,IAAI;AAC/B,YAAI,CAAC,OAAQ;AAEb,cAAM,SAAS,eAAe,QAAQ,QAAQ,MAAM,GAAG;AACvD,YAAI,CAAC,OAAQ;AAEb,YAAI,CAAC,QAAQ;AACX,gBAAM,IAAI,WAAW,gBAAgB,UAAU,KAAK,IAAI;AACxD,gBAAM,gBAAgB,KAAK;AAAA,YACzB,IAAI,mBAAmB,gBAAgB,UAAU,KAAK,IAAI;AAAA,YAC1D,YAAY;AAAA,YACZ;AAAA,YACA,UAAU,KAAK;AAAA,YACf,UAAU,KAAK;AAAA,YACf;AAAA,YACA,WAAW,IAAI,YAAY;AAAA,YAC3B,OAAO,IAAI;AAAA,UACb,CAAC;AACD,0BAAgB;AAAA,QAClB;AACA,mBAAW;AACX,qBAAa,cAAc,EAAE,WAAW;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,eACP,QACA,QACA,MACA,KACqC;AACrC,MAAI,eAAe;AACnB,MAAI,qBAAqB;AAEzB,MAAI,OAAO,eAAe,UAAa,OAAO,aAAa,GAAG;AAC5D,UAAM,aAAa,KAAK,MAAM,KAAK,UAAU;AAC7C,QAAI,OAAO,SAAS,UAAU,GAAG;AAC/B,YAAM,QAAQ,IAAI,QAAQ,IAAI;AAC9B,YAAM,UAAU,OAAO,aAAa;AACpC,UAAI,QAAQ,QAAS,gBAAe;AAAA,IACtC;AAAA,EACF;AAEA,MAAI,OAAO,WAAW;AACpB,QAAI;AACF,UAAI,OAAO,UAAU,MAAM,EAAG,sBAAqB;AAAA,IACrD,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI,gBAAgB,mBAAoB,QAAO;AAC/C,MAAI,aAAc,QAAO;AACzB,MAAI,mBAAoB,QAAO;AAC/B,SAAO;AACT;AAEA,SAAS,mBAAmB,YAAoB,UAAkB,UAA0B;AAC1F,QAAM,OAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,CAAC,CAAC;AAChE,MAAI,SAAS;AACb,aAAW,KAAK,KAAM,WAAU,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC9D,SAAO,GAAG,UAAU,KAAK,QAAQ,KAAK,QAAQ,KAAK,MAAM;AAC3D;AAEA,eAAe,gBAAgB,KAAwB,OAAyC;AAC9F,QAAM,OAAO,KAAK,UAAU,KAAK;AACjC,MAAI;AACJ,MAAI,IAAI,WAAW;AACjB,UAAM,MAAM,MAAM,IAAI,OAAO,8BAA8B;AAC3D,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,eAAW;AAAA,MACT,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF,OAAO;AACL,eAAW;AAAA,MACT,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF;AACA,QAAM,IAAI,QAAQ,IAAI,IAAI,OAAO,gCAAgC,MAAM,IAAI,QAAQ;AACrF;;;ACjNA;AACA;AACA;AAGO,IAAM,+BAA+B;AAGrC,IAAM,iCAAiC;AAGvC,IAAM,6BAA6B;AAqD1C,eAAsB,0BACpB,cACA,OACA,OACoB;AACpB,QAAMC,UAAS,WAAW,OAAO;AACjC,QAAM,WACJ,wBAAwB,aACpB,eACA,IAAI,YAAY,EAAE,OAAO,YAAY;AAC3C,QAAM,aAAa,IAAI,YAAY,EAAE,OAAO,KAAK;AACjD,QAAM,aAAa,MAAMA,QAAO,OAAO,WAAW,UAAU;AAC5D,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,iCAAiC,KAAK;AAC5E,QAAM,MAAM,MAAMA,QAAO,UAAU,OAAO,UAAU,QAAQ,OAAO,CAAC,WAAW,CAAC;AAChF,SAAOA,QAAO;AAAA,IACZ,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,YAAY,KAAK;AAAA,IACxD;AAAA,IACA,EAAE,MAAM,WAAW,QAAQ,IAAI;AAAA,IAC/B;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AAaA,eAAsB,oBACpB,OACA,OACA,SACA,YACA,UACA,UACA,MAC+B;AAC/B,QAAM,iBAAiB,KAAK,cAAc;AAC1C,QAAM,YAAY,iBACd,OAAO,gBAAgB,KAAK,IAAI,IAChC,SAAS,KAAK,IAAI;AACtB,QAAM,YAAY,QAAQ,KAAK,IAAI,SAAS;AAC5C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR,4BAA4B,KAAK,IAAI,YAAY,kBAAkB,OAAO;AAAA,IAC5E;AAAA,EACF;AACA,QAAM,aAAa,MAAM,QAAQ,WAAW,QAAQ;AAEpD,QAAM,QAAQ,OAAO,KAAK,UAAU,WAAW,KAAK,QAAQ,KAAK,MAAM,YAAY;AACnF,QAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,QAAM,UAAiC;AAAA,IACrC,IAAI;AAAA,IACJ,QAAQ,KAAK;AAAA,IACb,UAAU,QAAQ;AAAA,IAClB,MAAM,KAAK;AAAA,IACX,YAAY;AAAA,IACZ,GAAI,KAAK,UAAU,EAAE,QAAQ,KAAK,OAAO;AAAA,IACzC;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAI,KAAK,QAAQ,EAAE,MAAM,KAAK,KAAK;AAAA,EACrC;AAEA,QAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,KAAK,UAAU,OAAO,GAAG,UAAU;AACtE,QAAM,WAA8B;AAAA,IAClC,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,OAAO;AAAA,IACP,KAAK,QAAQ;AAAA,EACf;AACA,QAAM,MAAM,IAAI,OAAO,8BAA8B,UAAU,QAAQ;AACvE,SAAO,EAAE,UAAU,QAAQ;AAC7B;AAaA,eAAsB,yBACpB,OACA,OACA,YACA,UACuC;AACvC,QAAM,MAAM,MAAM,MAAM,IAAI,OAAO,8BAA8B,QAAQ;AACzE,MAAI,CAAC,IAAK,QAAO;AACjB,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,IAAI,KAAK,IAAI,OAAO,UAAU;AACzD,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,oBACpB,OACA,OACA,YACA,OACkC;AAClC,QAAM,MAAM,MAAM,MAAM,KAAK,OAAO,4BAA4B;AAChE,QAAM,WAAW,IAAI,OAAO,QAAM,OAAO,SAAS,GAAG,WAAW,GAAG,KAAK,GAAG,CAAC;AAC5E,QAAM,MAA+B,CAAC;AACtC,aAAW,MAAM,UAAU;AACzB,UAAM,UAAU,MAAM,yBAAyB,OAAO,OAAO,YAAY,EAAE;AAC3E,QAAI,QAAS,KAAI,KAAK,OAAO;AAAA,EAC/B;AACA,SAAO;AACT;AAOA,eAAsB,qBACpB,SACA,UACoB;AACpB,SAAO,UAAU,QAAQ,YAAY,QAAQ;AAC/C;AAMA,eAAsB,qBACpB,OACA,OACA,OACiB;AACjB,QAAM,MAAM,MAAM,MAAM,KAAK,OAAO,4BAA4B;AAChE,QAAM,WAAW,IAAI,OAAO,QAAM,OAAO,SAAS,GAAG,WAAW,GAAG,KAAK,GAAG,CAAC;AAC5E,aAAW,MAAM,UAAU;AACzB,UAAM,MAAM,OAAO,OAAO,8BAA8B,EAAE;AAAA,EAC5D;AACA,SAAO,SAAS;AAClB;AASO,SAAS,uBAAuB,OAAe,OAAuB;AAC3E,SAAO,UAAU,IAAI,QAAQ,GAAG,KAAK,IAAI,KAAK;AAChD;AAOO,SAAS,wBACd,SACA,MAAY,oBAAI,KAAK,GACZ;AACT,SAAO,QAAQ,SAAS,IAAI,YAAY;AAC1C;;;A7B/MO,IAAM,QAAN,MAAY;AAAA,EACA;AAAA;AAAA,EAED;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQR;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcS;AAAA,EACA,kBAAkB,oBAAI,IAAiC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvD,qBAAqB,oBAAI,IAAuC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAczE,cAAkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQzB,cAAc,IAAI,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU9B,oBAAoB,oBAAI,IAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ7C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,iBAAwC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcxC,cAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5B,uBAAuB,oBAAI,IAG1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASe,oBAAoB,oBAAI,IAGvC;AAAA;AAAA,EAGe,kBAAkB,oBAAI,IAA8B;AAAA;AAAA,EAGpD,gBAAgB,oBAAI,IAA2C;AAAA;AAAA,EAGxE,kBAKG;AAAA;AAAA;AAAA;AAAA;AAAA,EAMM;AAAA,EAIjB,YAAY,MAuCT;AACD,SAAK,UAAU,KAAK;AACpB,SAAK,OAAO,KAAK;AACjB,SAAK,QAAQ,KAAK;AAClB,SAAK,UAAU,KAAK;AACpB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AACpB,SAAK,UAAU,KAAK;AACpB,SAAK,6BAA6B,KAAK;AACvC,SAAK,cAAc,KAAK;AACxB,SAAK,eAAe,KAAK;AACzB,SAAK,gBAAgB,KAAK;AAC1B,SAAK,oBAAoB,KAAK;AAC9B,SAAK,eAAe,KAAK;AACzB,SAAK,kBAAkB,KAAK,mBAAmB;AAC/C,SAAK,kBAAkB,KAAK,mBAAmB;AAC/C,SAAK,iBAAiB,KAAK,kBAAkB;AAC7C,SAAK,kBAAkB,KAAK,mBAAmB;AAC/C,SAAK,eAAe,KAAK,gBAAgB;AACzC,SAAK,eAAe,KAAK,gBAAgB;AACzC,SAAK,gBAAgB,KAAK,iBAAiB,EAAE,SAAS,KAAK;AAC3D,SAAK,gBAAgB,KAAK;AAC1B,SAAK,SAAS,KAAK;AACnB,SAAK,gBAAgB,KAAK;AAQ1B,SAAK,SAAS,KAAK,WAAW;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,aAA6D;AACnE,QAAI,WAAoE;AACxE,WAAO,OAAO,mBAA+C;AAC3D,UAAI,CAAC,UAAU;AACb,mBAAW,MAAM,oBAAoB,KAAK,SAAS,KAAK,MAAM,KAAK,OAAO;AAAA,MAC5E;AACA,aAAO,SAAS,cAAc;AAAA,IAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BA,WAAc,gBAAwB,SAkCpB;AAEhB,QAAI,qBAAqB,cAAc,GAAG;AACxC,YAAM,IAAI,4BAA4B,cAAc;AAAA,IACtD;AAEA,QAAI,OAAO,KAAK,gBAAgB,IAAI,cAAc;AAClD,QAAI,CAAC,MAAM;AAKT,UAAI,SAAS,MAAM;AACjB,aAAK,YAAY,SAAS,gBAAgB,QAAQ,IAAI;AAAA,MACxD;AAGA,UAAI,SAAS,YAAY;AACvB,aAAK,kBAAkB,IAAI,gBAAgB,QAAQ,UAAU;AAAA,MAC/D;AAGA,UAAI,SAAS,YAAY;AACvB,aAAK,mBAAmB,IAAI,gBAAgB,QAAQ,UAAuC;AAAA,MAC7F;AAGA,UAAI,SAAS,eAAe;AAC1B,cAAM,eAAuC,CAAC;AAC9C,mBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,QAAQ,aAAa,GAAG;AACjE,uBAAa,KAAK,IAAI,KAAK;AAAA,QAC7B;AACA,aAAK,qBAAqB,IAAI,gBAAgB,YAAY;AAAA,MAC5D;AAEA,YAAM,WAA2D;AAAA,QAC/D,SAAS,KAAK;AAAA,QACd,OAAO,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,QACd,WAAW,KAAK;AAAA,QAChB,SAAS,KAAK;AAAA,QACd,QAAQ,KAAK;AAAA,QACb,SAAS,KAAK;AAAA,QACd,eAAe,KAAK;AAAA;AAAA;AAAA;AAAA,QAIpB,GAAI,KAAK,iBAAiB,SAAY,EAAE,cAAc,KAAK,aAAa,IAAI,CAAC;AAAA,QAC7E,GAAI,KAAK,kBAAkB,SAAY,EAAE,eAAe,KAAK,cAAc,IAAI,CAAC;AAAA,QAChF,GAAI,KAAK,sBAAsB,SAAY,EAAE,mBAAmB,KAAK,kBAAkB,IAAI,CAAC;AAAA,QAC5F,GAAI,KAAK,iBAAiB,SAAY,EAAE,cAAc,KAAK,aAAa,IAAI,CAAC;AAAA,QAC7E,iBAAiB,KAAK;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,cAAc,KAAK;AAAA,QACnB,QAAQ,KAAK,gBAAgB,KAAK;AAAA,QAClC,aAAa;AAAA,QACb,cAAc;AAAA,QACd,eAAe,KAAK;AAAA,QACpB,4BAA4B,KAAK;AAAA,QACjC,UAAU,CAAC,IAAI,OAAO,KAAK,YAAY,IAAI,gBAAgB,EAAE;AAAA,QAC7D,aAAa,CAAC,UAAU,aAAa,KAAK,kBAAkB,UAAU,QAAQ;AAAA,MAChF;AACA,UAAI,SAAS,YAAY,OAAW,UAAS,UAAU,QAAQ;AAC/D,UAAI,SAAS,oBAAoB,OAAW,UAAS,kBAAkB,QAAQ;AAC/E,UAAI,SAAS,aAAa,OAAW,UAAS,WAAW,QAAQ;AACjE,UAAI,SAAS,UAAU,OAAW,UAAS,QAAQ,QAAQ;AAC3D,UAAI,SAAS,WAAW,OAAW,UAAS,SAAS,QAAQ;AAC7D,UAAI,SAAS,mBAAmB,OAAW,UAAS,iBAAiB,QAAQ;AAC7E,UAAI,SAAS,SAAS,OAAW,UAAS,OAAO,QAAQ;AACzD,UAAI,SAAS,wBAAwB,QAAW;AAC9C,iBAAS,sBAAsB,QAAQ;AAAA,MACzC;AACA,UAAI,SAAS,iCAAiC,QAAW;AACvD,iBAAS,+BAA+B,QAAQ;AAAA,MAClD;AACA,UAAI,SAAS,UAAU,OAAW,UAAS,QAAQ,QAAQ;AAC3D,UAAI,SAAS,aAAa,OAAW,UAAS,WAAW,QAAQ;AACjE,eAAS,oBAAoB,CAAC,UAAU,KAAK,cAAc,KAAK;AAChE,UAAI,KAAK,gBAAgB,OAAW,UAAS,cAAc,KAAK;AAChE,UAAI,SAAS,eAAe,OAAW,UAAS,aAAa,QAAQ;AACrE,UAAI,SAAS,kBAAkB,QAAW;AAExC,iBAAS,oBAAoB,OAAO,UAAU,KAAK,QAAQ,aAAa;AACtE,gBAAM,SAAS,KAAK,WAAW,QAAQ;AACvC,iBAAO,OAAO,aAAa,KAAK,QAAQ,QAAQ;AAAA,QAClD;AACA,iBAAS,gBAAgB,QAAQ;AAAA,MACnC;AAEA,UAAI,SAAS,eAAe,UAAa,SAAS,kBAAkB,QAAW;AAC7E,iBAAS,mBAAmB,CAAC,WAAoB;AAC/C,eAAK,iBAAiB,gBAAgB,MAAM;AAAA,QAC9C;AAAA,MACF;AAEA,UAAI,SAAS,eAAe,UAAa,KAAK,eAAe;AAC3D,iBAAS,oBAAoB,KAAK;AAAA,MACpC;AACA,aAAO,IAAI,WAAc,QAAQ;AACjC,WAAK,gBAAgB,IAAI,gBAAgB,IAAI;AAAA,IAC/C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,gBAAwB,QAAuB;AAC9D,UAAM,aAAa,KAAK,kBAAkB,IAAI,cAAc;AAC5D,QAAI,CAAC,cAAc,OAAO,KAAK,UAAU,EAAE,WAAW,EAAG;AACzD,QAAI,CAAC,UAAU,OAAO,WAAW,SAAU;AAE3C,UAAM,MAAM;AACZ,eAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,UAAU,GAAG;AAC5D,YAAM,QAAQ,IAAI,KAAK;AACvB,UAAI,UAAU,UAAa,UAAU,KAAM;AAC3C,WAAK,aAAa,sBAAsB,OAAO,OAAO,UAAU;AAAA,IAClE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,YACJ,gBACA,QACA,YACkC;AAClC,UAAM,SAAS,WAAW,UAAU,KAAK;AACzC,QAAI,CAAC,OAAQ,QAAO;AAEpB,QAAI,SAAS;AAGb,UAAM,aAAa,KAAK,kBAAkB,IAAI,cAAc;AAC5D,QAAI,cAAc,OAAO,KAAK,UAAU,EAAE,SAAS,GAAG;AACpD,eAAS,KAAK,aAAa,gBAAgB,QAAQ,YAAY,QAAQ,WAAW,QAAQ;AAAA,IAC5F;AAGA,UAAM,aAAa,KAAK,qBAAqB,IAAI,cAAc;AAC/D,QAAI,cAAc,OAAO,KAAK,UAAU,EAAE,SAAS,KAAK,WAAW,OAAO;AACxE,YAAM,aAAa,EAAE,GAAG,OAAO;AAC/B,iBAAW,CAAC,OAAO,QAAQ,KAAK,OAAO,QAAQ,UAAU,GAAG;AAC1D,cAAM,MAAM,OAAO,KAAK;AACxB,YAAI,OAAO,QAAQ,SAAU;AAC7B,cAAM,SAAS,KAAK,WAAW,QAAQ;AACvC,cAAM,QAAQ,MAAM,OAAO,aAAa,KAAK,QAAQ,WAAW,QAAQ;AACxE,YAAI,UAAU,QAAW;AACvB,qBAAW,GAAG,KAAK,OAAO,IAAI;AAAA,QAChC;AAAA,MACF;AACA,eAAS;AAAA,IACX;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,WACE,MACA,UAA6B,CAAC,GACN;AACxB,QAAI,SAAS,KAAK,gBAAgB,IAAI,IAAI;AAC1C,QAAI,CAAC,QAAQ;AACX,eAAS,KAAK,aAAa,sBAA4B;AAAA,QACrD,SAAS,KAAK;AAAA,QACd,iBAAiB,KAAK;AAAA,QACtB,gBAAgB;AAAA,QAChB,SAAS,KAAK;AAAA,QACd,QAAQ,KAAK;AAAA,QACb,WAAW,KAAK;AAAA,QAChB,QAAQ,KAAK,gBAAgB,KAAK;AAAA,QAClC;AAAA;AAAA;AAAA,QAGA,yBAAyB,OAAO,gBAAgB,QAAQ,WAAW;AACjE,qBAAW,CAAC,gBAAgB,UAAU,KAAK,KAAK,sBAAsB;AAEpE,kBAAM,SAAS,OAAO,QAAQ,UAAU,EACrC,OAAO,CAAC,CAAC,EAAE,EAAE,MAAM,OAAO,cAAc,EACxC,IAAI,CAAC,CAAC,KAAK,MAAM,KAAK;AACzB,gBAAI,OAAO,WAAW,EAAG;AAEzB,kBAAM,OAAO,KAAK,WAAoC,cAAc;AACpE,kBAAM,UAAU,MAAM,KAAK,KAAK;AAChC,uBAAW,UAAU,SAAS;AAC5B,kBAAI,UAAU;AACd,oBAAM,UAAU,EAAE,GAAG,OAAO;AAC5B,yBAAW,SAAS,QAAQ;AAC1B,oBAAI,QAAQ,KAAK,MAAM,QAAQ;AAC7B,0BAAQ,KAAK,IAAI;AACjB,4BAAU;AAAA,gBACZ;AAAA,cACF;AACA,kBAAI,SAAS;AACX,sBAAM,KAAM,OAAO,IAAI;AACvB,oBAAI,OAAO,QAAW;AACpB,wBAAM,KAAK,IAAI,IAAI,OAAO;AAAA,gBAC5B;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA,SAAS,KAAK;AAAA,MAChB,CAAC;AACD,WAAK,gBAAgB,IAAI,MAAM,MAAM;AAAA,IACvC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2BA,kBAAkB,gBAAwB,OAAsC;AAC9E,UAAM,aAAa,KAAK,qBAAqB,IAAI,cAAc;AAC/D,QAAI,CAAC,cAAc,EAAE,SAAS,YAAa,QAAO;AAClD,UAAM,WAAW,WAAW,KAAK;AACjC,QAAI,CAAC,SAAU,QAAO;AACtB,UAAM,SAAS,KAAK,WAAW,QAAQ;AACvC,WAAO;AAAA,MACL,WAA+B;AAC7B,eAAO,OAAO,gBAAgB;AAAA,MAChC;AAAA,MACA,WAAW,IAAqB;AAC9B,cAAM,UAAU,OAAO,gBAAgB;AACvC,eAAO,QAAQ,KAAK,CAAC,MAAM,EAAE,KAAK,MAAM,EAAE;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,UAAU,QAAkC;AAC1C,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA,EAGA,YAAgC;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,SAAiB;AACnB,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,OAAa;AACf,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,6BACJ,YACsC;AACtC,UAAM,SAAsC,CAAC;AAC7C,eAAW,aAAa,YAAY;AAClC,UAAI,UAAU,MAAM,QAAQ;AAC1B,cAAM,IAAI,MAAM,yDAAyD,UAAU,EAAE,GAAG;AAAA,MAC1F;AACA,aAAO,UAAU,EAAE,IAAI,MAAM,0BAA0B,KAAK,SAAS,SAAS;AAAA,IAChF;AACA,WAAO;AAAA,EACT;AAAA,EAoBA,gBAAgB,MAA8B,QAA6B;AACzE,QAAI,SAAS,aAAa;AACxB,UAAI,WAAW,QAAW;AACxB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AACA,UAAI,CAAC,oBAAoB,KAAK,SAAS,aAAa,MAAM,GAAG;AAC3D,cAAM,IAAI,sBAAsB;AAAA,UAC9B,MAAM;AAAA,UACN,QAAQ,KAAK,QAAQ;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AACA;AAAA,IACF;AACA,QAAI,CAAC,oBAAoB,KAAK,SAAS,QAAQ,GAAG;AAChD,YAAM,IAAI,sBAAsB;AAAA,QAC9B,MAAM;AAAA,QACN,QAAQ,KAAK,QAAQ;AAAA,MACvB,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAqBA,gBAAgB,MAA8B,QAA6B;AACzE,QAAI,SAAS,aAAa;AACxB,UAAI,WAAW,QAAW;AACxB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AACA,UAAI,CAAC,oBAAoB,KAAK,SAAS,aAAa,MAAM,GAAG;AAC3D,cAAM,IAAI,sBAAsB;AAAA,UAC9B,MAAM;AAAA,UACN,QAAQ,KAAK,QAAQ;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AACA;AAAA,IACF;AACA,QAAI,CAAC,oBAAoB,KAAK,SAAS,QAAQ,GAAG;AAChD,YAAM,IAAI,sBAAsB;AAAA,QAC9B,MAAM;AAAA,QACN,QAAQ,KAAK,QAAQ;AAAA,MACvB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,QAAQ,UAA6B,CAAC,GAA8B;AACxE,WAAO,cAAc;AAAA,MACnB,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK,QAAQ;AAAA,MACpB,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK;AAAA,MACb,eAAe,CAAI,SAChB,KAAK,mBAAmB,IAAI,IAAI,KAAyC;AAAA,MAC5E,iBAAiB,MAAM,KAAK,YAAY;AAAA,MACxC,aAAa,CAAC,SAAiB,KAAK,QAAQ,KAAK,KAAK,MAAM,IAAI;AAAA,MAChE,WAAW,OAAU,MAAc,OAAe;AAChD,cAAM,OAAO,KAAK,WAAc,IAAI;AACpC,eAAO,KAAK,IAAI,EAAE;AAAA,MACpB;AAAA,MACA,WAAW,OAAO,MAAc,OAAe;AAC7C,cAAM,OAAO,KAAK,WAAW,IAAI;AACjC,eAAO,KAAK,KAAK,EAAE,EAAE,KAAK;AAAA,MAC5B;AAAA,MACA,YAAY,OAAO,MAAc,IAAY,aAAqB;AAChE,cAAM,OAAO,KAAK,WAAW,IAAI;AACjC,cAAM,KAAK,KAAK,EAAE,EAAE,OAAO,QAAQ;AAAA,MACrC;AAAA,IACF,GAAG,OAAO;AAAA,EACZ;AAAA,EAEA,YAAY,UAA8B,CAAC,GAAsB;AAC/D,SAAK,gBAAgB,aAAa,MAAM;AACxC,WAAO;AAAA,MACL,KAAK,QAAQ;AAAA,MACb,MAAM,KAAK,YAAY;AAAA,MACvB,CAAC,SAAS,KAAK,WAAW,IAAI;AAAA,MAC9B,CAAC,UAAU,KAAK,iBAAiB,KAAK;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,iBAAiB,OAA6C;AAC1E,UAAM,OAAO,KAAK,UAAU,KAAK;AACjC,UAAM,WAA8B,KAAK,YACrC,OAAO,YAAY;AACjB,YAAM,MAAM,MAAM,KAAK,OAAO,uBAAuB;AACrD,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,aAAO,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,MAAM,WAAW,KAAK,IAAI,OAAO,MAAM,KAAK,MAAM,MAAM;AAAA,IAC7G,GAAG,IACH,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,MAAM,WAAW,KAAK,IAAI,OAAO,MAAM,KAAK,MAAM,MAAM;AACxG,UAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,yBAAyB,MAAM,IAAI,QAAQ;AAAA,EAC/E;AAAA,EASA,UAAU,MAA8B,QAAgC;AACtE,QAAI,SAAS,aAAa;AACxB,UAAI,WAAW,OAAW,QAAO;AACjC,aAAO,oBAAoB,KAAK,SAAS,aAAa,MAAM;AAAA,IAC9D;AACA,WAAO,oBAAoB,KAAK,SAAS,QAAQ;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,gCACJ,KACA,gBACkB;AAClB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,IAAI,KAAK;AAAA,IAC7B;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,cAAc;AAC5C,UAAM,OAAO,MAAM,QAAQ,IAAI,KAAK,IAAI,OAAO,GAAG;AAClD,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA,EASA,UAAU,MAA8B,QAAgC;AACtE,QAAI,SAAS,aAAa;AACxB,UAAI,WAAW,OAAW,QAAO;AACjC,aAAO,oBAAoB,KAAK,SAAS,aAAa,MAAM;AAAA,IAC9D;AACA,WAAO,oBAAoB,KAAK,SAAS,QAAQ;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,iBAAiB,gBAAwB,QAAgC;AAC7E,UAAM,WAAW,KAAK,YAAY,YAAY,cAAc;AAC5D,QAAI,OAAO,KAAK,QAAQ,EAAE,WAAW,EAAG;AACxC,QAAI,CAAC,UAAU,OAAO,WAAW,SAAU;AAC3C,UAAM,MAAM;AAEZ,eAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAC1D,UAAI,WAAW,SAAS,SAAU;AAClC,YAAM,QAAQ,IAAI,KAAK;AAIvB,UAAI,UAAU,QAAQ,UAAU,OAAW;AAI3C,UAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAU;AAC1D,cAAM,IAAI,kBAAkB;AAAA,UAC1B,YAAY;AAAA,UACZ,IAAK,IAAI,IAAI,KAA4B;AAAA,UACzC;AAAA,UACA,OAAO,WAAW;AAAA,UAClB,OAAO;AAAA,UACP,SACE,cAAc,cAAc,IAAI,KAAK,qCAAqC,OAAO,KAAK;AAAA,QAC1F,CAAC;AAAA,MACH;AACA,YAAM,QAAQ,OAAO,KAAK;AAC1B,YAAM,SAAS,KAAK,WAAoC,WAAW,MAAM;AACzE,YAAM,SAAS,MAAM,OAAO,IAAI,KAAK;AACrC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,kBAAkB;AAAA,UAC1B,YAAY;AAAA,UACZ,IAAK,IAAI,IAAI,KAA4B;AAAA,UACzC;AAAA,UACA,OAAO,WAAW;AAAA,UAClB;AAAA,UACA,SACE,eAAe,cAAc,IAAI,KAAK,aAAQ,WAAW,MAAM,qCAC5B,KAAK,mBAAmB,WAAW,MAAM;AAAA,QAChF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,oBAAoB,gBAAwB,IAA2B;AAC3E,UAAM,MAAM,GAAG,cAAc,IAAI,EAAE;AACnC,QAAI,KAAK,kBAAkB,IAAI,GAAG,EAAG;AACrC,SAAK,kBAAkB,IAAI,GAAG;AAE9B,QAAI;AACF,YAAM,UAAU,KAAK,YAAY,WAAW,cAAc;AAC1D,iBAAW,QAAQ,SAAS;AAC1B,cAAM,iBAAiB,KAAK,WAAoC,KAAK,UAAU;AAI/E,cAAM,aAAa,MAAM,eAAe,KAAK;AAC7C,cAAM,UAAU,WAAW,OAAO,CAAC,QAAQ;AACzC,gBAAM,MAAM,IAAI,KAAK,KAAK;AAI1B,cAAI,OAAO,QAAQ,YAAY,OAAO,QAAQ,SAAU,QAAO;AAC/D,iBAAO,OAAO,GAAG,MAAM;AAAA,QACzB,CAAC;AACD,YAAI,QAAQ,WAAW,EAAG;AAE1B,YAAI,KAAK,SAAS,UAAU;AAC1B,gBAAM,QAAQ,QAAQ,CAAC;AACvB,gBAAM,IAAI,kBAAkB;AAAA,YAC1B,YAAY,KAAK;AAAA,YACjB,IAAK,QAAQ,IAAI,KAA4B;AAAA,YAC7C,OAAO,KAAK;AAAA,YACZ,OAAO;AAAA,YACP,OAAO;AAAA,YACP,SACE,kBAAkB,cAAc,MAAM,EAAE,MACrC,QAAQ,MAAM,kBAAkB,KAAK,UAAU,wCAAwC,KAAK,KAAK;AAAA,UACxG,CAAC;AAAA,QACH;AACA,YAAI,KAAK,SAAS,WAAW;AAC3B,qBAAW,SAAS,SAAS;AAC3B,kBAAM,UAAW,MAAM,IAAI,KAA4B;AACvD,gBAAI,YAAY,KAAM;AAGtB,kBAAM,eAAe,OAAO,OAAO;AAAA,UACrC;AAAA,QACF;AAAA,MAEF;AAAA,IACF,UAAE;AACA,WAAK,kBAAkB,OAAO,GAAG;AAAA,IACnC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,WAAW,gBAAwB,OAAqC;AACtE,UAAM,WAAW,KAAK,YAAY,YAAY,cAAc;AAC5D,WAAO,SAAS,KAAK,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,cAAc,gBAA+C;AAG3D,QAAI,eAAe,WAAW,GAAG,EAAG,QAAO;AAC3C,UAAM,OAAO,KAAK,gBAAgB,IAAI,cAAc;AACpD,QAAI,CAAC,KAAM,QAAO;AAMlB,WAAQ,KAEL,mBAAmB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,iBAA0D;AAC9D,UAAM,aAA6B,CAAC;AACpC,eAAW,CAAC,gBAAgB,IAAI,KAAK,KAAK,YAAY,QAAQ,GAAG;AAC/D,YAAM,OAAO,KAAK,WAAoC,cAAc;AACpE,YAAM,UAAU,MAAM,KAAK,KAAK;AAChC,iBAAW,UAAU,SAAS;AAC5B,cAAM,QAAS,OAAO,IAAI,KAA4B;AACtD,mBAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,gBAAM,QAAQ,OAAO,KAAK;AAC1B,cAAI,UAAU,QAAQ,UAAU,OAAW;AAK3C,cAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAAU;AAC1D,uBAAW,KAAK;AAAA,cACd,YAAY;AAAA,cACZ,IAAI;AAAA,cACJ;AAAA,cACA,OAAO,WAAW;AAAA,cAClB,OAAO;AAAA,cACP,MAAM,WAAW;AAAA,YACnB,CAAC;AACD;AAAA,UACF;AACA,gBAAM,QAAQ,OAAO,KAAK;AAC1B,gBAAM,SAAS,KAAK,WAAoC,WAAW,MAAM;AACzE,gBAAM,SAAS,MAAM,OAAO,IAAI,KAAK;AACrC,cAAI,CAAC,QAAQ;AACX,uBAAW,KAAK;AAAA,cACd,YAAY;AAAA,cACZ,IAAI;AAAA,cACJ;AAAA,cACA,OAAO,WAAW;AAAA,cAClB,OAAO;AAAA,cACP,MAAM,WAAW;AAAA,YACnB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO,EAAE,WAAW;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,SAAsB;AACpB,UAAM,QAAQ,KAAK,gBAAgB;AACnC,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR;AAAA,MAGF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,kBAAsC;AAC5C,QAAI,CAAC,KAAK,aAAa;AACrB,WAAK,cAAc,KAAK,gBAAgB,YAAY;AAAA,QAClD,SAAS,KAAK;AAAA,QACd,OAAO,KAAK;AAAA,QACZ,WAAW,KAAK;AAAA,QAChB,QAAQ,KAAK;AAAA,QACb,OAAO,KAAK,QAAQ;AAAA,MACtB,CAAC;AAAA,IACH;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,GAAG,WAAwC;AACzC,UAAM,MAAM,qBAAqB,OAAO,UAAU,YAAY,IAAI;AAClE,WAAO,KAAK,gBAAgB;AAAA,MAC1B;AAAA,QACE,SAAS,KAAK;AAAA,QACd,MAAM,KAAK;AAAA,QACX,WAAW,KAAK;AAAA,QAChB,QAAQ,KAAK;AAAA,QACb,WAAW,MAAO,KAAK,cAAc,YAAY,QAAQ,OAAO,KAAK,gBAAgB;AAAA,MACvF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,QAAoB;AAClB,WAAO,KAAK,eAAe,WAAW,IAAI;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBA,MAAM,YAAe,KAAqB,IAAkC;AAC1E,UAAM,QAAQ,KAAK;AACnB,SAAK,iBAAiB;AACtB,QAAI;AACF,aAAO,MAAM,GAAG;AAAA,IAClB,UAAE;AACA,WAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aAAa,SAA6B,CAAC,GAAiC;AAChF,WAAO,KAAK,gBAAgB,KAAK,KAAK,SAAS,KAAK,MAAM,KAAK,WAAW,KAAK,QAAQ,MAAM;AAAA,EAC/F;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,YAAY,IAAe,YAAoB,UAAiC;AACpF,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,IAAK;AACV,UAAM,KAAK,gBAAgB;AAAA,MACzB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,QACE,OAAO,KAAK,QAAQ;AAAA,QACpB,SAAS,IAAI;AAAA,QACb,aAAa,IAAI;AAAA,QACjB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,kBACE,UACY;AACZ,SAAK,cAAc,IAAI,QAAQ;AAC/B,WAAO,MAAM,KAAK,cAAc,OAAO,QAAQ;AAAA,EACjD;AAAA,EAEQ,cAAc,OAAmC;AACvD,eAAW,OAAO,KAAK,eAAe;AACpC,UAAI;AACF,YAAI,KAAK;AAAA,MACX,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,SAAS,MAAwD;AACrE,UAAM,EAAE,iBAAAC,kBAAiB,wBAAAC,wBAAuB,IAAI,MAAM;AAQ1D,UAAM,YAAY,KAAK,QAAQ;AAC/B,UAAM,iBAAiB,MAAM,KAAK,OAAOA,uBAAsB;AAC/D,WAAOD;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,IAA2B;AAChD,UAAM,EAAE,kBAAAE,mBAAkB,wBAAAD,wBAAuB,IAAI,MAAM;AAC3D,UAAMC,kBAAiB,KAAK,SAAS,KAAK,MAAM,EAAE;AAElD,SAAKD;AAAA,EACP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BA,MAAM,QACJ,MACA,SACyB;AACzB,QAAI,CAAC,OAAO,UAAU,IAAI,KAAK,QAAQ,GAAG;AACxC,YAAM,IAAI,gBAAgB,iDAAiD,IAAI,EAAE;AAAA,IACnF;AACA,QAAI,CAAC,WAAW,OAAO,QAAQ,WAAW,YAAY,QAAQ,OAAO,WAAW,GAAG;AACjF,YAAM,IAAI,gBAAgB,gDAAgD;AAAA,IAC5E;AACA,QAAI,OAAO,QAAQ,UAAU,YAAY,QAAQ,SAAS,GAAG;AAC3D,YAAM,IAAI,gBAAgB,0CAA0C;AAAA,IACtE;AACA,QAAI,KAAK,iBAAiB;AACxB,YAAM,IAAI,qBAAqB,KAAK,gBAAgB,IAAI;AAAA,IAC1D;AAIA,QAAI,KAAK,QAAQ,SAAS,WAAW,KAAK,QAAQ,SAAS,SAAS;AAClE,YAAM,SAAS,IAAI,IAAI;AACvB,UAAI,QAAQ;AACZ,iBAAW,KAAK,KAAK,QAAQ,KAAK,KAAK,GAAG;AACxC,YAAI,EAAE,SAAS,MAAM,GAAG;AAAE,kBAAQ;AAAM;AAAA,QAAM;AAAA,MAChD;AACA,UAAI,CAAC,OAAO;AAGV,cAAM,IAAI,oBAAoB,oBAAoB,IAAI;AAAA,MACxD;AAAA,IACF;AAEA,UAAM,YAAY,oBAAI,KAAK;AAC3B,UAAM,YAAY,UAAU,QAAQ,IAAI,QAAQ;AAChD,UAAM,SAAS,QAAQ;AAEvB,UAAM,SAAS,IAAI,eAAe;AAAA,MAChC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW,MAAM;AACf,YAAI,KAAK,mBAAmB,KAAK,gBAAgB,WAAW,QAAQ;AAClE,eAAK,kBAAkB;AAAA,QACzB;AAAA,MACF;AAAA,IACF,CAAC;AAED,SAAK,kBAAkB,EAAE,MAAM,WAAW,QAAQ,OAAO;AACzD,UAAM,KAAK,oBAAoB;AAAA,MAC7B,OAAO,KAAK,QAAQ;AAAA,MACpB;AAAA,MACA;AAAA,MACA,OAAO,QAAQ;AAAA,MACf,WAAW,UAAU,YAAY;AAAA,MACjC,WAAW,IAAI,KAAK,SAAS,EAAE,YAAY;AAAA,IAC7C,CAAC;AACD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aACJ,gBACA,IACA,QACA,MACA,QACe;AACf,UAAM,OAAO,KAAK,WAAc,cAAc;AAC9C,UAAM,KAAK,UAAU,IAAI,QAAQ,MAAM;AAAA,MACrC,WAAW,EAAE,QAAQ,UAAU,EAAE;AAAA,IACnC,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,oBAAoB,OAOhB;AAChB,UAAM,KAAK,QAAQ,KAAK,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,GAAG,EAAE,CAAC;AACrF,UAAM,OAAO,KAAK,UAAU,EAAE,IAAI,GAAG,MAAM,CAAC;AAC5C,UAAM,WAA8B,KAAK,YACrC,OAAO,YAAY;AACjB,YAAM,MAAM,MAAM,KAAK,OAAO,0BAA0B;AACxD,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,KAAK,MAAM;AAAA,QACX,KAAK;AAAA,QACL,OAAO;AAAA,QACP,KAAK,MAAM;AAAA,MACb;AAAA,IACF,GAAG,IACH;AAAA,MACE,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AACJ,UAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,4BAA4B,IAAI,QAAQ;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,oBACJ,YACA,UACA,UACA,MAC+B;AAC/B,WAAO;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,YAAY,SAAoD;AACpE,UAAM,WAAW,MAAM,KAAK,kBAAkB;AAC9C,SAAK,gBAAgB,mBAAmB,QAAQ,MAAM,QAAQ;AAC9D,QAAI,OAAO,QAAQ,YAAY,YAAY,QAAQ,QAAQ,WAAW,GAAG;AACvE,YAAM,IAAI,gBAAgB,sDAAsD;AAAA,IAClF;AACA,UAAM,SAAS,MAAM,KAAK,gBAAgB,YAAY,QAAQ;AAC9D,UAAM,SAAuB;AAAA,MAC3B,MAAM,QAAQ;AAAA,MACd,MAAM;AAAA,MACN,SAAS,QAAQ;AAAA,MACjB,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,MACjC,UAAU,KAAK,QAAQ;AAAA,MACvB,iBAAiB,OAAO;AAAA,MACxB,GAAI,OAAO,oBAAoB,UAAa,EAAE,iBAAiB,OAAO,gBAAgB;AAAA,MACtF,GAAI,QAAQ,cAAc,UAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,IACxE;AACA,UAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,UAAM,KAAK,gBAAgB,wBAAwB,KAAK,gBAAgB,GAAG,KAAK,QAAQ,QAAQ,UAAU,OAAO,IAAI;AACrH,aAAS,KAAK,MAAM;AACpB,SAAK,cAAc;AACnB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,WACJ,SACuB;AACvB,UAAM,WAAW,MAAM,KAAK,kBAAkB;AAC9C,SAAK,gBAAgB,mBAAmB,QAAQ,MAAM,QAAQ;AAC9D,UAAM,QAAQ,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,UAAU;AAChE,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR,2BAA2B,QAAQ,UAAU;AAAA,MAC/C;AAAA,IACF;AACA,QAAI,MAAM,SAAS,UAAU;AAC3B,YAAM,IAAI;AAAA,QACR,2BAA2B,QAAQ,UAAU,iBAAiB,MAAM,IAAI;AAAA,MAC1E;AAAA,IACF;AAOA,UAAM,MAAM;AAAA,MACV,cAAc,MAAM;AAAA,MACpB,YAAY,CAAc,SAAiB;AACzC,cAAM,IAAI,KAAK,WAAc,IAAI;AACjC,eAAO;AAAA,UACL,KAAK,CAAC,OAAe,EAAE,IAAI,EAAE;AAAA,UAC7B,MAAM,MAAM,EAAE,KAAK;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AACA,UAAM,WAAW,MAAM,QAAQ,aAAa,GAAG;AAM/C,UAAM,qBAA+B,CAAC;AACtC,eAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAC1D,UAAI,CAAC,WAAW,OAAO,YAAY,SAAU;AAC7C,YAAM,gBAAgB,OAAO,QAAQ,OAAO;AAC5C,UAAI,cAAc,WAAW,EAAG;AAChC,YAAM,OAAO,KAAK,WAAW,QAAQ;AACrC,iBAAW,CAAC,IAAIE,OAAM,KAAK,eAAe;AAExC,cAAM,KAAK,IAAI,IAAIA,OAAa;AAAA,MAClC;AACA,yBAAmB,KAAK,QAAQ;AAAA,IAClC;AAEA,UAAM,SAAS,MAAM,KAAK,gBAAgB,YAAY,QAAQ;AAC9D,UAAM,SAAuB;AAAA,MAC3B,MAAM,QAAQ;AAAA,MACd,MAAM;AAAA,MACN,WAAW,QAAQ;AAAA,MACnB,SAAS,MAAM;AAAA;AAAA,MACf,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,MACjC,UAAU,KAAK,QAAQ;AAAA,MACvB,iBAAiB,OAAO;AAAA,MACxB,iBAAiB,OAAO,mBAAmB,MAAM;AAAA,MACjD,GAAI,mBAAmB,SAAS,KAAK,EAAE,mBAAmB;AAAA,IAC5D;AACA,UAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,UAAM,KAAK,gBAAgB,wBAAwB,KAAK,gBAAgB,GAAG,KAAK,QAAQ,QAAQ,UAAU,OAAO,IAAI;AACrH,aAAS,KAAK,MAAM;AACpB,SAAK,cAAc;AACnB,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,cAAgD;AACpD,WAAO,CAAC,GAAI,MAAM,KAAK,kBAAkB,CAAE;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,UAAU,MAA4C;AAC1D,UAAM,MAAM,MAAM,KAAK,kBAAkB;AACzC,WAAO,IAAI,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI,KAAK;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,kBACJ,UACA,UACe;AAGf,QAAI,aAAa,QAAQ,aAAa,KAAM;AAC5C,QAAI,KAAK,gBAAgB,MAAM;AAC7B,WAAK,cAAc,MAAM,KAAK,gBAAgB;AAAA,QAC5C,KAAK;AAAA,QACL,KAAK;AAAA,QACL,CAAC,QAAQ,KAAK,qBAAqB,GAAG;AAAA,MACxC;AAAA,IACF;AACA,QAAI,KAAK,YAAY,WAAW,EAAG;AACnC,SAAK,gBAAgB,iBAAiB,UAAU,UAAU,KAAK,WAAW;AAAA,EAC5E;AAAA,EAEA,MAAc,oBAA6C;AACzD,QAAI,KAAK,gBAAgB,KAAM,QAAO,KAAK;AAC3C,UAAM,SAAS,MAAM,KAAK,gBAAgB;AAAA,MACxC,KAAK;AAAA,MACL,KAAK;AAAA,MACL,CAAC,QAA2B,KAAK,qBAAqB,GAAG;AAAA,IAC3D;AACA,SAAK,cAAc;AACnB,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,mBAAmB,QAAkD;AACjF,UAAM,OAAO,KAAK,UAAU,MAAM;AAClC,QAAI;AACJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,kBAAkB;AAChD,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO;AAAA,QACP,KAAK,KAAK,QAAQ;AAAA,MACpB;AAAA,IACF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO;AAAA,QACP,KAAK,KAAK,QAAQ;AAAA,MACpB;AAAA,IACF;AACA,UAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,oBAAoB,OAAO,MAAM,QAAQ;AAC3E,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,qBAAqB,UAAoD;AACrF,QAAI;AACJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,kBAAkB;AAChD,aAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAAA,IACxD,OAAO;AACL,aAAO,SAAS;AAAA,IAClB;AACA,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA;AAAA,EAGA,MAAM,cAAiC;AACrC,UAAM,WAAW,MAAM,KAAK,QAAQ,QAAQ,KAAK,IAAI;AACrD,WAAO,OAAO,KAAK,QAAQ;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqCA,MAAM,kBAAmC;AACvC,UAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,SAAS,QAAQ;AACpE,QAAI,UAAU;AACZ,UAAI;AACF,cAAM,SAAS,KAAK,MAAM,SAAS,KAAK;AACxC,YAAI,WAAW,QAAQ,OAAO,WAAW,YAAY,YAAY,QAAQ;AACvE,gBAAMC,UAAU,OAA+B;AAC/C,cAAI,OAAOA,YAAW,YAAY,2BAA2B,KAAKA,OAAM,GAAG;AACzE,mBAAOA;AAAA,UACT;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAIR;AAAA,IACF;AAOA,UAAM,EAAE,cAAAC,cAAa,IAAI,MAAM;AAC/B,UAAM,SAASA,cAAa;AAC5B,UAAM,WAA8B;AAAA,MAClC,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC;AAAA,IAClC;AACA,UAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,SAAS,UAAU,QAAQ;AAC7D,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OAAwB;AAC5B,UAAM,WAAW,MAAM,KAAK,QAAQ,QAAQ,KAAK,IAAI;AAIrD,UAAM,aAAa,MAAM,KAAK,QAAQ,KAAK,KAAK,MAAM,UAAU;AAChE,UAAM,WAAoC,CAAC;AAC3C,eAAW,aAAa,YAAY;AAClC,YAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,YAAY,SAAS;AACxE,UAAI,UAAU;AACZ,iBAAS,SAAS,IAAI,KAAK,MAAM,SAAS,KAAK;AAAA,MACjD;AAAA,IACF;AAMA,UAAM,mBAAkC,CAAC;AACzC,eAAW,gBAAgB,CAAC,mBAAmB,wBAAwB,GAAG;AACxE,YAAM,MAAM,MAAM,KAAK,QAAQ,KAAK,KAAK,MAAM,YAAY;AAC3D,UAAI,IAAI,WAAW,EAAG;AACtB,YAAM,UAA6C,CAAC;AACpD,iBAAW,MAAM,KAAK;AACpB,cAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,cAAc,EAAE;AACnE,YAAI,SAAU,SAAQ,EAAE,IAAI;AAAA,MAC9B;AACA,uBAAiB,YAAY,IAAI;AAAA,IACnC;AAQA,UAAM,gBAAgB,KAAK,gBAAgB;AAC3C,UAAM,OAAO,gBAAgB,MAAM,cAAc,KAAK,IAAI;AAC1D,UAAM,SAAsB;AAAA,MAC1B,eAAe;AAAA,MACf,cAAc,KAAK;AAAA,MACnB,eAAc,oBAAI,KAAK,GAAE,YAAY;AAAA,MACrC,cAAc,KAAK,QAAQ;AAAA,MAC3B;AAAA,MACA,aAAa;AAAA,MACb,GAAI,OAAO,KAAK,gBAAgB,EAAE,SAAS,IACvC,EAAE,WAAW,iBAAiB,IAC9B,CAAC;AAAA,MACL,GAAI,OACA;AAAA,QACE,YAAY;AAAA,UACV,MAAM,KAAK;AAAA,UACX,OAAO,KAAK,MAAM;AAAA,UAClB,IAAI,KAAK,MAAM;AAAA,QACjB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAEA,WAAO,KAAK,UAAU,MAAM;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,KAAK,YAAmC;AAC5C,UAAM,SAAS,KAAK,MAAM,UAAU;AAGpC,UAAM,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,WAAW;AAGxD,eAAW,CAAC,QAAQ,WAAW,KAAK,OAAO,QAAQ,OAAO,QAAQ,GAAG;AACnE,YAAM,WAAW;AAAA,QACf,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO,KAAK,UAAU,WAAW;AAAA,MACnC;AACA,YAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,YAAY,QAAQ,QAAQ;AAAA,IAChE;AAIA,QAAI,OAAO,WAAW;AACpB,iBAAW,CAAC,cAAc,OAAO,KAAK,OAAO,QAAQ,OAAO,SAAS,GAAG;AACtE,mBAAW,CAAC,IAAI,QAAQ,KAAK,OAAO,QAAQ,OAAO,GAAG;AACpD,gBAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,cAAc,IAAI,QAAQ;AAAA,QAC9D;AAAA,MACF;AAAA,IACF;AAUA,QAAI,KAAK,eAAe;AACtB,WAAK,UAAU,MAAM,KAAK,cAAc;AAIxC,WAAK,SAAS,KAAK,WAAW;AAAA,IAChC;AAKA,SAAK,gBAAgB,MAAM;AAC3B,SAAK,cAAc;AAKnB,QAAI,CAAC,OAAO,YAAY;AACtB,cAAQ;AAAA,QACN;AAAA,MAGF;AACA;AAAA,IACF;AAEA,UAAM,SAAS,MAAM,KAAK,sBAAsB;AAChD,QAAI,CAAC,OAAO,IAAI;AAGd,UAAI,OAAO,SAAS,QAAQ;AAC1B,cAAM,IAAI;AAAA,UACR,OAAO;AAAA,UACP,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AACA,YAAM,IAAI,kBAAkB,OAAO,SAAS,OAAO,UAAU;AAAA,IAC/D;AAKA,QAAI,OAAO,SAAS,OAAO,WAAW,MAAM;AAC1C,YAAM,IAAI;AAAA,QACR,0CAA0C,OAAO,WAAW,IAAI,wBAC1C,OAAO,IAAI;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BA,MAAM,wBAeJ;AAIA,UAAM,kBAAkB,KAAK,gBAAgB;AAC7C,QAAI,CAAC,iBAAiB;AACpB,aAAO,EAAE,IAAI,MAAM,MAAM,IAAI,QAAQ,EAAE;AAAA,IACzC;AACA,UAAM,cAAc,MAAM,gBAAgB,OAAO;AACjD,QAAI,CAAC,YAAY,IAAI;AACnB,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,MAAM;AAAA,QACN,YAAY,YAAY;AAAA,QACxB,SACE,kCAAkC,YAAY,UAAU,wBAClC,YAAY,QAAQ,gBAAgB,YAAY,MAAM;AAAA,MAChF;AAAA,IACF;AASA,UAAM,aAAa,MAAM,gBAAgB,eAAe;AAKxD,UAAM,OAAO,oBAAI,IAAY;AAC7B,UAAM,SAAS,oBAAI,IAGjB;AACF,aAAS,IAAI,WAAW,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,YAAM,QAAQ,WAAW,CAAC;AAC1B,UAAI,CAAC,MAAO;AACZ,YAAM,MAAM,GAAG,MAAM,UAAU,IAAI,MAAM,EAAE;AAC3C,UAAI,KAAK,IAAI,GAAG,EAAG;AACnB,WAAK,IAAI,GAAG;AAGZ,UAAI,MAAM,OAAO,SAAU;AAC3B,aAAO,IAAI,KAAK;AAAA,QACd,YAAY,MAAM;AAAA,QAClB,IAAI,MAAM;AAAA,QACV,cAAc,MAAM;AAAA,MACtB,CAAC;AAAA,IACH;AAEA,eAAW,EAAE,YAAY,IAAI,aAAa,KAAK,OAAO,OAAO,GAAG;AAC9D,YAAM,WAAW,MAAM,KAAK,QAAQ,IAAI,KAAK,MAAM,YAAY,EAAE;AACjE,UAAI,CAAC,UAAU;AACb,eAAO;AAAA,UACL,IAAI;AAAA,UACJ,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA,SACE,+BAA+B,UAAU,IAAI,EAAE;AAAA,QAEnD;AAAA,MACF;AACA,YAAM,aAAa,MAAMC,WAAU,SAAS,KAAK;AACjD,UAAI,eAAe,cAAc;AAC/B,eAAO;AAAA,UACL,IAAI;AAAA,UACJ,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA,SACE,kBAAkB,UAAU,IAAI,EAAE,mDACT,YAAY,WAAW,UAAU;AAAA,QAC9D;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,MAAM,YAAY;AAAA,MAClB,QAAQ,YAAY;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgEA,OAAO,aAAa,OAA4B,CAAC,GAAuC;AACtF,UAAM,cAAc,KAAK,eAAe;AAKxC,UAAM,WAAW,MAAM,KAAK,QAAQ,QAAQ,KAAK,IAAI;AACrD,UAAM,kBAAkB,OAAO,KAAK,QAAQ,EAAE,KAAK;AAQnD,UAAM,aAAa,KAAK,iBACpB,OAAO,YAAY;AACjB,YAAM,SAAS,KAAK,gBAAgB;AACpC,UAAI,CAAC,OAAQ,QAAO;AACpB,YAAM,OAAO,MAAM,OAAO,KAAK;AAC/B,aAAO,OACH,EAAE,MAAM,KAAK,MAAM,OAAO,KAAK,MAAM,OAAO,IAAI,KAAK,MAAM,GAAG,IAC9D;AAAA,IACN,GAAG,IACH;AAOJ,UAAM,oBAAoB,oBAAI,IAG5B;AACF,eAAW,kBAAkB,iBAAiB;AAC5C,YAAM,aAAa,KAAK,qBAAqB,IAAI,cAAc;AAC/D,UAAI,cAAc,OAAO,KAAK,UAAU,EAAE,SAAS,GAAG;AACpD,cAAM,OAA+D,CAAC;AACtE,mBAAW,CAAC,WAAW,QAAQ,KAAK,OAAO,QAAQ,UAAU,GAAG;AAC9D,gBAAM,UAAU,MAAM,KAAK,WAAW,QAAQ,EAAE,KAAK;AACrD,gBAAM,SAAiD,CAAC;AACxD,qBAAW,SAAS,SAAS;AAC3B,mBAAO,MAAM,GAAG,IAAI,MAAM;AAAA,UAC5B;AACA,eAAK,SAAS,IAAI;AAAA,QACpB;AACA,0BAAkB,IAAI,gBAAgB,IAAI;AAAA,MAC5C;AAAA,IACF;AAEA,eAAW,kBAAkB,iBAAiB;AAI5C,UAAI,CAAC,UAAU,KAAK,SAAS,cAAc,EAAG;AAE9C,YAAM,OAAO,KAAK,WAAW,cAAc;AAC3C,YAAM,SAAS,KAAK,UAAU,KAAK;AACnC,YAAM,OAAO,KAAK,YAAY,YAAY,cAAc;AACxD,YAAM,MAAM,OAAO,KAAK,SAAS,cAAc,KAAK,CAAC,CAAC;AAEtD,YAAM,eAAe,kBAAkB,IAAI,cAAc;AAEzD,UAAI,gBAAgB,cAAc;AAKhC,cAAM,UAAqB,CAAC;AAC5B,mBAAW,MAAM,KAAK;AACpB,gBAAM,SAAS,MAAM,KAAK,IAAI,EAAE;AAChC,cAAI,WAAW,KAAM,SAAQ,KAAK,MAAM;AAAA,QAC1C;AACA,cAAM,QAAqB;AAAA,UACzB,YAAY;AAAA,UACZ;AAAA,UACA;AAAA,UACA;AAAA,UACA,GAAI,iBAAiB,SAAY,EAAE,aAAa,IAAI,CAAC;AAAA,UACrD,GAAI,aAAa,EAAE,WAAW,IAAI,CAAC;AAAA,QACrC;AACA,cAAM;AAAA,MACR,OAAO;AAIL,mBAAW,MAAM,KAAK;AACpB,gBAAM,SAAS,MAAM,KAAK,IAAI,EAAE;AAChC,cAAI,WAAW,KAAM;AACrB,gBAAM,QAAqB;AAAA,YACzB,YAAY;AAAA,YACZ;AAAA,YACA;AAAA,YACA,SAAS,CAAC,MAAM;AAAA,YAChB,GAAI,iBAAiB,SAAY,EAAE,aAAa,IAAI,CAAC;AAAA,YACrD,GAAI,aAAa,EAAE,WAAW,IAAI,CAAC;AAAA,UACrC;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqFA,MAAM,WAAW,OAA4B,CAAC,GAAoB;AAGhE,UAAM,cAOF,CAAC;AACL,QAAI;AAGJ,UAAM,kBAGF,CAAC;AAEL,qBAAiB,SAAS,KAAK,aAAa;AAAA,MAC1C,aAAa;AAAA,MACb,gBAAgB,KAAK,mBAAmB;AAAA,IAC1C,CAAC,GAAG;AACF,kBAAY,MAAM,UAAU,IAAI;AAAA,QAC9B,QAAQ;AAAA;AAAA,QACR,MAAM,MAAM;AAAA,QACZ,SAAS,MAAM;AAAA,MACjB;AACA,UAAI,MAAM,WAAY,cAAa,MAAM;AAEzC,UAAI,CAAC,KAAK,iBAAiB,MAAM,cAAc;AAC7C,wBAAgB,MAAM,UAAU,IAAI,MAAM;AAAA,MAC5C;AAAA,IACF;AAEA,UAAM,kBAAkB,OAAO,KAAK,eAAe,EAAE,SAAS;AAC9D,WAAO,KAAK,UAAU;AAAA,MACpB,eAAe;AAAA,MACf,cAAc,KAAK;AAAA,MACnB,eAAc,oBAAI,KAAK,GAAE,YAAY;AAAA,MACrC,cAAc,KAAK,QAAQ;AAAA,MAC3B;AAAA,MACA,GAAI,kBAAkB,EAAE,eAAe,gBAAgB,IAAI,CAAC;AAAA,MAC5D,GAAI,aAAa,EAAE,WAAW,IAAI,CAAC;AAAA,IACrC,CAAC;AAAA,EACH;AACF;AAQO,IAAM,6BAA6B;AAiBnC,IAAM,iBAAN,MAAqB;AAAA;AAAA,EAEjB;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA,EACD,WAAW;AAAA,EACF;AAAA,EACA;AAAA,EAEjB,YAAY,MAMT;AACD,SAAK,QAAQ,KAAK;AAClB,SAAK,OAAO,KAAK;AACjB,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,WAAc,MAA6D;AAKzE,WAAO;AAAA,MACL,KAAK,OAAO,IAAY,WAA6B;AACnD,aAAK,aAAa;AAClB,cAAM,KAAK,MAAM,aAAgB,MAAM,IAAI,QAAQ,KAAK,MAAM,KAAK,MAAM;AAAA,MAC3E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAU;AACnB,SAAK,WAAW;AAChB,SAAK,UAAU;AAAA,EACjB;AAAA,EAEQ,eAAqB;AAC3B,QAAI,KAAK,UAAU;AACjB,YAAM,IAAI,sBAAsB,EAAE,MAAM,KAAK,MAAM,WAAW,KAAK,UAAU,CAAC;AAAA,IAChF;AACA,QAAI,KAAK,IAAI,IAAI,KAAK,WAAW;AAI/B,WAAK,WAAW;AAChB,WAAK,UAAU;AACf,YAAM,IAAI,sBAAsB,EAAE,MAAM,KAAK,MAAM,WAAW,KAAK,UAAU,CAAC;AAAA,IAChF;AAAA,EACF;AACF;;;A8BliFO,IAAM,oBAAN,MAAwB;AAAA,EACZ,YAAY,oBAAI,IAAwC;AAAA,EAEzE,GACE,OACA,SACM;AACN,QAAI,MAAM,KAAK,UAAU,IAAI,KAAe;AAC5C,QAAI,CAAC,KAAK;AACR,YAAM,oBAAI,IAAI;AACd,WAAK,UAAU,IAAI,OAAiB,GAAG;AAAA,IACzC;AACA,QAAI,IAAI,OAAgC;AAAA,EAC1C;AAAA,EAEA,IACE,OACA,SACM;AACN,SAAK,UAAU,IAAI,KAAe,GAAG,OAAO,OAAgC;AAAA,EAC9E;AAAA,EAEA,KAAoC,OAAU,MAA8B;AAC1E,UAAM,MAAM,KAAK,UAAU,IAAI,KAAe;AAC9C,QAAI,KAAK;AACP,iBAAW,WAAW,KAAK;AACzB,gBAAQ,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAAA,EAEA,qBAA2B;AACzB,SAAK,UAAU,MAAM;AAAA,EACvB;AACF;;;ACjBA,IAAMC,eAAc,IAAI;AAAA,EACtB;AAGF;AAKO,IAAM,QAAoB;AAAA,EAC/B,MAAM,iBAAiB;AAAE,UAAMA;AAAA,EAAY;AAC7C;;;ACIA,SAASC,YAAW,IAAmB;AACrC,SAAO,IAAI;AAAA,IACT,GAAG,EAAE;AAAA,EAGP;AACF;AASO,IAAM,aAA8B;AAAA,EACzC,wBAAwB;AAAE,UAAMA,YAAW,eAAe;AAAA,EAAE;AAAA,EAC5D,iBAAiB;AAAE,UAAMA,YAAW,4BAA4B;AAAA,EAAE;AAAA,EAClE,oBAAoB;AAAA,EAAC;AACvB;;;AjCLA,IAAM,YAAkC;AAAA,EACtC,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,OAAO;AACT;AAGA,SAAS,uBAAuB,QAAiC;AAC/D,SAAO;AAAA,IACL;AAAA,IACA,aAAa;AAAA,IACb,MAAM;AAAA,IACN,aAAa,CAAC;AAAA,IACd,MAAM,oBAAI,IAAI;AAAA,IACd,KAAK;AAAA,IACL,MAAM,IAAI,WAAW,CAAC;AAAA,EACxB;AACF;AAGO,IAAM,QAAN,MAAY;AAAA,EACA;AAAA,EACA,UAAU,IAAI,kBAAkB;AAAA,EAChC,aAAa,oBAAI,IAAmB;AAAA,EACpC,eAAe,oBAAI,IAA6B;AAAA,EAChD,cAAc,oBAAI,IAAwB;AAAA,EACnD,SAAS;AAAA,EACT,eAAqD;AAAA;AAAA,EAE5C,kBAAkB,oBAAI,IAA4B;AAAA,EAClD;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,kBAAkB,oBAAI,IAAoB;AAAA;AAAA,EAE1C,sBAA8C,CAAC;AAAA,EAEhE,YAAY,SAAuB;AACjC,SAAK,UAAU;AACf,SAAK,aAAa,QAAQ,cAAc;AACxC,SAAK,kBAAkB,QAAQ,mBAAmB;AAClD,SAAK,eAAe,QAAQ,gBAAgB;AAI5C,QAAI,QAAQ,eAAe;AACzB,WAAK,gBAAgB,sBAAsB,QAAQ,aAAa;AAAA,IAClE;AACA,SAAK,kBAAkB;AAAA,EACzB;AAAA,EAEQ,oBAA0B;AAChC,QAAI,KAAK,aAAc,cAAa,KAAK,YAAY;AAGrD,UAAM,SAAS,KAAK,QAAQ,eAAe,iBAAiB,KAAK,QAAQ;AACzE,QAAI,UAAU,SAAS,GAAG;AACxB,WAAK,eAAe,WAAW,MAAM;AACnC,aAAK,MAAM;AAAA,MACb,GAAG,MAAM;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,qBAAqB,OAAe,WAAyB;AACnE,UAAM,SAAS,KAAK,QAAQ;AAC5B,QAAI,CAAC,OAAQ;AAGb,SAAK,gBAAgB,IAAI,KAAK,GAAG,QAAQ;AAEzC,UAAM,WAAW,KAAK,gBAAgB,eAAe;AAAA,MACnD;AAAA,MACA;AAAA,MACA,UAAU,CAAC,YAAY;AACrB,aAAK,aAAa,OAAO,KAAK;AAC9B,aAAK,WAAW,OAAO,KAAK;AAC5B,aAAK,gBAAgB,OAAO,KAAK;AAAA,MACnC;AAAA,IACF,CAAC;AACD,SAAK,gBAAgB,IAAI,OAAO,QAAQ;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,YAAY,OAAsB;AACxC,SAAK,kBAAkB;AACvB,QAAI,OAAO;AACT,WAAK,gBAAgB,IAAI,KAAK,GAAG,MAAM;AAAA,IACzC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,qBAAqB,OAAe,IAA2B;AACrE,SAAK,gBAAgB,IAAI,KAAK,GAAG,eAAe,EAAE;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,UACJ,MACA,MACgB;AAChB,QAAI,KAAK,OAAQ,OAAM,IAAI,gBAAgB,oBAAoB;AAC/D,SAAK,YAAY,IAAI;AAErB,QAAI,OAAO,KAAK,WAAW,IAAI,IAAI;AACnC,QAAI,MAAM;AAER,UAAI,MAAM,WAAW,QAAW;AAC9B,aAAK,UAAU,KAAK,MAAM;AAAA,MAC5B;AACA,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,MAAM,KAAK,WAAW,IAAI;AAG1C,QAAI;AACJ,UAAM,UAAU,qBAAqB,KAAK,QAAQ,IAAI;AACtD,QAAI,QAAQ,SAAS,GAAG;AAEtB,YAAM,UAAU,QAAQ,KAAK,OAAK,EAAE,SAAS,WAAW,KAAK,QAAQ,CAAC;AACtE,YAAM,kBAAkB,KAAK,QAAQ,cAAc,QAAQ,UAAU;AACrE,mBAAa,KAAK,aAAa,gBAAgB;AAAA,QAC7C,OAAO,KAAK,QAAQ;AAAA,QACpB,QAAQ,QAAQ;AAAA,QAChB,OAAO;AAAA,QACP,UAAU,KAAK,QAAQ,YAAY;AAAA,QACnC,SAAS,KAAK;AAAA,QACd,YAAY;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,GAAI,QAAQ,UAAU,SAAY,EAAE,OAAO,QAAQ,MAAM,IAAI,CAAC;AAAA,MAChE,CAAC;AACD,WAAK,YAAY,IAAI,MAAM,UAAU;AAGrC,iBAAW,UAAU,SAAS;AAC5B,YAAI,WAAW,QAAS;AACxB,cAAM,eAAe,OAAO,UAAU,KAAK,QAAQ,cAAc;AACjE,cAAM,SAAS,KAAK,aAAa,gBAAgB;AAAA,UAC/C,OAAO,KAAK,QAAQ;AAAA,UACpB,QAAQ,OAAO;AAAA,UACf,OAAO;AAAA,UACP,UAAU,KAAK,QAAQ,YAAY;AAAA,UACnC,SAAS,KAAK;AAAA,UACd,YAAY;AAAA,UACZ,MAAM,OAAO;AAAA,UACb,GAAI,OAAO,UAAU,SAAY,EAAE,OAAO,OAAO,MAAM,IAAI,CAAC;AAAA,QAC9D,CAAC;AACD,cAAM,MAAM,GAAG,IAAI,KAAK,OAAO,SAAS,OAAO,IAAI;AACnD,aAAK,YAAY,IAAI,KAAK,MAAM;AAAA,MAClC;AAAA,IACF;AAEA,WAAO,IAAI,MAAM;AAAA,MACf,SAAS,KAAK,QAAQ;AAAA,MACtB;AAAA,MACA,OAAO;AAAA,MACP;AAAA,MACA,WAAW,KAAK,QAAQ,YAAY;AAAA,MACpC,SAAS,KAAK;AAAA,MACd,SAAS,QAAQ,SAAS,IACtB,OAAO,MAAM,IAAI,QAAQ,YAAY;AAEnC,mBAAW,CAAC,KAAK,MAAM,KAAK,KAAK,aAAa;AAC5C,cAAI,QAAQ,QAAQ,IAAI,WAAW,GAAG,IAAI,IAAI,GAAG;AAC/C,iBAAK,OAAO,YAAY,MAAM,IAAI,QAAQ,OAAO;AAAA,UACnD;AAAA,QACF;AAAA,MACF,IACA;AAAA,MACJ,4BAA4B,aACxB,CAAC,cAAc,aAAa,WAAW,yBAAyB,cAAc,QAAQ,IACtF;AAAA,MACJ,aAAa,QAAQ,SAAS,IAAI,QAAQ,CAAC,EAAG,QAAQ;AAAA,MACtD,eAAe,KAAK,QAAQ;AAAA,MAC5B,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,GAAI,KAAK,QAAQ,kBAAkB,SAAY,EAAE,eAAe,KAAK,QAAQ,cAAc,IAAI,CAAC;AAAA,MAChG,GAAI,KAAK,QAAQ,sBAAsB,SAAY,EAAE,mBAAmB,KAAK,QAAQ,kBAAkB,IAAI,CAAC;AAAA,MAC5G,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,MACtG,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,MACtG,GAAI,KAAK,QAAQ,mBAAmB,SAAY,EAAE,gBAAgB,KAAK,QAAQ,eAAe,IAAI,CAAC;AAAA,MACnG,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,MACtG,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,QAAQ,MAAM;AAAA;AAAA,MAEd,qBAAqB,KAAK,QAAQ,sBAC9B,CAAC,MAAM,MAAM,IAAI,OAAO,eACtB,KAAK,iBAAiB,MAAM,MAAM,IAAI,OAAO,UAAU,IACzD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMJ,eACE,KAAK,QAAQ,YAAY,SAAS,KAAK,QAAQ,SAC3C,YAAY;AAKV,aAAK,aAAa,OAAO,IAAI;AAC7B,cAAM,YAAY,MAAM;AAAA,UACtB,KAAK,QAAQ;AAAA,UACb;AAAA,UACA,KAAK,QAAQ;AAAA,UACb,KAAK,QAAQ;AAAA,QACf;AACA,aAAK,aAAa,IAAI,MAAM,SAAS;AACrC,eAAO;AAAA,MACT,IACA;AAAA,IACR,CAAC;AACD,SAAK,WAAW,IAAI,MAAM,IAAI;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,MAAqB;AACzB,UAAM,SAAS,KAAK,WAAW,IAAI,IAAI;AACvC,QAAI,OAAQ,QAAO;AAGnB,QAAI,KAAK,QAAQ,YAAY,OAAO;AAClC,YAAMC,WAAU,uBAAuB,KAAK,QAAQ,IAAI;AACxD,YAAMC,QAAO,IAAI,MAAM;AAAA,QACrB,SAAS,KAAK,QAAQ;AAAA,QACtB;AAAA,QACA,OAAO;AAAA,QACP,SAAAD;AAAA,QACA,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,eAAe,KAAK,QAAQ;AAAA,QAC9B,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,QAC7F,GAAI,KAAK,QAAQ,kBAAkB,SAAY,EAAE,eAAe,KAAK,QAAQ,cAAc,IAAI,CAAC;AAAA,QAChG,GAAI,KAAK,QAAQ,sBAAsB,SAAY,EAAE,mBAAmB,KAAK,QAAQ,kBAAkB,IAAI,CAAC;AAAA,QAC5G,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,QAC7F,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,QACtG,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,QACtG,GAAI,KAAK,QAAQ,mBAAmB,SAAY,EAAE,gBAAgB,KAAK,QAAQ,eAAe,IAAI,CAAC;AAAA,QACnG,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,QACtG,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,QAC7F,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,CAAC;AACD,WAAK,WAAW,IAAI,MAAMC,KAAI;AAC9B,aAAOA;AAAA,IACT;AAEA,UAAM,UAAU,KAAK,aAAa,IAAI,IAAI;AAC1C,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,UAAU,IAAI,yCAAyC,IAAI;AAAA,MAC7D;AAAA,IACF;AAEA,UAAM,OAAO,IAAI,MAAM;AAAA,MACrB,SAAS,KAAK,QAAQ;AAAA,MACtB;AAAA,MACA,OAAO;AAAA,MACP;AAAA,MACA,WAAW;AAAA,MACX,eAAe,KAAK,QAAQ;AAAA,MAC5B,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,GAAI,KAAK,QAAQ,kBAAkB,SAAY,EAAE,eAAe,KAAK,QAAQ,cAAc,IAAI,CAAC;AAAA,MAChG,GAAI,KAAK,QAAQ,sBAAsB,SAAY,EAAE,mBAAmB,KAAK,QAAQ,kBAAkB,IAAI,CAAC;AAAA,MAC5G,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,MACtG,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,MACtG,GAAI,KAAK,QAAQ,mBAAmB,SAAY,EAAE,gBAAgB,KAAK,QAAQ,eAAe,IAAI,CAAC;AAAA,MACnG,GAAI,KAAK,QAAQ,oBAAoB,SAAY,EAAE,iBAAiB,KAAK,QAAQ,gBAAgB,IAAI,CAAC;AAAA,MACtG,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,GAAI,KAAK,QAAQ,iBAAiB,SAAY,EAAE,cAAc,KAAK,QAAQ,aAAa,IAAI,CAAC;AAAA,MAC7F,SAAS,KAAK;AAAA,IAChB,CAAC;AACD,SAAK,WAAW,IAAI,MAAM,IAAI;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,MAAM,OAAe,SAAsC;AAC/D,SAAK,qBAAqB,OAAO,OAAO;AACxC,UAAM,UAAU,MAAM,KAAK,WAAW,KAAK;AAC3C,UAAM,MAAa,KAAK,QAAQ,OAAO,OAAO,SAAS,OAAO;AAAA,EAChE;AAAA;AAAA,EAGA,MAAM,OAAO,OAAe,SAAuC;AACjE,SAAK,qBAAqB,OAAO,QAAQ;AACzC,UAAM,UAAU,MAAM,KAAK,WAAW,KAAK;AAC3C,UAAM,OAAc,KAAK,QAAQ,OAAO,OAAO,SAAS,OAAO;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,MAAM,OAAO,OAAe,aAAsC;AAChE,SAAK,qBAAqB,OAAO,QAAQ;AACzC,UAAM,UAAU,MAAM,KAAK,WAAW,KAAK;AAC3C,UAAM,WAAc,KAAK,QAAQ,OAAO,OAAO,SAAS,WAAW;AAInE,SAAK,aAAa,IAAI,OAAO,OAAO;AAAA,EACtC;AAAA;AAAA,EAGA,MAAM,UAAU,OAAoC;AAClD,WAAO,UAAiB,KAAK,QAAQ,OAAO,KAAK;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiEA,MAAM,qBACJ,UAAuC,CAAC,GACZ;AAC5B,QAAI,KAAK,OAAQ,OAAM,IAAI,gBAAgB,oBAAoB;AAC/D,SAAK,kBAAkB;AAEvB,UAAM,UAAU,KAAK,QAAQ;AAC7B,QAAI,OAAO,QAAQ,eAAe,YAAY;AAC5C,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF;AAEA,QAAI,KAAK,QAAQ,YAAY,OAAO;AAGlC,YAAM,MAAM,MAAM,QAAQ,WAAW;AACrC,aAAO,IAAI,IAAI,CAAC,QAAQ,EAAE,IAAI,MAAM,QAAgB,EAAE;AAAA,IACxD;AAEA,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,UAAU,UAAU,QAAQ,WAAW,QAAQ;AACrD,UAAM,WAAW,MAAM,QAAQ,WAAW;AAC1C,UAAM,aAAgC,CAAC;AAEvC,eAAW,SAAS,UAAU;AAQ5B,UAAI;AACJ,UAAI;AACF,kBAAU,MAAM;AAAA,UACd;AAAA,UACA;AAAA,UACA,KAAK,QAAQ;AAAA,UACb,KAAK,QAAQ;AAAA,QACf;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,eAAe,iBAAiB;AAClE;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAEA,UAAI,UAAU,QAAQ,IAAI,IAAI,QAAS;AACvC,iBAAW,KAAK,EAAE,IAAI,OAAO,MAAM,QAAQ,KAAK,CAAC;AAKjD,WAAK,aAAa,IAAI,OAAO,OAAO;AAAA,IACtC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkDA,MAAM,YACJ,UACA,IACA,UAA8B,CAAC,GACE;AACjC,QAAI,KAAK,OAAQ,OAAM,IAAI,gBAAgB,oBAAoB;AAC/D,SAAK,kBAAkB;AAEvB,UAAM,cAAc,KAAK,IAAI,GAAG,QAAQ,eAAe,CAAC;AACxD,UAAM,UAAkC,IAAI,MAAM,SAAS,MAAM;AAOjE,QAAI,YAAY;AAChB,UAAM,WAA+B,oBAAI,IAAI;AAE7C,UAAM,SAAS,MAA4B;AACzC,UAAI,aAAa,SAAS,OAAQ,QAAO;AACzC,YAAM,MAAM;AACZ,YAAM,UAAU,SAAS,GAAG;AAC5B,YAAM,QAAQ,YAAY;AACxB,YAAI;AACF,gBAAM,OAAO,MAAM,KAAK,UAAU,OAAO;AACzC,gBAAM,SAAS,MAAM,GAAG,IAAI;AAC5B,kBAAQ,GAAG,IAAI,EAAE,OAAO,SAAS,OAAO;AAAA,QAC1C,SAAS,KAAK;AACZ,kBAAQ,GAAG,IAAI;AAAA,YACb,OAAO;AAAA,YACP,OAAO,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,UAC3D;AAAA,QACF;AAAA,MACF,GAAG;AACH,eAAS,IAAI,IAAI;AAKjB,WAAK,KAAK,QAAQ,MAAM,SAAS,OAAO,IAAI,CAAC;AAC7C,aAAO;AAAA,IACT;AAGA,aAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,UAAI,OAAO,MAAM,KAAM;AAAA,IACzB;AAMA,WAAO,SAAS,OAAO,GAAG;AACxB,YAAM,QAAQ,KAAK,QAAQ;AAC3B,aAAO,SAAS,OAAO,eAAe,YAAY,SAAS,QAAQ;AACjE,YAAI,OAAO,MAAM,KAAM;AAAA,MACzB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,aAAa,OAAe,eAAsC;AACtE,SAAK,qBAAqB,OAAO,cAAc;AAC/C,UAAM,UAAU,MAAM,KAAK,WAAW,KAAK;AAC3C,UAAM,UAAU,MAAM;AAAA,MACpB,KAAK,QAAQ;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,SAAK,aAAa,IAAI,OAAO,OAAO;AAAA,EACtC;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,OAAe,SAA4C;AACpE,UAAM,SAAS,KAAK,cAAc,KAAK;AACvC,WAAO,OAAO,KAAK,OAAO;AAAA,EAC5B;AAAA;AAAA,EAGA,MAAM,KAAK,OAAe,SAA4C;AACpE,UAAM,SAAS,KAAK,cAAc,KAAK;AACvC,WAAO,OAAO,KAAK,OAAO;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAAK,OAAe,SAAuG;AAC/H,UAAM,UAAU,KAAK,cAAc,KAAK;AACxC,UAAM,SAAS,MAAM,QAAQ,KAAK,OAAO;AAGzC,eAAW,CAAC,KAAK,MAAM,KAAK,KAAK,aAAa;AAC5C,UAAI,QAAQ,MAAO;AACnB,UAAI,CAAC,IAAI,WAAW,GAAG,KAAK,IAAI,EAAG;AACnC,UAAI,OAAO,SAAS,aAAa;AAC/B,cAAM,OAAO,KAAK,OAAO,EAAE,MAAM,CAAC,QAAe;AAC/C,eAAK,QAAQ,KAAK,qBAAqB;AAAA,YACrC;AAAA,YACA,QAAQ,OAAO,SAAS,OAAO;AAAA,YAC/B,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAAA,MACH,OAAO;AAEL,cAAM,OAAO,KAAK,SAAS,IAAI,EAAE,MAAM,CAAC,QAAe;AACrD,eAAK,QAAQ,KAAK,qBAAqB;AAAA,YACrC;AAAA,YACA,QAAQ,OAAO,SAAS,OAAO;AAAA,YAC/B,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAyBA,YACE,KAC8B;AAC9B,QAAI,OAAO,QAAQ,YAAY;AAC7B,aAAO,KAAK,WAAW,eAAe,MAAM,GAAG;AAAA,IACjD;AACA,UAAM,QAAQ;AACd,UAAM,OAAO,KAAK,WAAW,IAAI,KAAK;AACtC,QAAI,CAAC,MAAM;AACT,YAAM,IAAI;AAAA,QACR,UAAU,KAAK;AAAA,MACjB;AAAA,IACF;AACA,UAAM,SAAS,KAAK,cAAc,KAAK;AACvC,WAAO,KAAK,aAAa,qBAAqB,MAAM,MAAM;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,IAAI,SAAqB;AACvB,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA,EAGA,WAAW,OAA2B;AACpC,UAAM,SAAS,KAAK,YAAY,IAAI,KAAK;AACzC,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,OAAO,GAAG,UAAU,MAAM,UAAU,MAAM,QAAQ,KAAK;AAAA,IAClE;AACA,WAAO,OAAO,OAAO;AAAA,EACvB;AAAA,EAEQ,cAAc,OAA2B;AAC/C,UAAM,SAAS,KAAK,YAAY,IAAI,KAAK;AACzC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,gBAAgB,qEAAqE;AAAA,IACjG;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAIA,GAAkC,OAAU,SAAiD;AAC3F,SAAK,QAAQ,GAAG,OAAO,OAAO;AAAA,EAChC;AAAA,EAEA,IAAmC,OAAU,SAAiD;AAC5F,SAAK,QAAQ,IAAI,OAAO,OAAO;AAAA,EACjC;AAAA,EAEA,QAAc;AACZ,SAAK,SAAS;AACd,QAAI,KAAK,cAAc;AACrB,mBAAa,KAAK,YAAY;AAC9B,WAAK,eAAe;AAAA,IACtB;AAEA,eAAW,YAAY,KAAK,gBAAgB,OAAO,GAAG;AACpD,eAAS,QAAQ;AAAA,IACnB;AACA,SAAK,gBAAgB,MAAM;AAE3B,SAAK,gBAAgB,kBAAkB;AAEvC,eAAW,UAAU,KAAK,YAAY,OAAO,GAAG;AAC9C,aAAO,aAAa;AAAA,IACtB;AACA,SAAK,YAAY,MAAM;AACvB,SAAK,aAAa,MAAM;AACxB,SAAK,WAAW,MAAM;AACtB,SAAK,QAAQ,mBAAmB;AAEhC,SAAK,gBAAgB,MAAM;AAC3B,SAAK,oBAAoB,SAAS;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,qBAAsD;AACpD,WAAO,CAAC,GAAG,KAAK,mBAAmB;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,iBACJ,MACA,MACA,IACA,OACA,YACiB;AACjB,UAAM,WAAW,GAAG,KAAK,KAAO,UAAU,KAAO,IAAI,KAAO,EAAE,KAAO,IAAI;AACzE,UAAM,iBAAiB,KAAK,QAAQ,2BAA2B;AAE/D,UAAM,SAAS,KAAK,gBAAgB,IAAI,QAAQ;AAChD,QAAI,WAAW,QAAW;AACxB,WAAK,oBAAoB,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,YAAY;AAAA,QACZ,UAAU;AAAA,QACV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,QAAQ;AAAA,MACV,CAAC;AACD,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,MAAM,KAAK,QAAQ,oBAAqB,EAAE,MAAM,MAAM,IAAI,OAAO,WAAW,CAAC;AAC5F,SAAK,gBAAgB,IAAI,UAAU,MAAM;AACzC,SAAK,oBAAoB,KAAK;AAAA,MAC5B,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,UAAU;AAAA,MACV;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC,CAAC;AACD,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAc,WAAW,OAAyC;AAChE,QAAI,KAAK,QAAQ,YAAY,OAAO;AAClC,aAAO,uBAAuB,KAAK,QAAQ,IAAI;AAAA,IACjD;AAEA,UAAM,SAAS,KAAK,aAAa,IAAI,KAAK;AAC1C,QAAI,OAAQ,QAAO;AAEnB,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,YAAM,IAAI,gBAAgB,8DAA8D;AAAA,IAC1F;AAEA,QAAI;AACJ,QAAI;AACF,gBAAU,MAAM,YAAY,KAAK,QAAQ,OAAO,OAAO,KAAK,QAAQ,MAAM,KAAK,QAAQ,MAAM;AAAA,IAC/F,SAAS,KAAK;AAGZ,UAAI,eAAe,eAAe;AAChC,kBAAU,MAAM,mBAAmB,KAAK,QAAQ,OAAO,OAAO,KAAK,QAAQ,MAAM,KAAK,QAAQ,MAAM;AAAA,MACtG,OAAO;AACL,cAAM;AAAA,MACR;AAAA,IACF;AAEA,SAAK,aAAa,IAAI,OAAO,OAAO;AACpC,WAAO;AAAA,EACT;AACF;AAGA,eAAsB,YAAY,SAAuC;AACvE,QAAM,YAAY,QAAQ,YAAY;AAEtC,MAAI,aAAa,CAAC,QAAQ,QAAQ;AAChC,UAAM,IAAI,gBAAgB,8DAA8D;AAAA,EAC1F;AAEA,SAAO,IAAI,MAAM,OAAO;AAC1B;AAQA,SAAS,qBACP,MACc;AACd,MAAI,CAAC,KAAM,QAAO,CAAC;AACnB,MAAI,MAAM,QAAQ,IAAI,EAAG,QAAO;AAEhC,MAAI,UAAU,QAAQ,OAAO,KAAK,SAAS,UAAU;AACnD,WAAO,CAAC,IAAI;AAAA,EACd;AAEA,SAAO,CAAC,EAAE,OAAO,MAAoB,MAAM,YAAY,CAAC;AAC1D;;;AkCn2BO,SAAS,oBAAoB,OAA2B;AAC7D,UAAQ,MAAM,OAAO;AAAA,IACnB,KAAK,WAAW;AACd,YAAM,SAAkC,CAAC;AACzC,iBAAW,CAAC,OAAO,GAAG,KAAK,OAAO,QAAQ,MAAM,MAAM,GAAG;AACvD,eAAO,KAAK,IAAI,IAAI;AAAA,MACtB;AACA,aAAO;AAAA,IACT;AAAA,IACA,KAAK,OAAO;AACV,YAAM,OAAO,IAAI,IAAI,MAAM,UAAU;AACrC,aAAO,MAAM,MAAM,OAAO,OAAK,CAAC,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,IAAI,OAAK,EAAE,CAAC;AAAA,IAC/D;AAAA,IACA,KAAK;AACH,aAAO,MAAM;AAAA,EACjB;AACF;AAWO,SAAS,gBAAgB,GAAc,GAAyB;AAErE,MAAI,EAAE,UAAU,EAAE,MAAO,QAAO;AAEhC,UAAQ,EAAE,OAAO;AAAA,IACf,KAAK;AACH,aAAO,YAAY,GAAG,CAAgB;AAAA,IACxC,KAAK;AACH,aAAO,SAAS,GAAG,CAAa;AAAA,IAClC,KAAK;AAEH,aAAO;AAAA,EACX;AACF;AAEA,SAAS,YAAY,GAAgB,GAA6B;AAChE,QAAM,SAAqD,CAAC;AAC5D,QAAM,YAAY,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,EAAE,MAAM,GAAG,GAAG,OAAO,KAAK,EAAE,MAAM,CAAC,CAAC;AAC9E,aAAW,SAAS,WAAW;AAC7B,UAAM,KAAK,EAAE,OAAO,KAAK;AACzB,UAAM,KAAK,EAAE,OAAO,KAAK;AACzB,QAAI,CAAC,IAAI;AAAE,aAAO,KAAK,IAAI;AAAA,IAAI,WACtB,CAAC,IAAI;AAAE,aAAO,KAAK,IAAI;AAAA,IAAG,OAC9B;AAAE,aAAO,KAAK,IAAI,GAAG,MAAM,GAAG,KAAK,KAAK;AAAA,IAAG;AAAA,EAClD;AACA,SAAO,EAAE,OAAO,WAAW,QAAQ,OAAO;AAC5C;AAEA,SAAS,SAAS,GAAa,GAAuB;AAEpD,QAAM,gBAAgB,oBAAI,IAAI,CAAC,GAAG,EAAE,YAAY,GAAG,EAAE,UAAU,CAAC;AAEhE,QAAM,WAAW,IAAI,IAAI,EAAE,MAAM,IAAI,OAAK,EAAE,GAAG,CAAC;AAChD,QAAM,SAA6C;AAAA,IACjD,GAAG,EAAE;AAAA,IACL,GAAG,EAAE,MAAM,OAAO,OAAK,CAAC,SAAS,IAAI,EAAE,GAAG,CAAC;AAAA,EAC7C;AACA,SAAO,EAAE,OAAO,OAAO,OAAO,QAAQ,YAAY,CAAC,GAAG,aAAa,EAAE;AACvE;;;AC7GA;AAoCO,IAAM,iBAAN,MAAwB;AAAA,EACZ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,cAAgC;AAAA,EAChC,cAAyD,CAAC;AAAA,EAC1D,oBAAyC;AAAA,EACzC,YAAmD;AAAA,EACnD,UAAU;AAAA,EAElB,YAAY,MAA0B;AACpC,SAAK,UAAU,KAAK;AACpB,SAAK,cAAc,KAAK;AACxB,SAAK,QAAQ,KAAK;AAClB,SAAK,iBAAiB,KAAK;AAC3B,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,SAAS,KAAK;AACnB,SAAK,UAAU,KAAK,WAAW;AAC/B,SAAK,iBAAiB,KAAK,kBAAkB;AAG7C,SAAK,UAAU,GAAG,KAAK,KAAK,IAAI,KAAK,cAAc;AAEnD,SAAK,oBAAoB,aAAa,KAAK,cAAc;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,SAA2B;AACtC,QAAI,KAAK,QAAS;AAElB,UAAM,MAAM,MAAM,KAAK,eAAe;AACtC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,YAAY,KAAK,UAAU,EAAE,QAAQ,KAAK,QAAQ,UAAU,KAAK,QAAQ,CAAC;AAChF,QAAI;AAEJ,QAAI,KAAK,aAAa,KAAK;AACzB,YAAM,KAAK,WAAW;AACtB,YAAM,QAAQ,eAAe,EAAE;AAC/B,YAAM,EAAE,KAAK,IAAI,MAAM,QAAQ,WAAW,GAAG;AAC7C,yBAAmB,KAAK,UAAU,EAAE,IAAI,OAAO,KAAK,CAAC;AAAA,IACvD,OAAO;AACL,yBAAmB;AAAA,IACrB;AAGA,UAAM,aAAa,KAAK,iBAAiB;AACzC,QAAI,YAAY,iBAAiB;AAC/B,YAAM,WAAW,gBAAgB,KAAK,SAAS,gBAAgB;AAAA,IACjE;AAGA,UAAM,KAAK,mBAAmB,SAAS,GAAG;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,IAAoD;AAC5D,QAAI,KAAK,QAAS,QAAO,MAAM;AAAA,IAAC;AAEhC,SAAK,YAAY,KAAK,EAAE;AAGxB,QAAI,KAAK,YAAY,WAAW,GAAG;AACjC,WAAK,eAAe;AAAA,IACtB;AAEA,WAAO,MAAM;AACX,WAAK,cAAc,KAAK,YAAY,OAAO,OAAK,MAAM,EAAE;AACxD,UAAI,KAAK,YAAY,WAAW,EAAG,MAAK,cAAc;AAAA,IACxD;AAAA,EACF;AAAA;AAAA,EAGA,OAAa;AACX,SAAK,UAAU;AACf,SAAK,cAAc;AACnB,SAAK,cAAc,CAAC;AAAA,EACtB;AAAA;AAAA,EAIA,MAAc,iBAA4C;AACxD,QAAI,CAAC,KAAK,UAAW,QAAO;AAC5B,QAAI,CAAC,KAAK,aAAa;AACrB,UAAI;AACF,cAAM,MAAM,MAAM,KAAK,OAAO,KAAK,cAAc;AACjD,aAAK,cAAc,MAAM,kBAAkB,KAAK,KAAK,cAAc;AAAA,MACrE,QAAQ;AAAA,MAER;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,mBAA2C;AAEjD,QAAI,KAAK,aAAa,gBAAiB,QAAO,KAAK;AACnD,QAAI,KAAK,QAAQ,gBAAiB,QAAO,KAAK;AAC9C,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAuB;AAC7B,UAAM,aAAa,KAAK,iBAAiB;AAEzC,QAAI,YAAY,mBAAmB;AAEjC,WAAK,oBAAoB,WAAW;AAAA,QAClC,KAAK;AAAA,QACL,CAAC,qBAAqB;AAAE,eAAK,KAAK,oBAAoB,gBAAgB;AAAA,QAAE;AAAA,MAC1E;AAAA,IACF,OAAO;AAEL,WAAK,YAAY;AAAA,QACf,MAAM;AAAE,eAAK,KAAK,oBAAoB;AAAA,QAAE;AAAA,QACxC,KAAK;AAAA,MACP;AAEA,WAAK,KAAK,oBAAoB;AAAA,IAChC;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,QAAI,KAAK,mBAAmB;AAC1B,WAAK,kBAAkB;AACvB,WAAK,oBAAoB;AAAA,IAC3B;AACA,QAAI,KAAK,WAAW;AAClB,oBAAc,KAAK,SAAS;AAC5B,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAc,oBAAoB,kBAAyC;AACzE,QAAI;AACF,YAAM,OAAO,MAAM,KAAK,uBAAuB,gBAAgB;AAC/D,UAAI,CAAC,QAAQ,KAAK,WAAW,KAAK,OAAQ;AAE1C,YAAM,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,OAAO,EAAE,YAAY;AAC/D,UAAI,KAAK,WAAW,OAAQ;AAK5B,YAAM,KAAK,oBAAoB;AAAA,IACjC,QAAQ;AAAA,IAER;AAAA,EACF;AAAA,EAEA,MAAc,uBACZ,kBACkE;AAClE,UAAM,MAAM,MAAM,KAAK,eAAe;AAEtC,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK;AAC3B,aAAO,KAAK,MAAM,gBAAgB;AAAA,IACpC;AAEA,UAAM,EAAE,IAAI,OAAO,KAAK,IAAI,KAAK,MAAM,gBAAgB;AACvD,UAAM,YAAY,MAAM,QAAQ,OAAO,MAAM,GAAG;AAChD,WAAO,KAAK,MAAM,SAAS;AAAA,EAC7B;AAAA,EAEA,MAAc,mBAAmB,SAAY,KAA4B;AACvE,UAAM,MAAM,MAAM,KAAK,eAAe;AACtC,UAAM,YAAY,KAAK,UAAU,OAAO;AACxC,QAAI,KAAK;AACT,QAAI;AAEJ,QAAI,KAAK,aAAa,KAAK;AACzB,YAAM,UAAU,WAAW;AAC3B,WAAK,eAAe,OAAO;AAC3B,YAAM,SAAS,MAAM,QAAQ,WAAW,GAAG;AAC3C,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO;AAAA,IACT;AAEA,UAAM,SAAgC,EAAE,QAAQ,KAAK,QAAQ,UAAU,KAAK,IAAI,KAAK;AACrF,UAAM,OAAO,KAAK,UAAU,MAAM;AAIlC,UAAM,eAAe,KAAK,eAAe,KAAK;AAC9C,UAAM,WAAW;AAAA,MACf,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AAAA,MACL,OAAO;AAAA,IACT;AACA,QAAI;AACF,YAAM,aAAa;AAAA,QACjB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAAA,EAEA,MAAc,sBAAqC;AACjD,QAAI,KAAK,WAAW,KAAK,YAAY,WAAW,EAAG;AAEnD,QAAI;AACF,YAAM,eAAe,KAAK,eAAe,KAAK;AAC9C,YAAM,MAAM,MAAM,aAAa,KAAK,KAAK,OAAO,KAAK,iBAAiB;AACtE,YAAM,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,OAAO,EAAE,YAAY;AAC/D,YAAM,QAA2B,CAAC;AAElC,iBAAW,MAAM,KAAK;AACpB,YAAI,OAAO,KAAK,OAAQ;AACxB,cAAM,WAAW,MAAM,aAAa,IAAI,KAAK,OAAO,KAAK,mBAAmB,EAAE;AAC9E,YAAI,CAAC,SAAU;AAEf,cAAM,SAAS,KAAK,MAAM,SAAS,KAAK;AACxC,YAAI,OAAO,WAAW,OAAQ;AAE9B,YAAI;AACJ,YAAI,KAAK,aAAa,KAAK,eAAe,OAAO,IAAI;AACnD,gBAAM,YAAY,MAAM,QAAQ,OAAO,IAAI,OAAO,MAAM,KAAK,WAAW;AACxE,wBAAc,KAAK,MAAM,SAAS;AAAA,QACpC,OAAO;AACL,wBAAc,KAAK,MAAM,OAAO,IAAI;AAAA,QACtC;AAEA,cAAM,KAAK,EAAE,QAAQ,OAAO,QAAQ,SAAS,aAAa,UAAU,OAAO,SAAS,CAAC;AAAA,MACvF;AAEA,iBAAW,MAAM,KAAK,aAAa;AACjC,WAAG,KAAK;AAAA,MACV;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AACF;;;AnD0BA;;;AoDhUA;AACA;AAMO,IAAM,aAAN,MAAiB;AAAA,EACL;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACR;AAAA,EACA;AAAA,EAED,QAAsB,CAAC;AAAA,EACvB,WAA0B;AAAA,EAC1B,WAA0B;AAAA,EAC1B,SAAS;AAAA,EACT,mBAA0D;AAAA,EAC1D,WAAW;AAAA;AAAA,EAGV;AAAA;AAAA,EAGQ,oBAAoB,oBAAI,IAAwC;AAAA,EAEjF,YAAY,MAST;AACD,SAAK,QAAQ,KAAK;AAClB,SAAK,SAAS,KAAK;AACnB,SAAK,QAAQ,KAAK;AAClB,SAAK,WAAW,KAAK;AACrB,SAAK,UAAU,KAAK;AACpB,SAAK,OAAO,KAAK,QAAQ;AACzB,SAAK,QAAQ,KAAK;AAGlB,UAAM,SAAS,KAAK;AACpB,QAAI,UAAU,OAAO,KAAK,SAAS,UAAU;AAC3C,WAAK,YAAY,IAAI,cAAc,QAAQ;AAAA,QACzC,MAAM,MAAM,KAAK,KAAK,EAAE,KAAK,MAAM;AAAA,QAAC,CAAC;AAAA,QACrC,MAAM,MAAM,KAAK,KAAK,EAAE,KAAK,MAAM;AAAA,QAAC,CAAC;AAAA,QACrC,eAAe,MAAM,KAAK,MAAM;AAAA,MAClC,CAAC;AAAA,IACH,OAAO;AACL,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA,EAGA,iBAAuB;AACrB,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA;AAAA,EAGA,gBAAsB;AACpB,SAAK,WAAW,KAAK;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,yBAAyB,YAAoB,UAA4C;AACvF,SAAK,kBAAkB,IAAI,YAAY,QAAQ;AAAA,EACjD;AAAA;AAAA,EAGA,MAAM,YAAY,YAAoB,IAAY,QAA0B,SAAgC;AAC1G,UAAM,KAAK,aAAa;AAGxB,UAAM,MAAM,KAAK,MAAM,UAAU,OAAK,EAAE,eAAe,cAAc,EAAE,OAAO,EAAE;AAChF,UAAM,QAAoB;AAAA,MACxB,OAAO,KAAK;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAEA,QAAI,OAAO,GAAG;AACZ,WAAK,MAAM,GAAG,IAAI;AAAA,IACpB,OAAO;AACL,WAAK,MAAM,KAAK,KAAK;AAAA,IACvB;AAEA,UAAM,KAAK,YAAY;AAGvB,SAAK,WAAW,aAAa;AAAA,EAC/B;AAAA;AAAA,EAGA,MAAM,KAAK,SAA4C;AACrD,UAAM,KAAK,aAAa;AAExB,QAAI,SAAS;AACb,UAAM,YAAwB,CAAC;AAC/B,UAAM,SAAkB,CAAC;AACzB,UAAM,YAAsB,CAAC;AAE7B,aAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,YAAM,QAAQ,KAAK,MAAM,CAAC;AAG1B,UAAI,SAAS,eAAe,CAAC,QAAQ,YAAY,SAAS,MAAM,UAAU,GAAG;AAC3E;AAAA,MACF;AAEA,UAAI;AACF,YAAI,MAAM,WAAW,UAAU;AAC7B,gBAAM,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC/D,oBAAU,KAAK,CAAC;AAChB;AAAA,QACF,OAAO;AACL,gBAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC5E,cAAI,CAAC,UAAU;AAEb,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF;AAEA,cAAI;AACF,kBAAM,KAAK,OAAO;AAAA,cAChB,KAAK;AAAA,cACL,MAAM;AAAA,cACN,MAAM;AAAA,cACN;AAAA,cACA,MAAM,UAAU;AAAA,YAClB;AACA,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF,SAAS,KAAK;AACZ,gBAAI,eAAe,eAAe;AAChC,oBAAM,iBAAiB,MAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AACnF,kBAAI,gBAAgB;AAClB,sBAAM,EAAE,SAAS,SAAS,IAAI,MAAM,KAAK;AAAA,kBACvC,MAAM;AAAA,kBACN,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,0BAAU,KAAK,QAAQ;AACvB,oBAAI,YAAY,SAAS;AACvB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,KAAK;AAC5E,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF,WAAW,YAAY,UAAU;AAC/B,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,MAAM;AAC5E,4BAAU,KAAK,CAAC;AAAA,gBAClB,WAAW,YAAY,YAAY,SAAS,UAAU,UAAU;AAE9D,wBAAM,SAAS,SAAS;AACxB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACpE,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACnE,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF;AAAA,cAEF;AAAA,YACF,OAAO;AACL,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,MACjE;AAAA,IACF;AAGA,eAAW,KAAK,UAAU,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG;AAC/C,WAAK,MAAM,OAAO,GAAG,CAAC;AAAA,IACxB;AAEA,SAAK,YAAW,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,KAAK,YAAY;AAEvB,UAAM,SAAqB,EAAE,QAAQ,WAAW,OAAO;AACvD,SAAK,QAAQ,KAAK,aAAa,MAAM;AACrC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,KAAK,SAA4C;AACrD,UAAM,KAAK,aAAa;AAExB,QAAI,SAAS;AACb,UAAM,YAAwB,CAAC;AAC/B,UAAM,SAAkB,CAAC;AAEzB,QAAI;AACF,YAAM,iBAAiB,MAAM,KAAK,OAAO,QAAQ,KAAK,KAAK;AAE3D,iBAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,cAAc,GAAG;AAEhE,YAAI,SAAS,eAAe,CAAC,QAAQ,YAAY,SAAS,QAAQ,GAAG;AACnE;AAAA,QACF;AAEA,mBAAW,CAAC,IAAI,cAAc,KAAK,OAAO,QAAQ,OAAO,GAAG;AAE1D,cAAI,SAAS,iBAAiB,eAAe,OAAO,QAAQ,eAAe;AACzE;AAAA,UACF;AAEA,cAAI;AACF,kBAAM,gBAAgB,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,EAAE;AAEnE,gBAAI,CAAC,eAAe;AAElB,oBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,cAAc;AAC7D;AAAA,YACF,WAAW,eAAe,KAAK,cAAc,IAAI;AAE/C,oBAAM,UAAU,KAAK,MAAM,KAAK,OAAK,EAAE,eAAe,YAAY,EAAE,OAAO,EAAE;AAC7E,kBAAI,SAAS;AAEX,sBAAM,EAAE,SAAS,SAAS,IAAI,MAAM,KAAK;AAAA,kBACvC;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,0BAAU,KAAK,QAAQ;AACvB,oBAAI,YAAY,UAAU;AACxB,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,SAAS,MAAM;AAC9D,uBAAK,QAAQ,KAAK,MAAM,OAAO,OAAK,EAAE,EAAE,eAAe,YAAY,EAAE,OAAO,GAAG;AAC/E;AAAA,gBACF,WAAW,YAAY,YAAY,SAAS,UAAU,eAAe;AACnE,wBAAM,SAAS,SAAS;AACxB,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,MAAM;AACrD,uBAAK,QAAQ,KAAK,MAAM,OAAO,OAAK,EAAE,EAAE,eAAe,YAAY,EAAE,OAAO,GAAG;AAC/E;AAAA,gBACF;AAAA,cAEF,OAAO;AAEL,sBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,cAAc;AAC7D;AAAA,cACF;AAAA,YACF;AAAA,UAEF,SAAS,KAAK;AACZ,mBAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IACjE;AAEA,SAAK,YAAW,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,KAAK,YAAY;AAEvB,UAAM,SAAqB,EAAE,QAAQ,WAAW,OAAO;AACvD,SAAK,QAAQ,KAAK,aAAa,MAAM;AACrC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,KAAK,SAAuG;AAChH,UAAM,aAAa,MAAM,KAAK,KAAK,SAAS,IAAI;AAChD,UAAM,aAAa,MAAM,KAAK,KAAK,SAAS,IAAI;AAChD,WAAO,EAAE,MAAM,YAAY,MAAM,WAAW;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,aAAa,WAAgE;AACjF,UAAM,KAAK,aAAa;AAExB,QAAI,SAAS;AACb,UAAM,YAAwB,CAAC;AAC/B,UAAM,SAAkB,CAAC;AACzB,UAAM,YAAsB,CAAC;AAE7B,aAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,YAAM,QAAQ,KAAK,MAAM,CAAC;AAC1B,UAAI,CAAC,UAAU,KAAK,EAAG;AAEvB,UAAI;AACF,YAAI,MAAM,WAAW,UAAU;AAC7B,gBAAM,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC/D,oBAAU,KAAK,CAAC;AAChB;AAAA,QACF,OAAO;AACL,gBAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC5E,cAAI,CAAC,UAAU;AACb,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF;AAEA,cAAI;AACF,kBAAM,KAAK,OAAO;AAAA,cAChB,KAAK;AAAA,cACL,MAAM;AAAA,cACN,MAAM;AAAA,cACN;AAAA,cACA,MAAM,UAAU;AAAA,YAClB;AACA,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF,SAAS,KAAK;AACZ,gBAAI,eAAe,eAAe;AAChC,oBAAM,iBAAiB,MAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AACnF,kBAAI,gBAAgB;AAClB,sBAAM,EAAE,SAAS,SAAS,IAAI,MAAM,KAAK;AAAA,kBACvC,MAAM;AAAA,kBACN,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,0BAAU,KAAK,QAAQ;AACvB,oBAAI,YAAY,SAAS;AACvB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,KAAK;AAC5E,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF,WAAW,YAAY,UAAU;AAC/B,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,MAAM;AAC5E,4BAAU,KAAK,CAAC;AAAA,gBAClB,WAAW,YAAY,YAAY,SAAS,UAAU,UAAU;AAC9D,wBAAM,SAAS,SAAS;AACxB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACpE,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACnE,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF;AAAA,cACF;AAAA,YACF,OAAO;AACL,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,MACjE;AAAA,IACF;AAEA,eAAW,KAAK,UAAU,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG;AAC/C,WAAK,MAAM,OAAO,GAAG,CAAC;AAAA,IACxB;AAEA,SAAK,YAAW,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,KAAK,YAAY;AAEvB,UAAM,SAAqB,EAAE,QAAQ,WAAW,OAAO;AACvD,SAAK,QAAQ,KAAK,aAAa,MAAM;AACrC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,SAAqB;AACnB,WAAO;AAAA,MACL,OAAO,KAAK,MAAM;AAAA,MAClB,UAAU,KAAK;AAAA,MACf,UAAU,KAAK;AAAA,MACf,QAAQ,KAAK;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA,EAKA,cAAc,YAA2B;AAEvC,QAAI,OAAO,WAAW,qBAAqB,YAAY;AACrD,iBAAW,iBAAiB,UAAU,KAAK,YAAY;AACvD,iBAAW,iBAAiB,WAAW,KAAK,aAAa;AAAA,IAC3D;AAGA,QAAI,cAAc,aAAa,GAAG;AAChC,WAAK,mBAAmB,YAAY,MAAM;AACxC,YAAI,KAAK,UAAU;AACjB,eAAK,KAAK,KAAK;AAAA,QACjB;AAAA,MACF,GAAG,UAAU;AAAA,IACf;AAAA,EACF;AAAA;AAAA,EAGA,eAAqB;AACnB,SAAK,cAAc;AACnB,QAAI,OAAO,WAAW,wBAAwB,YAAY;AACxD,iBAAW,oBAAoB,UAAU,KAAK,YAAY;AAC1D,iBAAW,oBAAoB,WAAW,KAAK,aAAa;AAAA,IAC9D;AACA,QAAI,KAAK,kBAAkB;AACzB,oBAAc,KAAK,gBAAgB;AACnC,WAAK,mBAAmB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEQ,eAAe,MAAY;AACjC,SAAK,WAAW;AAChB,SAAK,QAAQ,KAAK,eAAe,MAAkB;AACnD,SAAK,KAAK,KAAK;AAAA,EACjB;AAAA,EAEQ,gBAAgB,MAAY;AAClC,SAAK,WAAW;AAChB,SAAK,QAAQ,KAAK,gBAAgB,MAAkB;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAc,eACZ,YACA,IACA,OACA,QACA,QACsF;AACtF,UAAM,WAAW,KAAK,kBAAkB,IAAI,UAAU;AAEtD,QAAI,UAAU;AAGZ,YAAM,SAAS,MAAM,SAAS,IAAI,OAAO,MAAM;AAC/C,YAAM,OAAiB;AAAA,QACrB,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,cAAc,MAAM;AAAA,QACpB,eAAe,OAAO;AAAA,MACxB;AACA,UAAI,WAAW,KAAM,QAAO,EAAE,SAAS,YAAY,UAAU,KAAK;AAClE,UAAI,WAAW,MAAO,QAAO,EAAE,SAAS,SAAS,UAAU,KAAK;AAChE,UAAI,WAAW,OAAQ,QAAO,EAAE,SAAS,UAAU,UAAU,KAAK;AAElE,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,EAAE,GAAG,MAAM,OAAO,QAAQ,cAAc,OAAO,GAAG;AAAA,MAC9D;AAAA,IACF;AAGA,UAAM,eAAyB;AAAA,MAC7B,OAAO,KAAK;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,MAAM;AAAA,MACpB,eAAe,OAAO;AAAA,IACxB;AACA,SAAK,QAAQ,KAAK,iBAAiB,YAAY;AAC/C,UAAM,OAAO,KAAK,cAAc,YAAY;AAC5C,WAAO,EAAE,SAAS,MAAM,UAAU,aAAa;AAAA,EACjD;AAAA;AAAA,EAGQ,cAAc,UAAwC;AAC5D,QAAI,OAAO,KAAK,aAAa,YAAY;AACvC,aAAO,KAAK,SAAS,QAAQ;AAAA,IAC/B;AACA,YAAQ,KAAK,UAAU;AAAA,MACrB,KAAK;AAAc,eAAO;AAAA,MAC1B,KAAK;AAAe,eAAO;AAAA,MAC3B,KAAK;AAAA,MACL;AACE,eAAO,SAAS,gBAAgB,SAAS,gBAAgB,UAAU;AAAA,IACvE;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,eAA8B;AAC1C,QAAI,KAAK,OAAQ;AAEjB,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,SAAS,MAAM;AACjE,QAAI,UAAU;AACZ,YAAM,OAAO,KAAK,MAAM,SAAS,KAAK;AACtC,WAAK,QAAQ,CAAC,GAAG,KAAK,KAAK;AAC3B,WAAK,WAAW,KAAK;AACrB,WAAK,WAAW,KAAK;AAAA,IACvB;AAEA,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,MAAc,cAA6B;AACzC,UAAM,OAAqB;AAAA,MACzB,aAAa;AAAA,MACb,WAAW,KAAK;AAAA,MAChB,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,IACd;AAEA,UAAM,WAA8B;AAAA,MAClC,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO,KAAK,UAAU,IAAI;AAAA,IAC5B;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,SAAS,QAAQ,QAAQ;AAAA,EAC5D;AACF;;;AC5gBO,IAAM,kBAAN,MAAsB;AAAA,EACV;AAAA,EACA;AAAA,EACA,MAAc,CAAC;AAAA;AAAA,EAGhC,YAAY,MAAa,QAAoB;AAC3C,SAAK,OAAO;AACZ,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA,EAGA,IAAI,YAAoB,IAAY,QAAuB;AACzD,SAAK,IAAI,KAAK,EAAE,MAAM,OAAO,YAAY,IAAI,OAAO,CAAC;AACrD,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,OAAO,YAAoB,IAAkB;AAC3C,SAAK,IAAI,KAAK,EAAE,MAAM,UAAU,YAAY,GAAG,CAAC;AAChD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,SAAyC;AAE7C,eAAW,MAAM,KAAK,KAAK;AACzB,UAAI,GAAG,SAAS,OAAO;AAErB,cAAO,KAAK,KAAK,WAAgB,GAAG,UAAU,EAAG,IAAI,GAAG,IAAI,GAAG,MAAa;AAAA,MAC9E,OAAO;AACL,cAAM,KAAK,KAAK,WAAW,GAAG,UAAU,EAAE,OAAO,GAAG,EAAE;AAAA,MACxD;AAAA,IACF;AAGA,UAAM,QAAQ,oBAAI,IAAY;AAC9B,eAAW,MAAM,KAAK,KAAK;AACzB,YAAM,IAAI,GAAG,GAAG,UAAU,KAAK,GAAG,EAAE,EAAE;AAAA,IACxC;AAEA,UAAM,aAAa,MAAM,KAAK,OAAO;AAAA,MACnC,CAAC,UAAU,MAAM,IAAI,GAAG,MAAM,UAAU,KAAK,MAAM,EAAE,EAAE;AAAA,IACzD;AAEA,WAAO;AAAA,MACL,QAAQ,WAAW,UAAU,SAAS,IAAI,aAAa;AAAA,MACvD,QAAQ,WAAW;AAAA,MACnB,WAAW,WAAW;AAAA,IACxB;AAAA,EACF;AACF;;;ACtBA;AAiBO,IAAM,YAAN,MAAgB;AAAA;AAAA,EAEZ,OAAmB,CAAC;AAAA;AAAA,EAEpB;AAAA;AAAA,EAGT,YAAY,IAAW;AACrB,SAAK,MAAM;AAAA,EACb;AAAA;AAAA,EAGA,MAAM,MAAuB;AAC3B,UAAM,IAAI,KAAK,IAAI,MAAM,IAAI;AAC7B,WAAO,IAAI,QAAQ,MAAM,CAAC;AAAA,EAC5B;AACF;AAGO,IAAM,UAAN,MAAc;AAAA;AAAA,EAEV;AAAA;AAAA,EAEA;AAAA;AAAA,EAGT,YAAY,KAAgB,OAAc;AACxC,SAAK,OAAO;AACZ,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA,EAGA,WAAc,MAA+B;AAC3C,UAAM,IAAI,KAAK,OAAO,WAAc,IAAI;AACxC,WAAO,IAAI,aAAgB,KAAK,MAAM,KAAK,QAAQ,GAAG,IAAI;AAAA,EAC5D;AACF;AAGO,IAAM,eAAN,MAAsB;AAAA;AAAA,EAElB;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AAAA;AAAA,EAGT,YAAY,KAAgB,OAAc,MAAqB,MAAc;AAC3E,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,QAAQ;AACb,SAAK,QAAQ;AAAA,EACf;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,IAAI,IAA+B;AACvC,aAAS,IAAI,KAAK,KAAK,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK;AACnD,YAAM,KAAK,KAAK,KAAK,KAAK,CAAC;AAC3B,UACE,GAAG,cAAc,KAAK,OAAO,QAC7B,GAAG,mBAAmB,KAAK,SAC3B,GAAG,OAAO,IACV;AACA,YAAI,GAAG,SAAS,SAAU,QAAO;AACjC,eAAO,GAAG;AAAA,MACZ;AAAA,IACF;AACA,WAAO,KAAK,MAAM,IAAI,EAAE;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,IAAY,QAAW,SAA8C;AACvE,UAAM,KAAe;AAAA,MACnB,MAAM;AAAA,MACN,WAAW,KAAK,OAAO;AAAA,MACvB,gBAAgB,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,IACF;AACA,QAAI,SAAS,oBAAoB,OAAW,IAAG,kBAAkB,QAAQ;AACzE,SAAK,KAAK,KAAK,KAAK,EAAE;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,IAAY,SAA8C;AAC/D,UAAM,KAAe;AAAA,MACnB,MAAM;AAAA,MACN,WAAW,KAAK,OAAO;AAAA,MACvB,gBAAgB,KAAK;AAAA,MACrB;AAAA,IACF;AACA,QAAI,SAAS,oBAAoB,OAAW,IAAG,kBAAkB,QAAQ;AACzE,SAAK,KAAK,KAAK,KAAK,EAAE;AAAA,EACxB;AACF;AAUA,eAAsB,eACpB,IACA,IACY;AACZ,QAAM,MAAM,IAAI,UAAU,EAAE;AAC5B,QAAM,aAAa,MAAM,GAAG,GAAG;AAE/B,MAAI,IAAI,KAAK,WAAW,EAAG,QAAO;AAMlC,QAAM,iBAAiB,oBAAI,IAAsC;AACjE,QAAM,QAAQ,GAAG;AACjB,aAAW,MAAM,IAAI,MAAM;AACzB,UAAM,MAAM,MAAM,EAAE;AACpB,QAAI,CAAC,eAAe,IAAI,GAAG,GAAG;AAC5B,YAAM,MAAM,MAAM,MAAM,IAAI,GAAG,WAAW,GAAG,gBAAgB,GAAG,EAAE;AAClE,qBAAe,IAAI,KAAK,GAAG;AAAA,IAC7B;AACA,QAAI,GAAG,oBAAoB,QAAW;AACpC,YAAM,MAAM,eAAe,IAAI,GAAG,KAAK;AACvC,YAAM,SAAS,KAAK,MAAM;AAC1B,UAAI,WAAW,GAAG,iBAAiB;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,UACA,2BAA2B,GAAG,SAAS,IAAI,GAAG,cAAc,IAAI,GAAG,EAAE,cACtD,GAAG,eAAe,YAAY,MAAM;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAKA,QAAM,WAA6E,CAAC;AACpF,MAAI;AACF,eAAW,MAAM,IAAI,MAAM;AACzB,YAAM,OAAO,GAAG,MAAM,GAAG,SAAS,EAAE,WAAW,GAAG,cAAc;AAChE,YAAM,MAAM,MAAM,EAAE;AACpB,YAAM,QAAQ,eAAe,IAAI,GAAG,KAAK;AACzC,UAAI,GAAG,SAAS,OAAO;AAErB,cAAM,KAAK,IAAI,GAAG,IAAI,GAAG,MAAa;AAAA,MACxC,OAAO;AACL,cAAM,KAAK,OAAO,GAAG,EAAE;AAAA,MACzB;AACA,eAAS,KAAK,EAAE,IAAI,eAAe,MAAM,CAAC;AAAA,IAC5C;AACA,WAAO;AAAA,EACT,SAAS,KAAK;AAWZ,eAAW,EAAE,IAAI,cAAc,KAAK,SAAS,MAAM,EAAE,QAAQ,GAAG;AAC9D,UAAI;AACF,YAAI,eAAe;AACjB,gBAAM,MAAM,IAAI,GAAG,WAAW,GAAG,gBAAgB,GAAG,IAAI,aAAa;AAAA,QACvE,OAAO;AACL,gBAAM,MAAM,OAAO,GAAG,WAAW,GAAG,gBAAgB,GAAG,EAAE;AAAA,QAC3D;AAAA,MACF,QAAQ;AAAA,MAGR;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,SAAS,MAAM,IAAsB;AACnC,SAAO,GAAG,GAAG,SAAS,KAAO,GAAG,cAAc,KAAO,GAAG,EAAE;AAC5D;;;AtDuEA;;;AuDpTA;AA8DO,SAAS,SAAS,SAA8C;AACrE,SAAO,EAAE,gBAAgB,MAAM,QAAQ;AACzC;AAGO,SAAS,qBAAqB,GAAqC;AACxE,SACE,OAAO,MAAM,YACb,MAAM,QACL,EAAmC,mBAAmB;AAE3D;AAeO,SAAS,sBACd,OACA,OACA,YACM;AACN,QAAM,EAAE,QAAQ,IAAI;AAGpB,MAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,MAAM,QAAQ,KAAK,GAAG;AACvE,UAAM,IAAI;AAAA,MACR;AAAA,MACA,QAAQ;AAAA,MACR,UAAU,KAAK,6CAA6C,OAAO,KAAK;AAAA,IAC1E;AAAA,EACF;AAEA,QAAM,MAAM;AAGZ,aAAW,CAAC,QAAQ,CAAC,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC7C,QAAI,OAAO,MAAM,UAAU;AACzB,YAAM,IAAI;AAAA,QACR;AAAA,QACA,CAAC,MAAM;AAAA,QACP,UAAU,KAAK,cAAc,MAAM,2BAA2B,OAAO,CAAC;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAGA,QAAM,EAAE,SAAS,IAAI;AACrB,MAAI,aAAa,OAAO;AACtB,UAAM,UAAU,QAAQ,UAAU;AAAA,MAChC,CAAC,SAAS,EAAE,QAAQ,QAAQ,IAAI,IAAI,MAAM;AAAA,IAC5C;AACA,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,QACA,UAAU,KAAK,+CAA+C,QAAQ,KAAK,IAAI,CAAC;AAAA,MAClF;AAAA,IACF;AAAA,EACF,WAAW,aAAa,OAAO;AAC7B,UAAM,UAAU,QAAQ,UAAU;AAAA,MAChC,CAAC,SAAS,QAAQ,OAAO,IAAI,IAAI,MAAM;AAAA,IACzC;AACA,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR;AAAA,QACA,QAAQ;AAAA,QACR,UAAU,KAAK;AAAA,MACjB;AAAA,IACF;AAAA,EACF,OAAO;AAEL,UAAM,eAAe;AACrB,UAAM,UAAU,aAAa;AAAA,MAC3B,CAAC,SAAS,EAAE,QAAQ,QAAQ,IAAI,IAAI,MAAM;AAAA,IAC5C;AACA,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,QACA,UAAU,KAAK,eAAe,aAAa,KAAK,IAAI,CAAC,cAAc,QAAQ,KAAK,IAAI,CAAC;AAAA,MACvF;AAAA,IACF;AAAA,EACF;AACF;AAeO,SAAS,gBACd,OACA,QACA,UACA,OACiC;AACjC,MAAI,WAAW,OAAO;AACpB,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,wBAAwB,SAAS,WAAW;AAAA,EACxD;AAGA,MAAI,MAAM,MAAM,MAAM,UAAa,MAAM,MAAM,MAAM,IAAI;AACvD,WAAO,MAAM,MAAM;AAAA,EACrB;AAGA,QAAM,QAA2B,MAAM,QAAQ,QAAQ,IACnD,WACA,WACE,CAAC,QAAQ,IACT,CAAC;AAEP,aAAW,MAAM,OAAO;AACtB,QAAI,OAAO,OAAO;AAChB,YAAM,MAAM,OAAO,OAAO,KAAK,EAAE,KAAK,CAAC,MAAM,MAAM,EAAE;AACrD,UAAI,QAAQ,OAAW,QAAO;AAAA,IAChC,WAAW,MAAM,EAAE,MAAM,UAAa,MAAM,EAAE,MAAM,IAAI;AACtD,aAAO,MAAM,EAAE;AAAA,IACjB;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR,SAAS;AAAA,IACT,wCAAwC,MAAM,OAC3C,MAAM,SAAS,IAAI,uBAAuB,MAAM,KAAK,IAAI,CAAC,MAAM,MACjE;AAAA,EACJ;AACF;AAiBO,SAAS,gBACd,QACA,YACA,QACA,UACyB;AACzB,QAAM,aAAa,OAAO,KAAK,UAAU;AACzC,MAAI,WAAW,WAAW,EAAG,QAAO;AAEpC,QAAM,SAAS,EAAE,GAAG,OAAO;AAE3B,aAAW,SAAS,YAAY;AAC9B,UAAM,MAAM,OAAO,KAAK;AACxB,QAAI,QAAQ,UAAa,QAAQ,KAAM;AACvC,QAAI,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG,EAAG;AAEnD,WAAO,KAAK,IAAI;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AvD2FA;;;AwD1VA;AAEA;AAEA;AAGO,IAAM,8BAA8B;AA8B3C,SAAS,mBAAmB,SAAgC;AAC1D,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,SAAS;AACxD,UAAM,IAAI;AAAA,MACR,gEAAgE,QAAQ,IAAI;AAAA,IAC9E;AAAA,EACF;AACF;AAYA,eAAsB,cACpB,SACA,OACA,SACA,YACe;AACf,qBAAmB,OAAO;AAE1B,QAAM,SAAS,MAAM,oBAAoB,SAAS,OAAO,OAAO;AAChE,QAAM,MAAM,MAAM,OAAO,2BAA2B;AAEpD,QAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,KAAK,UAAU,UAAU,GAAG,GAAG;AAElE,QAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,6BAA6B,WAAW,SAAS;AAC3F,QAAM,UAAU,WAAW,SAAS,KAAK,IAAI;AAE7C,QAAM,WAA8B;AAAA,IAClC,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,IAC5B,KAAK;AAAA,IACL,OAAO;AAAA,IACP,KAAK,QAAQ;AAAA,EACf;AAEA,QAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA,WAAW;AAAA,IACX;AAAA,IACA,WAAW,SAAS,KAAK;AAAA,EAC3B;AACF;AAQA,eAAsB,cACpB,SACA,OACA,SACA,WACgC;AAChC,qBAAmB,OAAO;AAE1B,QAAM,SAAS,MAAM,oBAAoB,SAAS,OAAO,OAAO;AAChE,QAAM,MAAM,MAAM,OAAO,2BAA2B;AAEpD,QAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,6BAA6B,SAAS;AAChF,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,YAAY,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AACjE,SAAO,KAAK,MAAM,SAAS;AAC7B;AAOA,eAAsB,iBACpB,SACA,OACA,SACA,WACe;AACf,qBAAmB,OAAO;AAC1B,QAAM,QAAQ,OAAO,OAAO,6BAA6B,SAAS;AACpE;AASA,eAAsB,gBACpB,SACA,OACA,SACmB;AACnB,qBAAmB,OAAO;AAC1B,SAAO,QAAQ,KAAK,OAAO,2BAA2B;AACxD;AASA,eAAsB,iBACpB,SACA,OACA,SACA,WACiE;AACjE,QAAM,aAAa,MAAM,cAAc,SAAS,OAAO,SAAS,SAAS;AACzE,MAAI,CAAC,WAAY,QAAO,EAAE,QAAQ,MAAM;AAExC,QAAM,UAAU,WAAW,YACvB,KAAK,IAAI,IAAI,IAAI,KAAK,WAAW,SAAS,EAAE,QAAQ,IACpD;AAEJ,SAAO,EAAE,QAAQ,MAAM,QAAQ;AACjC;;;ACpLA;;;ACkBA;AACA;AAGA;AAEA,IAAMC,UAAS,WAAW,OAAO;AAGjC,IAAM,iBAAiB,KAAK,KAAK;AAGjC,IAAM,kBAAkB,oBAAI,IAAuB;AAwDnD,eAAsB,cACpB,SACA,OACA,UAAgC,CAAC,GACH;AAC9B,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,YAAY,aAAa;AAC/B,QAAM,YAAY,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,EAAE,YAAY;AAK3D,QAAM,aAAa,MAAMA,QAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,QAAQ,IAAI;AAAA,IAC/B;AAAA;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AAqBA,QAAM,SAAiC,CAAC;AACxC,aAAW,CAAC,UAAU,GAAG,KAAK,QAAQ,MAAM;AAC1C,UAAM,MAAM,MAAMA,QAAO,UAAU,OAAO,GAAG;AAC7C,WAAO,QAAQ,IAAI,eAAe,GAAG;AAAA,EACvC;AAEA,QAAM,UAAU,KAAK,UAAU;AAAA,IAC7B,QAAQ,QAAQ;AAAA,IAChB,aAAa,QAAQ;AAAA,IACrB,MAAM,QAAQ;AAAA,IACd,aAAa,QAAQ;AAAA,IACrB,MAAM;AAAA,IACN,MAAM,eAAe,QAAQ,IAAI;AAAA,EACnC,CAAC;AAED,QAAM,KAAK,WAAW,OAAO,gBAAgB,IAAI,WAAW,EAAE,CAAC;AAC/D,QAAM,YAAY,MAAMA,QAAO;AAAA,IAC7B,EAAE,MAAM,WAAW,GAAG;AAAA,IACtB;AAAA,IACA,IAAI,YAAY,EAAE,OAAO,OAAO;AAAA,EAClC;AAEA,QAAM,QAAsB;AAAA,IAC1B,gBAAgB;AAAA,IAChB;AAAA,IACA,QAAQ,QAAQ;AAAA,IAChB;AAAA,IACA,MAAM,QAAQ;AAAA,IACd;AAAA,IACA,YAAY,eAAe,SAAS;AAAA,IACpC,OAAO,eAAe,EAAE;AAAA,EAC1B;AAEA,kBAAgB,IAAI,WAAW,UAAU;AACzC,SAAO,EAAE,OAAO,UAAU;AAC5B;AAcA,eAAsB,eAAe,OAA+C;AAElF,MAAI,KAAK,IAAI,IAAI,IAAI,KAAK,MAAM,SAAS,EAAE,QAAQ,GAAG;AACpD,oBAAgB,OAAO,MAAM,SAAS;AACtC,UAAM,IAAI,oBAAoB,MAAM,SAAS;AAAA,EAC/C;AAEA,QAAM,aAAa,gBAAgB,IAAI,MAAM,SAAS;AACtD,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,qBAAqB,MAAM,SAAS;AAAA,EAChD;AAEA,QAAM,KAAK,eAAe,MAAM,KAAK;AACrC,QAAM,aAAa,eAAe,MAAM,UAAU;AAElD,MAAI;AACJ,MAAI;AACF,gBAAY,MAAMA,QAAO;AAAA,MACvB,EAAE,MAAM,WAAW,GAAG;AAAA,MACtB;AAAA,MACA;AAAA,IACF;AAAA,EACF,QAAQ;AACN,UAAM,IAAI,qBAAqB,MAAM,SAAS;AAAA,EAChD;AAEA,QAAM,UAAU,KAAK,MAAM,IAAI,YAAY,EAAE,OAAO,SAAS,CAAC;AAS9D,QAAM,OAAO,oBAAI,IAAuB;AACxC,aAAW,CAAC,UAAU,SAAS,KAAK,OAAO,QAAQ,QAAQ,IAAI,GAAG;AAChE,UAAM,MAAM,MAAMA,QAAO;AAAA,MACvB;AAAA,MACA,eAAe,SAAS;AAAA,MACxB,EAAE,MAAM,WAAW,QAAQ,IAAI;AAAA,MAC/B;AAAA,MACA,CAAC,WAAW,SAAS;AAAA,IACvB;AACA,SAAK,IAAI,UAAU,GAAG;AAAA,EACxB;AAEA,SAAO;AAAA,IACL,QAAQ,QAAQ;AAAA,IAChB,aAAa,QAAQ;AAAA,IACrB,MAAM,QAAQ;AAAA,IACd,aAAa,QAAQ;AAAA,IACrB;AAAA,IACA,KAAK;AAAA;AAAA,IACL,MAAM,eAAe,QAAQ,IAAI;AAAA,EACnC;AACF;AAWO,SAAS,cAAc,WAAyB;AACrD,kBAAgB,OAAO,SAAS;AAClC;AAMO,SAAS,eAAe,OAA8B;AAC3D,MAAI,KAAK,IAAI,IAAI,IAAI,KAAK,MAAM,SAAS,EAAE,QAAQ,EAAG,QAAO;AAC7D,SAAO,gBAAgB,IAAI,MAAM,SAAS;AAC5C;AAOO,SAAS,oBAA0B;AACxC,kBAAgB,MAAM;AACxB;AAMO,SAAS,qBAA6B;AAC3C,SAAO,gBAAgB;AACzB;;;ADlPO,IAAM,iBAAN,MAAqB;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAkD;AAAA,EAClD,gBAAsD;AAAA,EACtD,oBAAyC;AAAA,EAEjD,YAAY,MAA6B;AACvC,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,WAAW,KAAK;AACrB,SAAK,YAAY,KAAK,IAAI;AAC1B,SAAK,iBAAiB,KAAK,IAAI;AAE/B,SAAK,kBAAkB;AACvB,SAAK,sBAAsB;AAC3B,SAAK,uBAAuB;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AACZ,SAAK,iBAAiB,KAAK,IAAI;AAC/B,SAAK,kBAAkB;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,eAAe,IAA2B;AAExC,UAAM,EAAE,kBAAkB,IAAI,KAAK;AACnC,QAAI,sBAAsB,UAAa,KAAK,IAAI,IAAI,KAAK,aAAa,mBAAmB;AACvF,WAAK,OAAO,UAAU;AACtB,YAAM,IAAI,oBAAoB,KAAK,SAAS;AAAA,IAC9C;AAEA,UAAM,WAAW,KAAK,OAAO,oBAAoB,CAAC;AAClD,QAAI,SAAS,SAAS,EAAE,GAAG;AACzB,YAAM,IAAI,mBAAmB,EAAE;AAAA,IACjC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAAgB;AACd,QAAI,KAAK,WAAW;AAClB,mBAAa,KAAK,SAAS;AAC3B,WAAK,YAAY;AAAA,IACnB;AACA,QAAI,KAAK,eAAe;AACtB,mBAAa,KAAK,aAAa;AAC/B,WAAK,gBAAgB;AAAA,IACvB;AACA,QAAI,KAAK,qBAAqB,OAAO,aAAa,aAAa;AAC7D,eAAS,oBAAoB,oBAAoB,KAAK,iBAAiB;AACvE,WAAK,oBAAoB;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,IAAI,SAAiB;AACnB,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC3B;AAAA;AAAA,EAGA,IAAI,QAAgB;AAClB,WAAO,KAAK,IAAI,IAAI,KAAK;AAAA,EAC3B;AAAA;AAAA,EAIQ,oBAA0B;AAChC,UAAM,EAAE,cAAc,IAAI,KAAK;AAC/B,QAAI,CAAC,cAAe;AAEpB,QAAI,KAAK,UAAW,cAAa,KAAK,SAAS;AAC/C,SAAK,YAAY,WAAW,MAAM;AAChC,WAAK,OAAO,MAAM;AAAA,IACpB,GAAG,aAAa;AAAA,EAClB;AAAA,EAEQ,wBAA8B;AACpC,UAAM,EAAE,kBAAkB,IAAI,KAAK;AACnC,QAAI,CAAC,kBAAmB;AAExB,QAAI,KAAK,cAAe,cAAa,KAAK,aAAa;AACvD,SAAK,gBAAgB,WAAW,MAAM;AACpC,WAAK,OAAO,UAAU;AAAA,IACxB,GAAG,iBAAiB;AAAA,EACtB;AAAA,EAEQ,yBAA+B;AACrC,QAAI,CAAC,KAAK,OAAO,iBAAkB;AACnC,QAAI,OAAO,aAAa,YAAa;AAErC,SAAK,oBAAoB,MAAM;AAC7B,UAAI,SAAS,QAAQ;AACnB,aAAK,OAAO,YAAY;AAAA,MAC1B;AAAA,IACF;AACA,aAAS,iBAAiB,oBAAoB,KAAK,iBAAiB;AAAA,EACtE;AAAA,EAEQ,OAAO,QAAkD;AAC/D,SAAK,QAAQ;AACb,kBAAc,KAAK,SAAS;AAC5B,SAAK,SAAS,MAAM;AAAA,EACtB;AACF;AAQO,SAAS,eAAe,MAA6C;AAC1E,SAAO,IAAI,eAAe,IAAI;AAChC;AAOO,SAAS,sBAAsB,QAA6B;AACjE,QAAM,EAAE,eAAe,kBAAkB,IAAI;AAC7C,MAAI,kBAAkB,WAAc,OAAO,kBAAkB,YAAY,iBAAiB,IAAI;AAC5F,UAAM,IAAI,MAAM,8DAA8D,aAAa,EAAE;AAAA,EAC/F;AACA,MAAI,sBAAsB,WAAc,OAAO,sBAAsB,YAAY,qBAAqB,IAAI;AACxG,UAAM,IAAI,MAAM,kEAAkE,iBAAiB,EAAE;AAAA,EACvG;AACA,MAAI,kBAAkB,UAAa,sBAAsB,UAAa,iBAAiB,mBAAmB;AACxG,UAAM,IAAI;AAAA,MACR,gCAAgC,aAAa,4CAA4C,iBAAiB;AAAA,IAC5G;AAAA,EACF;AACF;;;AEtIA;AACA;AAKA,IAAM,uBAAuB;AAE7B,IAAM,iBAAiB;AAmBvB,SAAS,uBAA6B;AAEpC,MACE,OAAO,YAAY,eACnB,QAAQ,IAAI,aAAa,cACzB;AACA,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AAGA,MACE,OAAO,eAAe,eACrB,WAAuC,2BAA2B,MACnE;AACA,UAAM,IAAI,gBAAgB,kDAAkD;AAAA,EAC9E;AAGA,MACE,OAAO,WAAW,eAClB,OAAO,OAAO,aAAa,aAC3B;AACA,UAAM,OAAO,OAAO,SAAS;AAC7B,QAAI,SAAS,eAAe,SAAS,eAAe,SAAS,SAAS,CAAC,KAAK,SAAS,QAAQ,GAAG;AAC9F,YAAM,IAAI;AAAA,QACR,gEAAgE,IAAI;AAAA,MAEtE;AAAA,IACF;AAAA,EACF;AACF;AAIA,SAAS,WAAW,OAAe,QAAwB;AACzD,SAAO,GAAG,cAAc,GAAG,KAAK,IAAI,MAAM;AAC5C;AAEA,SAAS,eAAe,mBAAsC;AAC5D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,gBAAgB,yFAAyF;AAAA,EACrH;AACA,SAAO,oBAAoB,OAAO,eAAe,OAAO;AAC1D;AAmBA,eAAsB,gBACpB,OACA,QACA,SACA,SACe;AACf,MAAI,QAAQ,gBAAgB,sBAAsB;AAChD,UAAM,IAAI;AAAA,MACR,oCAAoC,oBAAoB,YAC/C,QAAQ,WAAW;AAAA,IAC9B;AAAA,EACF;AAEA,uBAAqB;AAErB,QAAM,UAAU,eAAe,QAAQ,iBAAiB;AAExD,QAAM,SAAiC,CAAC;AACxC,aAAW,CAAC,UAAU,GAAG,KAAK,QAAQ,MAAM;AAC1C,UAAM,MAAM,MAAM,WAAW,OAAO,OAAO,UAAU,OAAO,GAAG;AAC/D,WAAO,QAAQ,IAAI,eAAe,GAAG;AAAA,EACvC;AAEA,QAAM,UAAU,KAAK,UAAU;AAAA,IAC7B,mBAAmB;AAAA,IACnB,QAAQ,QAAQ;AAAA,IAChB,aAAa,QAAQ;AAAA,IACrB,MAAM,QAAQ;AAAA,IACd,aAAa,QAAQ;AAAA,IACrB,MAAM;AAAA,IACN,MAAM,eAAe,QAAQ,IAAI;AAAA,EACnC,CAAC;AAED,UAAQ,QAAQ,WAAW,OAAO,MAAM,GAAG,OAAO;AAGlD,UAAQ;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA;AAAA,eAAoB,KAAK,WAAW,MAAM,kBACvC,QAAQ,oBAAoB,iBAAiB,gBAAgB;AAAA;AAAA;AAAA,EAGlE;AACF;AAgBA,eAAsB,cACpB,OACA,QACA,UAA2C,CAAC,GACX;AACjC,MAAI,OAAO,WAAW,YAAa,QAAO;AAE1C,QAAM,UAAU,eAAe,QAAQ,iBAAiB;AACxD,QAAM,MAAM,QAAQ,QAAQ,WAAW,OAAO,MAAM,CAAC;AACrD,MAAI,CAAC,IAAK,QAAO;AAEjB,MAAI;AASJ,MAAI;AACF,aAAS,KAAK,MAAM,GAAG;AAAA,EACzB,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,sBAAsB,EAAG,QAAO;AAE3C,QAAM,OAAO,oBAAI,IAAuB;AACxC,aAAW,CAAC,UAAU,SAAS,KAAK,OAAO,QAAQ,OAAO,IAAI,GAAG;AAC/D,UAAM,MAAM,MAAM,WAAW,OAAO,OAAO;AAAA,MACzC;AAAA,MACA,eAAe,SAAS;AAAA,MACxB,EAAE,MAAM,WAAW,QAAQ,IAAI;AAAA,MAC/B;AAAA,MACA,CAAC,WAAW,SAAS;AAAA,IACvB;AACA,SAAK,IAAI,UAAU,GAAG;AAAA,EACxB;AAEA,SAAO;AAAA,IACL,QAAQ,OAAO;AAAA,IACf,aAAa,OAAO;AAAA,IACpB,MAAM,OAAO;AAAA,IACb,aAAa,OAAO;AAAA,IACpB;AAAA,IACA,KAAK;AAAA,IACL,MAAM,eAAe,OAAO,IAAI;AAAA,EAClC;AACF;AAOO,SAAS,eACd,OACA,QACA,UAA2C,CAAC,GACtC;AACN,MAAI,OAAO,WAAW,YAAa;AACnC,QAAM,UAAU,eAAe,QAAQ,iBAAiB;AACxD,UAAQ,WAAW,WAAW,OAAO,MAAM,CAAC;AAC9C;AAOO,SAAS,kBACd,OACA,QACA,UAA2C,CAAC,GACnC;AACT,MAAI,OAAO,WAAW,YAAa,QAAO;AAC1C,QAAM,UAAU,eAAe,QAAQ,iBAAiB;AACxD,SAAO,QAAQ,QAAQ,WAAW,OAAO,MAAM,CAAC,MAAM;AACxD;;;A3D8IA;AACA;AAIA;AAGA;AACA;AAEA;;;A4D5aO,SAASC,MAAK,QAAiB,QAAiB,WAAW,IAAiB;AACjF,QAAM,UAAuB,CAAC;AAG9B,MAAI,WAAW,OAAQ,QAAO;AAG9B,MAAI,UAAU,QAAQ,UAAU,MAAM;AACpC,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,SAAS,IAAI,OAAO,CAAC;AAAA,EACnE;AACA,MAAI,UAAU,QAAQ,UAAU,MAAM;AACpC,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,WAAW,MAAM,OAAO,CAAC;AAAA,EACvE;AAGA,MAAI,OAAO,WAAW,OAAO,QAAQ;AACnC,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,CAAC;AAAA,EACnF;AAGA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO,CAAC,EAAE,MAAM,YAAY,UAAU,MAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,CAAC;AAAA,EACnF;AAGA,MAAI,MAAM,QAAQ,MAAM,KAAK,MAAM,QAAQ,MAAM,GAAG;AAClD,UAAM,SAAS,KAAK,IAAI,OAAO,QAAQ,OAAO,MAAM;AACpD,aAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,YAAM,IAAI,WAAW,GAAG,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC;AAChD,UAAI,KAAK,OAAO,QAAQ;AACtB,gBAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,SAAS,IAAI,OAAO,CAAC,EAAE,CAAC;AAAA,MACxD,WAAW,KAAK,OAAO,QAAQ;AAC7B,gBAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,WAAW,MAAM,OAAO,CAAC,EAAE,CAAC;AAAA,MAC5D,OAAO;AACL,gBAAQ,KAAK,GAAGA,MAAK,OAAO,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;AAAA,MAC/C;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAGA,QAAM,YAAY;AAClB,QAAM,YAAY;AAClB,QAAM,UAAU,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,SAAS,GAAG,GAAG,OAAO,KAAK,SAAS,CAAC,CAAC;AAE9E,aAAW,OAAO,SAAS;AACzB,UAAM,IAAI,WAAW,GAAG,QAAQ,IAAI,GAAG,KAAK;AAC5C,QAAI,EAAE,OAAO,YAAY;AACvB,cAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,SAAS,IAAI,UAAU,GAAG,EAAE,CAAC;AAAA,IAC7D,WAAW,EAAE,OAAO,YAAY;AAC9B,cAAQ,KAAK,EAAE,MAAM,GAAG,MAAM,WAAW,MAAM,UAAU,GAAG,EAAE,CAAC;AAAA,IACjE,OAAO;AACL,cAAQ,KAAK,GAAGA,MAAK,UAAU,GAAG,GAAG,UAAU,GAAG,GAAG,CAAC,CAAC;AAAA,IACzD;AAAA,EACF;AAEA,SAAO;AACT;AAGO,SAAS,WAAW,SAA8B;AACvD,MAAI,QAAQ,WAAW,EAAG,QAAO;AACjC,SAAO,QAAQ,IAAI,OAAK;AACtB,YAAQ,EAAE,MAAM;AAAA,MACd,KAAK;AACH,eAAO,KAAK,EAAE,IAAI,KAAK,KAAK,UAAU,EAAE,EAAE,CAAC;AAAA,MAC7C,KAAK;AACH,eAAO,KAAK,EAAE,IAAI,KAAK,KAAK,UAAU,EAAE,IAAI,CAAC;AAAA,MAC/C,KAAK;AACH,eAAO,KAAK,EAAE,IAAI,KAAK,KAAK,UAAU,EAAE,IAAI,CAAC,WAAM,KAAK,UAAU,EAAE,EAAE,CAAC;AAAA,IAC3E;AAAA,EACF,CAAC,EAAE,KAAK,IAAI;AACd;;;AC8BA,eAAsB,UACpB,OACA,WACA,UAAuB,CAAC,GACD;AACvB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,cAAc,IAAI,IAAI,QAAQ,WAAW,IAAI;AACpE,QAAM,YACJ,QAAQ,cAAc,CAAC,GAAY,MAAeC,MAAU,GAAG,CAAC,EAAE,WAAW;AAO/E,QAAM,OAAO,oBAAI,IAA4B;AAC7C,mBAAiB,SAAS,MAAM,aAAa,EAAE,aAAa,aAAa,CAAC,GAAG;AAC3E,QAAI,UAAU,CAAC,OAAO,IAAI,MAAM,UAAU,EAAG;AAC7C,UAAM,aAAa,KAAK,IAAI,MAAM,UAAU,KAAK,oBAAI,IAAe;AACpE,eAAW,UAAU,MAAM,SAAS;AAClC,YAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,UAAI,CAAC,GAAI;AACT,iBAAW,IAAI,IAAI,MAAW;AAAA,IAChC;AACA,SAAK,IAAI,MAAM,YAAY,UAAU;AAAA,EACvC;AAGA,QAAM,OAAO,MAAM,mBAAsB,WAAW,OAAO,MAAM;AAGjE,QAAM,QAA6B,CAAC;AACpC,QAAM,WAAwC,CAAC;AAC/C,QAAM,UAA+B,CAAC;AACtC,QAAM,YAA6C,QAAQ,mBAAmB,CAAC,IAAI;AAEnF,QAAM,kBAAkB,oBAAI,IAAI,CAAC,GAAG,KAAK,KAAK,GAAG,GAAG,KAAK,KAAK,CAAC,CAAC;AAChE,aAAW,cAAc,CAAC,GAAG,eAAe,EAAE,KAAK,GAAG;AACpD,UAAM,WAAW,KAAK,IAAI,UAAU,KAAK,oBAAI,IAAe;AAC5D,UAAM,WAAW,KAAK,IAAI,UAAU,KAAK,oBAAI,IAAe;AAC5D,UAAM,SAAS,oBAAI,IAAI,CAAC,GAAG,SAAS,KAAK,GAAG,GAAG,SAAS,KAAK,CAAC,CAAC;AAE/D,eAAW,MAAM,CAAC,GAAG,MAAM,EAAE,KAAK,GAAG;AACnC,YAAM,SAAS,SAAS,IAAI,EAAE;AAC9B,YAAM,QAAQ,SAAS,IAAI,EAAE;AAE7B,UAAI,WAAW,UAAa,UAAU,QAAW;AAC/C,cAAM,KAAK,EAAE,YAAY,IAAI,QAAQ,MAAM,CAAC;AAAA,MAC9C,WAAW,WAAW,UAAa,UAAU,QAAW;AACtD,gBAAQ,KAAK,EAAE,YAAY,IAAI,QAAQ,OAAO,CAAC;AAAA,MACjD,WAAW,WAAW,UAAa,UAAU,QAAW;AACtD,YAAI,UAAU,QAAQ,KAAK,GAAG;AAC5B,qBAAW,KAAK,EAAE,YAAY,IAAI,QAAQ,MAAM,CAAC;AAAA,QACnD,OAAO;AACL,gBAAM,aAAaA,MAAU,QAAQ,KAAK;AAC1C,gBAAM,gBAAgB,mBAAmB,UAAU;AACnD,mBAAS,KAAK;AAAA,YACZ;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAU;AAAA,IACd,KAAK,MAAM;AAAA,IACX,QAAQ,SAAS;AAAA,IACjB,QAAQ,QAAQ;AAAA,IAChB,OAAO,MAAM,SAAS,SAAS,SAAS,QAAQ;AAAA,EAClD;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,MAAM;AACX,aAAOC,YAAW,MAAM,UAAU,QAAQ,EAAE,OAAO,UAAU,SAAS,QAAQ,CAAC;AAAA,IACjF;AAAA,EACF;AACF;AAIA,eAAe,mBACb,WACA,OACA,QACsC;AACtC,QAAM,MAAM,oBAAI,IAA4B;AAK5C,MACE,OAAO,cAAc,YACrB,cAAc,QACd,kBAAkB,aAClB,OAAQ,UAAoB,iBAAiB,YAC7C;AACA,qBAAiB,SAAU,UAAoB,aAAa,EAAE,aAAa,aAAa,CAAC,GAAG;AAC1F,UAAI,UAAU,CAAC,OAAO,IAAI,MAAM,UAAU,EAAG;AAC7C,YAAM,aAAa,IAAI,IAAI,MAAM,UAAU,KAAK,oBAAI,IAAe;AACnE,iBAAW,UAAU,MAAM,SAAS;AAClC,cAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,YAAI,CAAC,GAAI;AACT,mBAAW,IAAI,IAAI,MAAW;AAAA,MAChC;AACA,UAAI,IAAI,MAAM,YAAY,UAAU;AAAA,IACtC;AACA,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,cAAc,UAAU;AACjC,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,SAAS;AAAA,IAC/B,SAAS,KAAK;AACZ,YAAM,IAAI;AAAA,QACR,kDAAmD,IAAc,OAAO;AAAA,MAC1E;AAAA,IACF;AACA,WAAO,sBAAyB,QAAQ,OAAO,MAAM;AAAA,EACvD;AAGA,SAAO,sBAAyB,WAAW,OAAO,MAAM;AAC1D;AAEA,SAAS,sBACP,KACA,OACA,QAC6B;AAC7B,QAAM,MAAM,oBAAI,IAA4B;AAC5C,MAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,UAAM,IAAI,MAAM,mEAAmE;AAAA,EACrF;AAGA,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,QAAI,IAAI,WAAW,GAAG,EAAG;AACzB,QAAI,UAAU,CAAC,OAAO,IAAI,GAAG,EAAG;AAChC,QAAI,CAAC,MAAM,QAAQ,KAAK,EAAG;AAC3B,UAAM,aAAa,oBAAI,IAAe;AACtC,eAAW,UAAU,OAAuB;AAC1C,UAAI,WAAW,QAAQ,OAAO,WAAW,SAAU;AACnD,YAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,UAAI,CAAC,GAAI;AACT,iBAAW,IAAI,IAAI,MAAM;AAAA,IAC3B;AACA,QAAI,IAAI,KAAK,UAAU;AAAA,EACzB;AACA,SAAO;AACT;AAEA,SAAS,mBAAmB,OAAqD;AAC/E,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,KAAK,OAAO;AAGrB,UAAM,IAAI,UAAU,KAAK,EAAE,IAAI;AAC/B,QAAI,EAAG,MAAK,IAAI,EAAE,CAAC,CAAC;AAAA,EACtB;AACA,SAAO,CAAC,GAAG,IAAI;AACjB;AASA,SAAS,YAAY,QAAiB,OAAuB;AAC3D,MAAI,WAAW,QAAQ,OAAO,WAAW,SAAU,QAAO;AAC1D,QAAM,IAAK,OAAmC,KAAK;AACnD,MAAI,OAAO,MAAM,SAAU,QAAO;AAClC,MAAI,OAAO,MAAM,YAAY,OAAO,SAAS,CAAC,EAAG,QAAO,OAAO,CAAC;AAChE,SAAO;AACT;AASA,SAASA,YACP,QACA,GACQ;AACR,QAAM,OAAO,GAAG,EAAE,QAAQ,GAAG,eAAY,EAAE,QAAQ,MAAM,kBAAe,EAAE,QAAQ,MAAM;AACxF,MAAI,WAAW,QAAS,QAAO;AAC/B,MAAI,EAAE,QAAQ,UAAU,EAAG,QAAO,OAAO;AACzC,MAAI,WAAW,WAAY,QAAO;AAElC,QAAM,OAAiB,CAAC,MAAM,EAAE;AAChC,aAAW,KAAK,EAAE,MAAO,MAAK,KAAK,GAAG,EAAE,UAAU,IAAI,EAAE,EAAE,QAAS;AACnE,aAAW,KAAK,EAAE,UAAU;AAC1B,UAAM,SAAS,EAAE,WACd,IAAI,CAAC,MAAM,GAAG,EAAE,IAAI,KAAK,UAAU,EAAE,IAAI,CAAC,WAAM,UAAU,EAAE,EAAE,CAAC,EAAE,EACjE,KAAK,IAAI;AACZ,SAAK,KAAK,GAAG,EAAE,UAAU,IAAI,EAAE,EAAE,aAAe,MAAM,EAAE;AAAA,EAC1D;AACA,aAAW,KAAK,EAAE,QAAS,MAAK,KAAK,GAAG,EAAE,UAAU,IAAI,EAAE,EAAE,UAAW;AACvE,SAAO,KAAK,KAAK,IAAI;AACvB;AAEA,SAAS,UAAU,OAAwB;AACzC,MAAI,UAAU,OAAW,QAAO;AAChC,QAAM,IAAI,KAAK,UAAU,KAAK;AAK9B,MAAI,OAAO,MAAM,SAAU,QAAO;AAClC,SAAO,EAAE,SAAS,KAAK,EAAE,MAAM,GAAG,EAAE,IAAI,QAAQ;AAClD;;;AC/VA;AAOO,SAAS,mBAAmB,YAA0B;AAC3D,MAAI,WAAW,SAAS,GAAG;AACzB,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AAEA,QAAM,UAAU,gBAAgB,UAAU;AAC1C,MAAI,UAAU,IAAI;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IAGF;AAAA,EACF;AACF;AAMO,SAAS,gBAAgB,YAA4B;AAC1D,MAAI,cAAc;AAElB,MAAI,QAAQ,KAAK,UAAU,EAAG,gBAAe;AAC7C,MAAI,QAAQ,KAAK,UAAU,EAAG,gBAAe;AAC7C,MAAI,QAAQ,KAAK,UAAU,EAAG,gBAAe;AAC7C,MAAI,eAAe,KAAK,UAAU,EAAG,gBAAe;AAEpD,MAAI,gBAAgB,EAAG,eAAc;AAErC,SAAO,KAAK,MAAM,WAAW,SAAS,KAAK,KAAK,WAAW,CAAC;AAC9D;","names":["count","sha256Hex","hex","max","sha256Hex","sha256Hex","max","diff","sha256Hex","revoke","store","s","mergeSnapshots","sha256Hex","hex","notEnabled","NOT_ENABLED","coerceRefKey","EMPTY_PLAN","sortRecords","compareValues","notEnabled","version","envelope","previousEnvelope","NOT_ENABLED","NOT_ENABLED","subtle","issueDelegation","DELEGATIONS_COLLECTION","revokeDelegation","record","handle","generateULID","sha256Hex","NOT_ENABLED","notEnabled","keyring","comp","subtle","diff","diff","formatDiff"]}
|