@noy-db/hub 0.1.0-pre.4 → 0.1.0-pre.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blobs/index.cjs.map +1 -1
- package/dist/blobs/index.d.cts +3 -3
- package/dist/blobs/index.d.ts +3 -3
- package/dist/blobs/index.js +2 -2
- package/dist/bundle/index.cjs +26 -3
- package/dist/bundle/index.cjs.map +1 -1
- package/dist/bundle/index.d.cts +3 -3
- package/dist/bundle/index.d.ts +3 -3
- package/dist/bundle/index.js +3 -1
- package/dist/{chunk-LSZHBNDG.js → chunk-3WCRU7TI.js} +2 -2
- package/dist/{chunk-PSHTHSIX.js → chunk-6IJQ27XN.js} +213 -10
- package/dist/chunk-6IJQ27XN.js.map +1 -0
- package/dist/{chunk-O5GK62FJ.js → chunk-B6HF6NTZ.js} +1 -1
- package/dist/chunk-B6HF6NTZ.js.map +1 -0
- package/dist/{chunk-AVWFLPNR.js → chunk-CL37QSND.js} +2 -2
- package/dist/chunk-EMIGCR7X.js +39 -0
- package/dist/chunk-EMIGCR7X.js.map +1 -0
- package/dist/{chunk-GJILMRPO.js → chunk-FAAWLVTF.js} +42 -4
- package/dist/chunk-FAAWLVTF.js.map +1 -0
- package/dist/chunk-GILMPJXB.js +155 -0
- package/dist/chunk-GILMPJXB.js.map +1 -0
- package/dist/{chunk-L77MEFCH.js → chunk-INSJBB5W.js} +3 -3
- package/dist/{chunk-QZIACZZU.js → chunk-KPF2HHPI.js} +2 -2
- package/dist/{chunk-NK2NSXXK.js → chunk-N2LMZKLR.js} +2 -2
- package/dist/{chunk-EARQCIL7.js → chunk-NZ4XCIKS.js} +3 -3
- package/dist/{chunk-E445ICYI.js → chunk-UFL4DUEV.js} +5 -3
- package/dist/chunk-UFL4DUEV.js.map +1 -0
- package/dist/consent/index.d.cts +3 -3
- package/dist/consent/index.d.ts +3 -3
- package/dist/{dev-unlock-XOUecfQ9.d.ts → dev-unlock-CcJ1qIi7.d.ts} +1 -1
- package/dist/{dev-unlock-5SmCVGyx.d.cts → dev-unlock-Dk14V6lX.d.cts} +1 -1
- package/dist/{hash-Bxud16vM.d.ts → hash-1Xsqx1jl.d.ts} +1 -1
- package/dist/{hash-CvuKN2gH.d.cts → hash-h_2U3TFb.d.cts} +1 -1
- package/dist/history/index.cjs.map +1 -1
- package/dist/history/index.d.cts +4 -4
- package/dist/history/index.d.ts +4 -4
- package/dist/history/index.js +2 -2
- package/dist/i18n/index.cjs +3 -1
- package/dist/i18n/index.cjs.map +1 -1
- package/dist/i18n/index.d.cts +3 -3
- package/dist/i18n/index.d.ts +3 -3
- package/dist/i18n/index.js +3 -3
- package/dist/{index-DN-J-5wT.d.cts → index-6xNpPsxR.d.cts} +1 -1
- package/dist/{index-Cy-MKrdK.d.ts → index-Cvb0efA_.d.cts} +39 -5
- package/dist/{index-BRHBCmLt.d.ts → index-DJTf9yxn.d.ts} +1 -1
- package/dist/{index-BvUiM47h.d.cts → index-DZn6Yick.d.ts} +39 -5
- package/dist/index.cjs +2001 -58
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +315 -19
- package/dist/index.d.ts +315 -19
- package/dist/index.js +1503 -41
- package/dist/index.js.map +1 -1
- package/dist/{ledger-HWXYGUIQ.js → ledger-5V67MAIL.js} +3 -3
- package/dist/periods/index.cjs.map +1 -1
- package/dist/periods/index.d.cts +3 -3
- package/dist/periods/index.d.ts +3 -3
- package/dist/periods/index.js +3 -3
- package/dist/public-envelope-DFJZHXVH.js +31 -0
- package/dist/public-envelope-DFJZHXVH.js.map +1 -0
- package/dist/query/index.d.cts +1 -1
- package/dist/query/index.d.ts +1 -1
- package/dist/session/index.cjs +4 -2
- package/dist/session/index.cjs.map +1 -1
- package/dist/session/index.d.cts +4 -4
- package/dist/session/index.d.ts +4 -4
- package/dist/session/index.js +1 -1
- package/dist/shadow/index.d.cts +3 -3
- package/dist/shadow/index.d.ts +3 -3
- package/dist/store/index.d.cts +3 -3
- package/dist/store/index.d.ts +3 -3
- package/dist/sync/index.cjs.map +1 -1
- package/dist/sync/index.d.cts +2 -2
- package/dist/sync/index.d.ts +2 -2
- package/dist/sync/index.js +2 -2
- package/dist/team/index.cjs +3 -1
- package/dist/team/index.cjs.map +1 -1
- package/dist/team/index.d.cts +3 -3
- package/dist/team/index.d.ts +3 -3
- package/dist/team/index.js +4 -4
- package/dist/tx/index.d.cts +3 -3
- package/dist/tx/index.d.ts +3 -3
- package/dist/{types-Dmi7nrC9.d.ts → types-D-6bmD2c.d.ts} +1271 -3
- package/dist/{types-BVSfkYg6.d.cts → types-D3QLmhlk.d.cts} +1271 -3
- package/package.json +1 -1
- package/dist/chunk-E445ICYI.js.map +0 -1
- package/dist/chunk-GJILMRPO.js.map +0 -1
- package/dist/chunk-O5GK62FJ.js.map +0 -1
- package/dist/chunk-PSHTHSIX.js.map +0 -1
- /package/dist/{chunk-LSZHBNDG.js.map → chunk-3WCRU7TI.js.map} +0 -0
- /package/dist/{chunk-AVWFLPNR.js.map → chunk-CL37QSND.js.map} +0 -0
- /package/dist/{chunk-L77MEFCH.js.map → chunk-INSJBB5W.js.map} +0 -0
- /package/dist/{chunk-QZIACZZU.js.map → chunk-KPF2HHPI.js.map} +0 -0
- /package/dist/{chunk-NK2NSXXK.js.map → chunk-N2LMZKLR.js.map} +0 -0
- /package/dist/{chunk-EARQCIL7.js.map → chunk-NZ4XCIKS.js.map} +0 -0
- /package/dist/{ledger-HWXYGUIQ.js.map → ledger-5V67MAIL.js.map} +0 -0
package/dist/blobs/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/errors.ts","../../src/crypto.ts","../../src/blobs/index.ts","../../src/types.ts","../../src/blobs/blob-set.ts","../../src/blobs/mime-magic.ts","../../src/blobs/active.ts","../../src/blobs/blob-compaction.ts","../../src/blobs/export-blobs.ts"],"sourcesContent":["/**\n * All NOYDB error classes — a single import surface for `catch` blocks and\n * `instanceof` checks.\n *\n * ## Class hierarchy\n *\n * ```\n * Error\n * └─ NoydbError (code: string)\n * ├─ Crypto errors\n * │ ├─ DecryptionError — AES-GCM tag failure\n * │ ├─ TamperedError — ciphertext modified after write\n * │ └─ InvalidKeyError — wrong passphrase / corrupt keyring\n * ├─ Access errors\n * │ ├─ NoAccessError — no DEK for this collection\n * │ ├─ ReadOnlyError — ro permission, write attempted\n * │ ├─ PermissionDeniedError — role too low for operation\n * │ ├─ PrivilegeEscalationError — grant wider than grantor holds\n * │ └─ StoreCapabilityError — optional store method missing\n * ├─ Sync errors\n * │ ├─ ConflictError — optimistic-lock version mismatch\n * │ ├─ BundleVersionConflictError — bundle push rejected by remote\n * │ └─ NetworkError — push/pull network failure\n * ├─ Data errors\n * │ ├─ NotFoundError — get(id) on missing record\n * │ ├─ ValidationError — application-level guard failed\n * │ └─ SchemaValidationError — Standard Schema v1 rejection\n * ├─ Query errors\n * │ ├─ JoinTooLargeError — join row ceiling exceeded\n * │ ├─ DanglingReferenceError — strict ref() points at nothing\n * │ ├─ GroupCardinalityError — groupBy bucket cap exceeded\n * │ ├─ IndexRequiredError — lazy-mode query touches unindexed field\n * │ └─ IndexWriteFailureError — index side-car put/delete failed post-main\n * ├─ i18n / Dictionary errors\n * │ ├─ ReservedCollectionNameError\n * │ ├─ DictKeyMissingError\n * │ ├─ DictKeyInUseError\n * │ ├─ MissingTranslationError\n * │ ├─ LocaleNotSpecifiedError\n * │ └─ TranslatorNotConfiguredError\n * ├─ Backup errors\n * │ ├─ BackupLedgerError — hash-chain verification failed\n * │ └─ BackupCorruptedError — envelope hash mismatch in dump\n * ├─ Bundle errors\n * │ └─ BundleIntegrityError — .noydb body sha256 mismatch\n * └─ Session errors\n * ├─ SessionExpiredError\n * ├─ SessionNotFoundError\n * └─ SessionPolicyError\n * ```\n *\n * ## Catching all NOYDB errors\n *\n * ```ts\n * import { NoydbError, InvalidKeyError, ConflictError } from '@noy-db/hub'\n *\n * try {\n * await vault.unlock(passphrase)\n * } catch (e) {\n * if (e instanceof InvalidKeyError) { showBadPassphraseUI(); return }\n * if (e instanceof NoydbError) { logToSentry(e.code, e); return }\n * throw e // unexpected — re-throw\n * }\n * ```\n *\n * @module\n */\n\n/**\n * Base class for all NOYDB errors.\n *\n * Every error thrown by `@noy-db/hub` extends this class, so consumers can\n * catch all NOYDB errors in a single `catch (e) { if (e instanceof NoydbError) ... }`\n * block. The `code` field is a machine-readable string (e.g. `'DECRYPTION_FAILED'`)\n * suitable for `switch` statements and logging pipelines.\n */\nexport class NoydbError extends Error {\n /** Machine-readable error code. Stable across library versions. */\n readonly code: string\n\n constructor(code: string, message: string) {\n super(message)\n this.name = 'NoydbError'\n this.code = code\n }\n}\n\n// ─── Crypto Errors ─────────────────────────────────────────────────────\n\n/**\n * Thrown when AES-GCM decryption fails.\n *\n * The most common cause is a wrong passphrase or a corrupted ciphertext.\n * A `DecryptionError` at the wrong passphrase level is caught internally\n * and re-thrown as `InvalidKeyError` — so in practice this surfaces for\n * per-record corruption rather than authentication failures.\n */\nexport class DecryptionError extends NoydbError {\n constructor(message = 'Decryption failed') {\n super('DECRYPTION_FAILED', message)\n this.name = 'DecryptionError'\n }\n}\n\n/**\n * Thrown when GCM tag verification fails, indicating the ciphertext was\n * modified after encryption.\n *\n * AES-256-GCM is authenticated encryption — the tag over the ciphertext\n * is checked on every decrypt. If any byte was flipped (accidental\n * corruption or deliberate tampering), decryption throws this error.\n * Treat it as a security alert: the stored bytes are not what NOYDB wrote.\n */\nexport class TamperedError extends NoydbError {\n constructor(message = 'Data integrity check failed — record may have been tampered with') {\n super('TAMPERED', message)\n this.name = 'TamperedError'\n }\n}\n\n/**\n * Thrown when key unwrapping fails, typically because the passphrase is wrong\n * or the keyring file is corrupted.\n *\n * NOYDB uses AES-KW (RFC 3394) to wrap DEKs with the KEK. If AES-KW\n * unwrapping fails, it means either the KEK was derived from the wrong\n * passphrase (PBKDF2 with 600K iterations) or the keyring bytes are\n * corrupted. This is the error shown to the user on a failed unlock attempt.\n */\nexport class InvalidKeyError extends NoydbError {\n constructor(message = 'Invalid key — wrong passphrase or corrupted keyring') {\n super('INVALID_KEY', message)\n this.name = 'InvalidKeyError'\n }\n}\n\n// ─── Access Errors ─────────────────────────────────────────────────────\n\n/**\n * Thrown when the authenticated user does not have a DEK for the requested\n * collection — i.e. the collection is not in their keyring at all.\n *\n * This is the \"no key for this door\" error. It is different from\n * `ReadOnlyError` (user has a key but it only grants ro) and from\n * `PermissionDeniedError` (user's role doesn't allow the operation).\n */\nexport class NoAccessError extends NoydbError {\n constructor(message = 'No access — user does not have a key for this collection') {\n super('NO_ACCESS', message)\n this.name = 'NoAccessError'\n }\n}\n\n/**\n * Thrown when a user with read-only (`ro`) permission attempts a write\n * operation (`put` or `delete`) on a collection.\n *\n * The user has a DEK for the collection (they can decrypt and read), but\n * their keyring grants only `ro`. To fix: re-grant the user with `rw`\n * permission, or do not attempt writes as a viewer/client role.\n */\nexport class ReadOnlyError extends NoydbError {\n constructor(message = 'Read-only — user has ro permission on this collection') {\n super('READ_ONLY', message)\n this.name = 'ReadOnlyError'\n }\n}\n\n/**\n * Thrown when a write is attempted against a historical view produced\n * by `vault.at(timestamp)`. Time-machine views are read-only by\n * contract — mutating the past would require either the shadow-vault\n * mechanism or a ledger-history rewrite (which breaks\n * the tamper-evidence guarantee).\n *\n * Distinct from {@link ReadOnlyError} (keyring-level) and\n * {@link PermissionDeniedError} (role-level): this error is about the\n * *view* being historical, independent of the caller's permissions.\n */\nexport class ReadOnlyAtInstantError extends NoydbError {\n constructor(operation: string, timestamp: string) {\n super(\n 'READ_ONLY_AT_INSTANT',\n `Cannot ${operation}() on a vault view anchored at ${timestamp} — time-machine views are read-only`,\n )\n this.name = 'ReadOnlyAtInstantError'\n }\n}\n\n/**\n * Thrown when a write is attempted against a shadow-vault frame\n * produced by `vault.frame()`. Frames are read-only by contract —\n * the use case is screen-sharing / demos / compliance review where\n * the operator wants to prevent accidental edits.\n *\n * Behavioural enforcement only — the underlying keyring still holds\n * write-capable DEKs. See {@link VaultFrame} for the full caveat.\n */\nexport class ReadOnlyFrameError extends NoydbError {\n constructor(operation: string) {\n super(\n 'READ_ONLY_FRAME',\n `Cannot ${operation}() on a vault frame — frames are read-only presentations of the current vault`,\n )\n this.name = 'ReadOnlyFrameError'\n }\n}\n\n/**\n * Thrown when the authenticated user's role does not permit the requested\n * operation — e.g. a `viewer` calling `grantAccess()`, or an `operator`\n * calling `rotateKeys()`.\n *\n * This is a role-level check (what the user's role allows), distinct from\n * `NoAccessError` (collection not in keyring) and `ReadOnlyError` (in\n * keyring, but write not allowed).\n */\nexport class PermissionDeniedError extends NoydbError {\n constructor(message = 'Permission denied — insufficient role for this operation') {\n super('PERMISSION_DENIED', message)\n this.name = 'PermissionDeniedError'\n }\n}\n\n/**\n * Thrown when an `@noy-db/as-*` export is attempted without the\n * required capability bit on the invoking keyring.\n *\n * Two sub-cases discriminated by the `tier` field:\n *\n * - `tier: 'plaintext'` — a plaintext-tier export (`as-xlsx`,\n * `as-csv`, `as-blob`, `as-zip`, …) was attempted but the\n * keyring's `exportCapability.plaintext` does not include the\n * requested `format` (nor the `'*'` wildcard). Default for every\n * role is `plaintext: []` — the owner must positively grant.\n * - `tier: 'bundle'` — an encrypted `as-noydb` bundle export was\n * attempted but the keyring's `exportCapability.bundle` is\n * `false`. Default for `owner`/`admin` is `true`; for\n * `operator`/`viewer`/`client` it is `false`.\n *\n * Distinct from `PermissionDeniedError` (role-level check) and\n * `NoAccessError` (collection not readable). Surfaces separately so\n * UI layers can show a \"request the export capability from your\n * admin\" flow rather than a generic permission error.\n */\nexport class ExportCapabilityError extends NoydbError {\n readonly tier: 'plaintext' | 'bundle'\n readonly format?: string\n readonly userId: string\n\n constructor(opts: {\n tier: 'plaintext' | 'bundle'\n userId: string\n format?: string\n message?: string\n }) {\n const msg =\n opts.message ??\n (opts.tier === 'plaintext'\n ? `Export capability denied — keyring \"${opts.userId}\" is not granted plaintext-export capability for format \"${opts.format ?? '<unknown>'}\". Ask a vault owner or admin to grant it via vault.grant({ exportCapability: { plaintext: ['${opts.format ?? '<format>'}'] } }).`\n : `Export capability denied — keyring \"${opts.userId}\" is not granted encrypted-bundle export capability. Ask a vault owner or admin to grant it via vault.grant({ exportCapability: { bundle: true } }).`)\n super('EXPORT_CAPABILITY', msg)\n this.name = 'ExportCapabilityError'\n this.tier = opts.tier\n this.userId = opts.userId\n if (opts.format !== undefined) this.format = opts.format\n }\n}\n\n/**\n * Thrown when a keyring file's `expires_at` cutoff has passed.\n * Surfaced by `loadKeyring` before any DEK unwrap is attempted —\n * past the cutoff the slot refuses to open even with the right\n * passphrase. Distinct from PBKDF2 / unwrap errors so consumer code\n * can show a precise \"this bundle slot has expired\" message instead\n * of the generic decryption-failure UX.\n *\n * Used predominantly on `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })` to time-box audit access.\n */\nexport class KeyringExpiredError extends NoydbError {\n readonly userId: string\n readonly expiresAt: string\n constructor(opts: { userId: string; expiresAt: string }) {\n super(\n 'KEYRING_EXPIRED',\n `Keyring \"${opts.userId}\" expired at ${opts.expiresAt}. ` +\n 'The slot refuses to unlock past its expiry timestamp.',\n )\n this.name = 'KeyringExpiredError'\n this.userId = opts.userId\n this.expiresAt = opts.expiresAt\n }\n}\n\n/**\n * Thrown when an `@noy-db/as-*` import is attempted but the invoking\n * keyring lacks the required import-capability bit (issue ).\n *\n * - `tier: 'plaintext'` — a plaintext-tier import (`as-csv`, `as-json`,\n * `as-ndjson`, `as-zip`, …) was attempted but the keyring's\n * `importCapability.plaintext` does not include the requested\n * `format` (nor the `'*'` wildcard).\n * - `tier: 'bundle'` — a `.noydb` bundle import was attempted but the\n * keyring's `importCapability.bundle` is not `true`.\n *\n * Default for every role on every dimension is closed — owners and\n * admins must positively grant the capability. Distinct from\n * `PermissionDeniedError` and `NoAccessError` so UI layers can show a\n * specific \"request the import capability\" flow.\n */\nexport class ImportCapabilityError extends NoydbError {\n readonly tier: 'plaintext' | 'bundle'\n readonly format?: string\n readonly userId: string\n\n constructor(opts: {\n tier: 'plaintext' | 'bundle'\n userId: string\n format?: string\n message?: string\n }) {\n const msg =\n opts.message ??\n (opts.tier === 'plaintext'\n ? `Import capability denied — keyring \"${opts.userId}\" is not granted plaintext-import capability for format \"${opts.format ?? '<unknown>'}\". Ask a vault owner or admin to grant it via vault.grant({ importCapability: { plaintext: ['${opts.format ?? '<format>'}'] } }).`\n : `Import capability denied — keyring \"${opts.userId}\" is not granted encrypted-bundle import capability. Ask a vault owner or admin to grant it via vault.grant({ importCapability: { bundle: true } }).`)\n super('IMPORT_CAPABILITY', msg)\n this.name = 'ImportCapabilityError'\n this.tier = opts.tier\n this.userId = opts.userId\n if (opts.format !== undefined) this.format = opts.format\n }\n}\n\n/**\n * Thrown when a grant would give the grantee a permission the grantor\n * does not themselves hold — the \"admin cannot grant what admin cannot\n * do\" rule from the admin-delegation work.\n *\n * Distinct from `PermissionDeniedError` so callers can tell the two\n * cases apart in logs and tests:\n *\n * - `PermissionDeniedError` — \"you are not allowed to perform this\n * operation at all\" (wrong role).\n * - `PrivilegeEscalationError` — \"you are allowed to grant, but not\n * with these specific permissions\" (widening attempt).\n *\n * Under the admin model the grantee of an admin-grants-admin call\n * inherits the caller's entire DEK set by construction, so this error\n * is structurally unreachable in typical flows. The check and error\n * class exist so that future per-collection admin scoping cannot\n * accidentally bypass the subset rule — the guard is already wired in.\n *\n * `offendingCollection` carries the first collection name that failed\n * the subset check, to make the violation actionable in error output.\n */\n/**\n * Thrown when a caller invokes an API that requires an optional\n * store capability the active store does not implement.\n *\n * Today the only call site is `Noydb.listAccessibleVaults()`,\n * which depends on the optional `NoydbStore.listVaults()`\n * method. The error message names the missing method and the calling\n * API so consumers know exactly which combination is unsupported,\n * and the `capability` field is machine-readable so library code can\n * pattern-match in catch blocks (e.g. fall back to a candidate-list\n * shape).\n *\n * The class lives in `errors.ts` rather than as a generic\n * `ValidationError` because the diagnostic shape is different: a\n * `ValidationError` says \"the inputs you passed are wrong\"; this\n * error says \"the inputs are fine, but the store you wired up\n * doesn't support what you're asking for.\" Different fix, different\n * documentation.\n */\nexport class StoreCapabilityError extends NoydbError {\n /** The store method/capability that was missing. */\n readonly capability: string\n\n constructor(capability: string, callerApi: string, storeName?: string) {\n super(\n 'STORE_CAPABILITY',\n `${callerApi} requires the optional store capability \"${capability}\" ` +\n `but the active store${storeName ? ` (${storeName})` : ''} does not implement it. ` +\n `Use a store that supports \"${capability}\" (store-memory, store-file) or pass an explicit ` +\n `vault list to bypass enumeration.`,\n )\n this.name = 'StoreCapabilityError'\n this.capability = capability\n }\n}\n\nexport class PrivilegeEscalationError extends NoydbError {\n readonly offendingCollection: string\n\n constructor(offendingCollection: string, message?: string) {\n super(\n 'PRIVILEGE_ESCALATION',\n message ??\n `Privilege escalation: grantor has no DEK for collection \"${offendingCollection}\" and cannot grant access to it.`,\n )\n this.name = 'PrivilegeEscalationError'\n this.offendingCollection = offendingCollection\n }\n}\n\n/**\n * Thrown by `Collection.put` / `.delete` when the target record's\n * envelope `_ts` falls within a closed accounting period.\n *\n * Distinct from `ReadOnlyError` (keyring-level), `ReadOnlyAtInstantError`\n * (historical view), and `ReadOnlyFrameError` (shadow vault): this\n * error is about the STORED RECORD being sealed by an operator call\n * to `vault.closePeriod()`, independent of caller permissions or\n * view type. The `periodName` and `endDate` fields name the sealing\n * period so audit UIs can surface a \"this record is locked in\n * FY2026-Q1 (closed 2026-03-31)\" message without parsing the error\n * string.\n *\n * To apply a correction after close, book a compensating entry in a\n * new period rather than unlocking the old one. Re-opening a closed\n * period is deliberately unsupported.\n */\nexport class PeriodClosedError extends NoydbError {\n readonly periodName: string\n readonly endDate: string\n readonly recordTs: string\n\n constructor(periodName: string, endDate: string, recordTs: string) {\n super(\n 'PERIOD_CLOSED',\n `Cannot modify record (last written ${recordTs}) — sealed by closed period ` +\n `\"${periodName}\" (endDate: ${endDate}). Post a compensating entry in a ` +\n `new period instead.`,\n )\n this.name = 'PeriodClosedError'\n this.periodName = periodName\n this.endDate = endDate\n this.recordTs = recordTs\n }\n}\n\n// ─── Hierarchical Access Errors ─────────────────────\n\n/**\n * Thrown when a user tries to act at a tier they are not cleared for.\n *\n * This is the umbrella error for tier write refusals:\n * - `put({ tier: N })` when the user's keyring lacks tier-N DEK.\n * - `elevate(id, N)` when the caller cannot reach tier N.\n *\n * Distinct from `TierAccessDeniedError` which covers *read* refusals on\n * the invisibility/ghost path.\n */\nexport class TierNotGrantedError extends NoydbError {\n readonly tier: number\n readonly collection: string\n\n constructor(collection: string, tier: number) {\n super(\n 'TIER_NOT_GRANTED',\n `User has no DEK for tier ${tier} in collection \"${collection}\"`,\n )\n this.name = 'TierNotGrantedError'\n this.collection = collection\n this.tier = tier\n }\n}\n\n/**\n * Thrown when an elevated-handle operation runs after the elevation's\n * TTL expired. Reads continue at the original tier; only writes\n * through the scoped handle flip to throwing once expired.\n */\nexport class ElevationExpiredError extends NoydbError {\n readonly tier: number\n readonly expiresAt: number\n\n constructor(opts: { tier: number; expiresAt: number }) {\n super(\n 'ELEVATION_EXPIRED',\n `Elevation to tier ${opts.tier} expired at ${new Date(opts.expiresAt).toISOString()}`,\n )\n this.name = 'ElevationExpiredError'\n this.tier = opts.tier\n this.expiresAt = opts.expiresAt\n }\n}\n\n/**\n * Thrown by `vault.elevate(...)` when an elevation is already active\n * on the vault. Adopters must `release()` the existing handle before\n * starting a new elevation.\n */\nexport class AlreadyElevatedError extends NoydbError {\n readonly activeTier: number\n\n constructor(activeTier: number) {\n super(\n 'ALREADY_ELEVATED',\n `Vault is already elevated to tier ${activeTier}; release the existing handle first`,\n )\n this.name = 'AlreadyElevatedError'\n this.activeTier = activeTier\n }\n}\n\n/**\n * Thrown when `demote()` is called by someone who is not the original\n * elevator and not an owner.\n */\nexport class TierDemoteDeniedError extends NoydbError {\n constructor(id: string, tier: number) {\n super(\n 'TIER_DEMOTE_DENIED',\n `Only the original elevator or an owner can demote record \"${id}\" from tier ${tier}`,\n )\n this.name = 'TierDemoteDeniedError'\n }\n}\n\n/**\n * Thrown when `db.delegate()` is called against a user that has no\n * keyring in the target vault — the delegation token cannot be\n * constructed without the target user's KEK wrap.\n */\nexport class DelegationTargetMissingError extends NoydbError {\n readonly toUser: string\n\n constructor(toUser: string) {\n super(\n 'DELEGATION_TARGET_MISSING',\n `Delegation target user \"${toUser}\" has no keyring in this vault`,\n )\n this.name = 'DelegationTargetMissingError'\n this.toUser = toUser\n }\n}\n\n// ─── Sync Errors ───────────────────────────────────────────────────────\n\n/**\n * Thrown when a `put()` detects an optimistic concurrency conflict.\n *\n * NOYDB uses version numbers (`_v`) for optimistic locking. If a `put()`\n * is called with `expectedVersion: N` but the stored record is at version\n * `M ≠ N`, the write is rejected and the caller must re-read, re-apply their\n * change, and retry. The `version` field carries the actual stored version\n * so callers can decide whether to retry or surface the conflict to the user.\n */\nexport class ConflictError extends NoydbError {\n /** The actual stored version at the time of conflict. */\n readonly version: number\n\n constructor(version: number, message = 'Version conflict') {\n super('CONFLICT', message)\n this.name = 'ConflictError'\n this.version = version\n }\n}\n\n/**\n * Thrown by `LedgerStore.append()` after exhausting its CAS retry\n * budget under multi-writer contention. Two browser tabs, a\n * web app + an offline mobile peer, or a server worker pool all\n * producing ledger entries against the same vault can race on the\n * \"read head, write head+1\" cycle; the optimistic-CAS retry loop\n * resolves the race for `casAtomic: true` stores, but pathological\n * contention (or a buggy peer) can still exhaust the budget. When\n * that happens, the chain is intact — the failed writer simply\n * couldn't claim a slot. Caller's choice whether to retry, queue,\n * or surface the failure to the user.\n */\nexport class LedgerContentionError extends NoydbError {\n readonly attempts: number\n\n constructor(attempts: number) {\n super(\n 'LEDGER_CONTENTION',\n `LedgerStore.append: failed to claim a chain slot after ${attempts} optimistic-CAS retries`,\n )\n this.name = 'LedgerContentionError'\n this.attempts = attempts\n }\n}\n\n/**\n * Thrown when a bundle push is rejected because the remote has been updated\n * since the local bundle was last pulled.\n *\n * Unlike `ConflictError` (per-record), this is a whole-bundle conflict —\n * the remote's bundle handle has changed. The caller must pull the new\n * bundle, merge, and re-push. `remoteVersion` is the handle of the newer\n * remote bundle for use in diagnostics.\n */\nexport class BundleVersionConflictError extends NoydbError {\n /** The bundle handle of the newer remote version that rejected the push. */\n readonly remoteVersion: string\n\n constructor(remoteVersion: string, message = 'Bundle version conflict — remote has been updated') {\n super('BUNDLE_VERSION_CONFLICT', message)\n this.name = 'BundleVersionConflictError'\n this.remoteVersion = remoteVersion\n }\n}\n\n/**\n * Thrown when a sync operation (push or pull) fails due to a network error.\n *\n * NOYDB's offline-first design means network errors are expected during sync.\n * Callers should catch `NetworkError`, surface connectivity status in the UI,\n * and rely on the `SyncScheduler` to retry when connectivity is restored.\n */\nexport class NetworkError extends NoydbError {\n constructor(message = 'Network error') {\n super('NETWORK_ERROR', message)\n this.name = 'NetworkError'\n }\n}\n\n// ─── Data Errors ───────────────────────────────────────────────────────\n\n/**\n * Thrown when `collection.get(id)` is called with an ID that does not exist.\n *\n * NOYDB collections are memory-first, so this error is synchronous and cheap —\n * it does not make a network round-trip. Callers that expect the record to be\n * absent should use `collection.getOrNull(id)` instead.\n */\nexport class NotFoundError extends NoydbError {\n constructor(message = 'Record not found') {\n super('NOT_FOUND', message)\n this.name = 'NotFoundError'\n }\n}\n\n/**\n * Thrown when application-level validation fails before encryption.\n *\n * Distinct from `SchemaValidationError` (Standard Schema v1 validator)\n * and `MissingTranslationError` (i18nText). `ValidationError` is the\n * general-purpose validation base — use it for custom guards in `put()`\n * hooks or store middleware.\n */\nexport class ValidationError extends NoydbError {\n constructor(message = 'Validation error') {\n super('VALIDATION_ERROR', message)\n this.name = 'ValidationError'\n }\n}\n\n/**\n * Thrown when a Standard Schema v1 validator rejects a record on\n * `put()` (input validation) or on read (output validation). Carries\n * the raw issue list so callers can render field-level errors.\n *\n * `direction` distinguishes the two cases:\n * - `'input'`: the user passed bad data into `put()`. This is a\n * normal error case that application code should handle — typically\n * by showing validation messages in the UI.\n * - `'output'`: stored data does not match the current schema. This\n * indicates a schema drift (the schema was changed without\n * migrating the existing records) and should be treated as a bug\n * — the application should not swallow it silently.\n *\n * The `issues` type is deliberately `readonly unknown[]` on this class\n * so that `errors.ts` doesn't need to import from `schema.ts` (and\n * create a dependency cycle). Callers who know they're holding a\n * `SchemaValidationError` can cast to the more precise\n * `readonly StandardSchemaV1Issue[]` from `schema.ts`.\n */\nexport class SchemaValidationError extends NoydbError {\n readonly issues: readonly unknown[]\n readonly direction: 'input' | 'output'\n\n constructor(\n message: string,\n issues: readonly unknown[],\n direction: 'input' | 'output',\n ) {\n super('SCHEMA_VALIDATION_FAILED', message)\n this.name = 'SchemaValidationError'\n this.issues = issues\n this.direction = direction\n }\n}\n\n// ─── Query DSL Errors ─────────────────────────────────────────────────\n\n/**\n * Thrown when `.groupBy().aggregate()` produces more than the hard\n * cardinality cap (default 100_000 groups)..\n *\n * The cap exists because `.groupBy()` materializes one bucket per\n * distinct key value in memory, and runaway cardinality — a groupBy\n * on a high-uniqueness field like `id` or `createdAt` — is almost\n * always a query mistake rather than legitimate use. A hard error is\n * better than silent OOM: the consumer sees an actionable message\n * naming the field and the observed cardinality, with guidance to\n * either narrow the query with `.where()` or accept the ceiling\n * override.\n *\n * A separate one-shot warning fires at 10% of the cap (10_000\n * groups) so consumers get a heads-up before the hard error — same\n * pattern as `JoinTooLargeError` and the `.join()` row ceiling.\n *\n * **Not overridable in.** The 100k cap is a fixed constant so\n * the failure mode is consistent across the codebase; a\n * `{ maxGroups }` override can be added later without a break if a\n * real consumer asks.\n */\nexport class GroupCardinalityError extends NoydbError {\n /** The field being grouped on. */\n readonly field: string\n /** Observed number of distinct groups at the moment the cap tripped. */\n readonly cardinality: number\n /** The cap that was exceeded. */\n readonly maxGroups: number\n\n constructor(field: string, cardinality: number, maxGroups: number) {\n super(\n 'GROUP_CARDINALITY',\n `.groupBy(\"${field}\") produced ${cardinality} distinct groups, ` +\n `exceeding the ${maxGroups}-group ceiling. This is almost always a ` +\n `query mistake — grouping on a high-uniqueness field like \"id\" or ` +\n `\"createdAt\" produces one bucket per record. Narrow the query with ` +\n `.where() before grouping, or group on a lower-cardinality field ` +\n `(status, category, clientId). If you genuinely need high-cardinality ` +\n `grouping, file an issue with your use case.`,\n )\n this.name = 'GroupCardinalityError'\n this.field = field\n this.cardinality = cardinality\n this.maxGroups = maxGroups\n }\n}\n\n/**\n * Thrown in lazy mode when a `.query()` / `.where()` / `.orderBy()` clause\n * references a field that does not have a declared index.\n *\n * Lazy-mode queries only work when every touched field is indexed.\n * This is deliberate — silent scan-fallback would hide the performance\n * cliff that lazy-mode indexes exist to prevent.\n *\n * Payload:\n * - `collection` — name of the collection queried\n * - `touchedFields` — every field referenced by the query (filter + order)\n * - `missingFields` — subset of `touchedFields` that have no declared index\n */\nexport class IndexRequiredError extends NoydbError {\n readonly collection: string\n readonly touchedFields: readonly string[]\n readonly missingFields: readonly string[]\n\n constructor(args: { collection: string; touchedFields: readonly string[]; missingFields: readonly string[] }) {\n super(\n 'INDEX_REQUIRED',\n `Collection \"${args.collection}\": query references unindexed fields in lazy mode ` +\n `(missing: ${args.missingFields.join(', ')}). ` +\n `Declare an index on each field, or use collection.scan() for non-indexed iteration.`,\n )\n this.name = 'IndexRequiredError'\n this.collection = args.collection\n this.touchedFields = [...args.touchedFields]\n this.missingFields = [...args.missingFields]\n }\n}\n\n/**\n * Thrown (or surfaced via the `index:write-partial` event) when one or more\n * per-indexed-field side-car writes fail after the main record write has\n * already succeeded.\n *\n * Not thrown out of `.put()` / `.delete()` directly — those succeed when the\n * main record succeeds. Instead, `IndexWriteFailureError` instances are collected\n * into the session-scoped reconcile queue and emitted on the Collection\n * emitter as `index:write-partial`.\n *\n * Payload:\n * - `recordId` — the id of the main record whose side-car writes failed\n * - `field` — the indexed field whose side-car write failed\n * - `op` — `'put'` or `'delete'`, indicating which mutation was in flight\n * - `cause` — the underlying error from the store\n */\nexport class IndexWriteFailureError extends NoydbError {\n readonly recordId: string\n readonly field: string\n readonly op: 'put' | 'delete'\n override readonly cause: unknown\n\n constructor(args: { recordId: string; field: string; op: 'put' | 'delete'; cause: unknown }) {\n super(\n 'INDEX_WRITE_FAILURE',\n `Index side-car ${args.op} failed for field \"${args.field}\" on record \"${args.recordId}\"`,\n )\n this.name = 'IndexWriteFailureError'\n this.recordId = args.recordId\n this.field = args.field\n this.op = args.op\n this.cause = args.cause\n }\n}\n\n// ─── Bundle Format Errors ─────────────────────────────────\n\n/**\n * Thrown by `readNoydbBundle()` when the body bytes don't match\n * the integrity hash declared in the bundle header — i.e. someone\n * modified the bytes between write and read.\n *\n * Distinct from a generic `Error` (which would be thrown for\n * format violations like a missing magic prefix or malformed\n * header JSON) so consumers can pattern-match the corruption case\n * and handle it differently from a producer bug. A\n * `BundleIntegrityError` indicates \"the bytes you got are not\n * what was written\"; a plain `Error` from `parsePrefixAndHeader`\n * indicates \"what was written wasn't a valid bundle in the first\n * place.\"\n *\n * Also thrown when decompression fails after the integrity hash\n * passed — that's a producer bug (the wrong algorithm byte was\n * written) but it surfaces with the same error class because the\n * end result is \"the body cannot be turned back into a dump.\"\n */\nexport class BundleIntegrityError extends NoydbError {\n constructor(message: string) {\n super('BUNDLE_INTEGRITY', `.noydb bundle integrity check failed: ${message}`)\n this.name = 'BundleIntegrityError'\n }\n}\n\n// ─── i18n / Dictionary Errors ──────────────────────────\n\n/**\n * Thrown when `vault.collection()` is called with a name that is\n * reserved for NOYDB internal use (any name starting with `_dict_`).\n *\n * Dictionary collections are accessed exclusively via\n * `vault.dictionary(name)` — attempting to open one as a regular\n * collection would bypass the dictionary invariants (ACL, rename\n * tracking, reserved-name policy).\n */\nexport class ReservedCollectionNameError extends NoydbError {\n /** The rejected collection name. */\n readonly collectionName: string\n\n constructor(collectionName: string) {\n super(\n 'RESERVED_COLLECTION_NAME',\n `\"${collectionName}\" is a reserved collection name. ` +\n `Use vault.dictionary(\"${collectionName.replace(/^_dict_/, '')}\") ` +\n `to access dictionary collections.`,\n )\n this.name = 'ReservedCollectionNameError'\n this.collectionName = collectionName\n }\n}\n\n/**\n * Thrown by `DictionaryHandle.get()` and `DictionaryHandle.delete()` when\n * the requested key does not exist in the dictionary.\n *\n * Distinct from `NotFoundError` (which is for data records) so callers\n * can distinguish \"data record missing\" from \"dictionary key missing\"\n * without inspecting error messages.\n */\nexport class DictKeyMissingError extends NoydbError {\n /** The dictionary name. */\n readonly dictionaryName: string\n /** The key that was not found. */\n readonly key: string\n\n constructor(dictionaryName: string, key: string) {\n super(\n 'DICT_KEY_MISSING',\n `Dictionary \"${dictionaryName}\" has no entry for key \"${key}\".`,\n )\n this.name = 'DictKeyMissingError'\n this.dictionaryName = dictionaryName\n this.key = key\n }\n}\n\n/**\n * Thrown by `DictionaryHandle.delete()` in strict mode when the key to\n * be deleted is still referenced by one or more records.\n *\n * The caller must either rename the key first (the only sanctioned\n * mass-mutation path) or pass `{ mode: 'warn' }` to skip the check\n * (development only).\n */\nexport class DictKeyInUseError extends NoydbError {\n /** The dictionary name. */\n readonly dictionaryName: string\n /** The key that is still referenced. */\n readonly key: string\n /** Name of the first collection found to reference this key. */\n readonly usedBy: string\n /** Number of records in `usedBy` that reference this key. */\n readonly count: number\n\n constructor(\n dictionaryName: string,\n key: string,\n usedBy: string,\n count: number,\n ) {\n super(\n 'DICT_KEY_IN_USE',\n `Cannot delete key \"${key}\" from dictionary \"${dictionaryName}\": ` +\n `${count} record(s) in \"${usedBy}\" still reference it. ` +\n `Use dictionary.rename(\"${key}\", newKey) to rewrite references first.`,\n )\n this.name = 'DictKeyInUseError'\n this.dictionaryName = dictionaryName\n this.key = key\n this.usedBy = usedBy\n this.count = count\n }\n}\n\n/**\n * Thrown by `Collection.put()` when an `i18nText` field is missing one\n * or more required translations.\n *\n * The `missing` array names each locale code that was absent from the\n * field value. The `field` property names the field so callers can\n * render a field-level error message without parsing the string.\n */\nexport class MissingTranslationError extends NoydbError {\n /** The field name whose translation(s) are missing. */\n readonly field: string\n /** Locale codes that were required but absent. */\n readonly missing: readonly string[]\n\n constructor(field: string, missing: readonly string[], message?: string) {\n super(\n 'MISSING_TRANSLATION',\n message ??\n `Field \"${field}\": missing required translation(s): ${missing.join(', ')}.`,\n )\n this.name = 'MissingTranslationError'\n this.field = field\n this.missing = missing\n }\n}\n\n/**\n * Thrown when reading an `i18nText` field without specifying a locale —\n * either at the call site (`get(id, { locale })`) or on the vault\n * (`openVault(name, { locale })`).\n *\n * Also thrown when `resolveI18nText()` exhausts the fallback chain and\n * no translation is available for the requested locale.\n *\n * The `field` property names the field that triggered the error so the\n * caller can surface it in the UI.\n */\nexport class LocaleNotSpecifiedError extends NoydbError {\n /** The field name that required a locale. */\n readonly field: string\n\n constructor(field: string, message?: string) {\n super(\n 'LOCALE_NOT_SPECIFIED',\n message ??\n `Cannot read i18nText field \"${field}\" without a locale. ` +\n `Pass { locale } to get()/list()/query() or set a default via ` +\n `openVault(name, { locale }).`,\n )\n this.name = 'LocaleNotSpecifiedError'\n this.field = field\n }\n}\n\n// ─── Translator Errors ─────────────────────────────────────\n\n/**\n * Thrown when a collection has an `i18nText` field with\n * `autoTranslate: true` but no `plaintextTranslator` was configured\n * on `createNoydb()`.\n *\n * The error is raised at `put()` time (not at schema construction) so\n * the mis-configuration is surfaced by the first write rather than\n * silently at startup.\n */\nexport class TranslatorNotConfiguredError extends NoydbError {\n /** The field that requested auto-translation. */\n readonly field: string\n /** The collection the put was targeting. */\n readonly collection: string\n\n constructor(field: string, collection: string) {\n super(\n 'TRANSLATOR_NOT_CONFIGURED',\n `Field \"${field}\" in collection \"${collection}\" has autoTranslate: true, ` +\n `but no plaintextTranslator was configured on createNoydb(). ` +\n `Either configure a plaintextTranslator or remove autoTranslate from the schema.`,\n )\n this.name = 'TranslatorNotConfiguredError'\n this.field = field\n this.collection = collection\n }\n}\n\n// ─── Backup Errors ─────────────────────────────────────────\n\n/**\n * Thrown when `Vault.load()` finds that a backup's hash chain\n * doesn't verify, or that its embedded `ledgerHead.hash` doesn't\n * match the chain head reconstructed from the loaded entries.\n *\n * Distinct from `BackupCorruptedError` so callers can choose to\n * recover from one but not the other (e.g., a corrupted JSON file is\n * unrecoverable; a chain mismatch might mean the backup is from an\n * incompatible noy-db version).\n */\nexport class BackupLedgerError extends NoydbError {\n /** First-broken-entry index, if known. */\n readonly divergedAt?: number\n\n constructor(message: string, divergedAt?: number) {\n super('BACKUP_LEDGER', message)\n this.name = 'BackupLedgerError'\n if (divergedAt !== undefined) this.divergedAt = divergedAt\n }\n}\n\n/**\n * Thrown when `Vault.load()` finds that the backup's data\n * collection content doesn't match the ledger's recorded\n * `payloadHash`es. This is the \"envelope was tampered with after\n * dump\" detection — the chain itself can be intact, but if any\n * encrypted record bytes were swapped, this check catches it.\n */\nexport class BackupCorruptedError extends NoydbError {\n /** The (collection, id) pair whose envelope failed the hash check. */\n readonly collection: string\n readonly id: string\n\n constructor(collection: string, id: string, message: string) {\n super('BACKUP_CORRUPTED', message)\n this.name = 'BackupCorruptedError'\n this.collection = collection\n this.id = id\n }\n}\n\n// ─── Session Errors ───────────────────────────────────────\n\n/**\n * Thrown by `resolveSession()` when the session token's `expiresAt`\n * timestamp is in the past. The session key is also removed from the\n * in-memory store when this is thrown, so retrying with the same sessionId\n * will produce `SessionNotFoundError`.\n *\n * Separate from `SessionNotFoundError` so callers can distinguish between\n * \"session is gone\" (key store cleared, tab reloaded) and \"session is\n * still in the store but has exceeded its lifetime\" (idle timeout, absolute\n * timeout, policy-driven expiry). The remediation differs: expired sessions\n * should prompt a fresh unlock; not-found sessions may indicate a bug or a\n * cross-tab scenario where the session was never established.\n */\nexport class SessionExpiredError extends NoydbError {\n readonly sessionId: string\n\n constructor(sessionId: string) {\n super('SESSION_EXPIRED', `Session \"${sessionId}\" has expired. Re-unlock to continue.`)\n this.name = 'SessionExpiredError'\n this.sessionId = sessionId\n }\n}\n\n/**\n * Thrown by `resolveSession()` when the session key cannot be found in\n * the module-level store. This happens when:\n * - The session was explicitly revoked via `revokeSession()`.\n * - The JS context was reloaded (tab navigation, page refresh, worker restart).\n * - `Noydb.close()` was called (which calls `revokeAllSessions()`).\n * - The sessionId is wrong or was generated by a different JS context.\n *\n * The session token (if the caller holds it) is permanently useless after\n * this error — the key is gone and cannot be recovered.\n */\nexport class SessionNotFoundError extends NoydbError {\n readonly sessionId: string\n\n constructor(sessionId: string) {\n super('SESSION_NOT_FOUND', `Session key for \"${sessionId}\" not found. The session may have been revoked or the page reloaded.`)\n this.name = 'SessionNotFoundError'\n this.sessionId = sessionId\n }\n}\n\n/**\n * Thrown when a session policy blocks an operation — for example,\n * `requireReAuthFor: ['export']` is set and the caller attempts to\n * call `exportStream()` without re-authenticating for this session.\n *\n * The `operation` field names the specific operation that was blocked\n * (e.g. `'export'`, `'grant'`, `'rotate'`) so the caller can surface\n * a targeted prompt (\"Please re-enter your passphrase to export data\").\n */\nexport class SessionPolicyError extends NoydbError {\n readonly operation: string\n\n constructor(operation: string, message?: string) {\n super(\n 'SESSION_POLICY',\n message ?? `Operation \"${operation}\" requires re-authentication per the active session policy.`,\n )\n this.name = 'SessionPolicyError'\n this.operation = operation\n }\n}\n\n// ─── Query / Join Errors ────────────────────────────────────\n\n/**\n * Thrown when a `.join()` would exceed its configured row ceiling on\n * either side. The ceiling defaults to 50,000 per side and can be\n * overridden via the `{ maxRows }` option on `.join()`.\n *\n * Carries both row counts so the error message can show which side\n * tripped the limit (e.g. \"left had 60,000 rows, right had 1,200,\n * max was 50,000\"). The `side` field is machine-readable so test\n * code and devtools can match on it without regex-parsing the\n * message.\n *\n * The row ceiling exists because joins are bounded in-memory\n * operations over materialized record sets. Consumers whose\n * collections genuinely exceed the ceiling should track \n * (streaming joins over `scan()`) or filter the left side further\n * with `where()` / `limit()` before joining.\n */\nexport class JoinTooLargeError extends NoydbError {\n readonly leftRows: number\n readonly rightRows: number\n readonly maxRows: number\n readonly side: 'left' | 'right'\n\n constructor(opts: {\n leftRows: number\n rightRows: number\n maxRows: number\n side: 'left' | 'right'\n message: string\n }) {\n super('JOIN_TOO_LARGE', opts.message)\n this.name = 'JoinTooLargeError'\n this.leftRows = opts.leftRows\n this.rightRows = opts.rightRows\n this.maxRows = opts.maxRows\n this.side = opts.side\n }\n}\n\n/**\n * Thrown by `.join()` in strict `ref()` mode when a left-side record\n * points at a right-side id that does not exist in the target\n * collection.\n *\n * Distinct from `RefIntegrityError` so test code can pattern-match\n * on the *read-time* dangling case without catching *write-time*\n * integrity violations. Both indicate \"ref points at nothing\" but\n * happen at different lifecycle phases and deserve different\n * remediation in documentation: a RefIntegrityError on `put()`\n * means the input is invalid; a DanglingReferenceError on `.join()`\n * means stored data has drifted and `vault.checkIntegrity()`\n * is the right tool to find the full set of orphans.\n */\nexport class DanglingReferenceError extends NoydbError {\n readonly field: string\n readonly target: string\n readonly refId: string\n\n constructor(opts: {\n field: string\n target: string\n refId: string\n message: string\n }) {\n super('DANGLING_REFERENCE', opts.message)\n this.name = 'DanglingReferenceError'\n this.field = opts.field\n this.target = opts.target\n this.refId = opts.refId\n }\n}\n\n/**\n * Thrown by {@link sanitizeFilename} when an input filename cannot be\n * made safe — NUL byte, empty after normalization, missing\n * `opaqueId` for the opaque profile, `..` segment, or a `maxBytes`\n * cap too small to hold a single code point.\n */\nexport class FilenameSanitizationError extends NoydbError {\n constructor(message: string) {\n super('FILENAME_SANITIZATION', message)\n this.name = 'FilenameSanitizationError'\n }\n}\n\n/**\n * Thrown when a write target resolves OUTSIDE the requested\n * directory after sanitization — the canonical Zip-Slip class. The\n * sanitizer's job is to strip path-traversal segments; this error\n * is the defense-in-depth fallback at the FS write site.\n */\nexport class PathEscapeError extends NoydbError {\n readonly attempted: string\n readonly targetDir: string\n\n constructor(opts: { attempted: string; targetDir: string }) {\n super(\n 'PATH_ESCAPE',\n `Sanitized filename \"${opts.attempted}\" resolves outside target dir \"${opts.targetDir}\"`,\n )\n this.name = 'PathEscapeError'\n this.attempted = opts.attempted\n this.targetDir = opts.targetDir\n }\n}\n","/**\n * Cryptographic primitives — thin wrappers around the Web Crypto API.\n *\n * ## Design principle\n *\n * **Zero npm crypto dependencies.** Every operation uses `globalThis.crypto.subtle`,\n * which is available natively in Node.js ≥ 18, all modern browsers, and\n * Deno/Bun. This avoids supply-chain risk from third-party crypto packages and\n * ensures the library stays auditable.\n *\n * ## Algorithms\n *\n * | Use case | Algorithm | Parameters |\n * |----------|-----------|------------|\n * | Key derivation | PBKDF2-SHA256 | 600,000 iterations, 32-byte salt |\n * | Record encryption | AES-256-GCM | 12-byte random IV per operation |\n * | DEK wrapping | AES-KW (RFC 3394) | 256-bit KEK |\n * | Binary encrypt | AES-256-GCM | same as record encryption |\n * | Integrity | HMAC-SHA256 | for presence channels |\n * | Content hash | SHA-256 | for ledger and bundle integrity |\n *\n * ## Key lifecycle\n *\n * ```\n * passphrase + salt\n * └─► deriveKey() → KEK (CryptoKey, extractable: false)\n * └─► wrapKey() → wrapped DEK bytes [stored in keyring]\n * └─► unwrapKey() → DEK (CryptoKey) [memory only during session]\n * └─► encrypt() / decrypt() → ciphertext / plaintext\n * ```\n *\n * IVs are generated fresh by {@link generateIV} on every encrypt call.\n * Reusing an IV with the same key would break GCM's authentication guarantee —\n * this function should be the only place IVs are produced.\n *\n * @module\n */\n\nimport { DecryptionError, InvalidKeyError, TamperedError } from './errors.js'\n\nconst PBKDF2_ITERATIONS = 600_000\nconst SALT_BYTES = 32\nconst IV_BYTES = 12\nconst KEY_BITS = 256\n\nconst subtle = globalThis.crypto.subtle\n\n// ─── Key Derivation ────────────────────────────────────────────────────\n\n/** Derive a KEK from a passphrase and salt using PBKDF2-SHA256. */\nexport async function deriveKey(\n passphrase: string,\n salt: Uint8Array,\n): Promise<CryptoKey> {\n const keyMaterial = await subtle.importKey(\n 'raw',\n new TextEncoder().encode(passphrase),\n 'PBKDF2',\n false,\n ['deriveKey'],\n )\n\n return subtle.deriveKey(\n {\n name: 'PBKDF2',\n salt: salt as BufferSource,\n iterations: PBKDF2_ITERATIONS,\n hash: 'SHA-256',\n },\n keyMaterial,\n { name: 'AES-KW', length: KEY_BITS },\n false,\n ['wrapKey', 'unwrapKey'],\n )\n}\n\n// ─── DEK Generation ────────────────────────────────────────────────────\n\n/** Generate a random AES-256-GCM data encryption key. */\nexport async function generateDEK(): Promise<CryptoKey> {\n return subtle.generateKey(\n { name: 'AES-GCM', length: KEY_BITS },\n true, // extractable — needed for AES-KW wrapping\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Key Wrapping ──────────────────────────────────────────────────────\n\n/** Wrap (encrypt) a DEK with a KEK using AES-KW. Returns base64 string. */\nexport async function wrapKey(dek: CryptoKey, kek: CryptoKey): Promise<string> {\n const wrapped = await subtle.wrapKey('raw', dek, kek, 'AES-KW')\n return bufferToBase64(wrapped)\n}\n\n/** Unwrap (decrypt) a DEK from base64 string using a KEK. */\nexport async function unwrapKey(\n wrappedBase64: string,\n kek: CryptoKey,\n): Promise<CryptoKey> {\n try {\n return await subtle.unwrapKey(\n 'raw',\n base64ToBuffer(wrappedBase64) as BufferSource,\n kek,\n 'AES-KW',\n { name: 'AES-GCM', length: KEY_BITS },\n true,\n ['encrypt', 'decrypt'],\n )\n } catch {\n throw new InvalidKeyError()\n }\n}\n\n// ─── Encrypt / Decrypt ─────────────────────────────────────────────────\n\nexport interface EncryptResult {\n iv: string // base64\n data: string // base64\n}\n\n/** Encrypt plaintext JSON string with AES-256-GCM. Fresh IV per call. */\nexport async function encrypt(\n plaintext: string,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const encoded = new TextEncoder().encode(plaintext)\n\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/** Decrypt AES-256-GCM ciphertext. Throws on wrong key or tampered data. */\nexport async function decrypt(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new TextDecoder().decode(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Binary Encrypt / Decrypt ────────\n\n/**\n * Encrypt raw bytes with AES-256-GCM using a fresh random IV.\n * Used by the attachment store so binary blobs avoid double base64 encoding\n * (the existing `encrypt()` function calls `TextEncoder` on a string — here\n * we pass the `Uint8Array` directly to `subtle.encrypt`).\n */\nexport async function encryptBytes(\n data: Uint8Array,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext back to raw bytes.\n * Counterpart to `encryptBytes`. Throws `TamperedError` on auth-tag failure.\n */\nexport async function decryptBytes(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n/**\n * SHA-256 hex digest of raw bytes. Used to derive content-addressed\n * eTags for blob deduplication. Computed on plaintext bytes\n * before compression and encryption so the eTag identifies content, not\n * ciphertext, and survives re-encryption (key rotation, re-upload).\n */\nexport async function sha256Hex(data: Uint8Array): Promise<string> {\n const hash = await subtle.digest('SHA-256', data as unknown as BufferSource)\n return Array.from(new Uint8Array(hash))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── HMAC-SHA-256 ─────────────────────────────\n\n/**\n * Compute HMAC-SHA-256(key, data) and return hex string.\n *\n * Used to derive content-addressed eTags that are opaque to the store:\n * ```\n * eTag = hmacSha256Hex(blobDEK, plaintext)\n * ```\n *\n * Unlike a plain SHA-256, the HMAC is keyed by the vault-shared `_blob` DEK,\n * so an attacker with store access cannot pre-compute eTags for known files.\n * Deduplication still works within a vault (same key + same content = same eTag).\n */\nexport async function hmacSha256Hex(key: CryptoKey, data: Uint8Array): Promise<string> {\n // Export AES-GCM DEK raw bytes → import as HMAC key\n const rawKey = await subtle.exportKey('raw', key)\n const hmacKey = await subtle.importKey(\n 'raw',\n rawKey,\n { name: 'HMAC', hash: 'SHA-256' },\n false,\n ['sign'],\n )\n const sig = await subtle.sign('HMAC', hmacKey, data as unknown as BufferSource)\n return Array.from(new Uint8Array(sig))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── AAD-aware Binary Encrypt / Decrypt ──\n\n/**\n * Encrypt raw bytes with AES-256-GCM using Additional Authenticated Data.\n *\n * The AAD binds each chunk to its parent blob and position, preventing\n * chunk reorder, substitution, and truncation attacks:\n * ```\n * AAD = UTF-8(\"{eTag}:{chunkIndex}:{chunkCount}\")\n * ```\n *\n * The AAD is NOT stored — the reader reconstructs it from `BlobObject`\n * metadata and passes it to `decryptBytesWithAAD`.\n */\nexport async function encryptBytesWithAAD(\n data: Uint8Array,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext with AAD verification.\n *\n * If the AAD does not match the one used at encryption time (e.g. because\n * a chunk was reordered or substituted from another blob), the GCM auth\n * tag fails and this throws `TamperedError`.\n */\nexport async function decryptBytesWithAAD(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Presence Key Derivation ──────────────────────────────\n\n/**\n * Derive an AES-256-GCM presence key from a collection DEK using HKDF-SHA256.\n *\n * The presence key is domain-separated from the data DEK by the fixed salt\n * `'noydb-presence'` and the `info` = collection name. This means:\n * - The adapter never sees the presence key.\n * - Presence payloads rotate automatically when the collection DEK is rotated.\n * - Revoked users cannot derive the new presence key after a DEK rotation.\n *\n * @param dek The collection's AES-256-GCM DEK (extractable).\n * @param collectionName Used as the HKDF `info` parameter for domain separation.\n * @returns A non-extractable AES-256-GCM key suitable for presence payload encryption.\n */\nexport async function derivePresenceKey(dek: CryptoKey, collectionName: string): Promise<CryptoKey> {\n // Step 1: export DEK raw bytes\n const rawDek = await subtle.exportKey('raw', dek)\n\n // Step 2: import as HKDF key material\n const hkdfKey = await subtle.importKey(\n 'raw',\n rawDek,\n 'HKDF',\n false,\n ['deriveBits'],\n )\n\n // Step 3: derive 256 bits with salt='noydb-presence' and info=collectionName\n const salt = new TextEncoder().encode('noydb-presence')\n const info = new TextEncoder().encode(collectionName)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n KEY_BITS,\n )\n\n // Step 4: import derived bits as AES-GCM key\n return subtle.importKey(\n 'raw',\n bits,\n { name: 'AES-GCM', length: KEY_BITS },\n false,\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Deterministic Encryption ────────────────────────────\n\n/**\n * Derive a deterministic 12-byte IV from `{ DEK, context, plaintext }`\n * via HKDF-SHA256. Given the same three inputs, the IV is identical, so\n * `encryptDeterministic` produces the same ciphertext on every call —\n * which is precisely what enables blind equality search on encrypted\n * fields.\n *\n * **The side channel this opens.** Two records whose field value is the\n * same produce the same ciphertext. An observer with store access can\n * therefore tell which records share a value — not *what* the value is,\n * but the equivalence class. This is the well-known trade-off of\n * deterministic encryption and is why the feature is strictly opt-in\n * per field, guarded by `acknowledgeDeterministicRisk: true` at\n * collection creation.\n *\n * The context string MUST include the collection name and field name,\n * so:\n * - The same plaintext in two different fields encrypts differently\n * (no cross-field equality leak).\n * - The same plaintext in two different collections (different DEKs)\n * encrypts differently by virtue of the key, even before HKDF\n * domain separation kicks in.\n */\nasync function deriveDeterministicIV(\n dek: CryptoKey,\n context: string,\n plaintext: string,\n): Promise<Uint8Array> {\n const rawDek = await subtle.exportKey('raw', dek)\n const hkdfKey = await subtle.importKey('raw', rawDek, 'HKDF', false, ['deriveBits'])\n const salt = new TextEncoder().encode('noydb-deterministic-v1')\n const info = new TextEncoder().encode(`${context}\\x00${plaintext}`)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n IV_BYTES * 8,\n )\n return new Uint8Array(bits)\n}\n\n/**\n * Encrypt a plaintext string with AES-256-GCM and a deterministic,\n * HKDF-derived IV.\n *\n * The same `{ dek, context, plaintext }` triple always produces the\n * same `{ iv, data }` — call this twice and you can string-compare the\n * ciphertexts to check equality of the inputs without decrypting them.\n *\n * @param context Domain-separation string — by convention\n * `'<collection>/<field>'`. Different contexts encrypt\n * the same plaintext to different ciphertexts, so\n * `email` in collection `users` does not collide with\n * `email` in collection `customers`.\n */\nexport async function encryptDeterministic(\n plaintext: string,\n dek: CryptoKey,\n context: string,\n): Promise<EncryptResult> {\n const iv = await deriveDeterministicIV(dek, context, plaintext)\n const encoded = new TextEncoder().encode(plaintext)\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Counterpart to {@link encryptDeterministic}. The IV is stored\n * alongside the ciphertext (exactly like the randomized path), so\n * decrypt uses the stored IV and verifies the GCM auth tag — a tampered\n * ciphertext throws `TamperedError` just like randomized AES-GCM.\n */\nexport async function decryptDeterministic(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n return decrypt(ivBase64, dataBase64, dek)\n}\n\n// ─── Random Generation ─────────────────────────────────────────────────\n\n/** Generate a random 12-byte IV for AES-GCM. */\nexport function generateIV(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES))\n}\n\n/** Generate a random 32-byte salt for PBKDF2. */\nexport function generateSalt(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(SALT_BYTES))\n}\n\n// ─── Base64 Helpers ────────────────────────────────────────────────────\n\nexport function bufferToBase64(buffer: ArrayBuffer | Uint8Array): string {\n const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)\n let binary = ''\n for (let i = 0; i < bytes.length; i++) {\n binary += String.fromCharCode(bytes[i]!)\n }\n return btoa(binary)\n}\n\nexport function base64ToBuffer(base64: string): Uint8Array<ArrayBuffer> {\n const binary = atob(base64)\n const bytes = new Uint8Array(binary.length)\n for (let i = 0; i < binary.length; i++) {\n bytes[i] = binary.charCodeAt(i)\n }\n return bytes\n}\n","/**\n * @noy-db/hub/blobs — opt-in blob / document subsystem.\n *\n * @category capability\n *\n * This subpath groups every file whose reason-for-existing is blob\n * storage: `BlobSet` (slot-based attachments with chunked encryption\n * and dedup), `mime-magic` (MIME detection from magic bytes),\n * `blob-compaction` (TTL eviction via `blobFields`), `export-blobs`\n * (bulk export primitive), and the legacy `attachments` API.\n *\n * Hub's root barrel (`@noy-db/hub`) still re-exports `BlobSet` + the\n * MIME helpers for backward compatibility with `@noy-db/as-blob`,\n * `@noy-db/as-zip`, and any consumer written before this split. New\n * code should prefer this subpath so the import boundary is explicit.\n */\n\nexport { withBlobs } from './active.js'\nexport type { BlobStrategy, BlobStrategyOpenArgs } from './strategy.js'\n\nexport { BlobSet } from './blob-set.js'\nexport {\n BLOB_COLLECTION,\n BLOB_INDEX_COLLECTION,\n BLOB_CHUNKS_COLLECTION,\n BLOB_SLOTS_PREFIX,\n BLOB_VERSIONS_PREFIX,\n DEFAULT_CHUNK_SIZE,\n} from './blob-set.js'\nexport type {\n BlobObject,\n SlotRecord,\n SlotInfo,\n VersionRecord,\n BlobPutOptions,\n BlobResponseOptions,\n} from '../types.js'\n\nexport { detectMimeType, detectMagic, isPreCompressed } from './mime-magic.js'\n\nexport { runCompaction, BLOB_EVICTION_AUDIT_COLLECTION } from './blob-compaction.js'\nexport type {\n BlobFieldsConfig,\n BlobFieldPolicy,\n BlobEvictionEntry,\n CompactRunOptions,\n CompactionResult,\n CompactionContext,\n} from './blob-compaction.js'\n\nexport {\n createExportBlobsHandle,\n ExportBlobsAbortedError,\n EXPORT_AUDIT_COLLECTION,\n} from './export-blobs.js'\nexport type {\n ExportBlobsOptions,\n ExportedBlob,\n ExportBlobsHandle,\n ExportBlobsAuditEntry,\n} from './export-blobs.js'\n","/**\n * Core types — the {@link NoydbStore} interface, envelope format, roles, and\n * all configuration shapes consumed by {@link createNoydb}.\n *\n * ## What lives here\n *\n * - **{@link NoydbStore}** — the 6-method contract every backend must implement\n * (`get`, `put`, `delete`, `list`, `loadAll`, `saveAll`).\n * - **{@link EncryptedEnvelope}** — the wire format stored by backends:\n * `{ _noydb, _v, _ts, _iv, _data }`. Backends only ever see this shape.\n * - **{@link Role} / {@link Permission}** — the access-control vocabulary\n * (`owner`, `admin`, `operator`, `viewer`, `client`).\n * - **{@link NoydbOptions}** — the full configuration object passed to\n * {@link createNoydb}.\n *\n * ## Extending the store interface\n *\n * All optional store capabilities (`ping`, `listPage`, `listSince`,\n * `presencePublish`, `presenceSubscribe`, `listVaults`) are additive extensions\n * discovered via `'method' in store`. Implementing them unlocks features but\n * is never required — core always falls back to the 6-method baseline.\n *\n * @module\n */\n\nimport type { StandardSchemaV1 } from './schema.js'\nimport type { SyncPolicy } from './store/sync-policy.js'\nimport type { BlobStrategy } from './blobs/strategy.js'\nimport type { IndexStrategy } from './indexing/strategy.js'\nimport type { AggregateStrategy } from './aggregate/strategy.js'\nimport type { CrdtStrategy } from './crdt/strategy.js'\nimport type { ConsentStrategy } from './consent/strategy.js'\nimport type { PeriodsStrategy } from './periods/strategy.js'\nimport type { ShadowStrategy } from './shadow/strategy.js'\nimport type { TxStrategy } from './tx/strategy.js'\nimport type { HistoryStrategy } from './history/strategy.js'\nimport type { I18nStrategy } from './i18n/strategy.js'\nimport type { SessionStrategy } from './session/strategy.js'\nimport type { SyncStrategy } from './team/sync-strategy.js'\nimport type { UnlockedKeyring } from './team/keyring.js'\n\n/** Format version for encrypted record envelopes. */\nexport const NOYDB_FORMAT_VERSION = 1 as const\n\n/** Format version for keyring files. */\nexport const NOYDB_KEYRING_VERSION = 1 as const\n\n/** Format version for backup files. */\nexport const NOYDB_BACKUP_VERSION = 1 as const\n\n/** Format version for sync metadata. */\nexport const NOYDB_SYNC_VERSION = 1 as const\n\n// ─── Roles & Permissions ───────────────────────────────────────────────\n\n/**\n * Access role assigned to a user within a vault.\n *\n * Roles control both the operations a user can perform and which DEKs\n * they receive in their keyring:\n *\n * | Role | Collections | Can grant/revoke | Can export |\n * |------------|-----------------|:----------------:|:----------:|\n * | `owner` | all (rw) | Yes (all roles) | Yes |\n * | `admin` | all (rw) | Yes (≤ admin) | Yes |\n * | `operator` | explicit (rw) | No | ACL-scoped |\n * | `viewer` | all (ro) | No | Yes |\n * | `client` | explicit (ro) | No | ACL-scoped |\n */\nexport type Role = 'owner' | 'admin' | 'operator' | 'viewer' | 'client'\n\n/**\n * Read-write or read-only access on a collection.\n * Stored per-collection in the user's keyring.\n */\nexport type Permission = 'rw' | 'ro'\n\n/**\n * Map of collection name → permission level for a user's keyring entry.\n * `'*'` is the wildcard collection matching all collections in the vault.\n */\nexport type Permissions = Record<string, Permission>\n\n// ─── Encrypted Envelope ────────────────────────────────────────────────\n\n/** The encrypted wrapper stored by adapters. Adapters only ever see this. */\nexport interface EncryptedEnvelope {\n readonly _noydb: typeof NOYDB_FORMAT_VERSION\n readonly _v: number\n readonly _ts: string\n readonly _iv: string\n readonly _data: string\n /** User who created this version (unencrypted metadata). */\n readonly _by?: string\n /**\n * Hierarchical access tier. Omitted → tier 0.\n *\n * Unencrypted on purpose — the store reads it to route the envelope\n * to the right DEK slot without having to try-decrypt against every\n * tier. Only leaks the tier of each record, not any value\n * equivalence.\n */\n readonly _tier?: number\n /**\n * User id who last elevated this record. Used by\n * `demote()` to gate the reverse operation: only the original\n * elevator or an owner can demote a record back down. Cleared on\n * every successful demote so a later re-elevate requires the new\n * actor to own the demotion right.\n */\n readonly _elevatedBy?: string\n /**\n * Deterministic-encryption index. Map of field name →\n * base64 deterministic ciphertext. Present only when the collection\n * declares `deterministicFields` and the feature is acknowledged. The\n * field names are unencrypted (they're the index keys); the values\n * are AES-GCM ciphertext with an HKDF-derived deterministic IV.\n *\n * Enables blind equality search (`collection.findByDet(field,\n * value)`) without decrypting every record. Leaks equality as a known\n * side channel.\n */\n readonly _det?: Record<string, string>\n}\n\n/**\n * Placeholder returned by `getAtTier()` in `'ghost'` mode when a\n * record is at a tier the caller cannot decrypt. Record existence is\n * advertised — the id and tier are visible — but contents are\n * withheld. `canElevateFrom` lists user ids authorized to elevate\n * access for this caller when known; absent when the workflow is\n * not configured.\n */\nexport interface GhostRecord {\n readonly _ghost: true\n readonly _tier: number\n readonly canElevateFrom?: readonly string[]\n}\n\n/** Control what lower-tier reads see above their clearance. */\nexport type TierMode = 'invisibility' | 'ghost'\n\n/**\n * Event emitted when a record at a tier above the caller's inherent\n * clearance is read or written successfully (via elevation or\n * delegation). Always written to the ledger; subscribers get a\n * real-time feed.\n */\nexport interface CrossTierAccessEvent {\n readonly actor: string\n readonly collection: string\n readonly id: string\n readonly tier: number\n /** How the caller gained tier access: they elevated it, or a delegation is active. */\n readonly authorization: 'elevation' | 'delegation' | 'inherent'\n readonly op: 'get' | 'put' | 'elevate' | 'demote'\n readonly ts: string\n /**\n * When `authorization === 'elevation'`, the audit reason string the\n * caller passed to `vault.elevate(...)`. Empty for inherent /\n * delegation paths.\n */\n readonly reason?: string\n /**\n * When `authorization === 'elevation'`, the tier the caller's\n * keyring effectively held BEFORE elevation. Useful for audit\n * dashboards distinguishing \"operator elevating to 2\" from\n * \"inherent tier-2 write.\"\n */\n readonly elevatedFrom?: number\n}\n\n/**\n * A single deterministic-ciphertext index slot on an envelope. Stored\n * as `iv:data` (both base64, colon-separated) so a single string per\n * field keeps the envelope compact.\n */\nexport type DeterministicCipher = string\n\n// ─── Vault Snapshot ──────────────────────────────────────────────\n\n/** All records across all collections for a compartment. */\nexport type VaultSnapshot = Record<string, Record<string, EncryptedEnvelope>>\n\n/**\n * Result of a single page fetch via the optional `listPage` adapter extension.\n *\n * `items` carries the actual encrypted envelopes (not just ids) so the\n * caller can decrypt and emit a single record without an extra `get()`\n * round-trip per id. `nextCursor` is `null` on the final page.\n */\nexport interface ListPageResult {\n /** Encrypted envelopes for this page, in adapter-defined order. */\n items: Array<{ id: string; envelope: EncryptedEnvelope }>\n /** Opaque cursor for the next page, or `null` if this was the last page. */\n nextCursor: string | null\n}\n\n// ─── Store Interface ───────────────────────────────────────────────────\n\nexport interface NoydbStore {\n /**\n * Optional human-readable adapter name (e.g. 'memory', 'file', 'dynamo').\n * Used in diagnostic messages and the listPage fallback warning. Adapters\n * are encouraged to set this so logs are clearer about which backend is\n * involved when something goes wrong.\n */\n name?: string\n\n /** Get a single record. Returns null if not found. */\n get(vault: string, collection: string, id: string): Promise<EncryptedEnvelope | null>\n\n /** Put a record. Throws ConflictError if expectedVersion doesn't match. */\n put(\n vault: string,\n collection: string,\n id: string,\n envelope: EncryptedEnvelope,\n expectedVersion?: number,\n ): Promise<void>\n\n /** Delete a record. */\n delete(vault: string, collection: string, id: string): Promise<void>\n\n /** List all record IDs in a collection. */\n list(vault: string, collection: string): Promise<string[]>\n\n /** Load all records for a vault (initial hydration). */\n loadAll(vault: string): Promise<VaultSnapshot>\n\n /** Save all records for a vault (bulk write / restore). */\n saveAll(vault: string, data: VaultSnapshot): Promise<void>\n\n /** Optional connectivity check for sync engine. */\n ping?(): Promise<boolean>\n\n /**\n * Optional: list record IDs in a collection that have `_ts` after `since`.\n * Used by partial sync (`pull({ modifiedSince })`). Adapters that omit this\n * fall back to a full `loadAll` + client-side timestamp filter.\n */\n listSince?(vault: string, collection: string, since: string): Promise<string[]>\n\n /**\n * Optional pagination extension. Adapters that implement `listPage` get\n * the streaming `Collection.scan()` fast path; adapters that don't are\n * silently fallen back to a full `loadAll()` + slice (with a one-time\n * console.warn).\n *\n * `cursor` is opaque to the core — each adapter encodes its own paging\n * state (DynamoDB: base64 LastEvaluatedKey JSON; S3: ContinuationToken;\n * memory/file/browser: numeric offset of a sorted id list). Pass\n * `undefined` to start from the beginning.\n *\n * `limit` is a soft upper bound on `items.length`. Adapters MAY return\n * fewer items even when more exist (e.g. if the underlying store has\n * its own page size cap), and MUST signal \"no more pages\" by returning\n * `nextCursor: null`.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listPage' in adapter`.\n */\n listPage?(\n vault: string,\n collection: string,\n cursor?: string,\n limit?: number,\n ): Promise<ListPageResult>\n\n /**\n * Optional pub/sub for real-time presence.\n * Publish an encrypted payload to a presence channel.\n * Falls back to storage-based polling when absent.\n */\n presencePublish?(channel: string, payload: string): Promise<void>\n\n /**\n * Optional pub/sub for real-time presence.\n * Subscribe to a presence channel. Returns an unsubscribe function.\n * Falls back to storage-based polling when absent.\n */\n presenceSubscribe?(channel: string, callback: (payload: string) => void): () => void\n\n /**\n * Optional cross-vault enumeration extension.\n *\n * Returns the names of every top-level vault the store\n * currently stores. Used by `Noydb.listAccessibleVaults()` to\n * enumerate the universe of vaults before filtering down to\n * the ones the calling principal can actually unwrap.\n *\n * **Why this is optional:** the storage shape of compartments\n * differs across backends. Memory and file stores store\n * vaults as top-level keys / directories and can enumerate\n * them in O(1) calls. DynamoDB stores everything in a single table\n * keyed by `(compartment#collection, id)` — enumerating compartments\n * requires either a Scan (expensive, eventually consistent, leaks\n * ciphertext metadata) or a dedicated GSI that the consumer\n * provisioned. S3 needs a prefix list (cheap if enabled, ACL-sensitive\n * otherwise). Browser localStorage can scan keys by prefix.\n *\n * Stores that cannot implement `listVaults` cheaply or\n * cleanly should omit it. Core surfaces a `StoreCapabilityError`\n * with a clear message when a caller invokes\n * `listAccessibleVaults()` against a store that doesn't\n * provide this method, so consumers know to either upgrade their\n * store, provide a candidate list explicitly to `queryAcross()`,\n * or fall back to maintaining the compartment index out of band.\n *\n * **Privacy note:** `listVaults` returns *every* compartment\n * the store has, not just the ones the caller can access. The\n * existence-leak filtering (returning only compartments whose\n * keyring the caller can unwrap) happens in core, not in the\n * store. The store is trusted to know its own contents — that\n * is not a leak in the threat model. The leak the API guards\n * against is the *return value* of `listAccessibleVaults()`\n * exposing existence to a downstream observer who only sees that\n * function's output.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listVaults' in store`.\n */\n listVaults?(): Promise<string[]>\n\n /**\n * Optional: generate a presigned URL for direct client download.\n * Only meaningful for object stores (S3, GCS) that support URL signing.\n * Returns a time-limited URL that fetches the encrypted envelope directly.\n * The caller must decrypt client-side (the URL returns ciphertext).\n */\n presignUrl?(vault: string, collection: string, id: string, expiresInSeconds?: number): Promise<string>\n\n /**\n * Optional: estimate current storage usage.\n * Returns `{ usedBytes, quotaBytes }` or null if the store cannot estimate.\n * Used by quota-aware routing to detect overflow conditions.\n */\n estimateUsage?(): Promise<{ usedBytes: number; quotaBytes: number } | null>\n\n /**\n * Optional multi-record atomic write.\n *\n * When present, `db.transaction(async (tx) => { ... })` uses this to\n * commit every staged op in one storage-layer transaction — either\n * all ops land or none do, regardless of which records they touch.\n * Every `TxOp.expectedVersion` (when set) must be honored atomically\n * alongside the write; any violation throws `ConflictError` and the\n * whole batch fails.\n *\n * Stores that omit this fall through to the hub's per-record OCC\n * fallback: pre-flight CAS check, then sequential `put`/`delete`\n * with best-effort unwind on mid-batch failure (see\n * `runTransaction` for the exact semantics and crash window).\n *\n * Native implementations: `to-memory` (single Map mutation),\n * `to-dynamo` (`TransactWriteItems`), `to-browser-idb` (one\n * `readwrite` transaction). File / S3 cannot implement this\n * atomically and should omit the method.\n */\n tx?(ops: readonly TxOp[]): Promise<void>\n}\n\n/**\n * A single staged operation inside a `db.transaction(fn)` commit. The\n * hub assembles `TxOp[]` from the user's `tx.collection().put/delete`\n * calls, encrypts any `record` values into `envelope`, and hands the\n * array to `NoydbStore.tx()` when the store supports atomic batch\n * writes. Stores that implement `tx()` MUST honor every\n * `expectedVersion` atomically against the stored envelope version.\n */\nexport interface TxOp {\n readonly type: 'put' | 'delete'\n readonly vault: string\n readonly collection: string\n readonly id: string\n /** Populated for `type: 'put'` — the encrypted envelope to write. */\n readonly envelope?: EncryptedEnvelope\n /** Optional per-record CAS. Mismatch must throw `ConflictError`. */\n readonly expectedVersion?: number\n}\n\n// ─── Store Factory Helper ──────────────────────────────────────────────\n\n/** Type-safe helper for creating store factories. */\nexport function createStore<TOptions>(\n factory: (options: TOptions) => NoydbStore,\n): (options: TOptions) => NoydbStore {\n return factory\n}\n\n// ─── Keyring ───────────────────────────────────────────────────────────\n\n/**\n * Interchange formats `@noy-db/as-*` packages can produce. `'*'` is a\n * wildcard granting every current + future plaintext format.\n */\nexport type ExportFormat =\n | 'xlsx'\n | 'csv'\n | 'json'\n | 'ndjson'\n | 'xml'\n | 'sql'\n | 'pdf'\n | 'blob'\n | 'zip'\n | '*'\n\n/**\n * Owner-granted export capability on a keyring.\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for record formatters + blob\n * extractors that emit plaintext bytes (`as-xlsx`, `as-csv`,\n * `as-blob`, `as-zip`, …). **Defaults to empty** for every role;\n * the owner/admin must positively grant per-format (or `'*'`).\n * - `bundle` — boolean for `.noydb` encrypted container export\n * (`as-noydb`). **Default policy: on for owner/admin, off for\n * operator/viewer/client** — applied when the field is absent or\n * undefined (see `hasExportCapability`).\n */\nexport interface ExportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\n/**\n * Owner-granted import capability on a keyring (sibling of\n * `ExportCapability`, issue ).\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for `as-*` readers that ingest\n * plaintext bytes (`as-csv`, `as-json`, `as-ndjson`, `as-zip`, …).\n * Defaults to empty for every role; the owner/admin must positively\n * grant per-format (or `'*'`).\n * - `bundle` — boolean gate for `.noydb` bundle import. **Defaults to\n * `false` for every role**, including owner/admin. Import is more\n * dangerous than export (corrupts vs leaks), so the policy is\n * default-closed across the board — the owner explicitly opts a\n * keyring in via `db.grant({ importCapability: { bundle: true } })`.\n */\nexport interface ImportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\nexport interface KeyringFile {\n readonly _noydb_keyring: typeof NOYDB_KEYRING_VERSION\n readonly user_id: string\n readonly display_name: string\n readonly role: Role\n readonly permissions: Permissions\n readonly deks: Record<string, string>\n readonly salt: string\n readonly created_at: string\n readonly granted_by: string\n /**\n * Optional — authorization spec capability bits. Absent on keyrings written\n * before the RFC implementation. Loading falls back to role-based\n * defaults (owner/admin get bundle-on, everyone else off).\n */\n readonly export_capability?: ExportCapability\n /**\n * Optional bundle-slot expiry. ISO-8601 timestamp; past\n * the cutoff `loadKeyring` throws `KeyringExpiredError` before any\n * DEK unwrap is attempted. Useful for time-boxed audit access:\n * \"this slot works for 30 days then becomes opaque to its holder.\"\n *\n * Absent on live keyrings written via `db.grant()` — the field is\n * meaningful for `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })`. Setting it on a live\n * keyring is allowed but unusual.\n */\n readonly expires_at?: string\n /**\n * Optional — issue import-capability bits. Absent on keyrings\n * written before landed. Loading falls back to default-closed\n * for every role and every format.\n */\n readonly import_capability?: ImportCapability\n /**\n * hierarchical access clearance. Absent → 0 (advisory;\n * the real check is whether the DEK map carries a `collection#tier`\n * entry for the requested tier). Owners and admins default to the\n * highest tier they have DEKs for at grant time.\n */\n readonly clearance?: number\n}\n\n// ─── Backup ────────────────────────────────────────────────────────────\n\nexport interface VaultBackup {\n readonly _noydb_backup: typeof NOYDB_BACKUP_VERSION\n readonly _compartment: string\n readonly _exported_at: string\n readonly _exported_by: string\n readonly keyrings: Record<string, KeyringFile>\n readonly collections: VaultSnapshot\n /**\n * Internal collections (`_ledger`, `_ledger_deltas`, `_history`, `_sync`, …)\n * captured alongside the data collections. Optional for backwards\n * compat with backups, which only stored data collections —\n * loading a backup leaves the ledger empty (and `verifyBackupIntegrity`\n * skips the chain check, surfacing only a console warning).\n */\n readonly _internal?: VaultSnapshot\n /**\n * Verifiable-backup metadata. Embeds the ledger head at\n * dump time so `load()` can cross-check that the loaded chain matches\n * exactly what was exported. A backup whose chain has been tampered\n * with — either by modifying ledger entries or by modifying data\n * envelopes that the chain references — fails this check.\n *\n * Optional for backwards compat with backups; missing means\n * \"legacy backup, load with a warning, no integrity check\".\n */\n readonly ledgerHead?: {\n /** Hex sha256 of the canonical JSON of the last ledger entry. */\n readonly hash: string\n /** Sequential index of the last ledger entry. */\n readonly index: number\n /** ISO timestamp captured at dump time. */\n readonly ts: string\n }\n}\n\n// ─── Export ────────────────────────────────────────────────────────────\n\n/**\n * Options for `Vault.exportStream()` and `Vault.exportJSON()`.\n *\n * The defaults match the most common consumer pattern: one chunk per\n * collection, no ledger metadata. Per-record streaming and ledger-head\n * inclusion are opt-in because both add structure most consumers don't\n * need.\n */\nexport interface ExportStreamOptions {\n /**\n * `'collection'` (default) yields one chunk per collection with all\n * records bundled in `chunk.records`. `'record'` yields one chunk per\n * record, useful for arbitrarily large collections that should never\n * be materialized as a single array.\n */\n readonly granularity?: 'collection' | 'record'\n\n /**\n * When `true`, every chunk includes the current compartment ledger\n * head under `chunk.ledgerHead`. The value is identical across every\n * chunk in a single export (one ledger per compartment). Forward-\n * compatible with future partition work where the head would become\n * per-partition. Default: `false`.\n */\n readonly withLedgerHead?: boolean\n /**\n * When set to a BCP 47 locale string (e.g. `'th'`), `exportJSON()`\n * resolves all `dictKey` labels to that locale and omits the raw\n * `dictionaries` snapshot from the output. Has no effect\n * on `exportStream()` — format packages use the `chunk.dictionaries`\n * snapshot directly and apply their own locale strategy.\n *\n * Default: `undefined` — embed the raw snapshot under `_dictionaries`.\n */\n readonly resolveLabels?: string\n}\n\n/**\n * One chunk yielded by `Vault.exportStream()`.\n *\n * `granularity: 'collection'` yields one chunk per collection with the\n * full record array in `records`. `granularity: 'record'` yields one\n * chunk per record with `records` containing exactly one element — the\n * `schema` and `refs` metadata is repeated on every chunk so consumers\n * doing per-record streaming don't have to thread state across yields.\n */\nexport interface ExportChunk<T = unknown> {\n /** Collection name (no leading underscore — internal collections are filtered out). */\n readonly collection: string\n\n /**\n * Standard Schema validator attached to the collection at `collection()`\n * construction time, or `null` if no schema was provided. Surfaced so\n * downstream serializers (`@noy-db/as-*` packages, custom\n * exporters) can produce schema-aware output (typed CSV headers, XSD\n * generation, etc.) without poking at collection internals.\n */\n readonly schema: StandardSchemaV1<unknown, T> | null\n\n /**\n * Foreign-key references declared on the collection via the `refs`\n * option, as the `{ field → { target, mode } }` map produced by\n * `RefRegistry.getOutbound`. Empty object when no refs were declared.\n */\n readonly refs: Record<string, { readonly target: string; readonly mode: 'strict' | 'warn' | 'cascade' }>\n\n /**\n * Decrypted, ACL-scoped, schema-validated records. Length 1 in\n * `granularity: 'record'` mode, full collection in `granularity: 'collection'`\n * mode. Records are returned by reference from the collection's eager\n * cache where applicable — consumers must treat them as immutable.\n */\n readonly records: T[]\n\n /**\n * Dictionary snapshots for every `dictKey` field declared on this\n * collection. Captured once at stream-start and held\n * constant across all chunks within the same export — a rename\n * mid-export does not change the snapshot. `undefined` when the\n * collection has no `dictKeyFields`.\n *\n * Shape: `{ [fieldName]: { [stableKey]: { [locale]: label } } }`\n *\n * @example\n * ```ts\n * chunk.dictionaries?.status?.paid?.th // → 'ชำระแล้ว'\n * ```\n */\n readonly dictionaries?: Record<\n string, // field name\n Record<string, Record<string, string>> // stable key → locale → label\n >\n\n /**\n * Vault ledger head at export time. Present only when\n * `exportStream({ withLedgerHead: true })` was called. Identical\n * across every chunk in the same export — included on every chunk\n * for forward-compatibility with future per-partition ledgers, where\n * the value will differ per chunk.\n */\n readonly ledgerHead?: {\n readonly hash: string\n readonly index: number\n readonly ts: string\n }\n}\n\n// ─── Sync ──────────────────────────────────────────────────────────────\n\nexport interface DirtyEntry {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n readonly version: number\n readonly timestamp: string\n}\n\nexport interface SyncMetadata {\n readonly _noydb_sync: typeof NOYDB_SYNC_VERSION\n readonly last_push: string | null\n readonly last_pull: string | null\n readonly dirty: DirtyEntry[]\n}\n\nexport interface Conflict {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly local: EncryptedEnvelope\n readonly remote: EncryptedEnvelope\n readonly localVersion: number\n readonly remoteVersion: number\n /**\n * Present only when the collection uses `conflictPolicy: 'manual'`.\n * Call `resolve(winner)` to commit the winning envelope, or\n * `resolve(null)` to defer (conflict stays queued for the next sync).\n * Called synchronously inside the `sync:conflict` event handler.\n */\n readonly resolve?: (winner: EncryptedEnvelope | null) => void\n}\n\nexport type ConflictStrategy =\n | 'local-wins'\n | 'remote-wins'\n | 'version'\n | ((conflict: Conflict) => 'local' | 'remote')\n\n/**\n * Collection-level conflict policy.\n * Overrides the db-level `conflict` option for the specific collection.\n *\n * - `'last-writer-wins'` — higher `_ts` wins (timestamp LWW).\n * - `'first-writer-wins'` — lower `_v` wins (earlier version is preserved).\n * - `'manual'` — emits `sync:conflict` with a `resolve` callback. Call\n * `resolve(winner)` synchronously to commit or `resolve(null)` to defer.\n * - Custom fn — synchronous `(local: T, remote: T) => T`. Must be pure.\n */\nexport type ConflictPolicy<T> =\n | 'last-writer-wins'\n | 'first-writer-wins'\n | 'manual'\n | ((local: T, remote: T) => T)\n\n/**\n * Envelope-level resolver registered per collection with the SyncEngine.\n * Receives the `id` of the conflicting record and both envelopes.\n * Returns the winning envelope, or `null` to defer resolution.\n * @internal\n */\nexport type CollectionConflictResolver = (\n id: string,\n local: EncryptedEnvelope,\n remote: EncryptedEnvelope,\n) => Promise<EncryptedEnvelope | null>\n\n/** Options for targeted push operations. */\nexport interface PushOptions {\n /** Only push records belonging to these collections. Omit to push all dirty. */\n collections?: string[]\n}\n\n/** Options for targeted pull operations. */\nexport interface PullOptions {\n /** Only pull these collections. Omit to pull all. */\n collections?: string[]\n /**\n * Only pull records with `_ts` strictly after this ISO timestamp.\n * Adapters that implement `listSince` use it directly; others fall back\n * to a full scan with client-side filtering.\n */\n modifiedSince?: string\n}\n\nexport interface PushResult {\n readonly pushed: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\nexport interface PullResult {\n readonly pulled: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\n/** Result of a sync transaction commit. */\nexport interface SyncTransactionResult {\n readonly status: 'committed' | 'conflict'\n readonly pushed: number\n readonly conflicts: Conflict[]\n}\n\nexport interface SyncStatus {\n readonly dirty: number\n readonly lastPush: string | null\n readonly lastPull: string | null\n readonly online: boolean\n}\n\n// ─── Sync Target ─────────────────────────────────────────\n\nexport type SyncTargetRole = 'sync-peer' | 'backup' | 'archive'\n\n/**\n * A sync target with role and optional per-target policy.\n *\n * | Role | Direction | Conflict resolution | Typical use |\n * |-------------|---------------|---------------------|--------------------------|\n * | `sync-peer` | Bidirectional | ConflictStrategy | DynamoDB live sync |\n * | `backup` | Push-only | N/A (receives merged)| S3 dump, Google Drive |\n * | `archive` | Push-only | N/A | IPFS, Git tags, S3 Lock |\n */\nexport interface SyncTarget {\n /** The store to sync with. */\n readonly store: NoydbStore\n /** Role determines sync direction and conflict handling. */\n readonly role: SyncTargetRole\n /** Per-target sync policy. Inherits store-category default when absent. */\n readonly policy?: SyncPolicy\n /** Human-readable label for DevTools and audit logs. */\n readonly label?: string\n}\n\n// ─── Events ────────────────────────────────────────────────────────────\n\nexport interface ChangeEvent {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n}\n\nexport interface NoydbEventMap {\n 'change': ChangeEvent\n 'error': Error\n 'sync:push': PushResult\n 'sync:pull': PullResult\n 'sync:conflict': Conflict\n 'sync:online': void\n 'sync:offline': void\n 'sync:backup-error': { vault: string; target: string; error: Error }\n 'history:save': { vault: string; collection: string; id: string; version: number }\n 'history:prune': { vault: string; collection: string; id: string; pruned: number }\n /**\n * Emitted when a persisted-index side-car put/delete fails after the\n * main record write already succeeded. The main record is durable; the\n * index mirror may have drifted. Operators reconcile via\n * `collection.reconcileIndex(field)`.\n */\n 'index:write-partial': {\n vault: string\n collection: string\n id: string\n action: 'put' | 'delete'\n error: Error\n }\n /**\n * emitted by `Collection.ensurePersistedIndexesLoaded()`\n * once per field on first lazy-mode query when\n * `reconcileOnOpen: 'auto' | 'dry-run'` is configured. `applied` is\n * `0` in `'dry-run'` mode. `skipped` is reserved for a future\n * drift-stamp optimization that short-circuits the reconcile when\n * the mirror version matches what's on disk — currently always\n * `false` (the full reconcile runs every session).\n */\n 'index:reconciled': {\n vault: string\n collection: string\n field: string\n missing: readonly string[]\n stale: readonly string[]\n applied: number\n skipped: boolean\n }\n}\n\n// ─── Grant / Revoke ────────────────────────────────────────────────────\n\nexport interface GrantOptions {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly passphrase: string\n readonly permissions?: Permissions\n /**\n * Optional `@noy-db/as-*` export capability. Omit or\n * leave undefined to apply role-based defaults (see\n * `hasExportCapability` and `ExportCapability`).\n */\n readonly exportCapability?: ExportCapability\n /**\n * Optional `@noy-db/as-*` import capability (issue ). Omit or\n * leave undefined for default-closed semantics — no plaintext format\n * is grantable until positively listed; bundle import is denied.\n */\n readonly importCapability?: ImportCapability\n}\n\nexport interface RevokeOptions {\n readonly userId: string\n readonly rotateKeys?: boolean\n\n /**\n * Cascade behavior when the revoked user is an admin who has granted\n * other admins.\n *\n * - `'strict'` (default) — recursively revoke every admin that the\n * target (transitively) granted. The cascade walks the\n * `granted_by` field on each keyring file and stops at non-admin\n * leaves. All affected collections are accumulated and rotated in\n * a single pass at the end, so cascade cost is O(records in\n * affected collections), not O(records × cascade depth).\n *\n * - `'warn'` — leave the descendant admins in place but emit a\n * `console.warn` listing them. Useful for diagnostic dry runs and\n * for environments where the operator wants to clean up the\n * delegation tree manually.\n *\n * No effect when the target is not an admin (operators, viewers, and\n * clients cannot grant other users, so they have no delegation\n * subtree to cascade through). Defaults to `'strict'`.\n */\n readonly cascade?: 'strict' | 'warn'\n}\n\n// ─── Cross-vault queries ──────────────────────────────\n\n/**\n * One entry returned by `Noydb.listAccessibleVaults()`. Carries\n * the compartment id and the role the calling principal holds in it,\n * so the consumer can decide how to fan out without re-checking\n * permissions per vault.\n */\nexport interface AccessibleVault {\n readonly id: string\n readonly role: Role\n}\n\n/**\n * Options for `Noydb.listAccessibleVaults()`.\n */\nexport interface ListAccessibleVaultsOptions {\n /**\n * Minimum role the caller must hold to include a compartment in the\n * result. Compartments where the caller's role is strictly *below*\n * this threshold are silently excluded. Defaults to `'client'`,\n * which means \"every vault I can unwrap is returned.\" Set to\n * `'admin'` for \"vaults where I can grant/revoke,\" or\n * `'owner'` for \"vaults I own.\"\n *\n * The privilege ordering used:\n * `client (1) < viewer (2) < operator (3) < admin (4) < owner (5)`\n *\n * Note: `viewer` and `client` are conceptually peers in the ACL\n * (neither can grant), but `viewer` has read-all access while\n * `client` has only explicit-collection read. The numeric order\n * reflects \"how much can this principal see,\" not \"how much can\n * this principal modify.\"\n */\n readonly minRole?: Role\n}\n\n/**\n * Options for `Noydb.queryAcross()`.\n */\nexport interface QueryAcrossOptions {\n /**\n * Maximum number of compartments to process in parallel. Defaults\n * to `1` (sequential) — conservative because the per-compartment\n * callback typically does its own I/O and an unbounded fan-out can\n * exhaust adapter connections (DynamoDB throughput, S3 socket\n * limits, browser fetch concurrency).\n *\n * Set to `4` or `8` for cloud-backed compartments where parallelism\n * is the whole point of fanning out. Set to `1` (default) for local\n * adapters where the disk I/O serializes anyway.\n */\n readonly concurrency?: number\n}\n\n/**\n * One entry in the array returned by `Noydb.queryAcross()`. Either\n * `result` is set (callback succeeded for this compartment) or\n * `error` is set (callback threw, or compartment failed to open).\n *\n * Per-compartment errors do **not** abort the overall fan-out — every\n * compartment is given a chance to run its callback, and the\n * partition between success and failure is exposed in the return\n * value. Consumers that want fail-fast semantics can check\n * `r.error !== undefined` and short-circuit themselves.\n */\nexport type QueryAcrossResult<T> =\n | { readonly vault: string; readonly result: T; readonly error?: undefined }\n | { readonly vault: string; readonly result?: undefined; readonly error: Error }\n\n// ─── User Info ─────────────────────────────────────────────────────────\n\nexport interface UserInfo {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly permissions: Permissions\n readonly createdAt: string\n readonly grantedBy: string\n}\n\n// ─── Session ───────────────────────────────────────────────\n\n/**\n * Operations that a session policy can require re-authentication for.\n * Passed as the `requireReAuthFor` array in `SessionPolicy`.\n */\nexport type ReAuthOperation = 'export' | 'grant' | 'revoke' | 'rotate' | 'changeSecret'\n\n/**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior.\n *\n * All timeout values are in milliseconds. `undefined` means \"no limit.\"\n * The policy is evaluated lazily — it does not start timers itself;\n * enforcement happens at the Noydb call site.\n */\nexport interface SessionPolicy {\n /**\n * Idle timeout in ms. If no NOYDB operation is performed for this\n * duration, the session is revoked on the next operation attempt\n * (which will throw `SessionExpiredError`). The idle clock resets\n * on every successful operation.\n *\n * Default: `undefined` (no idle timeout).\n */\n readonly idleTimeoutMs?: number\n\n /**\n * Absolute timeout in ms from session creation. After this duration\n * the session is unconditionally revoked regardless of activity.\n *\n * Default: `undefined` (no absolute timeout).\n */\n readonly absoluteTimeoutMs?: number\n\n /**\n * Operations that require the user to re-authenticate (re-enter their\n * passphrase or perform a fresh WebAuthn assertion) before proceeding,\n * even if the session is still alive.\n *\n * Common pattern: `requireReAuthFor: ['export', 'grant']` — allow\n * read/write operations in the background but demand a fresh credential\n * for high-risk mutations.\n *\n * Default: `[]` (no extra re-auth requirements).\n */\n readonly requireReAuthFor?: readonly ReAuthOperation[]\n\n /**\n * If `true`, the session is revoked when the page goes to the background\n * (visibilitychange event, `document.hidden === true`). Useful for\n * high-sensitivity deployments where leaving the tab is treated as\n * a session boundary.\n *\n * No-op in non-browser environments (Node.js, workers without document).\n * Default: `false`.\n */\n readonly lockOnBackground?: boolean\n}\n\n// ─── i18n / Locale ─────────────────────────────────────\n\n/**\n * Locale-aware read options. Pass to `Collection.get()`, `list()`,\n * `query()`, and `scan()` to trigger per-record locale resolution for\n * `dictKey` and `i18nText` fields.\n *\n * - **`locale: 'raw'`** — skip resolution for `i18nText` fields and\n * return the full `{ [locale]: string }` map. Dict key fields still\n * return the stable key (no `<field>Label` added).\n * - **`fallback`** — single locale code or ordered list. Use `'any'` as\n * the last element to fall back to any present translation.\n *\n * When neither the call-level locale nor the compartment's default locale\n * is set, reading a record with `i18nText` fields throws\n * `LocaleNotSpecifiedError`.\n */\nexport interface LocaleReadOptions {\n /**\n * The target locale code (e.g. `'th'`), or `'raw'` to return the full\n * language map without resolution.\n */\n readonly locale?: string\n /**\n * Fallback locale or ordered fallback chain. Use `'any'` as the last\n * element to fall back to any present translation.\n */\n readonly fallback?: string | readonly string[]\n}\n\n// ─── plaintextTranslator hook ──────────────────────────────\n\n/**\n * Context passed to the consumer-supplied `plaintextTranslator` function.\n * The hook receives the source text plus enough metadata to route it to the\n * right translation service and record what it did.\n */\nexport interface PlaintextTranslatorContext {\n /** The plaintext string to translate. */\n readonly text: string\n /** BCP 47 source locale (the locale the text is written in). */\n readonly from: string\n /** BCP 47 target locale to translate into. */\n readonly to: string\n /** The schema field name that triggered the translation. */\n readonly field: string\n /** The collection the record is being put into. */\n readonly collection: string\n}\n\n/**\n * A consumer-supplied async function that translates a single string\n * from one locale to another. noy-db ships no built-in translator.\n *\n * **Security:** this function receives plaintext. The consumer is\n * responsible for the data policy of whatever service it calls. See\n * `NOYDB_SPEC.md § Zero-Knowledge Storage` and the `plaintextTranslator`\n * JSDoc on `NoydbOptions` for the full invariant statement.\n */\nexport type PlaintextTranslatorFn = (\n ctx: PlaintextTranslatorContext,\n) => Promise<string>\n\n/**\n * One entry in the in-process translator audit log. Cleared when\n * `db.close()` is called — same lifetime as the KEK and DEKs.\n *\n * Deliberately omits any content hash or translated-text fingerprint\n * to prevent correlation attacks on the audit trail.\n */\nexport interface TranslatorAuditEntry {\n readonly type: 'translator-invocation'\n /** Schema field name that was translated. */\n readonly field: string\n /** Collection the record belongs to. */\n readonly collection: string\n /** Source locale. */\n readonly fromLocale: string\n /** Target locale. */\n readonly toLocale: string\n /**\n * Consumer-provided translator name from\n * `NoydbOptions.plaintextTranslatorName`. Defaults to `'anonymous'`\n * when not supplied.\n */\n readonly translatorName: string\n /** ISO 8601 timestamp of the invocation. */\n readonly timestamp: string\n /**\n * `true` when the result was served from the in-process cache rather\n * than by calling the translator function. Present only on cache hits\n * so the absence of the field also communicates a cache miss.\n */\n readonly cached?: true\n}\n\n// ─── Presence ─────────────────────────────────────────────\n\n/**\n * A presence peer entry. `lastSeen` is an ISO timestamp set by core on each\n * `update()` call. Stale entries (lastSeen older than `staleMs`) are filtered\n * before delivering to the subscriber callback.\n */\nexport interface PresencePeer<P> {\n readonly userId: string\n readonly payload: P\n readonly lastSeen: string\n}\n\n// ─── CRDT ─────────────────────────────────────────────────\n\n// Re-exported from crdt.ts so consumers only need one import path.\nexport type { CrdtMode, CrdtState, LwwMapState, RgaState, YjsState } from './crdt/crdt.js'\n\n// ─── Blob / Attachment Store ────────────────────────\n\n/**\n * Second store shape for blob-store backends (Drive, WebDAV, Git, iCloud)\n * that operate on whole-vault bundles rather than per-record KV.\n *\n * Implement `readBundle` / `writeBundle` instead of the six-method KV\n * contract. Use `wrapBundleStore()` from `@noy-db/hub` to convert to a\n * `NoydbStore` that the rest of the API consumes transparently.\n *\n * Named `NoydbBundleStore` (not `NoydbBundleAdapter`) for consistency\n * with the hub / to-* / in-* rename. Concrete implementations ship\n * in `@noy-db/to-*` packages starting in.\n */\nexport interface NoydbBundleStore {\n /** Discriminant for engine auto-detection of store shape. */\n readonly kind: 'bundle'\n /** Human-readable name for diagnostics (e.g. `'drive'`, `'webdav'`). */\n readonly name?: string\n /**\n * Read the entire vault as raw bytes. Returns `null` if no bundle exists\n * yet (first open of a brand-new vault).\n */\n readBundle(vaultId: string): Promise<{ bytes: Uint8Array; version: string } | null>\n /**\n * Write the entire vault as raw bytes. `expectedVersion` is the version\n * token from the last `readBundle` (or `null` for a first write).\n * Implementations MUST reject the write if the stored version has advanced\n * past `expectedVersion` — throw `BundleVersionConflictError`.\n * Returns the new version token on success.\n */\n writeBundle(\n vaultId: string,\n bytes: Uint8Array,\n expectedVersion: string | null,\n ): Promise<{ version: string }>\n /** Delete a vault bundle. Idempotent — no-op if the bundle does not exist. */\n deleteBundle(vaultId: string): Promise<void>\n /** List all vault bundles managed by this store. */\n listBundles(): Promise<Array<{ vaultId: string; version: string; size: number }>>\n}\n\n/**\n * Content-addressed blob object stored in the vault-level blob index.\n * Identified by HMAC-SHA-256(blobDEK, plaintext) — opaque to the store.\n *\n * Shared across all collections within a vault for deduplication: two\n * records that attach identical byte content reference the same `eTag`\n * and share a single set of encrypted chunks in `_blob_chunks`.\n */\nexport interface BlobObject {\n /** HMAC-SHA-256 hex of the original plaintext bytes, keyed by `_blob` DEK. */\n readonly eTag: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** Compressed size in bytes (the payload that is actually encrypted and chunked). */\n readonly compressedSize: number\n /** Compression algorithm applied before encryption. */\n readonly compression: 'gzip' | 'none'\n /** Raw chunk size in bytes used at write time. Readers MUST use this value. */\n readonly chunkSize: number\n /** Total number of chunks written. Reader expects exactly this many. */\n readonly chunkCount: number\n /** MIME type if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of first upload. */\n readonly createdAt: string\n /** Live reference count — slots + published versions pointing to this blob. */\n readonly refCount: number\n /**\n * Hint indicating which store holds the chunk data.\n * Used by `routeStore` size-tiered routing: `'default'` for small blobs\n * stored inline (e.g. DynamoDB), `'blobs'` for large blobs in the overflow\n * store (e.g. S3). Absent when no routing is configured.\n */\n readonly storeHint?: 'default' | 'blobs'\n}\n\n// ─── Attachment types ─────────────────────────────────────────\n\n/** Single attachment metadata entry stored inside a record's attachment envelope. */\nexport interface AttachmentEntry {\n /** Content-addressed identifier (HMAC-SHA-256 of plaintext). */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** MIME type, if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of the upload. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Attachment entry annotated with its slot name, as returned by `AttachmentHandle.list()`. */\nexport type AttachmentInfo = AttachmentEntry & { readonly name: string }\n\n/** Options for `AttachmentHandle.put()`. */\nexport interface AttachmentPutOptions {\n /** Compress the attachment with gzip before encryption. Default: `true`. */\n compress?: boolean\n /** Chunk size in bytes. Default: `DEFAULT_CHUNK_SIZE` (256 KB). */\n chunkSize?: number\n /** MIME type to store with the attachment. Auto-detected from magic bytes if omitted. */\n mimeType?: string\n /** User ID to record as the uploader. Falls back to the active user's ID. */\n uploadedBy?: string\n}\n\n/** Options for `AttachmentHandle.response()`. */\nexport interface AttachmentResponseOptions {\n /**\n * Set `Content-Disposition: inline` so the browser renders the file\n * instead of downloading it. Default: `false` (attachment disposition).\n */\n inline?: boolean\n}\n\n/**\n * Slot record — mutable metadata linking a named slot on a record\n * to a `BlobObject` via its eTag.\n *\n * Multiple slots (even across different records) may reference the same\n * `eTag` — the underlying chunks are shared. Updating metadata creates\n * a new envelope version (`_v++`) while the blob data is unchanged.\n */\nexport interface SlotRecord {\n /** Reference to the `BlobObject` in `_blob_index`. */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes (denormalized from `BlobObject`). */\n readonly size: number\n /** MIME type. Takes precedence over the MIME type stored in `BlobObject`. */\n readonly mimeType?: string\n /** ISO timestamp of the upload that set this slot. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Result of `BlobSet.list()` — slot record plus its named slot key. */\nexport interface SlotInfo extends SlotRecord {\n /** The slot name (key in the record's slot map). */\n readonly name: string\n}\n\n/**\n * Explicitly published version snapshot — an independent reference to a\n * blob at a specific point in time.\n */\nexport interface VersionRecord {\n /** User-defined label (e.g. `'issued-2025-01'`, `'amendment-2025-02'`). */\n readonly label: string\n /** eTag of the blob snapshot at publish time — independent of the current slot. */\n readonly eTag: string\n /** ISO timestamp when the version was published. */\n readonly publishedAt: string\n /** User ID of the publisher, if available. */\n readonly publishedBy?: string\n}\n\n/** Options for `BlobSet.put()`. */\nexport interface BlobPutOptions {\n /** MIME type hint. If omitted, auto-detected from magic bytes. */\n mimeType?: string\n /**\n * Raw chunk size in bytes. Priority: this value > store.maxBlobBytes > 256 KB.\n */\n chunkSize?: number\n /**\n * Whether to gzip-compress bytes before encrypting. Default: `true`.\n * Auto-set to `false` for pre-compressed MIME types (JPEG, PNG, ZIP, etc.).\n */\n compress?: boolean\n /** User ID to record as `uploadedBy`. Defaults to the Noydb session user. */\n uploadedBy?: string\n}\n\n/** Options for `BlobSet.response()` and `BlobSet.responseVersion()`. */\nexport interface BlobResponseOptions {\n /**\n * When `true`, sets `Content-Disposition: inline; filename=\"...\"` so\n * the browser renders the file in the tab. Default (`false`) sets\n * `attachment; filename=\"...\"` which triggers a download.\n */\n inline?: boolean\n /** Override the filename in the Content-Disposition header. */\n filename?: string\n}\n\n// ─── Store Capabilities ─────────────────────────────\n\nexport type StoreAuthKind =\n | 'none'\n | 'filesystem'\n | 'api-key'\n | 'iam'\n | 'oauth'\n | 'kerberos'\n | 'browser-origin'\n\nexport interface StoreAuth {\n kind: StoreAuthKind | StoreAuthKind[]\n required: boolean\n flow: 'static' | 'oauth' | 'kerberos' | 'implicit'\n}\n\nexport interface StoreCapabilities {\n /**\n * true — the store's expectedVersion check and write are atomic at the\n * storage layer. Two concurrent puts with the same expectedVersion will\n * produce exactly one success and one ConflictError.\n * false — check and write are separate operations with a race window.\n */\n casAtomic: boolean\n auth: StoreAuth\n /**\n * true — the store implements {@link NoydbStore.tx} and commits\n * every op atomically at the storage layer. The hub's\n * `db.transaction(fn)` will delegate to `tx(ops)` and surface a\n * single pass/fail outcome. false (or absent) — no native\n * multi-record atomicity; the hub falls back to per-record OCC\n * with best-effort unwind on partial failure.\n */\n txAtomic?: boolean\n /**\n * Maximum raw bytes per blob chunk record.\n * `undefined` — no limit (S3, file, IDB); blob stored as single chunk.\n * `256 * 1024` — DynamoDB (400 KB item limit minus envelope overhead).\n * `5 * 1024 * 1024` — localStorage quota safety.\n */\n maxBlobBytes?: number\n}\n\n// ─── Factory Options ───────────────────────────────────────────────────\n\nexport interface NoydbOptions {\n /** Primary store (local storage). */\n readonly store: NoydbStore\n /**\n * tree-shake seam — optional blob strategy. Pass `withBlobs()`\n * from `@noy-db/hub/blobs` to enable `collection.blob(id)` storage.\n * When omitted, hub's blob machinery stays out of the bundle (ESM\n * tree-shaking) and `collection.blob(id)` throws with a pointer at\n * the subpath. `BlobStrategy` is `@internal` — users only construct\n * it via the subpath factory.\n *\n * @internal\n */\n readonly blobStrategy?: BlobStrategy\n /**\n * tree-shake seam — optional indexing strategy. Pass\n * `withIndexing()` from `@noy-db/hub/indexing` to enable eager-mode\n * `==/in` fast-paths, lazy-mode `.lazyQuery()`, rebuild/reconcile,\n * and auto-reconcile. When omitted, indexing code never reaches the\n * bundle; `.lazyQuery()` throws with a pointer at the subpath, and\n * eager-mode collections fall back to linear scans regardless of\n * `indexes: [...]` declarations. `IndexStrategy` is `@internal` —\n * users only construct it via the subpath factory.\n *\n * @internal\n */\n readonly indexStrategy?: IndexStrategy\n /**\n * tree-shake seam — optional aggregate strategy. Pass\n * `withAggregate()` from `@noy-db/hub/aggregate` to enable\n * `.aggregate()` and `.groupBy()` on Query. When omitted, those\n * methods throw with a pointer at the subpath; the ~886 LOC of\n * Aggregation + GroupedQuery machinery never reaches the bundle.\n * Streaming `scan().aggregate()` works independently of this\n * strategy — it doesn't use the `Aggregation` class.\n *\n * @internal\n */\n readonly aggregateStrategy?: AggregateStrategy\n /**\n * tree-shake seam — optional CRDT strategy. Required when\n * any collection is declared with `crdt: 'lww-map' | 'rga' | 'yjs'`;\n * otherwise the first put/sync-merge hitting the CRDT path throws.\n * When omitted, ~221 LOC of LWW-Map / RGA / merge helpers never\n * reach the bundle.\n *\n * @internal\n */\n readonly crdtStrategy?: CrdtStrategy\n /**\n * tree-shake seam — optional consent-audit strategy. Pass\n * `withConsent()` from `@noy-db/hub/consent` to enable per-op audit\n * writes into `_consent_audit` when a consent scope is active.\n * When omitted, `vault.consentAudit()` returns `[]` and writes are\n * no-ops; the consent module's ~194 LOC never reaches the bundle.\n *\n * @internal\n */\n readonly consentStrategy?: ConsentStrategy\n /**\n * tree-shake seam — optional periods strategy. Pass\n * `withPeriods()` from `@noy-db/hub/periods` to enable\n * `vault.closePeriod()` / `.openPeriod()` / write-guard on closed\n * periods. When omitted, `vault.listPeriods()` returns `[]` and\n * the write-guard is a no-op; the ~363 LOC of period validation +\n * ledger appending stay out of the bundle.\n *\n * @internal\n */\n readonly periodsStrategy?: PeriodsStrategy\n /**\n * tree-shake seam — optional VaultFrame strategy. Pass\n * `withShadow()` from `@noy-db/hub/shadow` to enable\n * `vault.frame()`. Without it, calling `vault.frame()` throws.\n *\n * @internal\n */\n readonly shadowStrategy?: ShadowStrategy\n /**\n * tree-shake seam — optional multi-record transactions. Pass\n * `withTransactions()` from `@noy-db/hub/tx` to enable\n * `db.transaction(fn)`. Without it, calling the method throws.\n *\n * @internal\n */\n readonly txStrategy?: TxStrategy\n /**\n * tree-shake seam — optional history + ledger + time-machine.\n * Pass `withHistory()` from `@noy-db/hub/history` to enable\n * per-record version snapshots, the hash-chained audit ledger, JSON\n * Patch deltas, `vault.ledger()`, `vault.at()`, and the\n * `collection.history()` / `getVersion()` / `revert()` / `diff()` /\n * `clearHistory()` / `pruneRecordHistory()` read APIs. When omitted,\n * snapshots/prune/clear are silent no-ops, the read APIs throw with\n * a pointer at the subpath, and ~1,880 LOC stay out of the bundle.\n *\n * @internal\n */\n readonly historyStrategy?: HistoryStrategy\n /**\n * tree-shake seam — optional i18n strategy. Pass `withI18n()`\n * from `@noy-db/hub/i18n` to enable `i18nText`/`dictKey` field\n * resolution on reads, `i18nText` validation on writes, and\n * `vault.dictionary(name)`. When omitted, locale resolution is the\n * identity (raw values returned), the validators throw with a\n * pointer to the subpath, and ~854 LOC of dictionary + locale\n * machinery stay out of the bundle.\n *\n * @internal\n */\n readonly i18nStrategy?: I18nStrategy\n /**\n * tree-shake seam — optional session-policy strategy. Pass\n * `withSession()` from `@noy-db/hub/session` to enable\n * `sessionPolicy` validation, `PolicyEnforcer` lifecycle (idle /\n * absolute timeouts, lockOnBackground), and global session-token\n * revocation. When omitted, setting `sessionPolicy` throws at\n * `createNoydb()` time, and ~495 LOC of policy + token machinery\n * stay out of the bundle.\n *\n * @internal\n */\n readonly sessionStrategy?: SessionStrategy\n /**\n * tree-shake seam — optional sync engine + presence strategy.\n * Pass `withSync()` from `@noy-db/hub/sync` to enable\n * `db.push()` / `pull()` / replication, `db.transaction(vault)`\n * for sync-aware transactions, and `collection.presence()`. When\n * omitted, configuring `sync` / calling these surfaces throws with\n * a pointer at the subpath, and ~856 LOC of replication + presence\n * machinery stay out of the bundle. Keyring stays core; grant/\n * revoke/magic-link/delegation tree-shake via direct imports.\n *\n * @internal\n */\n readonly syncStrategy?: SyncStrategy\n /** Optional remote store(s) for sync. Accepts a single store, a SyncTarget, or an array. */\n readonly sync?: NoydbStore | SyncTarget | SyncTarget[]\n /** User identifier. */\n readonly user: string\n /** Passphrase for key derivation. Required unless encrypt is false or `getKeyring` is provided. */\n readonly secret?: string\n /**\n * Optional callback that returns an unlocked keyring for a given vault.\n * Use this to plug in WebAuthn / OIDC / Shamir / any unlock path that\n * produces an `UnlockedKeyring` outside the passphrase model.\n *\n * When set, `secret` MUST NOT also be set — `createNoydb` throws if both\n * are supplied. When neither is set (and `encrypt !== false`), `createNoydb`\n * also throws.\n *\n * The callback is called lazily, on the first operation that needs the\n * keyring for a given vault. Noydb caches the returned keyring per-vault\n * for the lifetime of the instance, so the callback is invoked at most\n * once per `(instance, vault)` pair (assuming the callback resolves\n * successfully). If the callback rejects, the rejection surfaces from the\n * first vault operation that triggered the unlock; subsequent operations\n * will retry the callback.\n *\n * @example\n * ```ts\n * import { createNoydb } from '@noy-db/hub'\n * import { unlockWebAuthn } from '@noy-db/on-webauthn'\n *\n * const enrollment = await loadEnrollment()\n * const db = await createNoydb({\n * store,\n * user: 'alice',\n * getKeyring: (vault) => unlockWebAuthn(enrollment),\n * })\n * ```\n *\n * Note: this callback is responsible for both the \"open existing vault\"\n * and the \"create new vault\" cases. Unlike the passphrase path, there is\n * no automatic `NoAccessError` → `createOwnerKeyring` fallback, because\n * the callback owner has the UI context to decide which path to run.\n * For first-time bootstrap, use a passphrase or recovery code, enroll\n * WebAuthn from the unlocked keyring, then swap to `getKeyring` on\n * subsequent sessions.\n */\n readonly getKeyring?: (vault: string) => Promise<UnlockedKeyring>\n /** Auth method. Default: 'passphrase'. */\n readonly auth?: 'passphrase' | 'biometric'\n /** Enable encryption. Default: true. */\n readonly encrypt?: boolean\n /** Conflict resolution strategy. Default: 'version'. */\n readonly conflict?: ConflictStrategy\n /**\n * Sync scheduling policy. Controls when push/pull fire.\n * Default inferred from store category: per-record → `on-change`,\n * bundle → `debounce 30s`.\n */\n readonly syncPolicy?: SyncPolicy\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n * When both are supplied, `syncPolicy` takes precedence.\n */\n readonly autoSync?: boolean\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n */\n readonly syncInterval?: number\n /**\n * Session timeout in ms. Clears keys after inactivity. Default: none.\n * @deprecated Use `sessionPolicy.idleTimeoutMs` instead. This field is\n * still honored for backwards compatibility but `sessionPolicy` takes\n * precedence when both are supplied.\n */\n readonly sessionTimeout?: number\n /**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior. When supplied, replaces the\n * legacy `sessionTimeout` field.\n */\n readonly sessionPolicy?: SessionPolicy\n /** Validate passphrase strength on creation. Default: true. */\n readonly validatePassphrase?: boolean\n /** Audit history configuration. */\n readonly history?: HistoryConfig\n /**\n * Consumer-supplied translation function for `i18nText` fields with\n * `autoTranslate: true`.\n *\n * ⚠ **`plaintextTranslator` receives unencrypted text.** Configuring\n * this hook causes plaintext to leave noy-db's zero-knowledge boundary\n * over whatever channel the consumer's implementation uses. noy-db ships\n * no built-in translator and adds no translator SDKs as dependencies.\n * The consumer chooses and owns the data policy of the external service.\n *\n * Per-field opt-in via `autoTranslate: true` on `i18nText()`. Calling\n * `put()` on a collection with `autoTranslate: true` fields while this\n * option is absent throws `TranslatorNotConfiguredError`.\n *\n * See `NOYDB_SPEC.md § Zero-Knowledge Storage` for the invariant text.\n */\n readonly plaintextTranslator?: PlaintextTranslatorFn\n /**\n * Human-readable name for the translator, recorded in the in-process\n * audit log (e.g. `'deepl-pro-with-dpa'`, `'self-hosted-llama-7b'`).\n * Defaults to `'anonymous'` when not supplied.\n */\n readonly plaintextTranslatorName?: string\n}\n\n// ─── History / Audit Trail ─────────────────────────────────────────────\n\n/** History configuration. */\nexport interface HistoryConfig {\n /** Enable history tracking. Default: true. */\n readonly enabled?: boolean\n /** Maximum history entries per record. Oldest pruned on overflow. Default: unlimited. */\n readonly maxVersions?: number\n}\n\n/** Options for querying history. */\nexport interface HistoryOptions {\n /** Start date (inclusive), ISO 8601. */\n readonly from?: string\n /** End date (inclusive), ISO 8601. */\n readonly to?: string\n /** Maximum entries to return. */\n readonly limit?: number\n}\n\n/** Options for pruning history. */\nexport interface PruneOptions {\n /** Keep only the N most recent versions. */\n readonly keepVersions?: number\n /** Delete versions older than this date, ISO 8601. */\n readonly beforeDate?: string\n}\n\n/** A decrypted history entry. */\nexport interface HistoryEntry<T> {\n readonly version: number\n readonly timestamp: string\n readonly userId: string\n readonly record: T\n}\n\n// ─── Bulk operations ──────────────────────────────────────\n\n/** Per-item options for `Collection.putMany()`. */\nexport interface PutManyItemOptions {\n /**\n * Optimistic-concurrency check: fail this item if the stored version\n * is not `expectedVersion`. Honored only in `atomic: true` mode;\n * ignored in the default best-effort loop.\n */\n readonly expectedVersion?: number\n}\n\n/**\n * Batch-level options for `Collection.putMany()` and `deleteMany()`.\n *\n * `atomic: true` switches the call from best-effort loop\n * to all-or-nothing: a pre-flight CAS check runs first, then every op\n * is executed; any mid-batch failure triggers a best-effort revert.\n * On failure in atomic mode the whole call throws — you won't get a\n * partial `PutManyResult`. On success the result mirrors the default\n * loop's shape.\n */\nexport interface PutManyOptions {\n readonly atomic?: boolean\n}\n\n/** Result of `Collection.putMany()`. */\nexport interface PutManyResult {\n /** `true` iff every entry succeeded. */\n readonly ok: boolean\n /** IDs that were successfully written. */\n readonly success: readonly string[]\n /** Entries that failed, with the error that prevented each write. */\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n\n/** Result of `Collection.deleteMany()`. Same shape as `PutManyResult`. */\nexport interface DeleteManyResult {\n readonly ok: boolean\n readonly success: readonly string[]\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n","import type {\n NoydbStore,\n EncryptedEnvelope,\n BlobObject,\n SlotRecord,\n SlotInfo,\n VersionRecord,\n BlobPutOptions,\n BlobResponseOptions,\n} from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport {\n encrypt,\n decrypt,\n hmacSha256Hex,\n encryptBytesWithAAD,\n decryptBytesWithAAD,\n bufferToBase64,\n base64ToBuffer,\n} from '../crypto.js'\nimport { ConflictError, NotFoundError } from '../errors.js'\nimport { detectMagic, isPreCompressed } from './mime-magic.js'\n\n// ─── Internal collection names ─────────────────────────────────────────\n\n/**\n * DEK slot name for vault-shared blob data. Calling `getDEK('_blob')`\n * auto-creates a blob DEK the first time — same lazy-creation mechanism\n * used for any user-defined collection.\n */\nexport const BLOB_COLLECTION = '_blob'\n\n/** Stores `BlobObject` metadata envelopes, keyed by eTag. */\nexport const BLOB_INDEX_COLLECTION = '_blob_index'\n\n/**\n * Stores encrypted chunk envelopes, keyed by `{eTag}/{chunkIndex}`.\n * NOT loaded into the in-memory query layer. Fetched on demand by\n * `BlobSet.get()` / `BlobSet.response()`.\n */\nexport const BLOB_CHUNKS_COLLECTION = '_blob_chunks'\n\n/** Prefix for per-collection slot metadata collections. */\nexport const BLOB_SLOTS_PREFIX = '_blob_slots_'\n\n/** Prefix for per-collection version records. */\nexport const BLOB_VERSIONS_PREFIX = '_blob_versions_'\n\n/**\n * Default chunk size: 256 KB raw bytes.\n * After AES-GCM (same size) + base64 (~33% inflation) → ~342 KB per\n * envelope, safely within DynamoDB's 400 KB item limit.\n */\nexport const DEFAULT_CHUNK_SIZE = 256 * 1024\n\n/** Maximum CAS retry attempts for refCount and slot metadata updates. */\nconst MAX_CAS_RETRIES = 5\n\n// ─── Compression helpers ───────────────────────────────────────────────\n\nasync function compressBytes(\n data: Uint8Array,\n): Promise<{ bytes: Uint8Array; algorithm: 'gzip' | 'none' }> {\n if (typeof CompressionStream === 'undefined') {\n return { bytes: data, algorithm: 'none' }\n }\n const cs = new CompressionStream('gzip')\n const writer = cs.writable.getWriter()\n await writer.write(data as Uint8Array<ArrayBuffer>)\n await writer.close()\n const buf = await new Response(cs.readable).arrayBuffer()\n return { bytes: new Uint8Array(buf), algorithm: 'gzip' }\n}\n\nasync function decompressBytes(data: Uint8Array): Promise<Uint8Array> {\n if (typeof DecompressionStream === 'undefined') {\n throw new Error(\n '[noy-db] DecompressionStream not available — cannot decompress blob chunk',\n )\n }\n const ds = new DecompressionStream('gzip')\n const writer = ds.writable.getWriter()\n await writer.write(data as Uint8Array<ArrayBuffer>)\n await writer.close()\n const buf = await new Response(ds.readable).arrayBuffer()\n return new Uint8Array(buf)\n}\n\nfunction concatChunks(chunks: Uint8Array[]): Uint8Array {\n const total = chunks.reduce((s, c) => s + c.byteLength, 0)\n const out = new Uint8Array(total)\n let offset = 0\n for (const c of chunks) {\n out.set(c, offset)\n offset += c.byteLength\n }\n return out\n}\n\n/** Build the AAD binding for chunk integrity: \"{eTag}:{chunkIndex}:{chunkCount}\" */\nfunction chunkAAD(eTag: string, chunkIndex: number, chunkCount: number): Uint8Array {\n return new TextEncoder().encode(`${eTag}:${chunkIndex}:${chunkCount}`)\n}\n\n// ─── BlobSet ──────────────────────────────────────────────────────────\n\n/**\n * Handle for reading, writing, versioning, and deleting binary blobs\n * on a specific record.\n *\n * Obtained via `collection.blob(id)`. No I/O is performed until you\n * call a method.\n *\n * ## Storage layout\n *\n * ```\n * _blob_index/{eTag} BlobObject metadata (vault-shared DEK)\n * _blob_chunks/{eTag}/{chunkIndex} Encrypted chunk data (vault-shared DEK + AAD)\n * _blob_slots_{collection}/{recordId} Slot map (parent collection DEK)\n * _blob_versions_{collection}/{recordId}/{slot}/{label} Published versions (parent collection DEK)\n * ```\n *\n * ## Deduplication\n *\n * `put()` computes `eTag = HMAC-SHA-256(blobDEK, plaintext)` — keyed so the\n * store cannot predict eTags for known content. If another record already\n * uploaded the same bytes, the chunks are reused and `refCount` is incremented.\n *\n * ## Chunk integrity\n *\n * Each chunk is encrypted with AES-256-GCM using AAD = `{eTag}:{index}:{count}`,\n * preventing chunk reorder, substitution, and truncation attacks.\n */\nexport class BlobSet {\n private readonly store: NoydbStore\n private readonly vault: string\n private readonly collection: string\n private readonly recordId: string\n private readonly getDEK: (name: string) => Promise<CryptoKey>\n private readonly encrypted: boolean\n private readonly userId: string | undefined\n private readonly maxBlobBytes: number | undefined\n\n constructor(opts: {\n store: NoydbStore\n vault: string\n collection: string\n recordId: string\n getDEK: (name: string) => Promise<CryptoKey>\n encrypted: boolean\n userId?: string\n maxBlobBytes?: number\n }) {\n this.store = opts.store\n this.vault = opts.vault\n this.collection = opts.collection\n this.recordId = opts.recordId\n this.getDEK = opts.getDEK\n this.encrypted = opts.encrypted\n this.userId = opts.userId\n this.maxBlobBytes = opts.maxBlobBytes\n }\n\n /** The internal collection that holds slot metadata for this collection's blobs. */\n private get slotsCollection(): string {\n return `${BLOB_SLOTS_PREFIX}${this.collection}`\n }\n\n /** The internal collection that holds published versions for this collection's blobs. */\n private get versionsCollection(): string {\n return `${BLOB_VERSIONS_PREFIX}${this.collection}`\n }\n\n // ─── Slot Metadata I/O (CAS-protected) ─────────────────────────────\n\n private async loadSlots(): Promise<{\n slots: Record<string, SlotRecord>\n version: number\n }> {\n const envelope = await this.store.get(this.vault, this.slotsCollection, this.recordId)\n if (!envelope) return { slots: {}, version: 0 }\n\n if (!this.encrypted) {\n return {\n slots: JSON.parse(envelope._data) as Record<string, SlotRecord>,\n version: envelope._v,\n }\n }\n\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return {\n slots: JSON.parse(json) as Record<string, SlotRecord>,\n version: envelope._v,\n }\n }\n\n private async saveSlots(\n slots: Record<string, SlotRecord>,\n currentVersion: number,\n ): Promise<void> {\n const json = JSON.stringify(slots)\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(this.collection)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: currentVersion + 1,\n _ts: now,\n _iv: iv,\n _data: data,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: currentVersion + 1,\n _ts: now,\n _iv: '',\n _data: json,\n }\n }\n\n await this.store.put(\n this.vault,\n this.slotsCollection,\n this.recordId,\n envelope,\n currentVersion > 0 ? currentVersion : undefined,\n )\n }\n\n /**\n * CAS retry loop for slot metadata updates. Re-reads slots on conflict\n * and re-applies the mutation function.\n */\n private async casUpdateSlots(\n mutate: (slots: Record<string, SlotRecord>) => Record<string, SlotRecord> | null,\n ): Promise<void> {\n for (let attempt = 0; attempt < MAX_CAS_RETRIES; attempt++) {\n const { slots, version } = await this.loadSlots()\n const updated = mutate(slots)\n if (updated === null) return // no-op\n try {\n await this.saveSlots(updated, version)\n return\n } catch (err) {\n if (err instanceof ConflictError && attempt < MAX_CAS_RETRIES - 1) continue\n throw err\n }\n }\n }\n\n // ─── Blob Index I/O (versioned for CAS refCount) ──────────────────\n\n private async loadBlobObject(eTag: string): Promise<{ blob: BlobObject; version: number } | null> {\n const envelope = await this.store.get(this.vault, BLOB_INDEX_COLLECTION, eTag)\n if (!envelope) return null\n\n if (!this.encrypted) {\n return { blob: JSON.parse(envelope._data) as BlobObject, version: envelope._v }\n }\n\n const dek = await this.getDEK(BLOB_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return { blob: JSON.parse(json) as BlobObject, version: envelope._v }\n }\n\n private async writeBlobObject(blob: BlobObject, expectedVersion?: number): Promise<void> {\n const json = JSON.stringify(blob)\n const now = new Date().toISOString()\n const newVersion = (expectedVersion ?? 0) + 1\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(BLOB_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: newVersion, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: newVersion, _ts: now, _iv: '', _data: json }\n }\n\n await this.store.put(\n this.vault,\n BLOB_INDEX_COLLECTION,\n blob.eTag,\n envelope,\n expectedVersion,\n )\n }\n\n /**\n * CAS retry loop for refCount changes on a BlobObject.\n */\n private async casUpdateRefCount(eTag: string, delta: number): Promise<void> {\n for (let attempt = 0; attempt < MAX_CAS_RETRIES; attempt++) {\n const result = await this.loadBlobObject(eTag)\n if (!result) throw new NotFoundError(`BlobObject ${eTag} not found`)\n const { blob, version } = result\n const updated: BlobObject = { ...blob, refCount: blob.refCount + delta }\n try {\n await this.writeBlobObject(updated, version)\n return\n } catch (err) {\n if (err instanceof ConflictError && attempt < MAX_CAS_RETRIES - 1) continue\n throw err\n }\n }\n }\n\n // ─── Chunk I/O (with AAD binding) ─────────────────────────────────\n\n private async writeChunk(\n eTag: string,\n index: number,\n chunkCount: number,\n chunk: Uint8Array,\n dek: CryptoKey | null,\n ): Promise<void> {\n const id = `${eTag}_${index}`\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (dek) {\n const aad = chunkAAD(eTag, index, chunkCount)\n const { iv, data } = await encryptBytesWithAAD(chunk, dek, aad)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: now,\n _iv: '',\n _data: bufferToBase64(chunk),\n }\n }\n\n await this.store.put(this.vault, BLOB_CHUNKS_COLLECTION, id, envelope)\n }\n\n private async readChunk(\n eTag: string,\n index: number,\n chunkCount: number,\n dek: CryptoKey | null,\n ): Promise<Uint8Array | null> {\n const envelope = await this.store.get(this.vault, BLOB_CHUNKS_COLLECTION, `${eTag}_${index}`)\n if (!envelope) return null\n\n if (dek) {\n const aad = chunkAAD(eTag, index, chunkCount)\n return await decryptBytesWithAAD(envelope._iv, envelope._data, dek, aad)\n }\n\n return base64ToBuffer(envelope._data)\n }\n\n // ─── Version record I/O ───────────────────────────────────────────\n\n private versionKey(slotName: string, label: string): string {\n return `${this.recordId}::${slotName}::${label}`\n }\n\n private async loadVersionRecord(slotName: string, label: string): Promise<VersionRecord | null> {\n const key = this.versionKey(slotName, label)\n const envelope = await this.store.get(this.vault, this.versionsCollection, key)\n if (!envelope) return null\n\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as VersionRecord\n }\n\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as VersionRecord\n }\n\n private async writeVersionRecord(slotName: string, record: VersionRecord): Promise<void> {\n const key = this.versionKey(slotName, record.label)\n const json = JSON.stringify(record)\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(this.collection)\n const { iv, data } = await encrypt(json, dek)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: '', _data: json }\n }\n\n await this.store.put(this.vault, this.versionsCollection, key, envelope)\n }\n\n private async deleteVersionRecord(slotName: string, label: string): Promise<void> {\n const key = this.versionKey(slotName, label)\n await this.store.delete(this.vault, this.versionsCollection, key)\n }\n\n // ─── Effective chunk size ─────────────────────────────────────────\n\n private effectiveChunkSize(opts?: BlobPutOptions): number {\n if (opts?.chunkSize) return opts.chunkSize\n if (this.maxBlobBytes) return this.maxBlobBytes\n return DEFAULT_CHUNK_SIZE\n }\n\n // ─── Fetch all chunks for a blob ──────────────────────────────────\n\n private async fetchAllChunks(blob: BlobObject): Promise<Uint8Array> {\n const blobDEK = this.encrypted ? await this.getDEK(BLOB_COLLECTION) : null\n const chunks: Uint8Array[] = []\n\n for (let i = 0; i < blob.chunkCount; i++) {\n const chunk = await this.readChunk(blob.eTag, i, blob.chunkCount, blobDEK)\n if (!chunk) {\n throw new NotFoundError(\n `Blob chunk ${i}/${blob.chunkCount} missing for eTag \"${blob.eTag}\" on record \"${this.recordId}\"`,\n )\n }\n chunks.push(chunk)\n }\n\n const assembled = concatChunks(chunks)\n return blob.compression === 'gzip' ? await decompressBytes(assembled) : assembled\n }\n\n // ─── Public API: Slot management ──────────────────────────────────\n\n /**\n * Upload bytes and attach them to this record under `slotName`.\n *\n * 1. Computes `eTag = HMAC-SHA-256(blobDEK, plaintext)` for keyed content-addressing.\n * 2. Auto-detects MIME type from magic bytes if not provided.\n * 3. If a blob with this eTag already exists, skips chunk upload (deduplication)\n * and CAS-increments refCount.\n * 4. Otherwise: compresses → splits into chunks → encrypts each chunk with\n * AAD binding → writes `_blob_chunks` → writes `BlobObject` to `_blob_index`.\n * 5. CAS-updates the slot metadata in `_blob_slots_{collection}`.\n * If overwriting an existing slot, decrements the old eTag's refCount.\n */\n async put(slotName: string, data: Uint8Array, opts?: BlobPutOptions): Promise<void> {\n // Step 1 — keyed content-hash (plaintext, before compression)\n const blobDEK = this.encrypted ? await this.getDEK(BLOB_COLLECTION) : null\n const eTag = blobDEK\n ? await hmacSha256Hex(blobDEK, data)\n : await plainSha256Hex(data)\n\n // Step 2 — MIME detection\n let mimeType = opts?.mimeType\n if (!mimeType) {\n const detected = detectMagic(data.subarray(0, 16))\n if (detected) mimeType = detected.mime\n }\n\n // Determine compression: explicit opt > auto-detect > default true\n let shouldCompress: boolean\n if (opts?.compress !== undefined) {\n shouldCompress = opts.compress\n } else if (mimeType && isPreCompressed(mimeType)) {\n shouldCompress = false\n } else {\n shouldCompress = true\n }\n\n // Step 3 — deduplication check\n const existingBlob = await this.loadBlobObject(eTag)\n\n if (existingBlob) {\n // eTag already exists — just increment refCount (CAS retry)\n await this.casUpdateRefCount(eTag, +1)\n } else {\n // Step 4 — compress\n const { bytes: compressed, algorithm } = shouldCompress\n ? await compressBytes(data)\n : { bytes: data, algorithm: 'none' as const }\n\n const chunkSize = this.effectiveChunkSize(opts)\n const chunkCount = Math.max(1, Math.ceil(compressed.byteLength / chunkSize))\n\n // Step 5 — write chunks FIRST with AAD binding (safe failure order)\n for (let i = 0; i < chunkCount; i++) {\n const start = i * chunkSize\n await this.writeChunk(\n eTag, i, chunkCount,\n compressed.subarray(start, start + chunkSize),\n blobDEK,\n )\n }\n\n // Step 6 — write blob index entry after all chunks succeed\n await this.writeBlobObject({\n eTag,\n size: data.byteLength,\n compressedSize: compressed.byteLength,\n compression: algorithm,\n chunkSize,\n chunkCount,\n ...(mimeType !== undefined ? { mimeType } : {}),\n createdAt: new Date().toISOString(),\n refCount: 1,\n })\n }\n\n // Step 7 — CAS-update slot metadata\n const uploaderUserId = opts?.uploadedBy ?? this.userId\n await this.casUpdateSlots((slots) => {\n const oldETag = slots[slotName]?.eTag\n slots[slotName] = {\n eTag,\n filename: slotName,\n size: data.byteLength,\n ...(mimeType !== undefined ? { mimeType } : {}),\n uploadedAt: new Date().toISOString(),\n ...(uploaderUserId !== undefined ? { uploadedBy: uploaderUserId } : {}),\n }\n // Schedule old eTag refCount decrement (non-blocking best-effort)\n if (oldETag && oldETag !== eTag) {\n this._deferredRefDecrement = oldETag\n }\n return slots\n })\n\n // Decrement old eTag refCount outside the CAS loop\n if (this._deferredRefDecrement) {\n const oldETag = this._deferredRefDecrement\n this._deferredRefDecrement = undefined\n await this.casUpdateRefCount(oldETag, -1).catch(() => {\n // Best-effort — blobGC will reconcile\n })\n }\n }\n\n private _deferredRefDecrement: string | undefined\n\n /**\n * Fetch all bytes for the named slot.\n * Returns `null` if the slot does not exist.\n * Throws `NotFoundError` if the index entry exists but a chunk is missing.\n */\n async get(slotName: string): Promise<Uint8Array | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n return this.fetchAllChunks(result.blob)\n }\n\n /**\n * List all slot entries for this record.\n * Returns metadata only — no chunk data is loaded.\n */\n async list(): Promise<SlotInfo[]> {\n const { slots } = await this.loadSlots()\n return Object.entries(slots).map(([name, slot]) => ({ name, ...slot }))\n }\n\n /**\n * Delete the named slot from this record.\n * Decrements refCount on the blob. Chunks are GC'd by `vault.blobGC()`.\n */\n async delete(slotName: string): Promise<void> {\n let eTagToDecrement: string | undefined\n\n await this.casUpdateSlots((slots) => {\n if (!(slotName in slots)) return null\n eTagToDecrement = slots[slotName]!.eTag\n delete slots[slotName]\n return slots\n })\n\n if (eTagToDecrement) {\n await this.casUpdateRefCount(eTagToDecrement, -1).catch(() => {\n // Best-effort — blobGC will reconcile\n })\n }\n }\n\n /**\n * Return a native `Response` whose body streams the decrypted,\n * decompressed blob bytes with full HTTP metadata headers.\n *\n * Note: implementation is buffered — all chunks are loaded into\n * memory before being enqueued. True streaming deferred to.\n *\n * Returns `null` if the slot does not exist.\n */\n async response(slotName: string, opts?: BlobResponseOptions): Promise<Response | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n return this.buildResponse(slot, result.blob, opts)\n }\n\n /**\n * Decrypt the slot and wrap the bytes in a browser ObjectURL ready\n * to feed into `<img src>`, `<a href>`, etc. The caller MUST call\n * `revoke()` when the URL is no longer needed — otherwise the URL\n * (and the underlying decrypted Blob) are pinned for the lifetime\n * of the document, which leaks memory in long-lived pages.\n *\n * Returns `null` when the slot does not exist.\n *\n * Throws when `URL.createObjectURL` is unavailable in the host\n * environment (Node without DOM, restricted workers). Framework\n * adapters — `useBlobURL` in `@noy-db/in-vue`, etc. — guard against\n * this for SSR contexts and stay at `null` instead of propagating.\n */\n async objectURL(\n slotName: string,\n opts?: { mimeType?: string },\n ): Promise<{ url: string; revoke: () => void } | null> {\n if (typeof URL === 'undefined' || typeof URL.createObjectURL !== 'function') {\n throw new Error(\n 'BlobSet.objectURL: URL.createObjectURL is unavailable in this environment. ' +\n 'Call this from the browser, or use BlobSet.get() and create the URL yourself.',\n )\n }\n const bytes = await this.get(slotName)\n if (!bytes) return null\n\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n const type = opts?.mimeType ?? slot?.mimeType ?? 'application/octet-stream'\n\n // Pinning the underlying ArrayBuffer in a Blob is what backs the\n // ObjectURL — once we createObjectURL the URL holds a strong ref\n // to the Blob, so the local `blob` variable can fall out of scope.\n // Copy through a fresh ArrayBuffer so TS narrows away the\n // SharedArrayBuffer branch of `ArrayBufferLike` (Uint8Array is\n // generic over the backing buffer type since TS 5.7).\n const buffer = bytes.buffer.slice(bytes.byteOffset, bytes.byteOffset + bytes.byteLength) as ArrayBuffer\n const blob = new Blob([buffer], { type })\n const url = URL.createObjectURL(blob)\n let revoked = false\n const revoke = (): void => {\n if (revoked) return\n revoked = true\n URL.revokeObjectURL(url)\n }\n return { url, revoke }\n }\n\n // ─── Public API: Published versions (UC-3 amendment versioning) ───\n\n /**\n * Publish the current slot content as a named version snapshot.\n *\n * The published version holds an independent refCount reference to\n * the blob. Even if the slot is later overwritten or deleted, the\n * published version keeps the blob data alive.\n *\n * Publishing with an existing label overwrites it — if the eTags differ,\n * refCounts are adjusted accordingly.\n */\n async publish(slotName: string, label: string): Promise<void> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) throw new NotFoundError(`Slot \"${slotName}\" not found on record \"${this.recordId}\"`)\n\n // Check for existing version with this label\n const existing = await this.loadVersionRecord(slotName, label)\n if (existing && existing.eTag === slot.eTag) return // no-op: same blob\n\n // Write the version record\n const record: VersionRecord = {\n label,\n eTag: slot.eTag,\n publishedAt: new Date().toISOString(),\n ...(this.userId !== undefined ? { publishedBy: this.userId } : {}),\n }\n await this.writeVersionRecord(slotName, record)\n\n // Increment refCount for the new version's eTag\n await this.casUpdateRefCount(slot.eTag, +1)\n\n // If overwriting an existing version with a different eTag, decrement the old one\n if (existing && existing.eTag !== slot.eTag) {\n await this.casUpdateRefCount(existing.eTag, -1).catch(() => {})\n }\n }\n\n /**\n * Fetch bytes for a published version.\n * Returns `null` if the version does not exist.\n */\n async getVersion(slotName: string, label: string): Promise<Uint8Array | null> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return null\n\n const result = await this.loadBlobObject(record.eTag)\n if (!result) return null\n\n return this.fetchAllChunks(result.blob)\n }\n\n /**\n * List all published versions for a slot.\n */\n async listVersions(slotName: string): Promise<VersionRecord[]> {\n const prefix = `${this.recordId}::${slotName}::`\n const allKeys = await this.store.list(this.vault, this.versionsCollection)\n const matchingKeys = allKeys.filter((k) => k.startsWith(prefix))\n\n const versions: VersionRecord[] = []\n for (const key of matchingKeys) {\n const envelope = await this.store.get(this.vault, this.versionsCollection, key)\n if (!envelope) continue\n\n if (!this.encrypted) {\n versions.push(JSON.parse(envelope._data) as VersionRecord)\n } else {\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n versions.push(JSON.parse(json) as VersionRecord)\n }\n }\n\n return versions\n }\n\n /**\n * Delete a published version. Decrements refCount on its blob.\n */\n async deleteVersion(slotName: string, label: string): Promise<void> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return\n\n await this.deleteVersionRecord(slotName, label)\n await this.casUpdateRefCount(record.eTag, -1).catch(() => {})\n }\n\n /**\n * Return a `Response` for a published version — same as `response()`\n * but reads from the version record's eTag instead of the current slot.\n */\n async responseVersion(\n slotName: string,\n label: string,\n opts?: BlobResponseOptions,\n ): Promise<Response | null> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return null\n\n const result = await this.loadBlobObject(record.eTag)\n if (!result) return null\n\n // Build a synthetic SlotRecord from the version + blob data\n const slotLike: SlotRecord = {\n eTag: record.eTag,\n filename: opts?.filename ?? `${slotName}-${label}`,\n size: result.blob.size,\n ...(result.blob.mimeType !== undefined ? { mimeType: result.blob.mimeType } : {}),\n uploadedAt: record.publishedAt,\n ...(record.publishedBy !== undefined ? { uploadedBy: record.publishedBy } : {}),\n }\n\n return this.buildResponse(slotLike, result.blob, opts)\n }\n\n // ─── Diagnostics ──────────────────────────────────────────────────\n\n /**\n * Return the `BlobObject` metadata for the named slot.\n * Returns `null` if the slot or blob does not exist.\n */\n async blobInfo(slotName: string): Promise<BlobObject | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n const result = await this.loadBlobObject(slot.eTag)\n return result?.blob ?? null\n }\n\n // ─── Presigned URL (E5) ────────────────────────────────────────────\n\n /**\n * Generate a presigned URL for direct client download of the blob's\n * ciphertext. Only works when the blob store supports `presignUrl`.\n *\n * **Important:** The URL returns encrypted data. The caller must\n * decrypt client-side using `decryptResponse()` or a service worker.\n *\n * Returns `null` if the slot doesn't exist or the store doesn't support presigning.\n */\n async presignedUrl(slotName: string, expiresInSeconds = 3600): Promise<string | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n // Only works for single-chunk blobs where the store supports presigning\n if (result.blob.chunkCount !== 1) return null\n if (!this.store.presignUrl) return null\n\n const chunkId = `${slot.eTag}_0`\n return this.store.presignUrl(this.vault, '_blob_chunks', chunkId, expiresInSeconds)\n }\n\n /**\n * Decrypt a ciphertext Response (e.g. from a presigned URL fetch)\n * back into a plaintext Response with correct headers.\n *\n * Usage with service worker or client-side fetch:\n * ```ts\n * const url = await blobs.presignedUrl('invoice.pdf')\n * const cipherResponse = await fetch(url)\n * const plainResponse = await blobs.decryptResponse('invoice.pdf', cipherResponse)\n * ```\n */\n async decryptResponse(slotName: string, cipherResponse: Response): Promise<Response | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n // Parse the envelope from the ciphertext response\n const text = await cipherResponse.text()\n const envelope = JSON.parse(text) as { _iv: string; _data: string }\n\n const blobDEK = this.encrypted ? await this.getDEK('_blob') : null\n if (!blobDEK) {\n return this.buildResponse(slot, result.blob, { inline: true })\n }\n\n // Decrypt the single chunk\n const aad = chunkAAD(slot.eTag, 0, result.blob.chunkCount)\n const { decryptBytesWithAAD: decryptAAD } = await import('../crypto.js')\n const decrypted = await decryptAAD(envelope._iv, envelope._data, blobDEK, aad)\n const plaintext = result.blob.compression === 'gzip'\n ? await decompressBytes(decrypted)\n : decrypted\n\n const body = new ReadableStream<Uint8Array>({\n start(controller) {\n controller.enqueue(plaintext)\n controller.close()\n },\n })\n\n const filename = slot.filename\n return new Response(body, {\n headers: {\n 'Content-Type': slot.mimeType ?? 'application/octet-stream',\n 'Content-Length': String(slot.size),\n 'ETag': `\"${slot.eTag}\"`,\n 'Content-Disposition': `inline; filename=\"${filename}\"`,\n 'Last-Modified': new Date(slot.uploadedAt).toUTCString(),\n },\n })\n }\n\n // ─── Internal: build Response from slot + blob ────────────────────\n\n private async buildResponse(\n slot: SlotRecord,\n blob: BlobObject,\n opts?: BlobResponseOptions,\n ): Promise<Response> {\n const fetchAllChunks = this.fetchAllChunks.bind(this)\n\n // buffered — all chunks loaded into memory then enqueued.\n const body = new ReadableStream<Uint8Array>({\n async start(controller) {\n try {\n const output = await fetchAllChunks(blob)\n controller.enqueue(output)\n controller.close()\n } catch (err) {\n controller.error(err)\n }\n },\n })\n\n const filename = opts?.filename ?? slot.filename\n const disposition = opts?.inline\n ? `inline; filename=\"${filename}\"`\n : `attachment; filename=\"${filename}\"`\n\n return new Response(body, {\n headers: {\n 'Content-Type': slot.mimeType ?? 'application/octet-stream',\n 'Content-Length': String(slot.size),\n 'ETag': `\"${slot.eTag}\"`,\n 'Content-Disposition': disposition,\n 'Last-Modified': new Date(slot.uploadedAt).toUTCString(),\n },\n })\n }\n}\n\n// ─── Fallback for unencrypted mode ──────────────────────────────────────\n\nimport { sha256Hex } from '../crypto.js'\n\nasync function plainSha256Hex(data: Uint8Array): Promise<string> {\n return sha256Hex(data)\n}\n","/**\n * Lightweight MIME type detection from magic bytes (file signatures).\n *\n * Designed for the blob store's auto-detection feature. Operates on the first 16 bytes of\n * plaintext — no filesystem access, no filename guessing.\n *\n * ## Detection strategies\n *\n * 1. **Prefix match** — magic bytes at offset 0 (most formats).\n * 2. **Offset match** — magic bytes at a fixed offset > 0 (ISOBMFF: offset 4).\n * 3. **Compound match** — two separate byte sequences at different offsets\n * (RIFF-based: bytes 0-3 + bytes 8-11).\n *\n * ## Formats excluded (require offset > 16 bytes)\n *\n * - TAR (`ustar` at offset 257)\n * - ISO 9660 (`CD001` at offset 32769)\n *\n * @module\n */\n\n// ─── Types ───────────────────────────────────────────────────────────────\n\ninterface MagicRule {\n /** IANA MIME type (or widely-used x- type). */\n readonly mime: string\n /** Human-readable format name for diagnostics. */\n readonly format: string\n /** Magic bytes to match, as a Uint8Array. */\n readonly bytes: Uint8Array\n /** Byte offset where the magic starts. Default 0. */\n readonly offset?: number\n /**\n * For compound checks (RIFF, FORM): a second byte sequence that must\n * also match at `secondaryOffset`.\n */\n readonly secondaryBytes?: Uint8Array\n /** Offset of the secondary match. */\n readonly secondaryOffset?: number\n /** If true, the format is already compressed — skip gzip in blob.put(). */\n readonly preCompressed?: true\n}\n\n// ─── Helpers ─────────────────────────────────────────────────────────────\n\n/** Convert a hex string like `'FF D8 FF'` to Uint8Array. */\nfunction hex(s: string): Uint8Array {\n return new Uint8Array(s.split(' ').map((b) => parseInt(b, 16)))\n}\n\n// ─── Magic rules ─────────────────────────────────────────────────────────\n//\n// Ordered by detection priority: more specific (longer) signatures first\n// within the same offset group, so that e.g. RAR v5 (8 bytes) is tested\n// before RAR v4 (7 bytes).\n//\n// Sources verified against:\n// - Gary Kessler's File Signatures Table\n// - Wikipedia \"List of file signatures\"\n// - IANA MIME type registry\n// - Individual format specifications (PNG RFC 2083, PDF ISO 32000, etc.)\n//\n// Each entry includes the original CSV row number for traceability.\n\nconst MAGIC_RULES: readonly MagicRule[] = [\n // ── Images ───────────────────────────────────────────────────────────\n\n // #2 PNG — full 8-byte signature (RFC 2083)\n { mime: 'image/png', format: 'PNG', bytes: hex('89 50 4E 47 0D 0A 1A 0A'), preCompressed: true },\n\n // #1 JPEG — FF D8 FF (third byte is start of APP marker, always FF)\n { mime: 'image/jpeg', format: 'JPEG', bytes: hex('FF D8 FF'), preCompressed: true },\n\n // #7 WebP — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = WEBP\n {\n mime: 'image/webp',\n format: 'WebP',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('57 45 42 50'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // #5 TIFF (little-endian) — II + version 42\n { mime: 'image/tiff', format: 'TIFF', bytes: hex('49 49 2A 00') },\n\n // #6 TIFF (big-endian) — MM + version 42\n { mime: 'image/tiff', format: 'TIFF', bytes: hex('4D 4D 00 2A') },\n\n // #3 GIF — GIF8 (covers GIF87a and GIF89a)\n { mime: 'image/gif', format: 'GIF', bytes: hex('47 49 46 38'), preCompressed: true },\n\n // #4 BMP — BM\n { mime: 'image/bmp', format: 'BMP', bytes: hex('42 4D') },\n\n // PSD — 8BPS\n { mime: 'image/vnd.adobe.photoshop', format: 'PSD', bytes: hex('38 42 50 53') },\n\n // #8 ICO — 00 00 01 00 (note: 00 00 02 00 is CUR cursor format)\n { mime: 'image/x-icon', format: 'ICO', bytes: hex('00 00 01 00') },\n\n // #9 HEIC — ISOBMFF: ftyp at offset 4, brand \"heic\" at offset 8\n {\n mime: 'image/heic',\n format: 'HEIC',\n bytes: hex('66 74 79 70'),\n offset: 4,\n secondaryBytes: hex('68 65 69 63'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // ── Documents ────────────────────────────────────────────────────────\n\n // PDF — %PDF\n { mime: 'application/pdf', format: 'PDF', bytes: hex('25 50 44 46') },\n\n // RTF — {\\rtf\n { mime: 'application/rtf', format: 'RTF', bytes: hex('7B 5C 72 74 66') },\n\n // ── Archives & compression ───────────────────────────────────────────\n\n // RAR v5 — 8-byte signature (test before RAR v4)\n { mime: 'application/vnd.rar', format: 'RAR v5', bytes: hex('52 61 72 21 1A 07 01 00'), preCompressed: true },\n\n // RAR v4 — 7-byte signature\n { mime: 'application/vnd.rar', format: 'RAR v4', bytes: hex('52 61 72 21 1A 07 00'), preCompressed: true },\n\n // 7-Zip — 6-byte signature\n { mime: 'application/x-7z-compressed', format: '7Z', bytes: hex('37 7A BC AF 27 1C'), preCompressed: true },\n\n // XZ — 6-byte stream header\n { mime: 'application/x-xz', format: 'XZ', bytes: hex('FD 37 7A 58 5A 00'), preCompressed: true },\n\n // ZIP — PK\\x03\\x04 (local file header)\n { mime: 'application/zip', format: 'ZIP', bytes: hex('50 4B 03 04'), preCompressed: true },\n\n // GZIP — 1F 8B\n { mime: 'application/gzip', format: 'GZIP', bytes: hex('1F 8B'), preCompressed: true },\n\n // BZIP2 — BZh\n { mime: 'application/x-bzip2', format: 'BZIP2', bytes: hex('42 5A 68'), preCompressed: true },\n\n // LZIP — LZIP\n { mime: 'application/x-lzip', format: 'LZIP', bytes: hex('4C 5A 49 50'), preCompressed: true },\n\n // ── Audio ────────────────────────────────────────────────────────────\n\n // WAV — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = WAVE\n {\n mime: 'audio/wav',\n format: 'WAV',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('57 41 56 45'),\n secondaryOffset: 8,\n },\n\n // AIFF — FORM compound: bytes 0-3 = FORM, bytes 8-11 = AIFF\n {\n mime: 'audio/aiff',\n format: 'AIFF',\n bytes: hex('46 4F 52 4D'),\n secondaryBytes: hex('41 49 46 46'),\n secondaryOffset: 8,\n },\n\n // FLAC — fLaC\n { mime: 'audio/flac', format: 'FLAC', bytes: hex('66 4C 61 43') },\n\n // OGG — OggS (container — may hold Vorbis, Opus, Theora, etc.)\n { mime: 'application/ogg', format: 'OGG', bytes: hex('4F 67 67 53') },\n\n // MIDI — MThd\n { mime: 'audio/midi', format: 'MIDI', bytes: hex('4D 54 68 64') },\n\n // MP3 (ID3-tagged) — ID3\n { mime: 'audio/mpeg', format: 'MP3', bytes: hex('49 44 33'), preCompressed: true },\n\n // ── Video ────────────────────────────────────────────────────────────\n\n // AVI — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = AVI\\x20\n {\n mime: 'video/x-msvideo',\n format: 'AVI',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('41 56 49 20'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // WMV/ASF — 8-byte ASF header GUID prefix\n { mime: 'video/x-ms-wmv', format: 'WMV', bytes: hex('30 26 B2 75 8E 66 CF 11'), preCompressed: true },\n\n // MKV/WebM — EBML header (Matroska container)\n { mime: 'video/x-matroska', format: 'MKV', bytes: hex('1A 45 DF A3'), preCompressed: true },\n\n // FLV — FLV\n { mime: 'video/x-flv', format: 'FLV', bytes: hex('46 4C 56'), preCompressed: true },\n\n // MOV — ISOBMFF: ftyp at offset 4, brand \"qt \" at offset 8\n {\n mime: 'video/quicktime',\n format: 'MOV',\n bytes: hex('66 74 79 70'),\n offset: 4,\n secondaryBytes: hex('71 74 20 20'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // MP4 — ISOBMFF: ftyp at offset 4 (brands vary: isom, mp41, mp42, etc.)\n // Tested AFTER MOV and HEIC so their specific brands match first.\n { mime: 'video/mp4', format: 'MP4', bytes: hex('66 74 79 70'), offset: 4, preCompressed: true },\n\n // ── Executables & binaries ───────────────────────────────────────────\n\n // SQLite — \"SQLite 3\" (first 8 bytes of the 16-byte header)\n { mime: 'application/vnd.sqlite3', format: 'SQLite', bytes: hex('53 51 4C 69 74 65 20 33') },\n\n // WASM — \\0asm\n { mime: 'application/wasm', format: 'WASM', bytes: hex('00 61 73 6D') },\n\n // ELF — \\x7FELF\n { mime: 'application/x-elf', format: 'ELF', bytes: hex('7F 45 4C 46') },\n\n // PE (EXE/DLL) — MZ\n { mime: 'application/vnd.microsoft.portable-executable', format: 'PE', bytes: hex('4D 5A') },\n\n // Mach-O — all four single-arch variants\n { mime: 'application/x-mach-binary', format: 'Mach-O 64 LE', bytes: hex('CF FA ED FE') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 64 BE', bytes: hex('FE ED FA CF') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 32 LE', bytes: hex('CE FA ED FE') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 32 BE', bytes: hex('FE ED FA CE') },\n\n // Java Class — CA FE BA BE\n // Note: collides with Mach-O Universal Binary. Disambiguated by checking\n // bytes 4-7: Java class version is >= 0x002D (45), while fat binary\n // arch count is a small number (typically 0x00000002).\n // We place Java after Mach-O single-arch entries so the more common\n // Mach-O variants match first. The CA FE BA BE collision between Java\n // and Mach-O fat binary is resolved by the caller if needed.\n { mime: 'application/java-vm', format: 'Java Class', bytes: hex('CA FE BA BE') },\n\n // DEX — dex\\n (Android Dalvik Executable)\n { mime: 'application/vnd.android.dex', format: 'DEX', bytes: hex('64 65 78 0A') },\n\n // ── Package formats ──────────────────────────────────────────────────\n\n // DEB — !<arch> (ar archive; DEB-specific member follows)\n { mime: 'application/vnd.debian.binary-package', format: 'DEB', bytes: hex('21 3C 61 72 63 68 3E') },\n\n // RPM — ED AB EE DB\n { mime: 'application/x-rpm', format: 'RPM', bytes: hex('ED AB EE DB') },\n\n // CAB — MSCF\n { mime: 'application/vnd.ms-cab-compressed', format: 'CAB', bytes: hex('4D 53 43 46'), preCompressed: true },\n\n // ── Capture & Flash ──────────────────────────────────────────────────\n\n // PCAP (little-endian) — D4 C3 B2 A1\n { mime: 'application/vnd.tcpdump.pcap', format: 'PCAP', bytes: hex('D4 C3 B2 A1') },\n\n // PCAP (big-endian) — A1 B2 C3 D4\n { mime: 'application/vnd.tcpdump.pcap', format: 'PCAP BE', bytes: hex('A1 B2 C3 D4') },\n\n // PCAPNG — Section Header Block\n { mime: 'application/x-pcapng', format: 'PCAPNG', bytes: hex('0A 0D 0D 0A') },\n\n // SWF — all three variants (uncompressed, zlib, LZMA)\n { mime: 'application/x-shockwave-flash', format: 'SWF', bytes: hex('46 57 53') },\n { mime: 'application/x-shockwave-flash', format: 'SWF zlib', bytes: hex('43 57 53'), preCompressed: true },\n { mime: 'application/x-shockwave-flash', format: 'SWF LZMA', bytes: hex('5A 57 53'), preCompressed: true },\n\n // ── Data formats ─────────────────────────────────────────────────────\n\n // Parquet — PAR1 (no registered IANA MIME; using Apache's informal type)\n { mime: 'application/vnd.apache.parquet', format: 'Parquet', bytes: hex('50 41 52 31') },\n\n // Avro Object Container — Obj\\x01\n { mime: 'application/avro', format: 'Avro', bytes: hex('4F 62 6A 01') },\n\n // NES ROM — NES\\x1A (iNES header)\n { mime: 'application/x-nintendo-nes-rom', format: 'NES ROM', bytes: hex('4E 45 53 1A') },\n] as const\n\n// ─── MP3 sync word ───────────────────────────────────────────────────────\n//\n// MP3 files without an ID3 tag start with a frame sync word where the top\n// 11 bits are set: 0xFFE0 mask. The ID3 signature (49 44 33) is handled\n// as a normal rule above. The sync-word check is a fallback tested in\n// `detectMimeType` after all rules.\n\nfunction isMp3SyncWord(byte0: number, byte1: number): boolean {\n return byte0 === 0xff && (byte1 & 0xe0) === 0xe0\n}\n\n// ─── Detection ───────────────────────────────────────────────────────────\n\n/**\n * Detect MIME type from the first bytes of a file.\n *\n * @param header - The first 16 bytes (or more) of the plaintext. Passing\n * fewer than 16 bytes may miss compound and offset-based matches.\n * @returns Detected MIME type, or `'application/octet-stream'` if unknown.\n */\nexport function detectMimeType(header: Uint8Array): string {\n const result = detectMagic(header)\n return result?.mime ?? 'application/octet-stream'\n}\n\n/**\n * Detect MIME type and whether the format is already compressed.\n *\n * Used by `BlobSet.put()` to decide whether to skip gzip compression.\n *\n * @param header - The first 16 bytes (or more) of the plaintext.\n * @returns `{ mime, preCompressed }` or `null` if no match.\n */\nexport function detectMagic(\n header: Uint8Array,\n): { mime: string; format: string; preCompressed: boolean } | null {\n for (const rule of MAGIC_RULES) {\n if (matchRule(header, rule)) {\n return {\n mime: rule.mime,\n format: rule.format,\n preCompressed: rule.preCompressed ?? false,\n }\n }\n }\n\n // Fallback: MP3 sync word (no ID3 tag)\n if (header.length >= 2 && isMp3SyncWord(header[0]!, header[1]!)) {\n return { mime: 'audio/mpeg', format: 'MP3', preCompressed: true }\n }\n\n return null\n}\n\n/**\n * Check whether a format is already compressed (should skip gzip).\n *\n * @param mimeType - A MIME type string.\n * @returns `true` if the format is known to be pre-compressed.\n */\nexport function isPreCompressed(mimeType: string): boolean {\n return PRE_COMPRESSED_MIMES.has(mimeType)\n}\n\n// ─── Internal matching ───────────────────────────────────────────────────\n\nfunction matchRule(header: Uint8Array, rule: MagicRule): boolean {\n const offset = rule.offset ?? 0\n const end = offset + rule.bytes.length\n\n // Not enough data for the primary match\n if (header.length < end) return false\n\n // Primary byte sequence\n for (let i = 0; i < rule.bytes.length; i++) {\n if (header[offset + i] !== rule.bytes[i]) return false\n }\n\n // Secondary byte sequence (compound check)\n if (rule.secondaryBytes && rule.secondaryOffset !== undefined) {\n const sEnd = rule.secondaryOffset + rule.secondaryBytes.length\n if (header.length < sEnd) return false\n for (let i = 0; i < rule.secondaryBytes.length; i++) {\n if (header[rule.secondaryOffset + i] !== rule.secondaryBytes[i]) return false\n }\n }\n\n return true\n}\n\n// ─── Pre-compressed MIME set ─────────────────────────────────────────────\n//\n// Built from the rules above. Used by `isPreCompressed()` for callers who\n// already know the MIME type (e.g. from a Content-Type header) and want to\n// skip the magic-byte detection step.\n\nconst PRE_COMPRESSED_MIMES = new Set<string>(\n MAGIC_RULES.filter((r) => r.preCompressed).map((r) => r.mime),\n)\n","/**\n * Active blob strategy factory. Calling `blobs()` returns a\n * `BlobStrategy` whose `openSlot` constructs a real `BlobSet` bound\n * to the caller's record. The returned strategy is passed into\n * `createNoydb({ blobStrategy: blobs() })` to light up the\n * `collection.blob(id)` path.\n *\n * This module is only reachable through the `@noy-db/hub/blobs`\n * subpath — a consumer that never imports the subpath ships none of\n * this (ESM tree-shaking + hub's `\"sideEffects\": false`).\n */\n\nimport { BlobSet } from './blob-set.js'\nimport type { BlobStrategy } from './strategy.js'\n\n/**\n * Build a default `BlobStrategy` ready to pass into `createNoydb`.\n *\n * Named `withBlobs` (plugin-pattern canonical) rather than `blobs` to\n * avoid shadowing the very common local idiom\n * `const blobs = invoices.blob(id)` in user code.\n *\n * @example\n * ```ts\n * import { createNoydb } from '@noy-db/hub'\n * import { withBlobs } from '@noy-db/hub/blobs'\n *\n * const db = await createNoydb({\n * store, user, secret,\n * blobStrategy: withBlobs(),\n * })\n *\n * // Now live — delegates to BlobSet.\n * await db.vault('acme').collection('invoices').blob('inv-1').put('receipt.pdf', bytes)\n * ```\n */\nexport function withBlobs(): BlobStrategy {\n return {\n openSlot(args) {\n return new BlobSet(args)\n },\n }\n}\n","/**\n * Blob retention + compaction.\n *\n * Declarative per-collection / per-slot eviction policy. Two\n * triggers:\n *\n * - **`retainDays`** — age-based TTL. A slot uploaded more than N\n * days ago is evicted.\n * - **`evictWhen(record)`** — predicate over the **decrypted**\n * record. Lets consumers express \"the image is safe to drop once\n * the structured invoice has been reviewed and confirmed.\"\n *\n * Either trigger (or both) causes the slot to evict. Eviction removes\n * the slot entry from `_blob_slots_{collection}`, decrements the\n * blob's refCount (so unreferenced chunks can be GC'd by the next\n * sweep), and writes one entry to the `_blob_eviction_audit`\n * collection for tamper-evident record-keeping.\n *\n * The audit entry carries the eTag of the evicted blob (opaque HMAC\n * of plaintext under the vault's `_blob` DEK) — no plaintext leakage,\n * per the SPEC non-correlation invariant. Consumers reconstructing\n * \"what used to be attached\" can look up the audit entry by record\n * id.\n *\n * Compaction is **consumer-scheduled** — noy-db never runs a\n * background daemon. Call `vault.compact()` whenever your workflow\n * allows (cron, manual \"tidy\" button, cold-storage export prep, …).\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope, SlotInfo } from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport { encrypt } from '../crypto.js'\n\n// ─── Config types ───────────────────────────────────────────────────────\n\nexport interface BlobFieldPolicy<T = unknown> {\n /**\n * Age-based TTL in days. A slot whose `uploadedAt` is older than\n * `now - retainDays × 86400s` evicts on the next `vault.compact()`.\n * Omit to disable age-based eviction.\n */\n readonly retainDays?: number\n /**\n * Predicate evaluated against the decrypted record. When it returns\n * `true`, every matching slot on that record evicts. Omit to\n * disable predicate-based eviction.\n */\n readonly evictWhen?: (record: T) => boolean\n}\n\nexport type BlobFieldsConfig<T = unknown> = Record<string, BlobFieldPolicy<T>>\n\n// ─── Audit collection ──────────────────────────────────────────────────\n\nexport const BLOB_EVICTION_AUDIT_COLLECTION = '_blob_eviction_audit'\n\nexport interface BlobEvictionEntry {\n readonly id: string\n readonly collection: string\n readonly recordId: string\n readonly slotName: string\n readonly blobHash: string\n readonly reason: 'ttl' | 'predicate' | 'both'\n readonly evictedAt: string\n readonly actor: string\n}\n\n// ─── Compaction result ──────────────────────────────────────────────────\n\nexport interface CompactionResult {\n /** Number of blob slots evicted across all collections. */\n readonly evicted: number\n /** Number of records touched (iterated + policy checked). */\n readonly records: number\n /** Number of collections with `blobFields` configured. */\n readonly collections: number\n /** Number of audit entries written. Equal to `evicted`. */\n readonly auditEntries: number\n /** Per-collection breakdown for diagnostics. */\n readonly byCollection: Record<string, { records: number; evicted: number }>\n}\n\n// ─── Core ──────────────────────────────────────────────────────────────\n\nexport interface CompactRunOptions {\n /** Override \"now\" for deterministic testing. */\n readonly now?: Date\n /**\n * Stop after this many evictions. Useful for capped batches / cron\n * jobs that need to fit in a time window. `undefined` = unbounded.\n */\n readonly maxEvictions?: number\n /**\n * Dry-run — evaluate policies and return the counts, but do NOT\n * delete slots or write audit entries. Lets a consumer preview\n * what would happen.\n */\n readonly dryRun?: boolean\n}\n\nexport interface CompactionContext {\n readonly adapter: NoydbStore\n readonly vault: string\n readonly actor: string\n readonly encrypted: boolean\n readonly getDEK: (collection: string) => Promise<CryptoKey>\n /**\n * Resolve a collection's declared `blobFields` config. Returns an\n * empty map for collections without the config — the walk skips\n * those.\n */\n readonly getBlobFields: <T>(collection: string) => BlobFieldsConfig<T> | null\n /** List collection names in the vault. */\n readonly listCollections: () => Promise<string[]>\n /** List record ids in a collection. */\n readonly listRecords: (collection: string) => Promise<string[]>\n /** Decrypt and return the record. Null when absent. */\n readonly getRecord: <T>(collection: string, id: string) => Promise<T | null>\n /** Return the BlobSet-like handle for a record's slots. */\n readonly listSlots: (collection: string, id: string) => Promise<SlotInfo[]>\n /** Delete a slot and decrement its blob's refCount. */\n readonly deleteSlot: (collection: string, id: string, slotName: string) => Promise<void>\n}\n\nexport async function runCompaction(\n ctx: CompactionContext,\n options: CompactRunOptions = {},\n): Promise<CompactionResult> {\n const now = options.now ?? new Date()\n const maxEvictions = options.maxEvictions ?? Infinity\n const dryRun = options.dryRun === true\n\n const allCollections = await ctx.listCollections()\n const byCollection: Record<string, { records: number; evicted: number }> = {}\n let evicted = 0\n let records = 0\n let auditEntries = 0\n let collectionsWithPolicy = 0\n\n outer: for (const collectionName of allCollections) {\n if (collectionName.startsWith('_')) continue\n const config = ctx.getBlobFields(collectionName)\n if (!config) continue\n const configuredSlots = Object.keys(config)\n if (configuredSlots.length === 0) continue\n collectionsWithPolicy += 1\n byCollection[collectionName] = { records: 0, evicted: 0 }\n\n const ids = await ctx.listRecords(collectionName)\n for (const recordId of ids) {\n if (evicted >= maxEvictions) break outer\n\n const record = await ctx.getRecord(collectionName, recordId).catch(() => null)\n if (record === null) continue\n records += 1\n byCollection[collectionName].records += 1\n\n const slots = await ctx.listSlots(collectionName, recordId).catch(() => [])\n for (const slot of slots) {\n if (evicted >= maxEvictions) break outer\n const policy = config[slot.name]\n if (!policy) continue\n\n const reason = evaluatePolicy(policy, record, slot, now)\n if (!reason) continue\n\n if (!dryRun) {\n await ctx.deleteSlot(collectionName, recordId, slot.name)\n await writeAuditEntry(ctx, {\n id: generateEvictionId(collectionName, recordId, slot.name),\n collection: collectionName,\n recordId,\n slotName: slot.name,\n blobHash: slot.eTag,\n reason,\n evictedAt: now.toISOString(),\n actor: ctx.actor,\n })\n auditEntries += 1\n }\n evicted += 1\n byCollection[collectionName].evicted += 1\n }\n }\n }\n\n return {\n evicted,\n records,\n collections: collectionsWithPolicy,\n auditEntries,\n byCollection,\n }\n}\n\nfunction evaluatePolicy<T>(\n policy: BlobFieldPolicy<T>,\n record: T,\n slot: SlotInfo,\n now: Date,\n): 'ttl' | 'predicate' | 'both' | null {\n let ttlTriggered = false\n let predicateTriggered = false\n\n if (policy.retainDays !== undefined && policy.retainDays > 0) {\n const uploadedAt = Date.parse(slot.uploadedAt)\n if (Number.isFinite(uploadedAt)) {\n const ageMs = now.getTime() - uploadedAt\n const limitMs = policy.retainDays * 86_400_000\n if (ageMs > limitMs) ttlTriggered = true\n }\n }\n\n if (policy.evictWhen) {\n try {\n if (policy.evictWhen(record)) predicateTriggered = true\n } catch {\n // Predicate error → do NOT evict. Fail closed.\n }\n }\n\n if (ttlTriggered && predicateTriggered) return 'both'\n if (ttlTriggered) return 'ttl'\n if (predicateTriggered) return 'predicate'\n return null\n}\n\nfunction generateEvictionId(collection: string, recordId: string, slotName: string): string {\n const rand = globalThis.crypto.getRandomValues(new Uint8Array(8))\n let suffix = ''\n for (const b of rand) suffix += b.toString(16).padStart(2, '0')\n return `${collection}__${recordId}__${slotName}__${suffix}`\n}\n\nasync function writeAuditEntry(ctx: CompactionContext, entry: BlobEvictionEntry): Promise<void> {\n const json = JSON.stringify(entry)\n let envelope: EncryptedEnvelope\n if (ctx.encrypted) {\n const dek = await ctx.getDEK(BLOB_EVICTION_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.evictedAt,\n _iv: iv,\n _data: data,\n _by: entry.actor,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.evictedAt,\n _iv: '',\n _data: json,\n _by: entry.actor,\n }\n }\n await ctx.adapter.put(ctx.vault, BLOB_EVICTION_AUDIT_COLLECTION, entry.id, envelope)\n}\n","/**\n * `vault.exportBlobs()` — bulk blob extraction primitive.\n *\n * Async-iterable handle over every blob attached to records in a\n * vault, optionally filtered by collection allowlist and per-record\n * predicate. Emits tuples of `{ blobId, recordRef, bytes, meta }` so\n * the consumer can pipe into any sink (zip stream, S3 multipart, USB\n * copy, cold-storage tape) without pulling the whole export into\n * memory.\n *\n * ## Auth + audit\n *\n * - Capability check runs **once** at handle creation via\n * `Vault.assertCanExport('plaintext', 'blob')`. An operator whose\n * keyring lacks that bit fails before a single byte of ciphertext\n * is decrypted.\n * - Audit entry lands in `_export_audit` at handle creation: the\n * actor, start timestamp, target collections, predicate presence,\n * and batch mechanism. **No content hashes** — per the spec\n * non-correlation invariant.\n *\n * ## Abort + resume\n *\n * - `handle.abort()` flips the internal signal; the next iteration\n * boundary throws `AbortError`. Consumers already in `for await`\n * can catch and exit cleanly.\n * - Restart after a partial failure with `{ afterBlobId }` — the\n * iterator skips tuples up to (and including) that blob id before\n * yielding again. Combined with a blob-count ceiling it supports\n * idempotent batch re-runs.\n *\n * @module\n */\n\nimport type { Collection } from '../collection.js'\nimport type { SlotInfo } from '../types.js'\n\n// ─── Types ──────────────────────────────────────────────────────────────\n\nexport interface ExportBlobsOptions {\n /**\n * Collection allowlist. Omit to export blobs from every collection\n * the caller has read access to.\n */\n readonly collections?: readonly string[]\n /**\n * Per-record predicate. Called on the decrypted record BEFORE any\n * blob bytes are read for that record — returning false skips the\n * record and all its slots without touching their chunks.\n */\n readonly where?: (record: unknown, context: { collection: string; id: string }) => boolean\n /**\n * Resume after a specific blob id. The iterator skips tuples up to\n * and including this id, then yields. Format of the id is the same\n * as `ExportedBlob.blobId` (the HMAC-keyed eTag).\n */\n readonly afterBlobId?: string\n /**\n * External abort signal. When fired, the next iterator tick throws\n * `ExportBlobsAbortedError`. Honored alongside `handle.abort()`.\n */\n readonly signal?: AbortSignal\n}\n\nexport interface ExportedBlob {\n /** Opaque blob identifier — HMAC-keyed eTag, stable across vaults. */\n readonly blobId: string\n /** Where this blob came from in the vault. */\n readonly recordRef: {\n readonly collection: string\n readonly id: string\n readonly slot: string\n }\n /** Decrypted plaintext bytes. */\n readonly bytes: Uint8Array\n /** Best-effort metadata (from the blob slot record). */\n readonly meta: {\n readonly size: number\n /**\n * User-visible filename stored on the slot. Often equal to the\n * slot name; differs when the caller supplied an explicit\n * `filename` to `BlobSet.put()`.\n */\n readonly filename: string\n readonly mimeType?: string\n readonly createdAt?: string\n }\n}\n\nexport interface ExportBlobsHandle extends AsyncIterable<ExportedBlob> {\n /** Abort the export. Safe to call multiple times. */\n abort(): void\n /** True once `abort()` has fired or the external signal aborted. */\n readonly aborted: boolean\n}\n\nexport class ExportBlobsAbortedError extends Error {\n constructor(reason: string) {\n super(`exportBlobs aborted: ${reason}`)\n this.name = 'ExportBlobsAbortedError'\n }\n}\n\n// ─── Audit ──────────────────────────────────────────────────────────────\n\nexport const EXPORT_AUDIT_COLLECTION = '_export_audit'\n\nexport interface ExportBlobsAuditEntry {\n readonly id: string\n readonly mechanism: 'exportBlobs'\n readonly actor: string\n readonly startedAt: string\n readonly collections: readonly string[] | null\n readonly predicate: boolean\n readonly afterBlobId: string | null\n}\n\n// ─── Implementation ─────────────────────────────────────────────────────\n\n/**\n * Build the handle. Factored out of `Vault.exportBlobs` so the\n * implementation can be unit-tested without going through the\n * compartment lifecycle.\n */\nexport function createExportBlobsHandle(\n actor: string,\n listAccessibleCollections: () => Promise<string[]>,\n getCollection: <T>(name: string) => Collection<T>,\n writeAudit: (entry: ExportBlobsAuditEntry) => Promise<void>,\n options: ExportBlobsOptions,\n): ExportBlobsHandle {\n let aborted = false\n\n const abort = (): void => {\n aborted = true\n }\n\n if (options.signal) {\n if (options.signal.aborted) aborted = true\n options.signal.addEventListener('abort', () => { aborted = true })\n }\n\n function assertLive(): void {\n if (aborted) throw new ExportBlobsAbortedError('aborted by caller')\n }\n\n const allowlist = options.collections ? new Set(options.collections) : null\n\n // Write the audit entry BEFORE the first yield so a blocked\n // iteration still leaves an audit trail that the export started.\n let auditPromise: Promise<void> | null = null\n function writeAuditOnce(): Promise<void> {\n if (!auditPromise) {\n auditPromise = writeAudit({\n id: generateBatchId(),\n mechanism: 'exportBlobs',\n actor,\n startedAt: new Date().toISOString(),\n collections: options.collections ?? null,\n predicate: Boolean(options.where),\n afterBlobId: options.afterBlobId ?? null,\n })\n }\n return auditPromise\n }\n\n async function* generate(): AsyncGenerator<ExportedBlob> {\n await writeAuditOnce()\n assertLive()\n\n // Resolve target collections lazily — also keeps the call async.\n const allCollections = await listAccessibleCollections()\n const targets = allCollections.filter(name => {\n if (name.startsWith('_')) return false\n if (allowlist && !allowlist.has(name)) return false\n return true\n })\n\n let resumeCursorHit = options.afterBlobId === undefined\n\n for (const collectionName of targets) {\n if (aborted) return\n\n const coll = getCollection<Record<string, unknown>>(collectionName)\n const records = await coll.list().catch(() => [])\n for (const record of records) {\n if (aborted) return\n assertLive()\n\n const idField = (record as { id?: unknown }).id\n if (typeof idField !== 'string') continue\n\n if (options.where && !options.where(record, { collection: collectionName, id: idField })) continue\n\n const blobSet = coll.blob(idField)\n const slots = await blobSet.list().catch(() => [] as SlotInfo[])\n for (const slot of slots) {\n if (aborted) return\n\n if (!resumeCursorHit) {\n if (slot.eTag === options.afterBlobId) {\n resumeCursorHit = true\n }\n continue\n }\n\n const bytes = await blobSet.get(slot.name)\n if (!bytes) continue\n\n const item: ExportedBlob = {\n blobId: slot.eTag,\n recordRef: { collection: collectionName, id: idField, slot: slot.name },\n bytes,\n meta: {\n size: slot.size,\n filename: slot.filename,\n ...(slot.mimeType !== undefined && { mimeType: slot.mimeType }),\n ...(slot.uploadedAt !== undefined && { createdAt: slot.uploadedAt }),\n },\n }\n yield item\n }\n }\n }\n }\n\n const handle: ExportBlobsHandle = {\n abort,\n get aborted() { return aborted },\n [Symbol.asyncIterator]: () => generate(),\n }\n return handle\n}\n\n// ─── Helpers ────────────────────────────────────────────────────────────\n\nfunction generateBatchId(): string {\n // 16 bytes of crypto randomness, URL-safe base64, no padding.\n const raw = globalThis.crypto.getRandomValues(new Uint8Array(16))\n let s = ''\n for (const b of raw) s += b.toString(16).padStart(2, '0')\n return `batch-${Date.now().toString(36)}-${s.slice(0, 12)}`\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAAA,IA4Ea,YAqBA,iBAgBA,eAgBA,iBAsaA,eA+EA;AAtnBb;AAAA;AAAA;AA4EO,IAAM,aAAN,cAAyB,MAAM;AAAA;AAAA,MAE3B;AAAA,MAET,YAAY,MAAc,SAAiB;AACzC,cAAM,OAAO;AACb,aAAK,OAAO;AACZ,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAYO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,qBAAqB;AACzC,cAAM,qBAAqB,OAAO;AAClC,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,yEAAoE;AACxF,cAAM,YAAY,OAAO;AACzB,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,4DAAuD;AAC3E,cAAM,eAAe,OAAO;AAC5B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAiaO,IAAM,gBAAN,cAA4B,WAAW;AAAA;AAAA,MAEnC;AAAA,MAET,YAAY,SAAiB,UAAU,oBAAoB;AACzD,cAAM,YAAY,OAAO;AACzB,aAAK,OAAO;AACZ,aAAK,UAAU;AAAA,MACjB;AAAA,IACF;AAsEO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,oBAAoB;AACxC,cAAM,aAAa,OAAO;AAC1B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAAA;AAAA;;;AC3nBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkDA,eAAsB,UACpB,YACA,MACoB;AACpB,QAAM,cAAc,MAAM,OAAO;AAAA,IAC/B;AAAA,IACA,IAAI,YAAY,EAAE,OAAO,UAAU;AAAA,IACnC;AAAA,IACA;AAAA,IACA,CAAC,WAAW;AAAA,EACd;AAEA,SAAO,OAAO;AAAA,IACZ;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,MAAM;AAAA,IACR;AAAA,IACA;AAAA,IACA,EAAE,MAAM,UAAU,QAAQ,SAAS;AAAA,IACnC;AAAA,IACA,CAAC,WAAW,WAAW;AAAA,EACzB;AACF;AAKA,eAAsB,cAAkC;AACtD,SAAO,OAAO;AAAA,IACZ,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AAKA,eAAsB,QAAQ,KAAgB,KAAiC;AAC7E,QAAM,UAAU,MAAM,OAAO,QAAQ,OAAO,KAAK,KAAK,QAAQ;AAC9D,SAAO,eAAe,OAAO;AAC/B;AAGA,eAAsB,UACpB,eACA,KACoB;AACpB,MAAI;AACF,WAAO,MAAM,OAAO;AAAA,MAClB;AAAA,MACA,eAAe,aAAa;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,MACpC;AAAA,MACA,CAAC,WAAW,SAAS;AAAA,IACvB;AAAA,EACF,QAAQ;AACN,UAAM,IAAI,gBAAgB;AAAA,EAC5B;AACF;AAUA,eAAsB,QACpB,WACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAElD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAGA,eAAsB,QACpB,UACA,YACA,KACiB;AACjB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAE5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,YAAY,EAAE,OAAO,SAAS;AAAA,EAC3C,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAUA,eAAsB,aACpB,MACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAMA,eAAsB,aACpB,UACA,YACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAQA,eAAsB,UAAU,MAAmC;AACjE,QAAM,OAAO,MAAM,OAAO,OAAO,WAAW,IAA+B;AAC3E,SAAO,MAAM,KAAK,IAAI,WAAW,IAAI,CAAC,EACnC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,cAAc,KAAgB,MAAmC;AAErF,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,EAAE,MAAM,QAAQ,MAAM,UAAU;AAAA,IAChC;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AACA,QAAM,MAAM,MAAM,OAAO,KAAK,QAAQ,SAAS,IAA+B;AAC9E,SAAO,MAAM,KAAK,IAAI,WAAW,GAAG,CAAC,EAClC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,oBACpB,MACA,KACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,gBAAgB;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AASA,eAAsB,oBACpB,UACA,YACA,KACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B;AAAA,QACE,MAAM;AAAA,QACN;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAiBA,eAAsB,kBAAkB,KAAgB,gBAA4C;AAElG,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAGhD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,CAAC,YAAY;AAAA,EACf;AAGA,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,gBAAgB;AACtD,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,cAAc;AACpD,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA;AAAA,EACF;AAGA,SAAO,OAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AA2BA,eAAe,sBACb,KACA,SACA,WACqB;AACrB,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO,UAAU,OAAO,QAAQ,QAAQ,OAAO,CAAC,YAAY,CAAC;AACnF,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,wBAAwB;AAC9D,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,GAAG,OAAO,KAAO,SAAS,EAAE;AAClE,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA,WAAW;AAAA,EACb;AACA,SAAO,IAAI,WAAW,IAAI;AAC5B;AAgBA,eAAsB,qBACpB,WACA,KACA,SACwB;AACxB,QAAM,KAAK,MAAM,sBAAsB,KAAK,SAAS,SAAS;AAC9D,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAClD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAQA,eAAsB,qBACpB,UACA,YACA,KACiB;AACjB,SAAO,QAAQ,UAAU,YAAY,GAAG;AAC1C;AAKO,SAAS,aAAyB;AACvC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,QAAQ,CAAC;AACnE;AAGO,SAAS,eAA2B;AACzC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,UAAU,CAAC;AACrE;AAIO,SAAS,eAAe,QAA0C;AACvE,QAAM,QAAQ,kBAAkB,aAAa,SAAS,IAAI,WAAW,MAAM;AAC3E,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAU,OAAO,aAAa,MAAM,CAAC,CAAE;AAAA,EACzC;AACA,SAAO,KAAK,MAAM;AACpB;AAEO,SAAS,eAAe,QAAyC;AACtE,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AAnfA,IAwCM,mBACA,YACA,UACA,UAEA;AA7CN;AAAA;AAAA;AAsCA;AAEA,IAAM,oBAAoB;AAC1B,IAAM,aAAa;AACnB,IAAM,WAAW;AACjB,IAAM,WAAW;AAEjB,IAAM,SAAS,WAAW,OAAO;AAAA;AAAA;;;AC7CjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC0CO,IAAM,uBAAuB;;;AC/BpC;AASA;;;AC0BA,SAAS,IAAI,GAAuB;AAClC,SAAO,IAAI,WAAW,EAAE,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,SAAS,GAAG,EAAE,CAAC,CAAC;AAChE;AAgBA,IAAM,cAAoC;AAAA;AAAA;AAAA,EAIxC,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG/F,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAGlF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA,EAGA,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAGnF,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,OAAO,EAAE;AAAA;AAAA,EAGxD,EAAE,MAAM,6BAA6B,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG9E,EAAE,MAAM,gBAAgB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGjE;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,QAAQ;AAAA,IACR,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA;AAAA,EAKA,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGpE,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,gBAAgB,EAAE;AAAA;AAAA;AAAA,EAKvE,EAAE,MAAM,uBAAuB,QAAQ,UAAU,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG5G,EAAE,MAAM,uBAAuB,QAAQ,UAAU,OAAO,IAAI,sBAAsB,GAAG,eAAe,KAAK;AAAA;AAAA,EAGzG,EAAE,MAAM,+BAA+B,QAAQ,MAAM,OAAO,IAAI,mBAAmB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG1G,EAAE,MAAM,oBAAoB,QAAQ,MAAM,OAAO,IAAI,mBAAmB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG/F,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAGzF,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,OAAO,GAAG,eAAe,KAAK;AAAA;AAAA,EAGrF,EAAE,MAAM,uBAAuB,QAAQ,SAAS,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAG5F,EAAE,MAAM,sBAAsB,QAAQ,QAAQ,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK7F;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,EACnB;AAAA;AAAA,EAGA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,EACnB;AAAA;AAAA,EAGA,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGpE,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,cAAc,QAAQ,OAAO,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAKjF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA,EAGA,EAAE,MAAM,kBAAkB,QAAQ,OAAO,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAGpG,EAAE,MAAM,oBAAoB,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAG1F,EAAE,MAAM,eAAe,QAAQ,OAAO,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAGlF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,QAAQ;AAAA,IACR,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA;AAAA,EAIA,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,QAAQ,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK9F,EAAE,MAAM,2BAA2B,QAAQ,UAAU,OAAO,IAAI,yBAAyB,EAAE;AAAA;AAAA,EAG3F,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,qBAAqB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,iDAAiD,QAAQ,MAAM,OAAO,IAAI,OAAO,EAAE;AAAA;AAAA,EAG3F,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASvF,EAAE,MAAM,uBAAuB,QAAQ,cAAc,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG/E,EAAE,MAAM,+BAA+B,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA;AAAA,EAKhF,EAAE,MAAM,yCAAyC,QAAQ,OAAO,OAAO,IAAI,sBAAsB,EAAE;AAAA;AAAA,EAGnG,EAAE,MAAM,qBAAqB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,qCAAqC,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK3G,EAAE,MAAM,gCAAgC,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGlF,EAAE,MAAM,gCAAgC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGrF,EAAE,MAAM,wBAAwB,QAAQ,UAAU,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG5E,EAAE,MAAM,iCAAiC,QAAQ,OAAO,OAAO,IAAI,UAAU,EAAE;AAAA,EAC/E,EAAE,MAAM,iCAAiC,QAAQ,YAAY,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA,EACzG,EAAE,MAAM,iCAAiC,QAAQ,YAAY,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAKzG,EAAE,MAAM,kCAAkC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGvF,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,kCAAkC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AACzF;AASA,SAAS,cAAc,OAAe,OAAwB;AAC5D,SAAO,UAAU,QAAS,QAAQ,SAAU;AAC9C;AAWO,SAAS,eAAe,QAA4B;AACzD,QAAM,SAAS,YAAY,MAAM;AACjC,SAAO,QAAQ,QAAQ;AACzB;AAUO,SAAS,YACd,QACiE;AACjE,aAAW,QAAQ,aAAa;AAC9B,QAAI,UAAU,QAAQ,IAAI,GAAG;AAC3B,aAAO;AAAA,QACL,MAAM,KAAK;AAAA,QACX,QAAQ,KAAK;AAAA,QACb,eAAe,KAAK,iBAAiB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,UAAU,KAAK,cAAc,OAAO,CAAC,GAAI,OAAO,CAAC,CAAE,GAAG;AAC/D,WAAO,EAAE,MAAM,cAAc,QAAQ,OAAO,eAAe,KAAK;AAAA,EAClE;AAEA,SAAO;AACT;AAQO,SAAS,gBAAgB,UAA2B;AACzD,SAAO,qBAAqB,IAAI,QAAQ;AAC1C;AAIA,SAAS,UAAU,QAAoB,MAA0B;AAC/D,QAAM,SAAS,KAAK,UAAU;AAC9B,QAAM,MAAM,SAAS,KAAK,MAAM;AAGhC,MAAI,OAAO,SAAS,IAAK,QAAO;AAGhC,WAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,QAAI,OAAO,SAAS,CAAC,MAAM,KAAK,MAAM,CAAC,EAAG,QAAO;AAAA,EACnD;AAGA,MAAI,KAAK,kBAAkB,KAAK,oBAAoB,QAAW;AAC7D,UAAM,OAAO,KAAK,kBAAkB,KAAK,eAAe;AACxD,QAAI,OAAO,SAAS,KAAM,QAAO;AACjC,aAAS,IAAI,GAAG,IAAI,KAAK,eAAe,QAAQ,KAAK;AACnD,UAAI,OAAO,KAAK,kBAAkB,CAAC,MAAM,KAAK,eAAe,CAAC,EAAG,QAAO;AAAA,IAC1E;AAAA,EACF;AAEA,SAAO;AACT;AAQA,IAAM,uBAAuB,IAAI;AAAA,EAC/B,YAAY,OAAO,CAAC,MAAM,EAAE,aAAa,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAC9D;;;AD2gBA;AA52BO,IAAM,kBAAkB;AAGxB,IAAM,wBAAwB;AAO9B,IAAM,yBAAyB;AAG/B,IAAM,oBAAoB;AAG1B,IAAM,uBAAuB;AAO7B,IAAM,qBAAqB,MAAM;AAGxC,IAAM,kBAAkB;AAIxB,eAAe,cACb,MAC4D;AAC5D,MAAI,OAAO,sBAAsB,aAAa;AAC5C,WAAO,EAAE,OAAO,MAAM,WAAW,OAAO;AAAA,EAC1C;AACA,QAAM,KAAK,IAAI,kBAAkB,MAAM;AACvC,QAAM,SAAS,GAAG,SAAS,UAAU;AACrC,QAAM,OAAO,MAAM,IAA+B;AAClD,QAAM,OAAO,MAAM;AACnB,QAAM,MAAM,MAAM,IAAI,SAAS,GAAG,QAAQ,EAAE,YAAY;AACxD,SAAO,EAAE,OAAO,IAAI,WAAW,GAAG,GAAG,WAAW,OAAO;AACzD;AAEA,eAAe,gBAAgB,MAAuC;AACpE,MAAI,OAAO,wBAAwB,aAAa;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,IAAI,oBAAoB,MAAM;AACzC,QAAM,SAAS,GAAG,SAAS,UAAU;AACrC,QAAM,OAAO,MAAM,IAA+B;AAClD,QAAM,OAAO,MAAM;AACnB,QAAM,MAAM,MAAM,IAAI,SAAS,GAAG,QAAQ,EAAE,YAAY;AACxD,SAAO,IAAI,WAAW,GAAG;AAC3B;AAEA,SAAS,aAAa,QAAkC;AACtD,QAAM,QAAQ,OAAO,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,YAAY,CAAC;AACzD,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,KAAK,QAAQ;AACtB,QAAI,IAAI,GAAG,MAAM;AACjB,cAAU,EAAE;AAAA,EACd;AACA,SAAO;AACT;AAGA,SAAS,SAAS,MAAc,YAAoB,YAAgC;AAClF,SAAO,IAAI,YAAY,EAAE,OAAO,GAAG,IAAI,IAAI,UAAU,IAAI,UAAU,EAAE;AACvE;AA+BO,IAAM,UAAN,MAAc;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,MAST;AACD,SAAK,QAAQ,KAAK;AAClB,SAAK,QAAQ,KAAK;AAClB,SAAK,aAAa,KAAK;AACvB,SAAK,WAAW,KAAK;AACrB,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,SAAS,KAAK;AACnB,SAAK,eAAe,KAAK;AAAA,EAC3B;AAAA;AAAA,EAGA,IAAY,kBAA0B;AACpC,WAAO,GAAG,iBAAiB,GAAG,KAAK,UAAU;AAAA,EAC/C;AAAA;AAAA,EAGA,IAAY,qBAA6B;AACvC,WAAO,GAAG,oBAAoB,GAAG,KAAK,UAAU;AAAA,EAClD;AAAA;AAAA,EAIA,MAAc,YAGX;AACD,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,iBAAiB,KAAK,QAAQ;AACrF,QAAI,CAAC,SAAU,QAAO,EAAE,OAAO,CAAC,GAAG,SAAS,EAAE;AAE9C,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,OAAO,KAAK,MAAM,SAAS,KAAK;AAAA,QAChC,SAAS,SAAS;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO;AAAA,MACL,OAAO,KAAK,MAAM,IAAI;AAAA,MACtB,SAAS,SAAS;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,MAAc,UACZ,OACA,gBACe;AACf,UAAM,OAAO,KAAK,UAAU,KAAK;AACjC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI,iBAAiB;AAAA,QACrB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AAAA,IACF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI,iBAAiB;AAAA,QACrB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,KAAK,MAAM;AAAA,MACf,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,iBAAiB,IAAI,iBAAiB;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eACZ,QACe;AACf,aAAS,UAAU,GAAG,UAAU,iBAAiB,WAAW;AAC1D,YAAM,EAAE,OAAO,QAAQ,IAAI,MAAM,KAAK,UAAU;AAChD,YAAM,UAAU,OAAO,KAAK;AAC5B,UAAI,YAAY,KAAM;AACtB,UAAI;AACF,cAAM,KAAK,UAAU,SAAS,OAAO;AACrC;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,UAAU,kBAAkB,EAAG;AACnE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,eAAe,MAAqE;AAChG,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,uBAAuB,IAAI;AAC7E,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,EAAE,MAAM,KAAK,MAAM,SAAS,KAAK,GAAiB,SAAS,SAAS,GAAG;AAAA,IAChF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,eAAe;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,EAAE,MAAM,KAAK,MAAM,IAAI,GAAiB,SAAS,SAAS,GAAG;AAAA,EACtE;AAAA,EAEA,MAAc,gBAAgB,MAAkB,iBAAyC;AACvF,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,cAAc,mBAAmB,KAAK;AAC5C,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,eAAe;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,YAAY,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IAC5F,OAAO;AACL,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,YAAY,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IAC5F;AAEA,UAAM,KAAK,MAAM;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,MAAc,OAA8B;AAC1E,aAAS,UAAU,GAAG,UAAU,iBAAiB,WAAW;AAC1D,YAAM,SAAS,MAAM,KAAK,eAAe,IAAI;AAC7C,UAAI,CAAC,OAAQ,OAAM,IAAI,cAAc,cAAc,IAAI,YAAY;AACnE,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,YAAM,UAAsB,EAAE,GAAG,MAAM,UAAU,KAAK,WAAW,MAAM;AACvE,UAAI;AACF,cAAM,KAAK,gBAAgB,SAAS,OAAO;AAC3C;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,UAAU,kBAAkB,EAAG;AACnE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,WACZ,MACA,OACA,YACA,OACA,KACe;AACf,UAAM,KAAK,GAAG,IAAI,IAAI,KAAK;AAC3B,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK;AACP,YAAM,MAAM,SAAS,MAAM,OAAO,UAAU;AAC5C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,oBAAoB,OAAO,KAAK,GAAG;AAC9D,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO,eAAe,KAAK;AAAA,MAC7B;AAAA,IACF;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,wBAAwB,IAAI,QAAQ;AAAA,EACvE;AAAA,EAEA,MAAc,UACZ,MACA,OACA,YACA,KAC4B;AAC5B,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,wBAAwB,GAAG,IAAI,IAAI,KAAK,EAAE;AAC5F,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,KAAK;AACP,YAAM,MAAM,SAAS,MAAM,OAAO,UAAU;AAC5C,aAAO,MAAM,oBAAoB,SAAS,KAAK,SAAS,OAAO,KAAK,GAAG;AAAA,IACzE;AAEA,WAAO,eAAe,SAAS,KAAK;AAAA,EACtC;AAAA;AAAA,EAIQ,WAAW,UAAkB,OAAuB;AAC1D,WAAO,GAAG,KAAK,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,EAChD;AAAA,EAEA,MAAc,kBAAkB,UAAkB,OAA8C;AAC9F,UAAM,MAAM,KAAK,WAAW,UAAU,KAAK;AAC3C,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAC9E,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,SAAS,KAAK;AAAA,IAClC;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA,EAEA,MAAc,mBAAmB,UAAkB,QAAsC;AACvF,UAAM,MAAM,KAAK,WAAW,UAAU,OAAO,KAAK;AAClD,UAAM,OAAO,KAAK,UAAU,MAAM;AAClC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF,OAAO;AACL,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,KAAK,QAAQ;AAAA,EACzE;AAAA,EAEA,MAAc,oBAAoB,UAAkB,OAA8B;AAChF,UAAM,MAAM,KAAK,WAAW,UAAU,KAAK;AAC3C,UAAM,KAAK,MAAM,OAAO,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAAA,EAClE;AAAA;AAAA,EAIQ,mBAAmB,MAA+B;AACxD,QAAI,MAAM,UAAW,QAAO,KAAK;AACjC,QAAI,KAAK,aAAc,QAAO,KAAK;AACnC,WAAO;AAAA,EACT;AAAA;AAAA,EAIA,MAAc,eAAe,MAAuC;AAClE,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,eAAe,IAAI;AACtE,UAAM,SAAuB,CAAC;AAE9B,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,KAAK;AACxC,YAAM,QAAQ,MAAM,KAAK,UAAU,KAAK,MAAM,GAAG,KAAK,YAAY,OAAO;AACzE,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,cAAc,CAAC,IAAI,KAAK,UAAU,sBAAsB,KAAK,IAAI,gBAAgB,KAAK,QAAQ;AAAA,QAChG;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,UAAM,YAAY,aAAa,MAAM;AACrC,WAAO,KAAK,gBAAgB,SAAS,MAAM,gBAAgB,SAAS,IAAI;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,IAAI,UAAkB,MAAkB,MAAsC;AAElF,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,eAAe,IAAI;AACtE,UAAM,OAAO,UACT,MAAM,cAAc,SAAS,IAAI,IACjC,MAAM,eAAe,IAAI;AAG7B,QAAI,WAAW,MAAM;AACrB,QAAI,CAAC,UAAU;AACb,YAAM,WAAW,YAAY,KAAK,SAAS,GAAG,EAAE,CAAC;AACjD,UAAI,SAAU,YAAW,SAAS;AAAA,IACpC;AAGA,QAAI;AACJ,QAAI,MAAM,aAAa,QAAW;AAChC,uBAAiB,KAAK;AAAA,IACxB,WAAW,YAAY,gBAAgB,QAAQ,GAAG;AAChD,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB;AAAA,IACnB;AAGA,UAAM,eAAe,MAAM,KAAK,eAAe,IAAI;AAEnD,QAAI,cAAc;AAEhB,YAAM,KAAK,kBAAkB,MAAM,CAAE;AAAA,IACvC,OAAO;AAEL,YAAM,EAAE,OAAO,YAAY,UAAU,IAAI,iBACrC,MAAM,cAAc,IAAI,IACxB,EAAE,OAAO,MAAM,WAAW,OAAgB;AAE9C,YAAM,YAAY,KAAK,mBAAmB,IAAI;AAC9C,YAAM,aAAa,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW,aAAa,SAAS,CAAC;AAG3E,eAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,cAAM,QAAQ,IAAI;AAClB,cAAM,KAAK;AAAA,UACT;AAAA,UAAM;AAAA,UAAG;AAAA,UACT,WAAW,SAAS,OAAO,QAAQ,SAAS;AAAA,UAC5C;AAAA,QACF;AAAA,MACF;AAGA,YAAM,KAAK,gBAAgB;AAAA,QACzB;AAAA,QACA,MAAM,KAAK;AAAA,QACX,gBAAgB,WAAW;AAAA,QAC3B,aAAa;AAAA,QACb;AAAA,QACA;AAAA,QACA,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,QAC7C,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAGA,UAAM,iBAAiB,MAAM,cAAc,KAAK;AAChD,UAAM,KAAK,eAAe,CAAC,UAAU;AACnC,YAAM,UAAU,MAAM,QAAQ,GAAG;AACjC,YAAM,QAAQ,IAAI;AAAA,QAChB;AAAA,QACA,UAAU;AAAA,QACV,MAAM,KAAK;AAAA,QACX,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,QAC7C,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACnC,GAAI,mBAAmB,SAAY,EAAE,YAAY,eAAe,IAAI,CAAC;AAAA,MACvE;AAEA,UAAI,WAAW,YAAY,MAAM;AAC/B,aAAK,wBAAwB;AAAA,MAC/B;AACA,aAAO;AAAA,IACT,CAAC;AAGD,QAAI,KAAK,uBAAuB;AAC9B,YAAM,UAAU,KAAK;AACrB,WAAK,wBAAwB;AAC7B,YAAM,KAAK,kBAAkB,SAAS,EAAE,EAAE,MAAM,MAAM;AAAA,MAEtD,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOR,MAAM,IAAI,UAA8C;AACtD,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,eAAe,OAAO,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAA4B;AAChC,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,WAAO,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,OAAO,EAAE,MAAM,GAAG,KAAK,EAAE;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,UAAiC;AAC5C,QAAI;AAEJ,UAAM,KAAK,eAAe,CAAC,UAAU;AACnC,UAAI,EAAE,YAAY,OAAQ,QAAO;AACjC,wBAAkB,MAAM,QAAQ,EAAG;AACnC,aAAO,MAAM,QAAQ;AACrB,aAAO;AAAA,IACT,CAAC;AAED,QAAI,iBAAiB;AACnB,YAAM,KAAK,kBAAkB,iBAAiB,EAAE,EAAE,MAAM,MAAM;AAAA,MAE9D,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,SAAS,UAAkB,MAAsD;AACrF,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,cAAc,MAAM,OAAO,MAAM,IAAI;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,UACJ,UACA,MACqD;AACrD,QAAI,OAAO,QAAQ,eAAe,OAAO,IAAI,oBAAoB,YAAY;AAC3E,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,UAAM,OAAO,MAAM,YAAY,MAAM,YAAY;AAQjD,UAAM,SAAS,MAAM,OAAO,MAAM,MAAM,YAAY,MAAM,aAAa,MAAM,UAAU;AACvF,UAAM,OAAO,IAAI,KAAK,CAAC,MAAM,GAAG,EAAE,KAAK,CAAC;AACxC,UAAM,MAAM,IAAI,gBAAgB,IAAI;AACpC,QAAI,UAAU;AACd,UAAM,SAAS,MAAY;AACzB,UAAI,QAAS;AACb,gBAAU;AACV,UAAI,gBAAgB,GAAG;AAAA,IACzB;AACA,WAAO,EAAE,KAAK,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,QAAQ,UAAkB,OAA8B;AAC5D,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,OAAM,IAAI,cAAc,SAAS,QAAQ,0BAA0B,KAAK,QAAQ,GAAG;AAG9F,UAAM,WAAW,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC7D,QAAI,YAAY,SAAS,SAAS,KAAK,KAAM;AAG7C,UAAM,SAAwB;AAAA,MAC5B;AAAA,MACA,MAAM,KAAK;AAAA,MACX,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC,GAAI,KAAK,WAAW,SAAY,EAAE,aAAa,KAAK,OAAO,IAAI,CAAC;AAAA,IAClE;AACA,UAAM,KAAK,mBAAmB,UAAU,MAAM;AAG9C,UAAM,KAAK,kBAAkB,KAAK,MAAM,CAAE;AAG1C,QAAI,YAAY,SAAS,SAAS,KAAK,MAAM;AAC3C,YAAM,KAAK,kBAAkB,SAAS,MAAM,EAAE,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,UAAkB,OAA2C;AAC5E,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ,QAAO;AAEpB,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO,IAAI;AACpD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,eAAe,OAAO,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAA4C;AAC7D,UAAM,SAAS,GAAG,KAAK,QAAQ,KAAK,QAAQ;AAC5C,UAAM,UAAU,MAAM,KAAK,MAAM,KAAK,KAAK,OAAO,KAAK,kBAAkB;AACzE,UAAM,eAAe,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,CAAC;AAE/D,UAAM,WAA4B,CAAC;AACnC,eAAW,OAAO,cAAc;AAC9B,YAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAC9E,UAAI,CAAC,SAAU;AAEf,UAAI,CAAC,KAAK,WAAW;AACnB,iBAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAkB;AAAA,MAC3D,OAAO;AACL,cAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,cAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,iBAAS,KAAK,KAAK,MAAM,IAAI,CAAkB;AAAA,MACjD;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,UAAkB,OAA8B;AAClE,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ;AAEb,UAAM,KAAK,oBAAoB,UAAU,KAAK;AAC9C,UAAM,KAAK,kBAAkB,OAAO,MAAM,EAAE,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,UACA,OACA,MAC0B;AAC1B,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ,QAAO;AAEpB,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO,IAAI;AACpD,QAAI,CAAC,OAAQ,QAAO;AAGpB,UAAM,WAAuB;AAAA,MAC3B,MAAM,OAAO;AAAA,MACb,UAAU,MAAM,YAAY,GAAG,QAAQ,IAAI,KAAK;AAAA,MAChD,MAAM,OAAO,KAAK;AAAA,MAClB,GAAI,OAAO,KAAK,aAAa,SAAY,EAAE,UAAU,OAAO,KAAK,SAAS,IAAI,CAAC;AAAA,MAC/E,YAAY,OAAO;AAAA,MACnB,GAAI,OAAO,gBAAgB,SAAY,EAAE,YAAY,OAAO,YAAY,IAAI,CAAC;AAAA,IAC/E;AAEA,WAAO,KAAK,cAAc,UAAU,OAAO,MAAM,IAAI;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,SAAS,UAA8C;AAC3D,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAClB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,aAAa,UAAkB,mBAAmB,MAA8B;AACpF,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAGpB,QAAI,OAAO,KAAK,eAAe,EAAG,QAAO;AACzC,QAAI,CAAC,KAAK,MAAM,WAAY,QAAO;AAEnC,UAAM,UAAU,GAAG,KAAK,IAAI;AAC5B,WAAO,KAAK,MAAM,WAAW,KAAK,OAAO,gBAAgB,SAAS,gBAAgB;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,gBAAgB,UAAkB,gBAAoD;AAC1F,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAGpB,UAAM,OAAO,MAAM,eAAe,KAAK;AACvC,UAAM,WAAW,KAAK,MAAM,IAAI;AAEhC,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,OAAO,IAAI;AAC9D,QAAI,CAAC,SAAS;AACZ,aAAO,KAAK,cAAc,MAAM,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAAA,IAC/D;AAGA,UAAM,MAAM,SAAS,KAAK,MAAM,GAAG,OAAO,KAAK,UAAU;AACzD,UAAM,EAAE,qBAAqB,WAAW,IAAI,MAAM;AAClD,UAAM,YAAY,MAAM,WAAW,SAAS,KAAK,SAAS,OAAO,SAAS,GAAG;AAC7E,UAAM,YAAY,OAAO,KAAK,gBAAgB,SAC1C,MAAM,gBAAgB,SAAS,IAC/B;AAEJ,UAAM,OAAO,IAAI,eAA2B;AAAA,MAC1C,MAAM,YAAY;AAChB,mBAAW,QAAQ,SAAS;AAC5B,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAED,UAAM,WAAW,KAAK;AACtB,WAAO,IAAI,SAAS,MAAM;AAAA,MACxB,SAAS;AAAA,QACP,gBAAgB,KAAK,YAAY;AAAA,QACjC,kBAAkB,OAAO,KAAK,IAAI;AAAA,QAClC,QAAQ,IAAI,KAAK,IAAI;AAAA,QACrB,uBAAuB,qBAAqB,QAAQ;AAAA,QACpD,iBAAiB,IAAI,KAAK,KAAK,UAAU,EAAE,YAAY;AAAA,MACzD;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA,EAIA,MAAc,cACZ,MACA,MACA,MACmB;AACnB,UAAM,iBAAiB,KAAK,eAAe,KAAK,IAAI;AAGpD,UAAM,OAAO,IAAI,eAA2B;AAAA,MAC1C,MAAM,MAAM,YAAY;AACtB,YAAI;AACF,gBAAM,SAAS,MAAM,eAAe,IAAI;AACxC,qBAAW,QAAQ,MAAM;AACzB,qBAAW,MAAM;AAAA,QACnB,SAAS,KAAK;AACZ,qBAAW,MAAM,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,WAAW,MAAM,YAAY,KAAK;AACxC,UAAM,cAAc,MAAM,SACtB,qBAAqB,QAAQ,MAC7B,yBAAyB,QAAQ;AAErC,WAAO,IAAI,SAAS,MAAM;AAAA,MACxB,SAAS;AAAA,QACP,gBAAgB,KAAK,YAAY;AAAA,QACjC,kBAAkB,OAAO,KAAK,IAAI;AAAA,QAClC,QAAQ,IAAI,KAAK,IAAI;AAAA,QACrB,uBAAuB;AAAA,QACvB,iBAAiB,IAAI,KAAK,KAAK,UAAU,EAAE,YAAY;AAAA,MACzD;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAMA,eAAe,eAAe,MAAmC;AAC/D,SAAO,UAAU,IAAI;AACvB;;;AE12BO,SAAS,YAA0B;AACxC,SAAO;AAAA,IACL,SAAS,MAAM;AACb,aAAO,IAAI,QAAQ,IAAI;AAAA,IACzB;AAAA,EACF;AACF;;;ACTA;AAuBO,IAAM,iCAAiC;AAsE9C,eAAsB,cACpB,KACA,UAA6B,CAAC,GACH;AAC3B,QAAM,MAAM,QAAQ,OAAO,oBAAI,KAAK;AACpC,QAAM,eAAe,QAAQ,gBAAgB;AAC7C,QAAM,SAAS,QAAQ,WAAW;AAElC,QAAM,iBAAiB,MAAM,IAAI,gBAAgB;AACjD,QAAM,eAAqE,CAAC;AAC5E,MAAI,UAAU;AACd,MAAI,UAAU;AACd,MAAI,eAAe;AACnB,MAAI,wBAAwB;AAE5B,QAAO,YAAW,kBAAkB,gBAAgB;AAClD,QAAI,eAAe,WAAW,GAAG,EAAG;AACpC,UAAM,SAAS,IAAI,cAAc,cAAc;AAC/C,QAAI,CAAC,OAAQ;AACb,UAAM,kBAAkB,OAAO,KAAK,MAAM;AAC1C,QAAI,gBAAgB,WAAW,EAAG;AAClC,6BAAyB;AACzB,iBAAa,cAAc,IAAI,EAAE,SAAS,GAAG,SAAS,EAAE;AAExD,UAAM,MAAM,MAAM,IAAI,YAAY,cAAc;AAChD,eAAW,YAAY,KAAK;AAC1B,UAAI,WAAW,aAAc,OAAM;AAEnC,YAAM,SAAS,MAAM,IAAI,UAAU,gBAAgB,QAAQ,EAAE,MAAM,MAAM,IAAI;AAC7E,UAAI,WAAW,KAAM;AACrB,iBAAW;AACX,mBAAa,cAAc,EAAE,WAAW;AAExC,YAAM,QAAQ,MAAM,IAAI,UAAU,gBAAgB,QAAQ,EAAE,MAAM,MAAM,CAAC,CAAC;AAC1E,iBAAW,QAAQ,OAAO;AACxB,YAAI,WAAW,aAAc,OAAM;AACnC,cAAM,SAAS,OAAO,KAAK,IAAI;AAC/B,YAAI,CAAC,OAAQ;AAEb,cAAM,SAAS,eAAe,QAAQ,QAAQ,MAAM,GAAG;AACvD,YAAI,CAAC,OAAQ;AAEb,YAAI,CAAC,QAAQ;AACX,gBAAM,IAAI,WAAW,gBAAgB,UAAU,KAAK,IAAI;AACxD,gBAAM,gBAAgB,KAAK;AAAA,YACzB,IAAI,mBAAmB,gBAAgB,UAAU,KAAK,IAAI;AAAA,YAC1D,YAAY;AAAA,YACZ;AAAA,YACA,UAAU,KAAK;AAAA,YACf,UAAU,KAAK;AAAA,YACf;AAAA,YACA,WAAW,IAAI,YAAY;AAAA,YAC3B,OAAO,IAAI;AAAA,UACb,CAAC;AACD,0BAAgB;AAAA,QAClB;AACA,mBAAW;AACX,qBAAa,cAAc,EAAE,WAAW;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,eACP,QACA,QACA,MACA,KACqC;AACrC,MAAI,eAAe;AACnB,MAAI,qBAAqB;AAEzB,MAAI,OAAO,eAAe,UAAa,OAAO,aAAa,GAAG;AAC5D,UAAM,aAAa,KAAK,MAAM,KAAK,UAAU;AAC7C,QAAI,OAAO,SAAS,UAAU,GAAG;AAC/B,YAAM,QAAQ,IAAI,QAAQ,IAAI;AAC9B,YAAM,UAAU,OAAO,aAAa;AACpC,UAAI,QAAQ,QAAS,gBAAe;AAAA,IACtC;AAAA,EACF;AAEA,MAAI,OAAO,WAAW;AACpB,QAAI;AACF,UAAI,OAAO,UAAU,MAAM,EAAG,sBAAqB;AAAA,IACrD,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI,gBAAgB,mBAAoB,QAAO;AAC/C,MAAI,aAAc,QAAO;AACzB,MAAI,mBAAoB,QAAO;AAC/B,SAAO;AACT;AAEA,SAAS,mBAAmB,YAAoB,UAAkB,UAA0B;AAC1F,QAAM,OAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,CAAC,CAAC;AAChE,MAAI,SAAS;AACb,aAAW,KAAK,KAAM,WAAU,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC9D,SAAO,GAAG,UAAU,KAAK,QAAQ,KAAK,QAAQ,KAAK,MAAM;AAC3D;AAEA,eAAe,gBAAgB,KAAwB,OAAyC;AAC9F,QAAM,OAAO,KAAK,UAAU,KAAK;AACjC,MAAI;AACJ,MAAI,IAAI,WAAW;AACjB,UAAM,MAAM,MAAM,IAAI,OAAO,8BAA8B;AAC3D,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,eAAW;AAAA,MACT,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF,OAAO;AACL,eAAW;AAAA,MACT,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF;AACA,QAAM,IAAI,QAAQ,IAAI,IAAI,OAAO,gCAAgC,MAAM,IAAI,QAAQ;AACrF;;;ACrKO,IAAM,0BAAN,cAAsC,MAAM;AAAA,EACjD,YAAY,QAAgB;AAC1B,UAAM,wBAAwB,MAAM,EAAE;AACtC,SAAK,OAAO;AAAA,EACd;AACF;AAIO,IAAM,0BAA0B;AAmBhC,SAAS,wBACd,OACA,2BACA,eACA,YACA,SACmB;AACnB,MAAI,UAAU;AAEd,QAAM,QAAQ,MAAY;AACxB,cAAU;AAAA,EACZ;AAEA,MAAI,QAAQ,QAAQ;AAClB,QAAI,QAAQ,OAAO,QAAS,WAAU;AACtC,YAAQ,OAAO,iBAAiB,SAAS,MAAM;AAAE,gBAAU;AAAA,IAAK,CAAC;AAAA,EACnE;AAEA,WAAS,aAAmB;AAC1B,QAAI,QAAS,OAAM,IAAI,wBAAwB,mBAAmB;AAAA,EACpE;AAEA,QAAM,YAAY,QAAQ,cAAc,IAAI,IAAI,QAAQ,WAAW,IAAI;AAIvE,MAAI,eAAqC;AACzC,WAAS,iBAAgC;AACvC,QAAI,CAAC,cAAc;AACjB,qBAAe,WAAW;AAAA,QACxB,IAAI,gBAAgB;AAAA,QACpB,WAAW;AAAA,QACX;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,aAAa,QAAQ,eAAe;AAAA,QACpC,WAAW,QAAQ,QAAQ,KAAK;AAAA,QAChC,aAAa,QAAQ,eAAe;AAAA,MACtC,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAEA,kBAAgB,WAAyC;AACvD,UAAM,eAAe;AACrB,eAAW;AAGX,UAAM,iBAAiB,MAAM,0BAA0B;AACvD,UAAM,UAAU,eAAe,OAAO,UAAQ;AAC5C,UAAI,KAAK,WAAW,GAAG,EAAG,QAAO;AACjC,UAAI,aAAa,CAAC,UAAU,IAAI,IAAI,EAAG,QAAO;AAC9C,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB,QAAQ,gBAAgB;AAE9C,eAAW,kBAAkB,SAAS;AACpC,UAAI,QAAS;AAEb,YAAM,OAAO,cAAuC,cAAc;AAClE,YAAM,UAAU,MAAM,KAAK,KAAK,EAAE,MAAM,MAAM,CAAC,CAAC;AAChD,iBAAW,UAAU,SAAS;AAC5B,YAAI,QAAS;AACb,mBAAW;AAEX,cAAM,UAAW,OAA4B;AAC7C,YAAI,OAAO,YAAY,SAAU;AAEjC,YAAI,QAAQ,SAAS,CAAC,QAAQ,MAAM,QAAQ,EAAE,YAAY,gBAAgB,IAAI,QAAQ,CAAC,EAAG;AAE1F,cAAM,UAAU,KAAK,KAAK,OAAO;AACjC,cAAM,QAAQ,MAAM,QAAQ,KAAK,EAAE,MAAM,MAAM,CAAC,CAAe;AAC/D,mBAAW,QAAQ,OAAO;AACxB,cAAI,QAAS;AAEb,cAAI,CAAC,iBAAiB;AACpB,gBAAI,KAAK,SAAS,QAAQ,aAAa;AACrC,gCAAkB;AAAA,YACpB;AACA;AAAA,UACF;AAEA,gBAAM,QAAQ,MAAM,QAAQ,IAAI,KAAK,IAAI;AACzC,cAAI,CAAC,MAAO;AAEZ,gBAAM,OAAqB;AAAA,YACzB,QAAQ,KAAK;AAAA,YACb,WAAW,EAAE,YAAY,gBAAgB,IAAI,SAAS,MAAM,KAAK,KAAK;AAAA,YACtE;AAAA,YACA,MAAM;AAAA,cACJ,MAAM,KAAK;AAAA,cACX,UAAU,KAAK;AAAA,cACf,GAAI,KAAK,aAAa,UAAa,EAAE,UAAU,KAAK,SAAS;AAAA,cAC7D,GAAI,KAAK,eAAe,UAAa,EAAE,WAAW,KAAK,WAAW;AAAA,YACpE;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAA4B;AAAA,IAChC;AAAA,IACA,IAAI,UAAU;AAAE,aAAO;AAAA,IAAQ;AAAA,IAC/B,CAAC,OAAO,aAAa,GAAG,MAAM,SAAS;AAAA,EACzC;AACA,SAAO;AACT;AAIA,SAAS,kBAA0B;AAEjC,QAAM,MAAM,WAAW,OAAO,gBAAgB,IAAI,WAAW,EAAE,CAAC;AAChE,MAAI,IAAI;AACR,aAAW,KAAK,IAAK,MAAK,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AACxD,SAAO,SAAS,KAAK,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAC3D;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/errors.ts","../../src/crypto.ts","../../src/blobs/index.ts","../../src/types.ts","../../src/blobs/blob-set.ts","../../src/blobs/mime-magic.ts","../../src/blobs/active.ts","../../src/blobs/blob-compaction.ts","../../src/blobs/export-blobs.ts"],"sourcesContent":["/**\n * All NOYDB error classes — a single import surface for `catch` blocks and\n * `instanceof` checks.\n *\n * ## Class hierarchy\n *\n * ```\n * Error\n * └─ NoydbError (code: string)\n * ├─ Crypto errors\n * │ ├─ DecryptionError — AES-GCM tag failure\n * │ ├─ TamperedError — ciphertext modified after write\n * │ └─ InvalidKeyError — wrong passphrase / corrupt keyring\n * ├─ Access errors\n * │ ├─ NoAccessError — no DEK for this collection\n * │ ├─ ReadOnlyError — ro permission, write attempted\n * │ ├─ PermissionDeniedError — role too low for operation\n * │ ├─ PrivilegeEscalationError — grant wider than grantor holds\n * │ └─ StoreCapabilityError — optional store method missing\n * ├─ Sync errors\n * │ ├─ ConflictError — optimistic-lock version mismatch\n * │ ├─ BundleVersionConflictError — bundle push rejected by remote\n * │ └─ NetworkError — push/pull network failure\n * ├─ Data errors\n * │ ├─ NotFoundError — get(id) on missing record\n * │ ├─ ValidationError — application-level guard failed\n * │ └─ SchemaValidationError — Standard Schema v1 rejection\n * ├─ Query errors\n * │ ├─ JoinTooLargeError — join row ceiling exceeded\n * │ ├─ DanglingReferenceError — strict ref() points at nothing\n * │ ├─ GroupCardinalityError — groupBy bucket cap exceeded\n * │ ├─ IndexRequiredError — lazy-mode query touches unindexed field\n * │ └─ IndexWriteFailureError — index side-car put/delete failed post-main\n * ├─ i18n / Dictionary errors\n * │ ├─ ReservedCollectionNameError\n * │ ├─ DictKeyMissingError\n * │ ├─ DictKeyInUseError\n * │ ├─ MissingTranslationError\n * │ ├─ LocaleNotSpecifiedError\n * │ └─ TranslatorNotConfiguredError\n * ├─ Backup errors\n * │ ├─ BackupLedgerError — hash-chain verification failed\n * │ └─ BackupCorruptedError — envelope hash mismatch in dump\n * ├─ Bundle errors\n * │ └─ BundleIntegrityError — .noydb body sha256 mismatch\n * └─ Session errors\n * ├─ SessionExpiredError\n * ├─ SessionNotFoundError\n * └─ SessionPolicyError\n * ```\n *\n * ## Catching all NOYDB errors\n *\n * ```ts\n * import { NoydbError, InvalidKeyError, ConflictError } from '@noy-db/hub'\n *\n * try {\n * await vault.unlock(passphrase)\n * } catch (e) {\n * if (e instanceof InvalidKeyError) { showBadPassphraseUI(); return }\n * if (e instanceof NoydbError) { logToSentry(e.code, e); return }\n * throw e // unexpected — re-throw\n * }\n * ```\n *\n * @module\n */\n\n/**\n * Base class for all NOYDB errors.\n *\n * Every error thrown by `@noy-db/hub` extends this class, so consumers can\n * catch all NOYDB errors in a single `catch (e) { if (e instanceof NoydbError) ... }`\n * block. The `code` field is a machine-readable string (e.g. `'DECRYPTION_FAILED'`)\n * suitable for `switch` statements and logging pipelines.\n */\nexport class NoydbError extends Error {\n /** Machine-readable error code. Stable across library versions. */\n readonly code: string\n\n constructor(code: string, message: string) {\n super(message)\n this.name = 'NoydbError'\n this.code = code\n }\n}\n\n// ─── Crypto Errors ─────────────────────────────────────────────────────\n\n/**\n * Thrown when AES-GCM decryption fails.\n *\n * The most common cause is a wrong passphrase or a corrupted ciphertext.\n * A `DecryptionError` at the wrong passphrase level is caught internally\n * and re-thrown as `InvalidKeyError` — so in practice this surfaces for\n * per-record corruption rather than authentication failures.\n */\nexport class DecryptionError extends NoydbError {\n constructor(message = 'Decryption failed') {\n super('DECRYPTION_FAILED', message)\n this.name = 'DecryptionError'\n }\n}\n\n/**\n * Thrown when GCM tag verification fails, indicating the ciphertext was\n * modified after encryption.\n *\n * AES-256-GCM is authenticated encryption — the tag over the ciphertext\n * is checked on every decrypt. If any byte was flipped (accidental\n * corruption or deliberate tampering), decryption throws this error.\n * Treat it as a security alert: the stored bytes are not what NOYDB wrote.\n */\nexport class TamperedError extends NoydbError {\n constructor(message = 'Data integrity check failed — record may have been tampered with') {\n super('TAMPERED', message)\n this.name = 'TamperedError'\n }\n}\n\n/**\n * Thrown when key unwrapping fails, typically because the passphrase is wrong\n * or the keyring file is corrupted.\n *\n * NOYDB uses AES-KW (RFC 3394) to wrap DEKs with the KEK. If AES-KW\n * unwrapping fails, it means either the KEK was derived from the wrong\n * passphrase (PBKDF2 with 600K iterations) or the keyring bytes are\n * corrupted. This is the error shown to the user on a failed unlock attempt.\n */\nexport class InvalidKeyError extends NoydbError {\n constructor(message = 'Invalid key — wrong passphrase or corrupted keyring') {\n super('INVALID_KEY', message)\n this.name = 'InvalidKeyError'\n }\n}\n\n// ─── Access Errors ─────────────────────────────────────────────────────\n\n/**\n * Thrown when the authenticated user does not have a DEK for the requested\n * collection — i.e. the collection is not in their keyring at all.\n *\n * This is the \"no key for this door\" error. It is different from\n * `ReadOnlyError` (user has a key but it only grants ro) and from\n * `PermissionDeniedError` (user's role doesn't allow the operation).\n */\nexport class NoAccessError extends NoydbError {\n constructor(message = 'No access — user does not have a key for this collection') {\n super('NO_ACCESS', message)\n this.name = 'NoAccessError'\n }\n}\n\n/**\n * Thrown when a user with read-only (`ro`) permission attempts a write\n * operation (`put` or `delete`) on a collection.\n *\n * The user has a DEK for the collection (they can decrypt and read), but\n * their keyring grants only `ro`. To fix: re-grant the user with `rw`\n * permission, or do not attempt writes as a viewer/client role.\n */\nexport class ReadOnlyError extends NoydbError {\n constructor(message = 'Read-only — user has ro permission on this collection') {\n super('READ_ONLY', message)\n this.name = 'ReadOnlyError'\n }\n}\n\n/**\n * Thrown when a write is attempted against a historical view produced\n * by `vault.at(timestamp)`. Time-machine views are read-only by\n * contract — mutating the past would require either the shadow-vault\n * mechanism or a ledger-history rewrite (which breaks\n * the tamper-evidence guarantee).\n *\n * Distinct from {@link ReadOnlyError} (keyring-level) and\n * {@link PermissionDeniedError} (role-level): this error is about the\n * *view* being historical, independent of the caller's permissions.\n */\nexport class ReadOnlyAtInstantError extends NoydbError {\n constructor(operation: string, timestamp: string) {\n super(\n 'READ_ONLY_AT_INSTANT',\n `Cannot ${operation}() on a vault view anchored at ${timestamp} — time-machine views are read-only`,\n )\n this.name = 'ReadOnlyAtInstantError'\n }\n}\n\n/**\n * Thrown when a write is attempted against a shadow-vault frame\n * produced by `vault.frame()`. Frames are read-only by contract —\n * the use case is screen-sharing / demos / compliance review where\n * the operator wants to prevent accidental edits.\n *\n * Behavioural enforcement only — the underlying keyring still holds\n * write-capable DEKs. See {@link VaultFrame} for the full caveat.\n */\nexport class ReadOnlyFrameError extends NoydbError {\n constructor(operation: string) {\n super(\n 'READ_ONLY_FRAME',\n `Cannot ${operation}() on a vault frame — frames are read-only presentations of the current vault`,\n )\n this.name = 'ReadOnlyFrameError'\n }\n}\n\n/**\n * Thrown when the authenticated user's role does not permit the requested\n * operation — e.g. a `viewer` calling `grantAccess()`, or an `operator`\n * calling `rotateKeys()`.\n *\n * This is a role-level check (what the user's role allows), distinct from\n * `NoAccessError` (collection not in keyring) and `ReadOnlyError` (in\n * keyring, but write not allowed).\n */\nexport class PermissionDeniedError extends NoydbError {\n constructor(message = 'Permission denied — insufficient role for this operation') {\n super('PERMISSION_DENIED', message)\n this.name = 'PermissionDeniedError'\n }\n}\n\n/**\n * Thrown when an `@noy-db/as-*` export is attempted without the\n * required capability bit on the invoking keyring.\n *\n * Two sub-cases discriminated by the `tier` field:\n *\n * - `tier: 'plaintext'` — a plaintext-tier export (`as-xlsx`,\n * `as-csv`, `as-blob`, `as-zip`, …) was attempted but the\n * keyring's `exportCapability.plaintext` does not include the\n * requested `format` (nor the `'*'` wildcard). Default for every\n * role is `plaintext: []` — the owner must positively grant.\n * - `tier: 'bundle'` — an encrypted `as-noydb` bundle export was\n * attempted but the keyring's `exportCapability.bundle` is\n * `false`. Default for `owner`/`admin` is `true`; for\n * `operator`/`viewer`/`client` it is `false`.\n *\n * Distinct from `PermissionDeniedError` (role-level check) and\n * `NoAccessError` (collection not readable). Surfaces separately so\n * UI layers can show a \"request the export capability from your\n * admin\" flow rather than a generic permission error.\n */\nexport class ExportCapabilityError extends NoydbError {\n readonly tier: 'plaintext' | 'bundle'\n readonly format?: string\n readonly userId: string\n\n constructor(opts: {\n tier: 'plaintext' | 'bundle'\n userId: string\n format?: string\n message?: string\n }) {\n const msg =\n opts.message ??\n (opts.tier === 'plaintext'\n ? `Export capability denied — keyring \"${opts.userId}\" is not granted plaintext-export capability for format \"${opts.format ?? '<unknown>'}\". Ask a vault owner or admin to grant it via vault.grant({ exportCapability: { plaintext: ['${opts.format ?? '<format>'}'] } }).`\n : `Export capability denied — keyring \"${opts.userId}\" is not granted encrypted-bundle export capability. Ask a vault owner or admin to grant it via vault.grant({ exportCapability: { bundle: true } }).`)\n super('EXPORT_CAPABILITY', msg)\n this.name = 'ExportCapabilityError'\n this.tier = opts.tier\n this.userId = opts.userId\n if (opts.format !== undefined) this.format = opts.format\n }\n}\n\n/**\n * Thrown when a keyring file's `expires_at` cutoff has passed.\n * Surfaced by `loadKeyring` before any DEK unwrap is attempted —\n * past the cutoff the slot refuses to open even with the right\n * passphrase. Distinct from PBKDF2 / unwrap errors so consumer code\n * can show a precise \"this bundle slot has expired\" message instead\n * of the generic decryption-failure UX.\n *\n * Used predominantly on `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })` to time-box audit access.\n */\nexport class KeyringExpiredError extends NoydbError {\n readonly userId: string\n readonly expiresAt: string\n constructor(opts: { userId: string; expiresAt: string }) {\n super(\n 'KEYRING_EXPIRED',\n `Keyring \"${opts.userId}\" expired at ${opts.expiresAt}. ` +\n 'The slot refuses to unlock past its expiry timestamp.',\n )\n this.name = 'KeyringExpiredError'\n this.userId = opts.userId\n this.expiresAt = opts.expiresAt\n }\n}\n\n/**\n * Thrown when an `@noy-db/as-*` import is attempted but the invoking\n * keyring lacks the required import-capability bit (issue ).\n *\n * - `tier: 'plaintext'` — a plaintext-tier import (`as-csv`, `as-json`,\n * `as-ndjson`, `as-zip`, …) was attempted but the keyring's\n * `importCapability.plaintext` does not include the requested\n * `format` (nor the `'*'` wildcard).\n * - `tier: 'bundle'` — a `.noydb` bundle import was attempted but the\n * keyring's `importCapability.bundle` is not `true`.\n *\n * Default for every role on every dimension is closed — owners and\n * admins must positively grant the capability. Distinct from\n * `PermissionDeniedError` and `NoAccessError` so UI layers can show a\n * specific \"request the import capability\" flow.\n */\nexport class ImportCapabilityError extends NoydbError {\n readonly tier: 'plaintext' | 'bundle'\n readonly format?: string\n readonly userId: string\n\n constructor(opts: {\n tier: 'plaintext' | 'bundle'\n userId: string\n format?: string\n message?: string\n }) {\n const msg =\n opts.message ??\n (opts.tier === 'plaintext'\n ? `Import capability denied — keyring \"${opts.userId}\" is not granted plaintext-import capability for format \"${opts.format ?? '<unknown>'}\". Ask a vault owner or admin to grant it via vault.grant({ importCapability: { plaintext: ['${opts.format ?? '<format>'}'] } }).`\n : `Import capability denied — keyring \"${opts.userId}\" is not granted encrypted-bundle import capability. Ask a vault owner or admin to grant it via vault.grant({ importCapability: { bundle: true } }).`)\n super('IMPORT_CAPABILITY', msg)\n this.name = 'ImportCapabilityError'\n this.tier = opts.tier\n this.userId = opts.userId\n if (opts.format !== undefined) this.format = opts.format\n }\n}\n\n/**\n * Thrown when a grant would give the grantee a permission the grantor\n * does not themselves hold — the \"admin cannot grant what admin cannot\n * do\" rule from the admin-delegation work.\n *\n * Distinct from `PermissionDeniedError` so callers can tell the two\n * cases apart in logs and tests:\n *\n * - `PermissionDeniedError` — \"you are not allowed to perform this\n * operation at all\" (wrong role).\n * - `PrivilegeEscalationError` — \"you are allowed to grant, but not\n * with these specific permissions\" (widening attempt).\n *\n * Under the admin model the grantee of an admin-grants-admin call\n * inherits the caller's entire DEK set by construction, so this error\n * is structurally unreachable in typical flows. The check and error\n * class exist so that future per-collection admin scoping cannot\n * accidentally bypass the subset rule — the guard is already wired in.\n *\n * `offendingCollection` carries the first collection name that failed\n * the subset check, to make the violation actionable in error output.\n */\n/**\n * Thrown when a caller invokes an API that requires an optional\n * store capability the active store does not implement.\n *\n * Today the only call site is `Noydb.listAccessibleVaults()`,\n * which depends on the optional `NoydbStore.listVaults()`\n * method. The error message names the missing method and the calling\n * API so consumers know exactly which combination is unsupported,\n * and the `capability` field is machine-readable so library code can\n * pattern-match in catch blocks (e.g. fall back to a candidate-list\n * shape).\n *\n * The class lives in `errors.ts` rather than as a generic\n * `ValidationError` because the diagnostic shape is different: a\n * `ValidationError` says \"the inputs you passed are wrong\"; this\n * error says \"the inputs are fine, but the store you wired up\n * doesn't support what you're asking for.\" Different fix, different\n * documentation.\n */\nexport class StoreCapabilityError extends NoydbError {\n /** The store method/capability that was missing. */\n readonly capability: string\n\n constructor(capability: string, callerApi: string, storeName?: string) {\n super(\n 'STORE_CAPABILITY',\n `${callerApi} requires the optional store capability \"${capability}\" ` +\n `but the active store${storeName ? ` (${storeName})` : ''} does not implement it. ` +\n `Use a store that supports \"${capability}\" (store-memory, store-file) or pass an explicit ` +\n `vault list to bypass enumeration.`,\n )\n this.name = 'StoreCapabilityError'\n this.capability = capability\n }\n}\n\nexport class PrivilegeEscalationError extends NoydbError {\n readonly offendingCollection: string\n\n constructor(offendingCollection: string, message?: string) {\n super(\n 'PRIVILEGE_ESCALATION',\n message ??\n `Privilege escalation: grantor has no DEK for collection \"${offendingCollection}\" and cannot grant access to it.`,\n )\n this.name = 'PrivilegeEscalationError'\n this.offendingCollection = offendingCollection\n }\n}\n\n/**\n * Thrown by `Collection.put` / `.delete` when the target record's\n * envelope `_ts` falls within a closed accounting period.\n *\n * Distinct from `ReadOnlyError` (keyring-level), `ReadOnlyAtInstantError`\n * (historical view), and `ReadOnlyFrameError` (shadow vault): this\n * error is about the STORED RECORD being sealed by an operator call\n * to `vault.closePeriod()`, independent of caller permissions or\n * view type. The `periodName` and `endDate` fields name the sealing\n * period so audit UIs can surface a \"this record is locked in\n * FY2026-Q1 (closed 2026-03-31)\" message without parsing the error\n * string.\n *\n * To apply a correction after close, book a compensating entry in a\n * new period rather than unlocking the old one. Re-opening a closed\n * period is deliberately unsupported.\n */\nexport class PeriodClosedError extends NoydbError {\n readonly periodName: string\n readonly endDate: string\n readonly recordTs: string\n\n constructor(periodName: string, endDate: string, recordTs: string) {\n super(\n 'PERIOD_CLOSED',\n `Cannot modify record (last written ${recordTs}) — sealed by closed period ` +\n `\"${periodName}\" (endDate: ${endDate}). Post a compensating entry in a ` +\n `new period instead.`,\n )\n this.name = 'PeriodClosedError'\n this.periodName = periodName\n this.endDate = endDate\n this.recordTs = recordTs\n }\n}\n\n// ─── Hierarchical Access Errors ─────────────────────\n\n/**\n * Thrown when a user tries to act at a tier they are not cleared for.\n *\n * This is the umbrella error for tier write refusals:\n * - `put({ tier: N })` when the user's keyring lacks tier-N DEK.\n * - `elevate(id, N)` when the caller cannot reach tier N.\n *\n * Distinct from `TierAccessDeniedError` which covers *read* refusals on\n * the invisibility/ghost path.\n */\nexport class TierNotGrantedError extends NoydbError {\n readonly tier: number\n readonly collection: string\n\n constructor(collection: string, tier: number) {\n super(\n 'TIER_NOT_GRANTED',\n `User has no DEK for tier ${tier} in collection \"${collection}\"`,\n )\n this.name = 'TierNotGrantedError'\n this.collection = collection\n this.tier = tier\n }\n}\n\n/**\n * Thrown when an elevated-handle operation runs after the elevation's\n * TTL expired. Reads continue at the original tier; only writes\n * through the scoped handle flip to throwing once expired.\n */\nexport class ElevationExpiredError extends NoydbError {\n readonly tier: number\n readonly expiresAt: number\n\n constructor(opts: { tier: number; expiresAt: number }) {\n super(\n 'ELEVATION_EXPIRED',\n `Elevation to tier ${opts.tier} expired at ${new Date(opts.expiresAt).toISOString()}`,\n )\n this.name = 'ElevationExpiredError'\n this.tier = opts.tier\n this.expiresAt = opts.expiresAt\n }\n}\n\n/**\n * Thrown by `vault.elevate(...)` when an elevation is already active\n * on the vault. Adopters must `release()` the existing handle before\n * starting a new elevation.\n */\nexport class AlreadyElevatedError extends NoydbError {\n readonly activeTier: number\n\n constructor(activeTier: number) {\n super(\n 'ALREADY_ELEVATED',\n `Vault is already elevated to tier ${activeTier}; release the existing handle first`,\n )\n this.name = 'AlreadyElevatedError'\n this.activeTier = activeTier\n }\n}\n\n/**\n * Thrown when `demote()` is called by someone who is not the original\n * elevator and not an owner.\n */\nexport class TierDemoteDeniedError extends NoydbError {\n constructor(id: string, tier: number) {\n super(\n 'TIER_DEMOTE_DENIED',\n `Only the original elevator or an owner can demote record \"${id}\" from tier ${tier}`,\n )\n this.name = 'TierDemoteDeniedError'\n }\n}\n\n/**\n * Thrown when `db.delegate()` is called against a user that has no\n * keyring in the target vault — the delegation token cannot be\n * constructed without the target user's KEK wrap.\n */\nexport class DelegationTargetMissingError extends NoydbError {\n readonly toUser: string\n\n constructor(toUser: string) {\n super(\n 'DELEGATION_TARGET_MISSING',\n `Delegation target user \"${toUser}\" has no keyring in this vault`,\n )\n this.name = 'DelegationTargetMissingError'\n this.toUser = toUser\n }\n}\n\n// ─── Sync Errors ───────────────────────────────────────────────────────\n\n/**\n * Thrown when a `put()` detects an optimistic concurrency conflict.\n *\n * NOYDB uses version numbers (`_v`) for optimistic locking. If a `put()`\n * is called with `expectedVersion: N` but the stored record is at version\n * `M ≠ N`, the write is rejected and the caller must re-read, re-apply their\n * change, and retry. The `version` field carries the actual stored version\n * so callers can decide whether to retry or surface the conflict to the user.\n */\nexport class ConflictError extends NoydbError {\n /** The actual stored version at the time of conflict. */\n readonly version: number\n\n constructor(version: number, message = 'Version conflict') {\n super('CONFLICT', message)\n this.name = 'ConflictError'\n this.version = version\n }\n}\n\n/**\n * Thrown by `LedgerStore.append()` after exhausting its CAS retry\n * budget under multi-writer contention. Two browser tabs, a\n * web app + an offline mobile peer, or a server worker pool all\n * producing ledger entries against the same vault can race on the\n * \"read head, write head+1\" cycle; the optimistic-CAS retry loop\n * resolves the race for `casAtomic: true` stores, but pathological\n * contention (or a buggy peer) can still exhaust the budget. When\n * that happens, the chain is intact — the failed writer simply\n * couldn't claim a slot. Caller's choice whether to retry, queue,\n * or surface the failure to the user.\n */\nexport class LedgerContentionError extends NoydbError {\n readonly attempts: number\n\n constructor(attempts: number) {\n super(\n 'LEDGER_CONTENTION',\n `LedgerStore.append: failed to claim a chain slot after ${attempts} optimistic-CAS retries`,\n )\n this.name = 'LedgerContentionError'\n this.attempts = attempts\n }\n}\n\n/**\n * Thrown when a bundle push is rejected because the remote has been updated\n * since the local bundle was last pulled.\n *\n * Unlike `ConflictError` (per-record), this is a whole-bundle conflict —\n * the remote's bundle handle has changed. The caller must pull the new\n * bundle, merge, and re-push. `remoteVersion` is the handle of the newer\n * remote bundle for use in diagnostics.\n */\nexport class BundleVersionConflictError extends NoydbError {\n /** The bundle handle of the newer remote version that rejected the push. */\n readonly remoteVersion: string\n\n constructor(remoteVersion: string, message = 'Bundle version conflict — remote has been updated') {\n super('BUNDLE_VERSION_CONFLICT', message)\n this.name = 'BundleVersionConflictError'\n this.remoteVersion = remoteVersion\n }\n}\n\n/**\n * Thrown when a sync operation (push or pull) fails due to a network error.\n *\n * NOYDB's offline-first design means network errors are expected during sync.\n * Callers should catch `NetworkError`, surface connectivity status in the UI,\n * and rely on the `SyncScheduler` to retry when connectivity is restored.\n */\nexport class NetworkError extends NoydbError {\n constructor(message = 'Network error') {\n super('NETWORK_ERROR', message)\n this.name = 'NetworkError'\n }\n}\n\n// ─── Data Errors ───────────────────────────────────────────────────────\n\n/**\n * Thrown when `collection.get(id)` is called with an ID that does not exist.\n *\n * NOYDB collections are memory-first, so this error is synchronous and cheap —\n * it does not make a network round-trip. Callers that expect the record to be\n * absent should use `collection.getOrNull(id)` instead.\n */\nexport class NotFoundError extends NoydbError {\n constructor(message = 'Record not found') {\n super('NOT_FOUND', message)\n this.name = 'NotFoundError'\n }\n}\n\n/**\n * Thrown when application-level validation fails before encryption.\n *\n * Distinct from `SchemaValidationError` (Standard Schema v1 validator)\n * and `MissingTranslationError` (i18nText). `ValidationError` is the\n * general-purpose validation base — use it for custom guards in `put()`\n * hooks or store middleware.\n */\nexport class ValidationError extends NoydbError {\n constructor(message = 'Validation error') {\n super('VALIDATION_ERROR', message)\n this.name = 'ValidationError'\n }\n}\n\n/**\n * Thrown when a Standard Schema v1 validator rejects a record on\n * `put()` (input validation) or on read (output validation). Carries\n * the raw issue list so callers can render field-level errors.\n *\n * `direction` distinguishes the two cases:\n * - `'input'`: the user passed bad data into `put()`. This is a\n * normal error case that application code should handle — typically\n * by showing validation messages in the UI.\n * - `'output'`: stored data does not match the current schema. This\n * indicates a schema drift (the schema was changed without\n * migrating the existing records) and should be treated as a bug\n * — the application should not swallow it silently.\n *\n * The `issues` type is deliberately `readonly unknown[]` on this class\n * so that `errors.ts` doesn't need to import from `schema.ts` (and\n * create a dependency cycle). Callers who know they're holding a\n * `SchemaValidationError` can cast to the more precise\n * `readonly StandardSchemaV1Issue[]` from `schema.ts`.\n */\nexport class SchemaValidationError extends NoydbError {\n readonly issues: readonly unknown[]\n readonly direction: 'input' | 'output'\n\n constructor(\n message: string,\n issues: readonly unknown[],\n direction: 'input' | 'output',\n ) {\n super('SCHEMA_VALIDATION_FAILED', message)\n this.name = 'SchemaValidationError'\n this.issues = issues\n this.direction = direction\n }\n}\n\n// ─── Query DSL Errors ─────────────────────────────────────────────────\n\n/**\n * Thrown when `.groupBy().aggregate()` produces more than the hard\n * cardinality cap (default 100_000 groups)..\n *\n * The cap exists because `.groupBy()` materializes one bucket per\n * distinct key value in memory, and runaway cardinality — a groupBy\n * on a high-uniqueness field like `id` or `createdAt` — is almost\n * always a query mistake rather than legitimate use. A hard error is\n * better than silent OOM: the consumer sees an actionable message\n * naming the field and the observed cardinality, with guidance to\n * either narrow the query with `.where()` or accept the ceiling\n * override.\n *\n * A separate one-shot warning fires at 10% of the cap (10_000\n * groups) so consumers get a heads-up before the hard error — same\n * pattern as `JoinTooLargeError` and the `.join()` row ceiling.\n *\n * **Not overridable in.** The 100k cap is a fixed constant so\n * the failure mode is consistent across the codebase; a\n * `{ maxGroups }` override can be added later without a break if a\n * real consumer asks.\n */\nexport class GroupCardinalityError extends NoydbError {\n /** The field being grouped on. */\n readonly field: string\n /** Observed number of distinct groups at the moment the cap tripped. */\n readonly cardinality: number\n /** The cap that was exceeded. */\n readonly maxGroups: number\n\n constructor(field: string, cardinality: number, maxGroups: number) {\n super(\n 'GROUP_CARDINALITY',\n `.groupBy(\"${field}\") produced ${cardinality} distinct groups, ` +\n `exceeding the ${maxGroups}-group ceiling. This is almost always a ` +\n `query mistake — grouping on a high-uniqueness field like \"id\" or ` +\n `\"createdAt\" produces one bucket per record. Narrow the query with ` +\n `.where() before grouping, or group on a lower-cardinality field ` +\n `(status, category, clientId). If you genuinely need high-cardinality ` +\n `grouping, file an issue with your use case.`,\n )\n this.name = 'GroupCardinalityError'\n this.field = field\n this.cardinality = cardinality\n this.maxGroups = maxGroups\n }\n}\n\n/**\n * Thrown in lazy mode when a `.query()` / `.where()` / `.orderBy()` clause\n * references a field that does not have a declared index.\n *\n * Lazy-mode queries only work when every touched field is indexed.\n * This is deliberate — silent scan-fallback would hide the performance\n * cliff that lazy-mode indexes exist to prevent.\n *\n * Payload:\n * - `collection` — name of the collection queried\n * - `touchedFields` — every field referenced by the query (filter + order)\n * - `missingFields` — subset of `touchedFields` that have no declared index\n */\nexport class IndexRequiredError extends NoydbError {\n readonly collection: string\n readonly touchedFields: readonly string[]\n readonly missingFields: readonly string[]\n\n constructor(args: { collection: string; touchedFields: readonly string[]; missingFields: readonly string[] }) {\n super(\n 'INDEX_REQUIRED',\n `Collection \"${args.collection}\": query references unindexed fields in lazy mode ` +\n `(missing: ${args.missingFields.join(', ')}). ` +\n `Declare an index on each field, or use collection.scan() for non-indexed iteration.`,\n )\n this.name = 'IndexRequiredError'\n this.collection = args.collection\n this.touchedFields = [...args.touchedFields]\n this.missingFields = [...args.missingFields]\n }\n}\n\n/**\n * Thrown (or surfaced via the `index:write-partial` event) when one or more\n * per-indexed-field side-car writes fail after the main record write has\n * already succeeded.\n *\n * Not thrown out of `.put()` / `.delete()` directly — those succeed when the\n * main record succeeds. Instead, `IndexWriteFailureError` instances are collected\n * into the session-scoped reconcile queue and emitted on the Collection\n * emitter as `index:write-partial`.\n *\n * Payload:\n * - `recordId` — the id of the main record whose side-car writes failed\n * - `field` — the indexed field whose side-car write failed\n * - `op` — `'put'` or `'delete'`, indicating which mutation was in flight\n * - `cause` — the underlying error from the store\n */\nexport class IndexWriteFailureError extends NoydbError {\n readonly recordId: string\n readonly field: string\n readonly op: 'put' | 'delete'\n override readonly cause: unknown\n\n constructor(args: { recordId: string; field: string; op: 'put' | 'delete'; cause: unknown }) {\n super(\n 'INDEX_WRITE_FAILURE',\n `Index side-car ${args.op} failed for field \"${args.field}\" on record \"${args.recordId}\"`,\n )\n this.name = 'IndexWriteFailureError'\n this.recordId = args.recordId\n this.field = args.field\n this.op = args.op\n this.cause = args.cause\n }\n}\n\n// ─── Bundle Format Errors ─────────────────────────────────\n\n/**\n * Thrown by `readNoydbBundle()` when the body bytes don't match\n * the integrity hash declared in the bundle header — i.e. someone\n * modified the bytes between write and read.\n *\n * Distinct from a generic `Error` (which would be thrown for\n * format violations like a missing magic prefix or malformed\n * header JSON) so consumers can pattern-match the corruption case\n * and handle it differently from a producer bug. A\n * `BundleIntegrityError` indicates \"the bytes you got are not\n * what was written\"; a plain `Error` from `parsePrefixAndHeader`\n * indicates \"what was written wasn't a valid bundle in the first\n * place.\"\n *\n * Also thrown when decompression fails after the integrity hash\n * passed — that's a producer bug (the wrong algorithm byte was\n * written) but it surfaces with the same error class because the\n * end result is \"the body cannot be turned back into a dump.\"\n */\nexport class BundleIntegrityError extends NoydbError {\n constructor(message: string) {\n super('BUNDLE_INTEGRITY', `.noydb bundle integrity check failed: ${message}`)\n this.name = 'BundleIntegrityError'\n }\n}\n\n// ─── i18n / Dictionary Errors ──────────────────────────\n\n/**\n * Thrown when `vault.collection()` is called with a name that is\n * reserved for NOYDB internal use (any name starting with `_dict_`).\n *\n * Dictionary collections are accessed exclusively via\n * `vault.dictionary(name)` — attempting to open one as a regular\n * collection would bypass the dictionary invariants (ACL, rename\n * tracking, reserved-name policy).\n */\nexport class ReservedCollectionNameError extends NoydbError {\n /** The rejected collection name. */\n readonly collectionName: string\n\n constructor(collectionName: string) {\n super(\n 'RESERVED_COLLECTION_NAME',\n `\"${collectionName}\" is a reserved collection name. ` +\n `Use vault.dictionary(\"${collectionName.replace(/^_dict_/, '')}\") ` +\n `to access dictionary collections.`,\n )\n this.name = 'ReservedCollectionNameError'\n this.collectionName = collectionName\n }\n}\n\n/**\n * Thrown by `DictionaryHandle.get()` and `DictionaryHandle.delete()` when\n * the requested key does not exist in the dictionary.\n *\n * Distinct from `NotFoundError` (which is for data records) so callers\n * can distinguish \"data record missing\" from \"dictionary key missing\"\n * without inspecting error messages.\n */\nexport class DictKeyMissingError extends NoydbError {\n /** The dictionary name. */\n readonly dictionaryName: string\n /** The key that was not found. */\n readonly key: string\n\n constructor(dictionaryName: string, key: string) {\n super(\n 'DICT_KEY_MISSING',\n `Dictionary \"${dictionaryName}\" has no entry for key \"${key}\".`,\n )\n this.name = 'DictKeyMissingError'\n this.dictionaryName = dictionaryName\n this.key = key\n }\n}\n\n/**\n * Thrown by `DictionaryHandle.delete()` in strict mode when the key to\n * be deleted is still referenced by one or more records.\n *\n * The caller must either rename the key first (the only sanctioned\n * mass-mutation path) or pass `{ mode: 'warn' }` to skip the check\n * (development only).\n */\nexport class DictKeyInUseError extends NoydbError {\n /** The dictionary name. */\n readonly dictionaryName: string\n /** The key that is still referenced. */\n readonly key: string\n /** Name of the first collection found to reference this key. */\n readonly usedBy: string\n /** Number of records in `usedBy` that reference this key. */\n readonly count: number\n\n constructor(\n dictionaryName: string,\n key: string,\n usedBy: string,\n count: number,\n ) {\n super(\n 'DICT_KEY_IN_USE',\n `Cannot delete key \"${key}\" from dictionary \"${dictionaryName}\": ` +\n `${count} record(s) in \"${usedBy}\" still reference it. ` +\n `Use dictionary.rename(\"${key}\", newKey) to rewrite references first.`,\n )\n this.name = 'DictKeyInUseError'\n this.dictionaryName = dictionaryName\n this.key = key\n this.usedBy = usedBy\n this.count = count\n }\n}\n\n/**\n * Thrown by `Collection.put()` when an `i18nText` field is missing one\n * or more required translations.\n *\n * The `missing` array names each locale code that was absent from the\n * field value. The `field` property names the field so callers can\n * render a field-level error message without parsing the string.\n */\nexport class MissingTranslationError extends NoydbError {\n /** The field name whose translation(s) are missing. */\n readonly field: string\n /** Locale codes that were required but absent. */\n readonly missing: readonly string[]\n\n constructor(field: string, missing: readonly string[], message?: string) {\n super(\n 'MISSING_TRANSLATION',\n message ??\n `Field \"${field}\": missing required translation(s): ${missing.join(', ')}.`,\n )\n this.name = 'MissingTranslationError'\n this.field = field\n this.missing = missing\n }\n}\n\n/**\n * Thrown when reading an `i18nText` field without specifying a locale —\n * either at the call site (`get(id, { locale })`) or on the vault\n * (`openVault(name, { locale })`).\n *\n * Also thrown when `resolveI18nText()` exhausts the fallback chain and\n * no translation is available for the requested locale.\n *\n * The `field` property names the field that triggered the error so the\n * caller can surface it in the UI.\n */\nexport class LocaleNotSpecifiedError extends NoydbError {\n /** The field name that required a locale. */\n readonly field: string\n\n constructor(field: string, message?: string) {\n super(\n 'LOCALE_NOT_SPECIFIED',\n message ??\n `Cannot read i18nText field \"${field}\" without a locale. ` +\n `Pass { locale } to get()/list()/query() or set a default via ` +\n `openVault(name, { locale }).`,\n )\n this.name = 'LocaleNotSpecifiedError'\n this.field = field\n }\n}\n\n// ─── Translator Errors ─────────────────────────────────────\n\n/**\n * Thrown when a collection has an `i18nText` field with\n * `autoTranslate: true` but no `plaintextTranslator` was configured\n * on `createNoydb()`.\n *\n * The error is raised at `put()` time (not at schema construction) so\n * the mis-configuration is surfaced by the first write rather than\n * silently at startup.\n */\nexport class TranslatorNotConfiguredError extends NoydbError {\n /** The field that requested auto-translation. */\n readonly field: string\n /** The collection the put was targeting. */\n readonly collection: string\n\n constructor(field: string, collection: string) {\n super(\n 'TRANSLATOR_NOT_CONFIGURED',\n `Field \"${field}\" in collection \"${collection}\" has autoTranslate: true, ` +\n `but no plaintextTranslator was configured on createNoydb(). ` +\n `Either configure a plaintextTranslator or remove autoTranslate from the schema.`,\n )\n this.name = 'TranslatorNotConfiguredError'\n this.field = field\n this.collection = collection\n }\n}\n\n// ─── Backup Errors ─────────────────────────────────────────\n\n/**\n * Thrown when `Vault.load()` finds that a backup's hash chain\n * doesn't verify, or that its embedded `ledgerHead.hash` doesn't\n * match the chain head reconstructed from the loaded entries.\n *\n * Distinct from `BackupCorruptedError` so callers can choose to\n * recover from one but not the other (e.g., a corrupted JSON file is\n * unrecoverable; a chain mismatch might mean the backup is from an\n * incompatible noy-db version).\n */\nexport class BackupLedgerError extends NoydbError {\n /** First-broken-entry index, if known. */\n readonly divergedAt?: number\n\n constructor(message: string, divergedAt?: number) {\n super('BACKUP_LEDGER', message)\n this.name = 'BackupLedgerError'\n if (divergedAt !== undefined) this.divergedAt = divergedAt\n }\n}\n\n/**\n * Thrown when `Vault.load()` finds that the backup's data\n * collection content doesn't match the ledger's recorded\n * `payloadHash`es. This is the \"envelope was tampered with after\n * dump\" detection — the chain itself can be intact, but if any\n * encrypted record bytes were swapped, this check catches it.\n */\nexport class BackupCorruptedError extends NoydbError {\n /** The (collection, id) pair whose envelope failed the hash check. */\n readonly collection: string\n readonly id: string\n\n constructor(collection: string, id: string, message: string) {\n super('BACKUP_CORRUPTED', message)\n this.name = 'BackupCorruptedError'\n this.collection = collection\n this.id = id\n }\n}\n\n// ─── Session Errors ───────────────────────────────────────\n\n/**\n * Thrown by `resolveSession()` when the session token's `expiresAt`\n * timestamp is in the past. The session key is also removed from the\n * in-memory store when this is thrown, so retrying with the same sessionId\n * will produce `SessionNotFoundError`.\n *\n * Separate from `SessionNotFoundError` so callers can distinguish between\n * \"session is gone\" (key store cleared, tab reloaded) and \"session is\n * still in the store but has exceeded its lifetime\" (idle timeout, absolute\n * timeout, policy-driven expiry). The remediation differs: expired sessions\n * should prompt a fresh unlock; not-found sessions may indicate a bug or a\n * cross-tab scenario where the session was never established.\n */\nexport class SessionExpiredError extends NoydbError {\n readonly sessionId: string\n\n constructor(sessionId: string) {\n super('SESSION_EXPIRED', `Session \"${sessionId}\" has expired. Re-unlock to continue.`)\n this.name = 'SessionExpiredError'\n this.sessionId = sessionId\n }\n}\n\n/**\n * Thrown by `resolveSession()` when the session key cannot be found in\n * the module-level store. This happens when:\n * - The session was explicitly revoked via `revokeSession()`.\n * - The JS context was reloaded (tab navigation, page refresh, worker restart).\n * - `Noydb.close()` was called (which calls `revokeAllSessions()`).\n * - The sessionId is wrong or was generated by a different JS context.\n *\n * The session token (if the caller holds it) is permanently useless after\n * this error — the key is gone and cannot be recovered.\n */\nexport class SessionNotFoundError extends NoydbError {\n readonly sessionId: string\n\n constructor(sessionId: string) {\n super('SESSION_NOT_FOUND', `Session key for \"${sessionId}\" not found. The session may have been revoked or the page reloaded.`)\n this.name = 'SessionNotFoundError'\n this.sessionId = sessionId\n }\n}\n\n/**\n * Thrown when a session policy blocks an operation — for example,\n * `requireReAuthFor: ['export']` is set and the caller attempts to\n * call `exportStream()` without re-authenticating for this session.\n *\n * The `operation` field names the specific operation that was blocked\n * (e.g. `'export'`, `'grant'`, `'rotate'`) so the caller can surface\n * a targeted prompt (\"Please re-enter your passphrase to export data\").\n */\nexport class SessionPolicyError extends NoydbError {\n readonly operation: string\n\n constructor(operation: string, message?: string) {\n super(\n 'SESSION_POLICY',\n message ?? `Operation \"${operation}\" requires re-authentication per the active session policy.`,\n )\n this.name = 'SessionPolicyError'\n this.operation = operation\n }\n}\n\n// ─── Query / Join Errors ────────────────────────────────────\n\n/**\n * Thrown when a `.join()` would exceed its configured row ceiling on\n * either side. The ceiling defaults to 50,000 per side and can be\n * overridden via the `{ maxRows }` option on `.join()`.\n *\n * Carries both row counts so the error message can show which side\n * tripped the limit (e.g. \"left had 60,000 rows, right had 1,200,\n * max was 50,000\"). The `side` field is machine-readable so test\n * code and devtools can match on it without regex-parsing the\n * message.\n *\n * The row ceiling exists because joins are bounded in-memory\n * operations over materialized record sets. Consumers whose\n * collections genuinely exceed the ceiling should track \n * (streaming joins over `scan()`) or filter the left side further\n * with `where()` / `limit()` before joining.\n */\nexport class JoinTooLargeError extends NoydbError {\n readonly leftRows: number\n readonly rightRows: number\n readonly maxRows: number\n readonly side: 'left' | 'right'\n\n constructor(opts: {\n leftRows: number\n rightRows: number\n maxRows: number\n side: 'left' | 'right'\n message: string\n }) {\n super('JOIN_TOO_LARGE', opts.message)\n this.name = 'JoinTooLargeError'\n this.leftRows = opts.leftRows\n this.rightRows = opts.rightRows\n this.maxRows = opts.maxRows\n this.side = opts.side\n }\n}\n\n/**\n * Thrown by `.join()` in strict `ref()` mode when a left-side record\n * points at a right-side id that does not exist in the target\n * collection.\n *\n * Distinct from `RefIntegrityError` so test code can pattern-match\n * on the *read-time* dangling case without catching *write-time*\n * integrity violations. Both indicate \"ref points at nothing\" but\n * happen at different lifecycle phases and deserve different\n * remediation in documentation: a RefIntegrityError on `put()`\n * means the input is invalid; a DanglingReferenceError on `.join()`\n * means stored data has drifted and `vault.checkIntegrity()`\n * is the right tool to find the full set of orphans.\n */\nexport class DanglingReferenceError extends NoydbError {\n readonly field: string\n readonly target: string\n readonly refId: string\n\n constructor(opts: {\n field: string\n target: string\n refId: string\n message: string\n }) {\n super('DANGLING_REFERENCE', opts.message)\n this.name = 'DanglingReferenceError'\n this.field = opts.field\n this.target = opts.target\n this.refId = opts.refId\n }\n}\n\n/**\n * Thrown by {@link sanitizeFilename} when an input filename cannot be\n * made safe — NUL byte, empty after normalization, missing\n * `opaqueId` for the opaque profile, `..` segment, or a `maxBytes`\n * cap too small to hold a single code point.\n */\nexport class FilenameSanitizationError extends NoydbError {\n constructor(message: string) {\n super('FILENAME_SANITIZATION', message)\n this.name = 'FilenameSanitizationError'\n }\n}\n\n/**\n * Thrown when a write target resolves OUTSIDE the requested\n * directory after sanitization — the canonical Zip-Slip class. The\n * sanitizer's job is to strip path-traversal segments; this error\n * is the defense-in-depth fallback at the FS write site.\n */\nexport class PathEscapeError extends NoydbError {\n readonly attempted: string\n readonly targetDir: string\n\n constructor(opts: { attempted: string; targetDir: string }) {\n super(\n 'PATH_ESCAPE',\n `Sanitized filename \"${opts.attempted}\" resolves outside target dir \"${opts.targetDir}\"`,\n )\n this.name = 'PathEscapeError'\n this.attempted = opts.attempted\n this.targetDir = opts.targetDir\n }\n}\n","/**\n * Cryptographic primitives — thin wrappers around the Web Crypto API.\n *\n * ## Design principle\n *\n * **Zero npm crypto dependencies.** Every operation uses `globalThis.crypto.subtle`,\n * which is available natively in Node.js ≥ 18, all modern browsers, and\n * Deno/Bun. This avoids supply-chain risk from third-party crypto packages and\n * ensures the library stays auditable.\n *\n * ## Algorithms\n *\n * | Use case | Algorithm | Parameters |\n * |----------|-----------|------------|\n * | Key derivation | PBKDF2-SHA256 | 600,000 iterations, 32-byte salt |\n * | Record encryption | AES-256-GCM | 12-byte random IV per operation |\n * | DEK wrapping | AES-KW (RFC 3394) | 256-bit KEK |\n * | Binary encrypt | AES-256-GCM | same as record encryption |\n * | Integrity | HMAC-SHA256 | for presence channels |\n * | Content hash | SHA-256 | for ledger and bundle integrity |\n *\n * ## Key lifecycle\n *\n * ```\n * passphrase + salt\n * └─► deriveKey() → KEK (CryptoKey, extractable: false)\n * └─► wrapKey() → wrapped DEK bytes [stored in keyring]\n * └─► unwrapKey() → DEK (CryptoKey) [memory only during session]\n * └─► encrypt() / decrypt() → ciphertext / plaintext\n * ```\n *\n * IVs are generated fresh by {@link generateIV} on every encrypt call.\n * Reusing an IV with the same key would break GCM's authentication guarantee —\n * this function should be the only place IVs are produced.\n *\n * @module\n */\n\nimport { DecryptionError, InvalidKeyError, TamperedError } from './errors.js'\n\nconst PBKDF2_ITERATIONS = 600_000\nconst SALT_BYTES = 32\nconst IV_BYTES = 12\nconst KEY_BITS = 256\n\nconst subtle = globalThis.crypto.subtle\n\n// ─── Key Derivation ────────────────────────────────────────────────────\n\n/** Derive a KEK from a passphrase and salt using PBKDF2-SHA256. */\nexport async function deriveKey(\n passphrase: string,\n salt: Uint8Array,\n): Promise<CryptoKey> {\n const keyMaterial = await subtle.importKey(\n 'raw',\n new TextEncoder().encode(passphrase),\n 'PBKDF2',\n false,\n ['deriveKey'],\n )\n\n return subtle.deriveKey(\n {\n name: 'PBKDF2',\n salt: salt as BufferSource,\n iterations: PBKDF2_ITERATIONS,\n hash: 'SHA-256',\n },\n keyMaterial,\n { name: 'AES-KW', length: KEY_BITS },\n false,\n ['wrapKey', 'unwrapKey'],\n )\n}\n\n// ─── DEK Generation ────────────────────────────────────────────────────\n\n/** Generate a random AES-256-GCM data encryption key. */\nexport async function generateDEK(): Promise<CryptoKey> {\n return subtle.generateKey(\n { name: 'AES-GCM', length: KEY_BITS },\n true, // extractable — needed for AES-KW wrapping\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Key Wrapping ──────────────────────────────────────────────────────\n\n/** Wrap (encrypt) a DEK with a KEK using AES-KW. Returns base64 string. */\nexport async function wrapKey(dek: CryptoKey, kek: CryptoKey): Promise<string> {\n const wrapped = await subtle.wrapKey('raw', dek, kek, 'AES-KW')\n return bufferToBase64(wrapped)\n}\n\n/** Unwrap (decrypt) a DEK from base64 string using a KEK. */\nexport async function unwrapKey(\n wrappedBase64: string,\n kek: CryptoKey,\n): Promise<CryptoKey> {\n try {\n return await subtle.unwrapKey(\n 'raw',\n base64ToBuffer(wrappedBase64) as BufferSource,\n kek,\n 'AES-KW',\n { name: 'AES-GCM', length: KEY_BITS },\n true,\n ['encrypt', 'decrypt'],\n )\n } catch {\n throw new InvalidKeyError()\n }\n}\n\n// ─── Encrypt / Decrypt ─────────────────────────────────────────────────\n\nexport interface EncryptResult {\n iv: string // base64\n data: string // base64\n}\n\n/** Encrypt plaintext JSON string with AES-256-GCM. Fresh IV per call. */\nexport async function encrypt(\n plaintext: string,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const encoded = new TextEncoder().encode(plaintext)\n\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/** Decrypt AES-256-GCM ciphertext. Throws on wrong key or tampered data. */\nexport async function decrypt(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new TextDecoder().decode(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Binary Encrypt / Decrypt ────────\n\n/**\n * Encrypt raw bytes with AES-256-GCM using a fresh random IV.\n * Used by the attachment store so binary blobs avoid double base64 encoding\n * (the existing `encrypt()` function calls `TextEncoder` on a string — here\n * we pass the `Uint8Array` directly to `subtle.encrypt`).\n */\nexport async function encryptBytes(\n data: Uint8Array,\n dek: CryptoKey,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext back to raw bytes.\n * Counterpart to `encryptBytes`. Throws `TamperedError` on auth-tag failure.\n */\nexport async function decryptBytes(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n/**\n * SHA-256 hex digest of raw bytes. Used to derive content-addressed\n * eTags for blob deduplication. Computed on plaintext bytes\n * before compression and encryption so the eTag identifies content, not\n * ciphertext, and survives re-encryption (key rotation, re-upload).\n */\nexport async function sha256Hex(data: Uint8Array): Promise<string> {\n const hash = await subtle.digest('SHA-256', data as unknown as BufferSource)\n return Array.from(new Uint8Array(hash))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── HMAC-SHA-256 ─────────────────────────────\n\n/**\n * Compute HMAC-SHA-256(key, data) and return hex string.\n *\n * Used to derive content-addressed eTags that are opaque to the store:\n * ```\n * eTag = hmacSha256Hex(blobDEK, plaintext)\n * ```\n *\n * Unlike a plain SHA-256, the HMAC is keyed by the vault-shared `_blob` DEK,\n * so an attacker with store access cannot pre-compute eTags for known files.\n * Deduplication still works within a vault (same key + same content = same eTag).\n */\nexport async function hmacSha256Hex(key: CryptoKey, data: Uint8Array): Promise<string> {\n // Export AES-GCM DEK raw bytes → import as HMAC key\n const rawKey = await subtle.exportKey('raw', key)\n const hmacKey = await subtle.importKey(\n 'raw',\n rawKey,\n { name: 'HMAC', hash: 'SHA-256' },\n false,\n ['sign'],\n )\n const sig = await subtle.sign('HMAC', hmacKey, data as unknown as BufferSource)\n return Array.from(new Uint8Array(sig))\n .map((b) => b.toString(16).padStart(2, '0'))\n .join('')\n}\n\n// ─── AAD-aware Binary Encrypt / Decrypt ──\n\n/**\n * Encrypt raw bytes with AES-256-GCM using Additional Authenticated Data.\n *\n * The AAD binds each chunk to its parent blob and position, preventing\n * chunk reorder, substitution, and truncation attacks:\n * ```\n * AAD = UTF-8(\"{eTag}:{chunkIndex}:{chunkCount}\")\n * ```\n *\n * The AAD is NOT stored — the reader reconstructs it from `BlobObject`\n * metadata and passes it to `decryptBytesWithAAD`.\n */\nexport async function encryptBytesWithAAD(\n data: Uint8Array,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<EncryptResult> {\n const iv = generateIV()\n const ciphertext = await subtle.encrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n data as unknown as BufferSource,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Decrypt AES-256-GCM ciphertext with AAD verification.\n *\n * If the AAD does not match the one used at encryption time (e.g. because\n * a chunk was reordered or substituted from another blob), the GCM auth\n * tag fails and this throws `TamperedError`.\n */\nexport async function decryptBytesWithAAD(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n aad: Uint8Array,\n): Promise<Uint8Array> {\n const iv = base64ToBuffer(ivBase64)\n const ciphertext = base64ToBuffer(dataBase64)\n try {\n const plaintext = await subtle.decrypt(\n {\n name: 'AES-GCM',\n iv: iv as BufferSource,\n additionalData: aad as BufferSource,\n },\n dek,\n ciphertext as BufferSource,\n )\n return new Uint8Array(plaintext)\n } catch (err) {\n if (err instanceof Error && err.name === 'OperationError') {\n throw new TamperedError()\n }\n throw new DecryptionError(\n err instanceof Error ? err.message : 'Decryption failed',\n )\n }\n}\n\n// ─── Presence Key Derivation ──────────────────────────────\n\n/**\n * Derive an AES-256-GCM presence key from a collection DEK using HKDF-SHA256.\n *\n * The presence key is domain-separated from the data DEK by the fixed salt\n * `'noydb-presence'` and the `info` = collection name. This means:\n * - The adapter never sees the presence key.\n * - Presence payloads rotate automatically when the collection DEK is rotated.\n * - Revoked users cannot derive the new presence key after a DEK rotation.\n *\n * @param dek The collection's AES-256-GCM DEK (extractable).\n * @param collectionName Used as the HKDF `info` parameter for domain separation.\n * @returns A non-extractable AES-256-GCM key suitable for presence payload encryption.\n */\nexport async function derivePresenceKey(dek: CryptoKey, collectionName: string): Promise<CryptoKey> {\n // Step 1: export DEK raw bytes\n const rawDek = await subtle.exportKey('raw', dek)\n\n // Step 2: import as HKDF key material\n const hkdfKey = await subtle.importKey(\n 'raw',\n rawDek,\n 'HKDF',\n false,\n ['deriveBits'],\n )\n\n // Step 3: derive 256 bits with salt='noydb-presence' and info=collectionName\n const salt = new TextEncoder().encode('noydb-presence')\n const info = new TextEncoder().encode(collectionName)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n KEY_BITS,\n )\n\n // Step 4: import derived bits as AES-GCM key\n return subtle.importKey(\n 'raw',\n bits,\n { name: 'AES-GCM', length: KEY_BITS },\n false,\n ['encrypt', 'decrypt'],\n )\n}\n\n// ─── Deterministic Encryption ────────────────────────────\n\n/**\n * Derive a deterministic 12-byte IV from `{ DEK, context, plaintext }`\n * via HKDF-SHA256. Given the same three inputs, the IV is identical, so\n * `encryptDeterministic` produces the same ciphertext on every call —\n * which is precisely what enables blind equality search on encrypted\n * fields.\n *\n * **The side channel this opens.** Two records whose field value is the\n * same produce the same ciphertext. An observer with store access can\n * therefore tell which records share a value — not *what* the value is,\n * but the equivalence class. This is the well-known trade-off of\n * deterministic encryption and is why the feature is strictly opt-in\n * per field, guarded by `acknowledgeDeterministicRisk: true` at\n * collection creation.\n *\n * The context string MUST include the collection name and field name,\n * so:\n * - The same plaintext in two different fields encrypts differently\n * (no cross-field equality leak).\n * - The same plaintext in two different collections (different DEKs)\n * encrypts differently by virtue of the key, even before HKDF\n * domain separation kicks in.\n */\nasync function deriveDeterministicIV(\n dek: CryptoKey,\n context: string,\n plaintext: string,\n): Promise<Uint8Array> {\n const rawDek = await subtle.exportKey('raw', dek)\n const hkdfKey = await subtle.importKey('raw', rawDek, 'HKDF', false, ['deriveBits'])\n const salt = new TextEncoder().encode('noydb-deterministic-v1')\n const info = new TextEncoder().encode(`${context}\\x00${plaintext}`)\n const bits = await subtle.deriveBits(\n { name: 'HKDF', hash: 'SHA-256', salt, info },\n hkdfKey,\n IV_BYTES * 8,\n )\n return new Uint8Array(bits)\n}\n\n/**\n * Encrypt a plaintext string with AES-256-GCM and a deterministic,\n * HKDF-derived IV.\n *\n * The same `{ dek, context, plaintext }` triple always produces the\n * same `{ iv, data }` — call this twice and you can string-compare the\n * ciphertexts to check equality of the inputs without decrypting them.\n *\n * @param context Domain-separation string — by convention\n * `'<collection>/<field>'`. Different contexts encrypt\n * the same plaintext to different ciphertexts, so\n * `email` in collection `users` does not collide with\n * `email` in collection `customers`.\n */\nexport async function encryptDeterministic(\n plaintext: string,\n dek: CryptoKey,\n context: string,\n): Promise<EncryptResult> {\n const iv = await deriveDeterministicIV(dek, context, plaintext)\n const encoded = new TextEncoder().encode(plaintext)\n const ciphertext = await subtle.encrypt(\n { name: 'AES-GCM', iv: iv as BufferSource },\n dek,\n encoded,\n )\n return {\n iv: bufferToBase64(iv),\n data: bufferToBase64(ciphertext),\n }\n}\n\n/**\n * Counterpart to {@link encryptDeterministic}. The IV is stored\n * alongside the ciphertext (exactly like the randomized path), so\n * decrypt uses the stored IV and verifies the GCM auth tag — a tampered\n * ciphertext throws `TamperedError` just like randomized AES-GCM.\n */\nexport async function decryptDeterministic(\n ivBase64: string,\n dataBase64: string,\n dek: CryptoKey,\n): Promise<string> {\n return decrypt(ivBase64, dataBase64, dek)\n}\n\n// ─── Random Generation ─────────────────────────────────────────────────\n\n/** Generate a random 12-byte IV for AES-GCM. */\nexport function generateIV(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(IV_BYTES))\n}\n\n/** Generate a random 32-byte salt for PBKDF2. */\nexport function generateSalt(): Uint8Array {\n return globalThis.crypto.getRandomValues(new Uint8Array(SALT_BYTES))\n}\n\n// ─── Base64 Helpers ────────────────────────────────────────────────────\n\nexport function bufferToBase64(buffer: ArrayBuffer | Uint8Array): string {\n const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)\n let binary = ''\n for (let i = 0; i < bytes.length; i++) {\n binary += String.fromCharCode(bytes[i]!)\n }\n return btoa(binary)\n}\n\nexport function base64ToBuffer(base64: string): Uint8Array<ArrayBuffer> {\n const binary = atob(base64)\n const bytes = new Uint8Array(binary.length)\n for (let i = 0; i < binary.length; i++) {\n bytes[i] = binary.charCodeAt(i)\n }\n return bytes\n}\n","/**\n * @noy-db/hub/blobs — opt-in blob / document subsystem.\n *\n * @category capability\n *\n * This subpath groups every file whose reason-for-existing is blob\n * storage: `BlobSet` (slot-based attachments with chunked encryption\n * and dedup), `mime-magic` (MIME detection from magic bytes),\n * `blob-compaction` (TTL eviction via `blobFields`), `export-blobs`\n * (bulk export primitive), and the legacy `attachments` API.\n *\n * Hub's root barrel (`@noy-db/hub`) still re-exports `BlobSet` + the\n * MIME helpers for backward compatibility with `@noy-db/as-blob`,\n * `@noy-db/as-zip`, and any consumer written before this split. New\n * code should prefer this subpath so the import boundary is explicit.\n */\n\nexport { withBlobs } from './active.js'\nexport type { BlobStrategy, BlobStrategyOpenArgs } from './strategy.js'\n\nexport { BlobSet } from './blob-set.js'\nexport {\n BLOB_COLLECTION,\n BLOB_INDEX_COLLECTION,\n BLOB_CHUNKS_COLLECTION,\n BLOB_SLOTS_PREFIX,\n BLOB_VERSIONS_PREFIX,\n DEFAULT_CHUNK_SIZE,\n} from './blob-set.js'\nexport type {\n BlobObject,\n SlotRecord,\n SlotInfo,\n VersionRecord,\n BlobPutOptions,\n BlobResponseOptions,\n} from '../types.js'\n\nexport { detectMimeType, detectMagic, isPreCompressed } from './mime-magic.js'\n\nexport { runCompaction, BLOB_EVICTION_AUDIT_COLLECTION } from './blob-compaction.js'\nexport type {\n BlobFieldsConfig,\n BlobFieldPolicy,\n BlobEvictionEntry,\n CompactRunOptions,\n CompactionResult,\n CompactionContext,\n} from './blob-compaction.js'\n\nexport {\n createExportBlobsHandle,\n ExportBlobsAbortedError,\n EXPORT_AUDIT_COLLECTION,\n} from './export-blobs.js'\nexport type {\n ExportBlobsOptions,\n ExportedBlob,\n ExportBlobsHandle,\n ExportBlobsAuditEntry,\n} from './export-blobs.js'\n","/**\n * Core types — the {@link NoydbStore} interface, envelope format, roles, and\n * all configuration shapes consumed by {@link createNoydb}.\n *\n * ## What lives here\n *\n * - **{@link NoydbStore}** — the 6-method contract every backend must implement\n * (`get`, `put`, `delete`, `list`, `loadAll`, `saveAll`).\n * - **{@link EncryptedEnvelope}** — the wire format stored by backends:\n * `{ _noydb, _v, _ts, _iv, _data }`. Backends only ever see this shape.\n * - **{@link Role} / {@link Permission}** — the access-control vocabulary\n * (`owner`, `admin`, `operator`, `viewer`, `client`).\n * - **{@link NoydbOptions}** — the full configuration object passed to\n * {@link createNoydb}.\n *\n * ## Extending the store interface\n *\n * All optional store capabilities (`ping`, `listPage`, `listSince`,\n * `presencePublish`, `presenceSubscribe`, `listVaults`) are additive extensions\n * discovered via `'method' in store`. Implementing them unlocks features but\n * is never required — core always falls back to the 6-method baseline.\n *\n * @module\n */\n\nimport type { StandardSchemaV1 } from './schema.js'\nimport type { SyncPolicy } from './store/sync-policy.js'\nimport type { BlobStrategy } from './blobs/strategy.js'\nimport type { IndexStrategy } from './indexing/strategy.js'\nimport type { AggregateStrategy } from './aggregate/strategy.js'\nimport type { CrdtStrategy } from './crdt/strategy.js'\nimport type { ConsentStrategy } from './consent/strategy.js'\nimport type { PeriodsStrategy } from './periods/strategy.js'\nimport type { ShadowStrategy } from './shadow/strategy.js'\nimport type { TxStrategy } from './tx/strategy.js'\nimport type { HistoryStrategy } from './history/strategy.js'\nimport type { I18nStrategy } from './i18n/strategy.js'\nimport type { SessionStrategy } from './session/strategy.js'\nimport type { SyncStrategy } from './team/sync-strategy.js'\nimport type { UnlockedKeyring } from './team/keyring.js'\nimport type { VaultPolicy } from './policy/types.js'\nimport type { PublicEnvelopeSchema } from './meta/public-envelope/types.js'\n\n/** Format version for encrypted record envelopes. */\nexport const NOYDB_FORMAT_VERSION = 1 as const\n\n/** Format version for keyring files. */\nexport const NOYDB_KEYRING_VERSION = 1 as const\n\n/** Format version for backup files. */\nexport const NOYDB_BACKUP_VERSION = 1 as const\n\n/** Format version for sync metadata. */\nexport const NOYDB_SYNC_VERSION = 1 as const\n\n// ─── Roles & Permissions ───────────────────────────────────────────────\n\n/**\n * Access role assigned to a user within a vault.\n *\n * Roles control both the operations a user can perform and which DEKs\n * they receive in their keyring:\n *\n * | Role | Collections | Can grant/revoke | Can export |\n * |------------|-----------------|:----------------:|:----------:|\n * | `owner` | all (rw) | Yes (all roles) | Yes |\n * | `admin` | all (rw) | Yes (≤ admin) | Yes |\n * | `operator` | explicit (rw) | No | ACL-scoped |\n * | `viewer` | all (ro) | No | Yes |\n * | `client` | explicit (ro) | No | ACL-scoped |\n */\nexport type Role = 'owner' | 'admin' | 'operator' | 'viewer' | 'client'\n\n/**\n * Read-write or read-only access on a collection.\n * Stored per-collection in the user's keyring.\n */\nexport type Permission = 'rw' | 'ro'\n\n/**\n * Map of collection name → permission level for a user's keyring entry.\n * `'*'` is the wildcard collection matching all collections in the vault.\n */\nexport type Permissions = Record<string, Permission>\n\n// ─── Encrypted Envelope ────────────────────────────────────────────────\n\n/** The encrypted wrapper stored by adapters. Adapters only ever see this. */\nexport interface EncryptedEnvelope {\n readonly _noydb: typeof NOYDB_FORMAT_VERSION\n readonly _v: number\n readonly _ts: string\n readonly _iv: string\n readonly _data: string\n /** User who created this version (unencrypted metadata). */\n readonly _by?: string\n /**\n * Hierarchical access tier. Omitted → tier 0.\n *\n * Unencrypted on purpose — the store reads it to route the envelope\n * to the right DEK slot without having to try-decrypt against every\n * tier. Only leaks the tier of each record, not any value\n * equivalence.\n */\n readonly _tier?: number\n /**\n * User id who last elevated this record. Used by\n * `demote()` to gate the reverse operation: only the original\n * elevator or an owner can demote a record back down. Cleared on\n * every successful demote so a later re-elevate requires the new\n * actor to own the demotion right.\n */\n readonly _elevatedBy?: string\n /**\n * Deterministic-encryption index. Map of field name →\n * base64 deterministic ciphertext. Present only when the collection\n * declares `deterministicFields` and the feature is acknowledged. The\n * field names are unencrypted (they're the index keys); the values\n * are AES-GCM ciphertext with an HKDF-derived deterministic IV.\n *\n * Enables blind equality search (`collection.findByDet(field,\n * value)`) without decrypting every record. Leaks equality as a known\n * side channel.\n */\n readonly _det?: Record<string, string>\n}\n\n/**\n * Placeholder returned by `getAtTier()` in `'ghost'` mode when a\n * record is at a tier the caller cannot decrypt. Record existence is\n * advertised — the id and tier are visible — but contents are\n * withheld. `canElevateFrom` lists user ids authorized to elevate\n * access for this caller when known; absent when the workflow is\n * not configured.\n */\nexport interface GhostRecord {\n readonly _ghost: true\n readonly _tier: number\n readonly canElevateFrom?: readonly string[]\n}\n\n/** Control what lower-tier reads see above their clearance. */\nexport type TierMode = 'invisibility' | 'ghost'\n\n/**\n * Event emitted when a record at a tier above the caller's inherent\n * clearance is read or written successfully (via elevation or\n * delegation). Always written to the ledger; subscribers get a\n * real-time feed.\n */\nexport interface CrossTierAccessEvent {\n readonly actor: string\n readonly collection: string\n readonly id: string\n readonly tier: number\n /** How the caller gained tier access: they elevated it, or a delegation is active. */\n readonly authorization: 'elevation' | 'delegation' | 'inherent'\n readonly op: 'get' | 'put' | 'elevate' | 'demote'\n readonly ts: string\n /**\n * When `authorization === 'elevation'`, the audit reason string the\n * caller passed to `vault.elevate(...)`. Empty for inherent /\n * delegation paths.\n */\n readonly reason?: string\n /**\n * When `authorization === 'elevation'`, the tier the caller's\n * keyring effectively held BEFORE elevation. Useful for audit\n * dashboards distinguishing \"operator elevating to 2\" from\n * \"inherent tier-2 write.\"\n */\n readonly elevatedFrom?: number\n}\n\n/**\n * A single deterministic-ciphertext index slot on an envelope. Stored\n * as `iv:data` (both base64, colon-separated) so a single string per\n * field keeps the envelope compact.\n */\nexport type DeterministicCipher = string\n\n// ─── Vault Snapshot ──────────────────────────────────────────────\n\n/** All records across all collections for a compartment. */\nexport type VaultSnapshot = Record<string, Record<string, EncryptedEnvelope>>\n\n/**\n * Result of a single page fetch via the optional `listPage` adapter extension.\n *\n * `items` carries the actual encrypted envelopes (not just ids) so the\n * caller can decrypt and emit a single record without an extra `get()`\n * round-trip per id. `nextCursor` is `null` on the final page.\n */\nexport interface ListPageResult {\n /** Encrypted envelopes for this page, in adapter-defined order. */\n items: Array<{ id: string; envelope: EncryptedEnvelope }>\n /** Opaque cursor for the next page, or `null` if this was the last page. */\n nextCursor: string | null\n}\n\n// ─── Store Interface ───────────────────────────────────────────────────\n\nexport interface NoydbStore {\n /**\n * Optional human-readable adapter name (e.g. 'memory', 'file', 'dynamo').\n * Used in diagnostic messages and the listPage fallback warning. Adapters\n * are encouraged to set this so logs are clearer about which backend is\n * involved when something goes wrong.\n */\n name?: string\n\n /** Get a single record. Returns null if not found. */\n get(vault: string, collection: string, id: string): Promise<EncryptedEnvelope | null>\n\n /** Put a record. Throws ConflictError if expectedVersion doesn't match. */\n put(\n vault: string,\n collection: string,\n id: string,\n envelope: EncryptedEnvelope,\n expectedVersion?: number,\n ): Promise<void>\n\n /** Delete a record. */\n delete(vault: string, collection: string, id: string): Promise<void>\n\n /** List all record IDs in a collection. */\n list(vault: string, collection: string): Promise<string[]>\n\n /** Load all records for a vault (initial hydration). */\n loadAll(vault: string): Promise<VaultSnapshot>\n\n /** Save all records for a vault (bulk write / restore). */\n saveAll(vault: string, data: VaultSnapshot): Promise<void>\n\n /** Optional connectivity check for sync engine. */\n ping?(): Promise<boolean>\n\n /**\n * Optional: list record IDs in a collection that have `_ts` after `since`.\n * Used by partial sync (`pull({ modifiedSince })`). Adapters that omit this\n * fall back to a full `loadAll` + client-side timestamp filter.\n */\n listSince?(vault: string, collection: string, since: string): Promise<string[]>\n\n /**\n * Optional pagination extension. Adapters that implement `listPage` get\n * the streaming `Collection.scan()` fast path; adapters that don't are\n * silently fallen back to a full `loadAll()` + slice (with a one-time\n * console.warn).\n *\n * `cursor` is opaque to the core — each adapter encodes its own paging\n * state (DynamoDB: base64 LastEvaluatedKey JSON; S3: ContinuationToken;\n * memory/file/browser: numeric offset of a sorted id list). Pass\n * `undefined` to start from the beginning.\n *\n * `limit` is a soft upper bound on `items.length`. Adapters MAY return\n * fewer items even when more exist (e.g. if the underlying store has\n * its own page size cap), and MUST signal \"no more pages\" by returning\n * `nextCursor: null`.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listPage' in adapter`.\n */\n listPage?(\n vault: string,\n collection: string,\n cursor?: string,\n limit?: number,\n ): Promise<ListPageResult>\n\n /**\n * Optional pub/sub for real-time presence.\n * Publish an encrypted payload to a presence channel.\n * Falls back to storage-based polling when absent.\n */\n presencePublish?(channel: string, payload: string): Promise<void>\n\n /**\n * Optional pub/sub for real-time presence.\n * Subscribe to a presence channel. Returns an unsubscribe function.\n * Falls back to storage-based polling when absent.\n */\n presenceSubscribe?(channel: string, callback: (payload: string) => void): () => void\n\n /**\n * Optional cross-vault enumeration extension.\n *\n * Returns the names of every top-level vault the store\n * currently stores. Used by `Noydb.listAccessibleVaults()` to\n * enumerate the universe of vaults before filtering down to\n * the ones the calling principal can actually unwrap.\n *\n * **Why this is optional:** the storage shape of compartments\n * differs across backends. Memory and file stores store\n * vaults as top-level keys / directories and can enumerate\n * them in O(1) calls. DynamoDB stores everything in a single table\n * keyed by `(compartment#collection, id)` — enumerating compartments\n * requires either a Scan (expensive, eventually consistent, leaks\n * ciphertext metadata) or a dedicated GSI that the consumer\n * provisioned. S3 needs a prefix list (cheap if enabled, ACL-sensitive\n * otherwise). Browser localStorage can scan keys by prefix.\n *\n * Stores that cannot implement `listVaults` cheaply or\n * cleanly should omit it. Core surfaces a `StoreCapabilityError`\n * with a clear message when a caller invokes\n * `listAccessibleVaults()` against a store that doesn't\n * provide this method, so consumers know to either upgrade their\n * store, provide a candidate list explicitly to `queryAcross()`,\n * or fall back to maintaining the compartment index out of band.\n *\n * **Privacy note:** `listVaults` returns *every* compartment\n * the store has, not just the ones the caller can access. The\n * existence-leak filtering (returning only compartments whose\n * keyring the caller can unwrap) happens in core, not in the\n * store. The store is trusted to know its own contents — that\n * is not a leak in the threat model. The leak the API guards\n * against is the *return value* of `listAccessibleVaults()`\n * exposing existence to a downstream observer who only sees that\n * function's output.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listVaults' in store`.\n */\n listVaults?(): Promise<string[]>\n\n /**\n * Optional: generate a presigned URL for direct client download.\n * Only meaningful for object stores (S3, GCS) that support URL signing.\n * Returns a time-limited URL that fetches the encrypted envelope directly.\n * The caller must decrypt client-side (the URL returns ciphertext).\n */\n presignUrl?(vault: string, collection: string, id: string, expiresInSeconds?: number): Promise<string>\n\n /**\n * Optional: estimate current storage usage.\n * Returns `{ usedBytes, quotaBytes }` or null if the store cannot estimate.\n * Used by quota-aware routing to detect overflow conditions.\n */\n estimateUsage?(): Promise<{ usedBytes: number; quotaBytes: number } | null>\n\n /**\n * Optional multi-record atomic write.\n *\n * When present, `db.transaction(async (tx) => { ... })` uses this to\n * commit every staged op in one storage-layer transaction — either\n * all ops land or none do, regardless of which records they touch.\n * Every `TxOp.expectedVersion` (when set) must be honored atomically\n * alongside the write; any violation throws `ConflictError` and the\n * whole batch fails.\n *\n * Stores that omit this fall through to the hub's per-record OCC\n * fallback: pre-flight CAS check, then sequential `put`/`delete`\n * with best-effort unwind on mid-batch failure (see\n * `runTransaction` for the exact semantics and crash window).\n *\n * Native implementations: `to-memory` (single Map mutation),\n * `to-dynamo` (`TransactWriteItems`), `to-browser-idb` (one\n * `readwrite` transaction). File / S3 cannot implement this\n * atomically and should omit the method.\n */\n tx?(ops: readonly TxOp[]): Promise<void>\n}\n\n/**\n * A single staged operation inside a `db.transaction(fn)` commit. The\n * hub assembles `TxOp[]` from the user's `tx.collection().put/delete`\n * calls, encrypts any `record` values into `envelope`, and hands the\n * array to `NoydbStore.tx()` when the store supports atomic batch\n * writes. Stores that implement `tx()` MUST honor every\n * `expectedVersion` atomically against the stored envelope version.\n */\nexport interface TxOp {\n readonly type: 'put' | 'delete'\n readonly vault: string\n readonly collection: string\n readonly id: string\n /** Populated for `type: 'put'` — the encrypted envelope to write. */\n readonly envelope?: EncryptedEnvelope\n /** Optional per-record CAS. Mismatch must throw `ConflictError`. */\n readonly expectedVersion?: number\n}\n\n// ─── Store Factory Helper ──────────────────────────────────────────────\n\n/** Type-safe helper for creating store factories. */\nexport function createStore<TOptions>(\n factory: (options: TOptions) => NoydbStore,\n): (options: TOptions) => NoydbStore {\n return factory\n}\n\n// ─── Keyring ───────────────────────────────────────────────────────────\n\n/**\n * Interchange formats `@noy-db/as-*` packages can produce. `'*'` is a\n * wildcard granting every current + future plaintext format.\n */\nexport type ExportFormat =\n | 'xlsx'\n | 'csv'\n | 'json'\n | 'ndjson'\n | 'xml'\n | 'sql'\n | 'pdf'\n | 'blob'\n | 'zip'\n | '*'\n\n/**\n * Owner-granted export capability on a keyring.\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for record formatters + blob\n * extractors that emit plaintext bytes (`as-xlsx`, `as-csv`,\n * `as-blob`, `as-zip`, …). **Defaults to empty** for every role;\n * the owner/admin must positively grant per-format (or `'*'`).\n * - `bundle` — boolean for `.noydb` encrypted container export\n * (`as-noydb`). **Default policy: on for owner/admin, off for\n * operator/viewer/client** — applied when the field is absent or\n * undefined (see `hasExportCapability`).\n */\nexport interface ExportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\n/**\n * Owner-granted import capability on a keyring (sibling of\n * `ExportCapability`, issue ).\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for `as-*` readers that ingest\n * plaintext bytes (`as-csv`, `as-json`, `as-ndjson`, `as-zip`, …).\n * Defaults to empty for every role; the owner/admin must positively\n * grant per-format (or `'*'`).\n * - `bundle` — boolean gate for `.noydb` bundle import. **Defaults to\n * `false` for every role**, including owner/admin. Import is more\n * dangerous than export (corrupts vs leaks), so the policy is\n * default-closed across the board — the owner explicitly opts a\n * keyring in via `db.grant({ importCapability: { bundle: true } })`.\n */\nexport interface ImportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\n/**\n * Forward-declared on-disk shape for `VaultPolicy` — the actual policy\n * model lives in `policy/types.ts` (#9). Declared here as `unknown`-typed\n * map so types.ts has no dependency on the policy module while the\n * `KeyringFile.policy` field can still round-trip foreign documents.\n *\n * @internal\n */\nexport type VaultPolicyOnDisk = Record<string, unknown>\n\n/**\n * Recovery profile enrolled at vault creation (issue #10).\n *\n * - `paper` — `on-recovery` codes (the only end-to-end profile in v0.1.0-pre.5).\n * - `shamir` / `multi-channel` / `admin-mediated` — API surface ships;\n * per-profile dispatch lands in follow-up issues. Calling\n * `db.recoverPassphrase` against these throws\n * {@link RecoveryProfileNotImplementedError}.\n */\nexport type RecoveryEnrollment =\n | {\n readonly profile: 'paper'\n /** Number of single-use codes to print at enrollment. */\n readonly codes: number\n }\n | {\n readonly profile: 'shamir'\n readonly k: number\n readonly n: number\n readonly trustees: ReadonlyArray<string>\n }\n | {\n readonly profile: 'multi-channel'\n readonly email?: string\n readonly pin?: boolean\n readonly paperCodes?: number\n }\n | {\n readonly profile: 'admin-mediated'\n readonly grantorUserId: string\n }\n\n/**\n * One tier-2 authenticator slot inside a keyring file. Each slot\n * independently wraps the SAME KEK under a method-specific derived key\n * (LUKS pattern). Adding or removing a slot is a constant-time keyring\n * write — no DEK re-keying required.\n *\n * @see docs/subsystems/session-tiers.md → Tier 2 — Authenticate (multi-slot)\n */\nexport interface KeyringAuthenticator {\n /** Caller-chosen identifier — e.g. `'webauthn-yubikey-blue'`, `'oidc-google'`, `'password-daily'`. */\n readonly id: string\n /** Method family — selects which `@noy-db/on-*` package handles unlock. */\n readonly method: 'webauthn' | 'oidc' | 'password'\n /** ISO-8601 timestamp at which the slot was added. */\n readonly enrolled_at: string\n /**\n * Which session tier ENROLLED this slot. Tier 1 enrolls a fresh slot;\n * tier 2 may add a sibling slot when the active policy permits.\n */\n readonly enrolled_via_tier: 1 | 2\n /** Base64 wrapped-KEK ciphertext under the method-derived key. */\n readonly wrapped_kek: string\n /**\n * Method-specific metadata: WebAuthn cred id, OIDC issuer/sub, PBKDF2\n * salt for `on-password`, etc. The schema is open by design — the\n * `@noy-db/on-*` package owns the contents.\n */\n readonly meta: Record<string, unknown>\n}\n\nexport interface KeyringFile {\n readonly _noydb_keyring: typeof NOYDB_KEYRING_VERSION\n readonly user_id: string\n readonly display_name: string\n readonly role: Role\n readonly permissions: Permissions\n readonly deks: Record<string, string>\n readonly salt: string\n readonly created_at: string\n readonly granted_by: string\n /**\n * Tier-2 authenticator slots (multi-slot keyring extension).\n * Optional / append-only: keyring files written before the\n * extension load with an empty list. Each slot independently wraps\n * the same KEK; any one of them unlocks.\n *\n * @see KeyringAuthenticator\n */\n readonly authenticators?: readonly KeyringAuthenticator[]\n /**\n * Per-keyring policy override (reserved). The on-disk format\n * accepts the field for forward compatibility with the Option C\n * merge engine deferred to a later release; v1.0 reads only the\n * vault-level `_meta/policy` document, so this field is parsed and\n * round-tripped but never enforced.\n */\n readonly policy?: VaultPolicyOnDisk\n /**\n * Optional — authorization spec capability bits. Absent on keyrings written\n * before the RFC implementation. Loading falls back to role-based\n * defaults (owner/admin get bundle-on, everyone else off).\n */\n readonly export_capability?: ExportCapability\n /**\n * Optional bundle-slot expiry. ISO-8601 timestamp; past\n * the cutoff `loadKeyring` throws `KeyringExpiredError` before any\n * DEK unwrap is attempted. Useful for time-boxed audit access:\n * \"this slot works for 30 days then becomes opaque to its holder.\"\n *\n * Absent on live keyrings written via `db.grant()` — the field is\n * meaningful for `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })`. Setting it on a live\n * keyring is allowed but unusual.\n */\n readonly expires_at?: string\n /**\n * Optional — issue import-capability bits. Absent on keyrings\n * written before landed. Loading falls back to default-closed\n * for every role and every format.\n */\n readonly import_capability?: ImportCapability\n /**\n * hierarchical access clearance. Absent → 0 (advisory;\n * the real check is whether the DEK map carries a `collection#tier`\n * entry for the requested tier). Owners and admins default to the\n * highest tier they have DEKs for at grant time.\n */\n readonly clearance?: number\n}\n\n// ─── Backup ────────────────────────────────────────────────────────────\n\nexport interface VaultBackup {\n readonly _noydb_backup: typeof NOYDB_BACKUP_VERSION\n readonly _compartment: string\n readonly _exported_at: string\n readonly _exported_by: string\n readonly keyrings: Record<string, KeyringFile>\n readonly collections: VaultSnapshot\n /**\n * Internal collections (`_ledger`, `_ledger_deltas`, `_history`, `_sync`, …)\n * captured alongside the data collections. Optional for backwards\n * compat with backups, which only stored data collections —\n * loading a backup leaves the ledger empty (and `verifyBackupIntegrity`\n * skips the chain check, surfacing only a console warning).\n */\n readonly _internal?: VaultSnapshot\n /**\n * Verifiable-backup metadata. Embeds the ledger head at\n * dump time so `load()` can cross-check that the loaded chain matches\n * exactly what was exported. A backup whose chain has been tampered\n * with — either by modifying ledger entries or by modifying data\n * envelopes that the chain references — fails this check.\n *\n * Optional for backwards compat with backups; missing means\n * \"legacy backup, load with a warning, no integrity check\".\n */\n readonly ledgerHead?: {\n /** Hex sha256 of the canonical JSON of the last ledger entry. */\n readonly hash: string\n /** Sequential index of the last ledger entry. */\n readonly index: number\n /** ISO timestamp captured at dump time. */\n readonly ts: string\n }\n}\n\n// ─── Export ────────────────────────────────────────────────────────────\n\n/**\n * Options for `Vault.exportStream()` and `Vault.exportJSON()`.\n *\n * The defaults match the most common consumer pattern: one chunk per\n * collection, no ledger metadata. Per-record streaming and ledger-head\n * inclusion are opt-in because both add structure most consumers don't\n * need.\n */\nexport interface ExportStreamOptions {\n /**\n * `'collection'` (default) yields one chunk per collection with all\n * records bundled in `chunk.records`. `'record'` yields one chunk per\n * record, useful for arbitrarily large collections that should never\n * be materialized as a single array.\n */\n readonly granularity?: 'collection' | 'record'\n\n /**\n * When `true`, every chunk includes the current compartment ledger\n * head under `chunk.ledgerHead`. The value is identical across every\n * chunk in a single export (one ledger per compartment). Forward-\n * compatible with future partition work where the head would become\n * per-partition. Default: `false`.\n */\n readonly withLedgerHead?: boolean\n /**\n * When set to a BCP 47 locale string (e.g. `'th'`), `exportJSON()`\n * resolves all `dictKey` labels to that locale and omits the raw\n * `dictionaries` snapshot from the output. Has no effect\n * on `exportStream()` — format packages use the `chunk.dictionaries`\n * snapshot directly and apply their own locale strategy.\n *\n * Default: `undefined` — embed the raw snapshot under `_dictionaries`.\n */\n readonly resolveLabels?: string\n}\n\n/**\n * One chunk yielded by `Vault.exportStream()`.\n *\n * `granularity: 'collection'` yields one chunk per collection with the\n * full record array in `records`. `granularity: 'record'` yields one\n * chunk per record with `records` containing exactly one element — the\n * `schema` and `refs` metadata is repeated on every chunk so consumers\n * doing per-record streaming don't have to thread state across yields.\n */\nexport interface ExportChunk<T = unknown> {\n /** Collection name (no leading underscore — internal collections are filtered out). */\n readonly collection: string\n\n /**\n * Standard Schema validator attached to the collection at `collection()`\n * construction time, or `null` if no schema was provided. Surfaced so\n * downstream serializers (`@noy-db/as-*` packages, custom\n * exporters) can produce schema-aware output (typed CSV headers, XSD\n * generation, etc.) without poking at collection internals.\n */\n readonly schema: StandardSchemaV1<unknown, T> | null\n\n /**\n * Foreign-key references declared on the collection via the `refs`\n * option, as the `{ field → { target, mode } }` map produced by\n * `RefRegistry.getOutbound`. Empty object when no refs were declared.\n */\n readonly refs: Record<string, { readonly target: string; readonly mode: 'strict' | 'warn' | 'cascade' }>\n\n /**\n * Decrypted, ACL-scoped, schema-validated records. Length 1 in\n * `granularity: 'record'` mode, full collection in `granularity: 'collection'`\n * mode. Records are returned by reference from the collection's eager\n * cache where applicable — consumers must treat them as immutable.\n */\n readonly records: T[]\n\n /**\n * Dictionary snapshots for every `dictKey` field declared on this\n * collection. Captured once at stream-start and held\n * constant across all chunks within the same export — a rename\n * mid-export does not change the snapshot. `undefined` when the\n * collection has no `dictKeyFields`.\n *\n * Shape: `{ [fieldName]: { [stableKey]: { [locale]: label } } }`\n *\n * @example\n * ```ts\n * chunk.dictionaries?.status?.paid?.th // → 'ชำระแล้ว'\n * ```\n */\n readonly dictionaries?: Record<\n string, // field name\n Record<string, Record<string, string>> // stable key → locale → label\n >\n\n /**\n * Vault ledger head at export time. Present only when\n * `exportStream({ withLedgerHead: true })` was called. Identical\n * across every chunk in the same export — included on every chunk\n * for forward-compatibility with future per-partition ledgers, where\n * the value will differ per chunk.\n */\n readonly ledgerHead?: {\n readonly hash: string\n readonly index: number\n readonly ts: string\n }\n}\n\n// ─── Sync ──────────────────────────────────────────────────────────────\n\nexport interface DirtyEntry {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n readonly version: number\n readonly timestamp: string\n}\n\nexport interface SyncMetadata {\n readonly _noydb_sync: typeof NOYDB_SYNC_VERSION\n readonly last_push: string | null\n readonly last_pull: string | null\n readonly dirty: DirtyEntry[]\n}\n\nexport interface Conflict {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly local: EncryptedEnvelope\n readonly remote: EncryptedEnvelope\n readonly localVersion: number\n readonly remoteVersion: number\n /**\n * Present only when the collection uses `conflictPolicy: 'manual'`.\n * Call `resolve(winner)` to commit the winning envelope, or\n * `resolve(null)` to defer (conflict stays queued for the next sync).\n * Called synchronously inside the `sync:conflict` event handler.\n */\n readonly resolve?: (winner: EncryptedEnvelope | null) => void\n}\n\nexport type ConflictStrategy =\n | 'local-wins'\n | 'remote-wins'\n | 'version'\n | ((conflict: Conflict) => 'local' | 'remote')\n\n/**\n * Collection-level conflict policy.\n * Overrides the db-level `conflict` option for the specific collection.\n *\n * - `'last-writer-wins'` — higher `_ts` wins (timestamp LWW).\n * - `'first-writer-wins'` — lower `_v` wins (earlier version is preserved).\n * - `'manual'` — emits `sync:conflict` with a `resolve` callback. Call\n * `resolve(winner)` synchronously to commit or `resolve(null)` to defer.\n * - Custom fn — synchronous `(local: T, remote: T) => T`. Must be pure.\n */\nexport type ConflictPolicy<T> =\n | 'last-writer-wins'\n | 'first-writer-wins'\n | 'manual'\n | ((local: T, remote: T) => T)\n\n/**\n * Envelope-level resolver registered per collection with the SyncEngine.\n * Receives the `id` of the conflicting record and both envelopes.\n * Returns the winning envelope, or `null` to defer resolution.\n * @internal\n */\nexport type CollectionConflictResolver = (\n id: string,\n local: EncryptedEnvelope,\n remote: EncryptedEnvelope,\n) => Promise<EncryptedEnvelope | null>\n\n/** Options for targeted push operations. */\nexport interface PushOptions {\n /** Only push records belonging to these collections. Omit to push all dirty. */\n collections?: string[]\n}\n\n/** Options for targeted pull operations. */\nexport interface PullOptions {\n /** Only pull these collections. Omit to pull all. */\n collections?: string[]\n /**\n * Only pull records with `_ts` strictly after this ISO timestamp.\n * Adapters that implement `listSince` use it directly; others fall back\n * to a full scan with client-side filtering.\n */\n modifiedSince?: string\n}\n\nexport interface PushResult {\n readonly pushed: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\nexport interface PullResult {\n readonly pulled: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\n/** Result of a sync transaction commit. */\nexport interface SyncTransactionResult {\n readonly status: 'committed' | 'conflict'\n readonly pushed: number\n readonly conflicts: Conflict[]\n}\n\nexport interface SyncStatus {\n readonly dirty: number\n readonly lastPush: string | null\n readonly lastPull: string | null\n readonly online: boolean\n}\n\n// ─── Sync Target ─────────────────────────────────────────\n\nexport type SyncTargetRole = 'sync-peer' | 'backup' | 'archive'\n\n/**\n * A sync target with role and optional per-target policy.\n *\n * | Role | Direction | Conflict resolution | Typical use |\n * |-------------|---------------|---------------------|--------------------------|\n * | `sync-peer` | Bidirectional | ConflictStrategy | DynamoDB live sync |\n * | `backup` | Push-only | N/A (receives merged)| S3 dump, Google Drive |\n * | `archive` | Push-only | N/A | IPFS, Git tags, S3 Lock |\n */\nexport interface SyncTarget {\n /** The store to sync with. */\n readonly store: NoydbStore\n /** Role determines sync direction and conflict handling. */\n readonly role: SyncTargetRole\n /** Per-target sync policy. Inherits store-category default when absent. */\n readonly policy?: SyncPolicy\n /** Human-readable label for DevTools and audit logs. */\n readonly label?: string\n}\n\n// ─── Events ────────────────────────────────────────────────────────────\n\nexport interface ChangeEvent {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n}\n\nexport interface NoydbEventMap {\n 'change': ChangeEvent\n 'error': Error\n 'sync:push': PushResult\n 'sync:pull': PullResult\n 'sync:conflict': Conflict\n 'sync:online': void\n 'sync:offline': void\n 'sync:backup-error': { vault: string; target: string; error: Error }\n 'history:save': { vault: string; collection: string; id: string; version: number }\n 'history:prune': { vault: string; collection: string; id: string; pruned: number }\n /**\n * Emitted when a persisted-index side-car put/delete fails after the\n * main record write already succeeded. The main record is durable; the\n * index mirror may have drifted. Operators reconcile via\n * `collection.reconcileIndex(field)`.\n */\n 'index:write-partial': {\n vault: string\n collection: string\n id: string\n action: 'put' | 'delete'\n error: Error\n }\n /**\n * emitted by `Collection.ensurePersistedIndexesLoaded()`\n * once per field on first lazy-mode query when\n * `reconcileOnOpen: 'auto' | 'dry-run'` is configured. `applied` is\n * `0` in `'dry-run'` mode. `skipped` is reserved for a future\n * drift-stamp optimization that short-circuits the reconcile when\n * the mirror version matches what's on disk — currently always\n * `false` (the full reconcile runs every session).\n */\n 'index:reconciled': {\n vault: string\n collection: string\n field: string\n missing: readonly string[]\n stale: readonly string[]\n applied: number\n skipped: boolean\n }\n}\n\n// ─── Grant / Revoke ────────────────────────────────────────────────────\n\nexport interface GrantOptions {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly passphrase: string\n readonly permissions?: Permissions\n /**\n * Optional `@noy-db/as-*` export capability. Omit or\n * leave undefined to apply role-based defaults (see\n * `hasExportCapability` and `ExportCapability`).\n */\n readonly exportCapability?: ExportCapability\n /**\n * Optional `@noy-db/as-*` import capability (issue ). Omit or\n * leave undefined for default-closed semantics — no plaintext format\n * is grantable until positively listed; bundle import is denied.\n */\n readonly importCapability?: ImportCapability\n /**\n * Skip phrase-format strength validation (issue #7). Defaults to\n * false — `grant()` rejects phrases that don't meet the configured\n * `PassphrasePolicy`. Test fixtures and CLI scripts pass `true`.\n */\n readonly allowWeakPassphrase?: boolean\n /**\n * Initial user-envelope payload for the new principal. Sealed under\n * the same vault DEK (the reserved `_users` collection's DEK) and\n * persisted alongside the keyring during grant.\n *\n * **Bootstrap-only.** Once the new user activates and writes their\n * own envelope, the own-only write rule kicks in — admins cannot\n * edit a teammate's envelope after activation. Use this field for\n * pre-fill at invite time (e.g. \"displayName: Bob, locale: en-US\")\n * and let the user take over from there.\n *\n * Hub does not introspect the payload; it is JSON-serialized and\n * encrypted opaquely. Apps own the schema.\n *\n * @see docs/superpowers/specs/2026-05-05-user-envelope-design.md → Lifecycle\n */\n readonly initialProfile?: unknown\n}\n\nexport interface RevokeOptions {\n readonly userId: string\n readonly rotateKeys?: boolean\n\n /**\n * Cascade behavior when the revoked user is an admin who has granted\n * other admins.\n *\n * - `'strict'` (default) — recursively revoke every admin that the\n * target (transitively) granted. The cascade walks the\n * `granted_by` field on each keyring file and stops at non-admin\n * leaves. All affected collections are accumulated and rotated in\n * a single pass at the end, so cascade cost is O(records in\n * affected collections), not O(records × cascade depth).\n *\n * - `'warn'` — leave the descendant admins in place but emit a\n * `console.warn` listing them. Useful for diagnostic dry runs and\n * for environments where the operator wants to clean up the\n * delegation tree manually.\n *\n * No effect when the target is not an admin (operators, viewers, and\n * clients cannot grant other users, so they have no delegation\n * subtree to cascade through). Defaults to `'strict'`.\n */\n readonly cascade?: 'strict' | 'warn'\n}\n\n// ─── Cross-vault queries ──────────────────────────────\n\n/**\n * One entry returned by `Noydb.listAccessibleVaults()`. Carries\n * the compartment id and the role the calling principal holds in it,\n * so the consumer can decide how to fan out without re-checking\n * permissions per vault.\n */\nexport interface AccessibleVault {\n readonly id: string\n readonly role: Role\n}\n\n/**\n * Options for `Noydb.listAccessibleVaults()`.\n */\nexport interface ListAccessibleVaultsOptions {\n /**\n * Minimum role the caller must hold to include a compartment in the\n * result. Compartments where the caller's role is strictly *below*\n * this threshold are silently excluded. Defaults to `'client'`,\n * which means \"every vault I can unwrap is returned.\" Set to\n * `'admin'` for \"vaults where I can grant/revoke,\" or\n * `'owner'` for \"vaults I own.\"\n *\n * The privilege ordering used:\n * `client (1) < viewer (2) < operator (3) < admin (4) < owner (5)`\n *\n * Note: `viewer` and `client` are conceptually peers in the ACL\n * (neither can grant), but `viewer` has read-all access while\n * `client` has only explicit-collection read. The numeric order\n * reflects \"how much can this principal see,\" not \"how much can\n * this principal modify.\"\n */\n readonly minRole?: Role\n}\n\n/**\n * Options for `Noydb.queryAcross()`.\n */\nexport interface QueryAcrossOptions {\n /**\n * Maximum number of compartments to process in parallel. Defaults\n * to `1` (sequential) — conservative because the per-compartment\n * callback typically does its own I/O and an unbounded fan-out can\n * exhaust adapter connections (DynamoDB throughput, S3 socket\n * limits, browser fetch concurrency).\n *\n * Set to `4` or `8` for cloud-backed compartments where parallelism\n * is the whole point of fanning out. Set to `1` (default) for local\n * adapters where the disk I/O serializes anyway.\n */\n readonly concurrency?: number\n}\n\n/**\n * One entry in the array returned by `Noydb.queryAcross()`. Either\n * `result` is set (callback succeeded for this compartment) or\n * `error` is set (callback threw, or compartment failed to open).\n *\n * Per-compartment errors do **not** abort the overall fan-out — every\n * compartment is given a chance to run its callback, and the\n * partition between success and failure is exposed in the return\n * value. Consumers that want fail-fast semantics can check\n * `r.error !== undefined` and short-circuit themselves.\n */\nexport type QueryAcrossResult<T> =\n | { readonly vault: string; readonly result: T; readonly error?: undefined }\n | { readonly vault: string; readonly result?: undefined; readonly error: Error }\n\n// ─── User Info ─────────────────────────────────────────────────────────\n\nexport interface UserInfo {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly permissions: Permissions\n readonly createdAt: string\n readonly grantedBy: string\n}\n\n// ─── Session ───────────────────────────────────────────────\n\n/**\n * Operations that a session policy can require re-authentication for.\n * Passed as the `requireReAuthFor` array in `SessionPolicy`.\n */\nexport type ReAuthOperation = 'export' | 'grant' | 'revoke' | 'rotate' | 'changeSecret'\n\n/**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior.\n *\n * All timeout values are in milliseconds. `undefined` means \"no limit.\"\n * The policy is evaluated lazily — it does not start timers itself;\n * enforcement happens at the Noydb call site.\n */\nexport interface SessionPolicy {\n /**\n * Idle timeout in ms. If no NOYDB operation is performed for this\n * duration, the session is revoked on the next operation attempt\n * (which will throw `SessionExpiredError`). The idle clock resets\n * on every successful operation.\n *\n * Default: `undefined` (no idle timeout).\n */\n readonly idleTimeoutMs?: number\n\n /**\n * Absolute timeout in ms from session creation. After this duration\n * the session is unconditionally revoked regardless of activity.\n *\n * Default: `undefined` (no absolute timeout).\n */\n readonly absoluteTimeoutMs?: number\n\n /**\n * Operations that require the user to re-authenticate (re-enter their\n * passphrase or perform a fresh WebAuthn assertion) before proceeding,\n * even if the session is still alive.\n *\n * Common pattern: `requireReAuthFor: ['export', 'grant']` — allow\n * read/write operations in the background but demand a fresh credential\n * for high-risk mutations.\n *\n * Default: `[]` (no extra re-auth requirements).\n */\n readonly requireReAuthFor?: readonly ReAuthOperation[]\n\n /**\n * If `true`, the session is revoked when the page goes to the background\n * (visibilitychange event, `document.hidden === true`). Useful for\n * high-sensitivity deployments where leaving the tab is treated as\n * a session boundary.\n *\n * No-op in non-browser environments (Node.js, workers without document).\n * Default: `false`.\n */\n readonly lockOnBackground?: boolean\n}\n\n// ─── i18n / Locale ─────────────────────────────────────\n\n/**\n * Locale-aware read options. Pass to `Collection.get()`, `list()`,\n * `query()`, and `scan()` to trigger per-record locale resolution for\n * `dictKey` and `i18nText` fields.\n *\n * - **`locale: 'raw'`** — skip resolution for `i18nText` fields and\n * return the full `{ [locale]: string }` map. Dict key fields still\n * return the stable key (no `<field>Label` added).\n * - **`fallback`** — single locale code or ordered list. Use `'any'` as\n * the last element to fall back to any present translation.\n *\n * When neither the call-level locale nor the compartment's default locale\n * is set, reading a record with `i18nText` fields throws\n * `LocaleNotSpecifiedError`.\n */\nexport interface LocaleReadOptions {\n /**\n * The target locale code (e.g. `'th'`), or `'raw'` to return the full\n * language map without resolution.\n */\n readonly locale?: string\n /**\n * Fallback locale or ordered fallback chain. Use `'any'` as the last\n * element to fall back to any present translation.\n */\n readonly fallback?: string | readonly string[]\n}\n\n// ─── plaintextTranslator hook ──────────────────────────────\n\n/**\n * Context passed to the consumer-supplied `plaintextTranslator` function.\n * The hook receives the source text plus enough metadata to route it to the\n * right translation service and record what it did.\n */\nexport interface PlaintextTranslatorContext {\n /** The plaintext string to translate. */\n readonly text: string\n /** BCP 47 source locale (the locale the text is written in). */\n readonly from: string\n /** BCP 47 target locale to translate into. */\n readonly to: string\n /** The schema field name that triggered the translation. */\n readonly field: string\n /** The collection the record is being put into. */\n readonly collection: string\n}\n\n/**\n * A consumer-supplied async function that translates a single string\n * from one locale to another. noy-db ships no built-in translator.\n *\n * **Security:** this function receives plaintext. The consumer is\n * responsible for the data policy of whatever service it calls. See\n * `NOYDB_SPEC.md § Zero-Knowledge Storage` and the `plaintextTranslator`\n * JSDoc on `NoydbOptions` for the full invariant statement.\n */\nexport type PlaintextTranslatorFn = (\n ctx: PlaintextTranslatorContext,\n) => Promise<string>\n\n/**\n * One entry in the in-process translator audit log. Cleared when\n * `db.close()` is called — same lifetime as the KEK and DEKs.\n *\n * Deliberately omits any content hash or translated-text fingerprint\n * to prevent correlation attacks on the audit trail.\n */\nexport interface TranslatorAuditEntry {\n readonly type: 'translator-invocation'\n /** Schema field name that was translated. */\n readonly field: string\n /** Collection the record belongs to. */\n readonly collection: string\n /** Source locale. */\n readonly fromLocale: string\n /** Target locale. */\n readonly toLocale: string\n /**\n * Consumer-provided translator name from\n * `NoydbOptions.plaintextTranslatorName`. Defaults to `'anonymous'`\n * when not supplied.\n */\n readonly translatorName: string\n /** ISO 8601 timestamp of the invocation. */\n readonly timestamp: string\n /**\n * `true` when the result was served from the in-process cache rather\n * than by calling the translator function. Present only on cache hits\n * so the absence of the field also communicates a cache miss.\n */\n readonly cached?: true\n}\n\n// ─── Presence ─────────────────────────────────────────────\n\n/**\n * A presence peer entry. `lastSeen` is an ISO timestamp set by core on each\n * `update()` call. Stale entries (lastSeen older than `staleMs`) are filtered\n * before delivering to the subscriber callback.\n */\nexport interface PresencePeer<P> {\n readonly userId: string\n readonly payload: P\n readonly lastSeen: string\n}\n\n// ─── CRDT ─────────────────────────────────────────────────\n\n// Re-exported from crdt.ts so consumers only need one import path.\nexport type { CrdtMode, CrdtState, LwwMapState, RgaState, YjsState } from './crdt/crdt.js'\n\n// ─── Blob / Attachment Store ────────────────────────\n\n/**\n * Second store shape for blob-store backends (Drive, WebDAV, Git, iCloud)\n * that operate on whole-vault bundles rather than per-record KV.\n *\n * Implement `readBundle` / `writeBundle` instead of the six-method KV\n * contract. Use `wrapBundleStore()` from `@noy-db/hub` to convert to a\n * `NoydbStore` that the rest of the API consumes transparently.\n *\n * Named `NoydbBundleStore` (not `NoydbBundleAdapter`) for consistency\n * with the hub / to-* / in-* rename. Concrete implementations ship\n * in `@noy-db/to-*` packages starting in.\n */\nexport interface NoydbBundleStore {\n /** Discriminant for engine auto-detection of store shape. */\n readonly kind: 'bundle'\n /** Human-readable name for diagnostics (e.g. `'drive'`, `'webdav'`). */\n readonly name?: string\n /**\n * Read the entire vault as raw bytes. Returns `null` if no bundle exists\n * yet (first open of a brand-new vault).\n */\n readBundle(vaultId: string): Promise<{ bytes: Uint8Array; version: string } | null>\n /**\n * Write the entire vault as raw bytes. `expectedVersion` is the version\n * token from the last `readBundle` (or `null` for a first write).\n * Implementations MUST reject the write if the stored version has advanced\n * past `expectedVersion` — throw `BundleVersionConflictError`.\n * Returns the new version token on success.\n */\n writeBundle(\n vaultId: string,\n bytes: Uint8Array,\n expectedVersion: string | null,\n ): Promise<{ version: string }>\n /** Delete a vault bundle. Idempotent — no-op if the bundle does not exist. */\n deleteBundle(vaultId: string): Promise<void>\n /** List all vault bundles managed by this store. */\n listBundles(): Promise<Array<{ vaultId: string; version: string; size: number }>>\n}\n\n/**\n * Content-addressed blob object stored in the vault-level blob index.\n * Identified by HMAC-SHA-256(blobDEK, plaintext) — opaque to the store.\n *\n * Shared across all collections within a vault for deduplication: two\n * records that attach identical byte content reference the same `eTag`\n * and share a single set of encrypted chunks in `_blob_chunks`.\n */\nexport interface BlobObject {\n /** HMAC-SHA-256 hex of the original plaintext bytes, keyed by `_blob` DEK. */\n readonly eTag: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** Compressed size in bytes (the payload that is actually encrypted and chunked). */\n readonly compressedSize: number\n /** Compression algorithm applied before encryption. */\n readonly compression: 'gzip' | 'none'\n /** Raw chunk size in bytes used at write time. Readers MUST use this value. */\n readonly chunkSize: number\n /** Total number of chunks written. Reader expects exactly this many. */\n readonly chunkCount: number\n /** MIME type if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of first upload. */\n readonly createdAt: string\n /** Live reference count — slots + published versions pointing to this blob. */\n readonly refCount: number\n /**\n * Hint indicating which store holds the chunk data.\n * Used by `routeStore` size-tiered routing: `'default'` for small blobs\n * stored inline (e.g. DynamoDB), `'blobs'` for large blobs in the overflow\n * store (e.g. S3). Absent when no routing is configured.\n */\n readonly storeHint?: 'default' | 'blobs'\n}\n\n// ─── Attachment types ─────────────────────────────────────────\n\n/** Single attachment metadata entry stored inside a record's attachment envelope. */\nexport interface AttachmentEntry {\n /** Content-addressed identifier (HMAC-SHA-256 of plaintext). */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** MIME type, if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of the upload. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Attachment entry annotated with its slot name, as returned by `AttachmentHandle.list()`. */\nexport type AttachmentInfo = AttachmentEntry & { readonly name: string }\n\n/** Options for `AttachmentHandle.put()`. */\nexport interface AttachmentPutOptions {\n /** Compress the attachment with gzip before encryption. Default: `true`. */\n compress?: boolean\n /** Chunk size in bytes. Default: `DEFAULT_CHUNK_SIZE` (256 KB). */\n chunkSize?: number\n /** MIME type to store with the attachment. Auto-detected from magic bytes if omitted. */\n mimeType?: string\n /** User ID to record as the uploader. Falls back to the active user's ID. */\n uploadedBy?: string\n}\n\n/** Options for `AttachmentHandle.response()`. */\nexport interface AttachmentResponseOptions {\n /**\n * Set `Content-Disposition: inline` so the browser renders the file\n * instead of downloading it. Default: `false` (attachment disposition).\n */\n inline?: boolean\n}\n\n/**\n * Slot record — mutable metadata linking a named slot on a record\n * to a `BlobObject` via its eTag.\n *\n * Multiple slots (even across different records) may reference the same\n * `eTag` — the underlying chunks are shared. Updating metadata creates\n * a new envelope version (`_v++`) while the blob data is unchanged.\n */\nexport interface SlotRecord {\n /** Reference to the `BlobObject` in `_blob_index`. */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes (denormalized from `BlobObject`). */\n readonly size: number\n /** MIME type. Takes precedence over the MIME type stored in `BlobObject`. */\n readonly mimeType?: string\n /** ISO timestamp of the upload that set this slot. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Result of `BlobSet.list()` — slot record plus its named slot key. */\nexport interface SlotInfo extends SlotRecord {\n /** The slot name (key in the record's slot map). */\n readonly name: string\n}\n\n/**\n * Explicitly published version snapshot — an independent reference to a\n * blob at a specific point in time.\n */\nexport interface VersionRecord {\n /** User-defined label (e.g. `'issued-2025-01'`, `'amendment-2025-02'`). */\n readonly label: string\n /** eTag of the blob snapshot at publish time — independent of the current slot. */\n readonly eTag: string\n /** ISO timestamp when the version was published. */\n readonly publishedAt: string\n /** User ID of the publisher, if available. */\n readonly publishedBy?: string\n}\n\n/** Options for `BlobSet.put()`. */\nexport interface BlobPutOptions {\n /** MIME type hint. If omitted, auto-detected from magic bytes. */\n mimeType?: string\n /**\n * Raw chunk size in bytes. Priority: this value > store.maxBlobBytes > 256 KB.\n */\n chunkSize?: number\n /**\n * Whether to gzip-compress bytes before encrypting. Default: `true`.\n * Auto-set to `false` for pre-compressed MIME types (JPEG, PNG, ZIP, etc.).\n */\n compress?: boolean\n /** User ID to record as `uploadedBy`. Defaults to the Noydb session user. */\n uploadedBy?: string\n}\n\n/** Options for `BlobSet.response()` and `BlobSet.responseVersion()`. */\nexport interface BlobResponseOptions {\n /**\n * When `true`, sets `Content-Disposition: inline; filename=\"...\"` so\n * the browser renders the file in the tab. Default (`false`) sets\n * `attachment; filename=\"...\"` which triggers a download.\n */\n inline?: boolean\n /** Override the filename in the Content-Disposition header. */\n filename?: string\n}\n\n// ─── Store Capabilities ─────────────────────────────\n\nexport type StoreAuthKind =\n | 'none'\n | 'filesystem'\n | 'api-key'\n | 'iam'\n | 'oauth'\n | 'kerberos'\n | 'browser-origin'\n\nexport interface StoreAuth {\n kind: StoreAuthKind | StoreAuthKind[]\n required: boolean\n flow: 'static' | 'oauth' | 'kerberos' | 'implicit'\n}\n\nexport interface StoreCapabilities {\n /**\n * true — the store's expectedVersion check and write are atomic at the\n * storage layer. Two concurrent puts with the same expectedVersion will\n * produce exactly one success and one ConflictError.\n * false — check and write are separate operations with a race window.\n */\n casAtomic: boolean\n auth: StoreAuth\n /**\n * true — the store implements {@link NoydbStore.tx} and commits\n * every op atomically at the storage layer. The hub's\n * `db.transaction(fn)` will delegate to `tx(ops)` and surface a\n * single pass/fail outcome. false (or absent) — no native\n * multi-record atomicity; the hub falls back to per-record OCC\n * with best-effort unwind on partial failure.\n */\n txAtomic?: boolean\n /**\n * Maximum raw bytes per blob chunk record.\n * `undefined` — no limit (S3, file, IDB); blob stored as single chunk.\n * `256 * 1024` — DynamoDB (400 KB item limit minus envelope overhead).\n * `5 * 1024 * 1024` — localStorage quota safety.\n */\n maxBlobBytes?: number\n}\n\n// ─── Factory Options ───────────────────────────────────────────────────\n\nexport interface NoydbOptions {\n /** Primary store (local storage). */\n readonly store: NoydbStore\n /**\n * tree-shake seam — optional blob strategy. Pass `withBlobs()`\n * from `@noy-db/hub/blobs` to enable `collection.blob(id)` storage.\n * When omitted, hub's blob machinery stays out of the bundle (ESM\n * tree-shaking) and `collection.blob(id)` throws with a pointer at\n * the subpath. `BlobStrategy` is `@internal` — users only construct\n * it via the subpath factory.\n *\n * @internal\n */\n readonly blobStrategy?: BlobStrategy\n /**\n * tree-shake seam — optional indexing strategy. Pass\n * `withIndexing()` from `@noy-db/hub/indexing` to enable eager-mode\n * `==/in` fast-paths, lazy-mode `.lazyQuery()`, rebuild/reconcile,\n * and auto-reconcile. When omitted, indexing code never reaches the\n * bundle; `.lazyQuery()` throws with a pointer at the subpath, and\n * eager-mode collections fall back to linear scans regardless of\n * `indexes: [...]` declarations. `IndexStrategy` is `@internal` —\n * users only construct it via the subpath factory.\n *\n * @internal\n */\n readonly indexStrategy?: IndexStrategy\n /**\n * tree-shake seam — optional aggregate strategy. Pass\n * `withAggregate()` from `@noy-db/hub/aggregate` to enable\n * `.aggregate()` and `.groupBy()` on Query. When omitted, those\n * methods throw with a pointer at the subpath; the ~886 LOC of\n * Aggregation + GroupedQuery machinery never reaches the bundle.\n * Streaming `scan().aggregate()` works independently of this\n * strategy — it doesn't use the `Aggregation` class.\n *\n * @internal\n */\n readonly aggregateStrategy?: AggregateStrategy\n /**\n * tree-shake seam — optional CRDT strategy. Required when\n * any collection is declared with `crdt: 'lww-map' | 'rga' | 'yjs'`;\n * otherwise the first put/sync-merge hitting the CRDT path throws.\n * When omitted, ~221 LOC of LWW-Map / RGA / merge helpers never\n * reach the bundle.\n *\n * @internal\n */\n readonly crdtStrategy?: CrdtStrategy\n /**\n * tree-shake seam — optional consent-audit strategy. Pass\n * `withConsent()` from `@noy-db/hub/consent` to enable per-op audit\n * writes into `_consent_audit` when a consent scope is active.\n * When omitted, `vault.consentAudit()` returns `[]` and writes are\n * no-ops; the consent module's ~194 LOC never reaches the bundle.\n *\n * @internal\n */\n readonly consentStrategy?: ConsentStrategy\n /**\n * tree-shake seam — optional periods strategy. Pass\n * `withPeriods()` from `@noy-db/hub/periods` to enable\n * `vault.closePeriod()` / `.openPeriod()` / write-guard on closed\n * periods. When omitted, `vault.listPeriods()` returns `[]` and\n * the write-guard is a no-op; the ~363 LOC of period validation +\n * ledger appending stay out of the bundle.\n *\n * @internal\n */\n readonly periodsStrategy?: PeriodsStrategy\n /**\n * tree-shake seam — optional VaultFrame strategy. Pass\n * `withShadow()` from `@noy-db/hub/shadow` to enable\n * `vault.frame()`. Without it, calling `vault.frame()` throws.\n *\n * @internal\n */\n readonly shadowStrategy?: ShadowStrategy\n /**\n * tree-shake seam — optional multi-record transactions. Pass\n * `withTransactions()` from `@noy-db/hub/tx` to enable\n * `db.transaction(fn)`. Without it, calling the method throws.\n *\n * @internal\n */\n readonly txStrategy?: TxStrategy\n /**\n * tree-shake seam — optional history + ledger + time-machine.\n * Pass `withHistory()` from `@noy-db/hub/history` to enable\n * per-record version snapshots, the hash-chained audit ledger, JSON\n * Patch deltas, `vault.ledger()`, `vault.at()`, and the\n * `collection.history()` / `getVersion()` / `revert()` / `diff()` /\n * `clearHistory()` / `pruneRecordHistory()` read APIs. When omitted,\n * snapshots/prune/clear are silent no-ops, the read APIs throw with\n * a pointer at the subpath, and ~1,880 LOC stay out of the bundle.\n *\n * @internal\n */\n readonly historyStrategy?: HistoryStrategy\n /**\n * tree-shake seam — optional i18n strategy. Pass `withI18n()`\n * from `@noy-db/hub/i18n` to enable `i18nText`/`dictKey` field\n * resolution on reads, `i18nText` validation on writes, and\n * `vault.dictionary(name)`. When omitted, locale resolution is the\n * identity (raw values returned), the validators throw with a\n * pointer to the subpath, and ~854 LOC of dictionary + locale\n * machinery stay out of the bundle.\n *\n * @internal\n */\n readonly i18nStrategy?: I18nStrategy\n /**\n * tree-shake seam — optional session-policy strategy. Pass\n * `withSession()` from `@noy-db/hub/session` to enable\n * `sessionPolicy` validation, `PolicyEnforcer` lifecycle (idle /\n * absolute timeouts, lockOnBackground), and global session-token\n * revocation. When omitted, setting `sessionPolicy` throws at\n * `createNoydb()` time, and ~495 LOC of policy + token machinery\n * stay out of the bundle.\n *\n * @internal\n */\n readonly sessionStrategy?: SessionStrategy\n /**\n * tree-shake seam — optional sync engine + presence strategy.\n * Pass `withSync()` from `@noy-db/hub/sync` to enable\n * `db.push()` / `pull()` / replication, `db.transaction(vault)`\n * for sync-aware transactions, and `collection.presence()`. When\n * omitted, configuring `sync` / calling these surfaces throws with\n * a pointer at the subpath, and ~856 LOC of replication + presence\n * machinery stay out of the bundle. Keyring stays core; grant/\n * revoke/magic-link/delegation tree-shake via direct imports.\n *\n * @internal\n */\n readonly syncStrategy?: SyncStrategy\n /** Optional remote store(s) for sync. Accepts a single store, a SyncTarget, or an array. */\n readonly sync?: NoydbStore | SyncTarget | SyncTarget[]\n /** User identifier. */\n readonly user: string\n /** Passphrase for key derivation. Required unless encrypt is false or `getKeyring` is provided. */\n readonly secret?: string\n /**\n * Optional callback that returns an unlocked keyring for a given vault.\n * Use this to plug in WebAuthn / OIDC / Shamir / any unlock path that\n * produces an `UnlockedKeyring` outside the passphrase model.\n *\n * When set, `secret` MUST NOT also be set — `createNoydb` throws if both\n * are supplied. When neither is set (and `encrypt !== false`), `createNoydb`\n * also throws.\n *\n * The callback is called lazily, on the first operation that needs the\n * keyring for a given vault. Noydb caches the returned keyring per-vault\n * for the lifetime of the instance, so the callback is invoked at most\n * once per `(instance, vault)` pair (assuming the callback resolves\n * successfully). If the callback rejects, the rejection surfaces from the\n * first vault operation that triggered the unlock; subsequent operations\n * will retry the callback.\n *\n * @example\n * ```ts\n * import { createNoydb } from '@noy-db/hub'\n * import { unlockWebAuthn } from '@noy-db/on-webauthn'\n *\n * const enrollment = await loadEnrollment()\n * const db = await createNoydb({\n * store,\n * user: 'alice',\n * getKeyring: (vault) => unlockWebAuthn(enrollment),\n * })\n * ```\n *\n * Note: this callback is responsible for both the \"open existing vault\"\n * and the \"create new vault\" cases. Unlike the passphrase path, there is\n * no automatic `NoAccessError` → `createOwnerKeyring` fallback, because\n * the callback owner has the UI context to decide which path to run.\n * For first-time bootstrap, use a passphrase or recovery code, enroll\n * WebAuthn from the unlocked keyring, then swap to `getKeyring` on\n * subsequent sessions.\n */\n readonly getKeyring?: (vault: string) => Promise<UnlockedKeyring>\n /** Auth method. Default: 'passphrase'. */\n readonly auth?: 'passphrase' | 'biometric'\n /** Enable encryption. Default: true. */\n readonly encrypt?: boolean\n /** Conflict resolution strategy. Default: 'version'. */\n readonly conflict?: ConflictStrategy\n /**\n * Sync scheduling policy. Controls when push/pull fire.\n * Default inferred from store category: per-record → `on-change`,\n * bundle → `debounce 30s`.\n */\n readonly syncPolicy?: SyncPolicy\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n * When both are supplied, `syncPolicy` takes precedence.\n */\n readonly autoSync?: boolean\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n */\n readonly syncInterval?: number\n /**\n * Session timeout in ms. Clears keys after inactivity. Default: none.\n * @deprecated Use `sessionPolicy.idleTimeoutMs` instead. This field is\n * still honored for backwards compatibility but `sessionPolicy` takes\n * precedence when both are supplied.\n */\n readonly sessionTimeout?: number\n /**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior. When supplied, replaces the\n * legacy `sessionTimeout` field.\n */\n readonly sessionPolicy?: SessionPolicy\n /**\n * Validate passphrase strength against the phrase format\n * (`@noy-db/hub` issue #7) on first-time keyring creation. When\n * `true`, weak phrases throw {@link WeakPassphraseError} from\n * `createNoydb()` / `db.rotatePassphrase()`. Default: `false` for\n * back-compat in v0.1.x; planned to flip to `true` at v1.0.\n */\n readonly validatePassphrase?: boolean\n /**\n * Vault-level policy gate document (issue #9). When present, the hub\n * persists the merged policy at `_meta/policy` on first-time vault\n * creation and gates sensitive operations (`db.rotatePassphrase`,\n * `db.export*`, …) against it. Omitted ⇒ the engine uses\n * {@link PERSONAL_POLICY}. Use {@link STRICT_POLICY} for regulated\n * deployments.\n *\n * The on-disk document is the source of truth — the policy field\n * is only honored at vault creation; subsequent runs read from\n * `_meta/policy`. Use `db.updatePolicy()` to change it deliberately.\n *\n * Imported from `@noy-db/hub` as a type-only reference; the runtime\n * import lives in `policy/index.ts`.\n */\n readonly policy?: VaultPolicy\n /**\n * Mandatory recovery profile enrollment (issue #10). Vaults with\n * `recover-passphrase` enabled MUST register at least one profile\n * before being production-ready, otherwise `createNoydb()` throws\n * {@link RecoveryNotEnrolledError}. Set\n * `policy.gates['recover-passphrase'].enabled = false` to\n * deliberately opt out of recovery (passphrase loss = data loss).\n *\n * v0.1.0-pre.5 supports the `'paper'` profile end-to-end. Other\n * profiles ship the API shape and throw\n * {@link RecoveryProfileNotImplementedError} during use.\n */\n readonly recovery?: ReadonlyArray<RecoveryEnrollment>\n /**\n * When `true`, `createNoydb` rejects vaults with no recovery\n * entries persisted (per the spec's mandatory-enrollment\n * requirement). Default `false` for v0.1.x back-compat; planned to\n * flip to `true` at v1.0. Apps in regulated environments should\n * turn this on now.\n */\n readonly requireRecovery?: boolean\n /**\n * What to do when `openVault` finds an existing keyring in the store that\n * cannot be decrypted with the supplied credentials (`InvalidKeyError`).\n *\n * - `'error'` (default) — propagate the error. The app must prompt the user\n * to supply the correct credentials or clear both the data and auth stores.\n * - `'reset'` — delete the stale keyring and re-initialise the vault from\n * scratch using the current credentials. Use this when the data store can\n * become detached from the auth store (e.g. the user cleared the IndexedDB\n * data records but not the keyring row, or a WebAuthn credential was rotated).\n * **All previously encrypted data is unrecoverable after a reset.**\n *\n * Only applies to the passphrase (`secret`) path. When `getKeyring` is used,\n * the callback is responsible for handling stale-keyring detection itself.\n */\n readonly onInvalidKey?: 'error' | 'reset'\n /**\n * Enable the public envelope subsystem (`docs/subsystems/public-envelope.md`).\n * Pass `true` for the default schema (every standard field, 256 KB\n * icon cap, 200-char text cap), or a `PublicEnvelopeSchema` to\n * narrow what the owner can set. Off by default — vaults written\n * by hubs without this option carry no envelope, full stop.\n */\n readonly publicEnvelope?: true | PublicEnvelopeSchema\n /** Audit history configuration. */\n readonly history?: HistoryConfig\n /**\n * Consumer-supplied translation function for `i18nText` fields with\n * `autoTranslate: true`.\n *\n * ⚠ **`plaintextTranslator` receives unencrypted text.** Configuring\n * this hook causes plaintext to leave noy-db's zero-knowledge boundary\n * over whatever channel the consumer's implementation uses. noy-db ships\n * no built-in translator and adds no translator SDKs as dependencies.\n * The consumer chooses and owns the data policy of the external service.\n *\n * Per-field opt-in via `autoTranslate: true` on `i18nText()`. Calling\n * `put()` on a collection with `autoTranslate: true` fields while this\n * option is absent throws `TranslatorNotConfiguredError`.\n *\n * See `NOYDB_SPEC.md § Zero-Knowledge Storage` for the invariant text.\n */\n readonly plaintextTranslator?: PlaintextTranslatorFn\n /**\n * Human-readable name for the translator, recorded in the in-process\n * audit log (e.g. `'deepl-pro-with-dpa'`, `'self-hosted-llama-7b'`).\n * Defaults to `'anonymous'` when not supplied.\n */\n readonly plaintextTranslatorName?: string\n}\n\n// ─── History / Audit Trail ─────────────────────────────────────────────\n\n/** History configuration. */\nexport interface HistoryConfig {\n /** Enable history tracking. Default: true. */\n readonly enabled?: boolean\n /** Maximum history entries per record. Oldest pruned on overflow. Default: unlimited. */\n readonly maxVersions?: number\n}\n\n/** Options for querying history. */\nexport interface HistoryOptions {\n /** Start date (inclusive), ISO 8601. */\n readonly from?: string\n /** End date (inclusive), ISO 8601. */\n readonly to?: string\n /** Maximum entries to return. */\n readonly limit?: number\n}\n\n/** Options for pruning history. */\nexport interface PruneOptions {\n /** Keep only the N most recent versions. */\n readonly keepVersions?: number\n /** Delete versions older than this date, ISO 8601. */\n readonly beforeDate?: string\n}\n\n/** A decrypted history entry. */\nexport interface HistoryEntry<T> {\n readonly version: number\n readonly timestamp: string\n readonly userId: string\n readonly record: T\n}\n\n// ─── Bulk operations ──────────────────────────────────────\n\n/** Per-item options for `Collection.putMany()`. */\nexport interface PutManyItemOptions {\n /**\n * Optimistic-concurrency check: fail this item if the stored version\n * is not `expectedVersion`. Honored only in `atomic: true` mode;\n * ignored in the default best-effort loop.\n */\n readonly expectedVersion?: number\n}\n\n/**\n * Batch-level options for `Collection.putMany()` and `deleteMany()`.\n *\n * `atomic: true` switches the call from best-effort loop\n * to all-or-nothing: a pre-flight CAS check runs first, then every op\n * is executed; any mid-batch failure triggers a best-effort revert.\n * On failure in atomic mode the whole call throws — you won't get a\n * partial `PutManyResult`. On success the result mirrors the default\n * loop's shape.\n */\nexport interface PutManyOptions {\n readonly atomic?: boolean\n}\n\n/** Result of `Collection.putMany()`. */\nexport interface PutManyResult {\n /** `true` iff every entry succeeded. */\n readonly ok: boolean\n /** IDs that were successfully written. */\n readonly success: readonly string[]\n /** Entries that failed, with the error that prevented each write. */\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n\n/** Result of `Collection.deleteMany()`. Same shape as `PutManyResult`. */\nexport interface DeleteManyResult {\n readonly ok: boolean\n readonly success: readonly string[]\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n","import type {\n NoydbStore,\n EncryptedEnvelope,\n BlobObject,\n SlotRecord,\n SlotInfo,\n VersionRecord,\n BlobPutOptions,\n BlobResponseOptions,\n} from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport {\n encrypt,\n decrypt,\n hmacSha256Hex,\n encryptBytesWithAAD,\n decryptBytesWithAAD,\n bufferToBase64,\n base64ToBuffer,\n} from '../crypto.js'\nimport { ConflictError, NotFoundError } from '../errors.js'\nimport { detectMagic, isPreCompressed } from './mime-magic.js'\n\n// ─── Internal collection names ─────────────────────────────────────────\n\n/**\n * DEK slot name for vault-shared blob data. Calling `getDEK('_blob')`\n * auto-creates a blob DEK the first time — same lazy-creation mechanism\n * used for any user-defined collection.\n */\nexport const BLOB_COLLECTION = '_blob'\n\n/** Stores `BlobObject` metadata envelopes, keyed by eTag. */\nexport const BLOB_INDEX_COLLECTION = '_blob_index'\n\n/**\n * Stores encrypted chunk envelopes, keyed by `{eTag}/{chunkIndex}`.\n * NOT loaded into the in-memory query layer. Fetched on demand by\n * `BlobSet.get()` / `BlobSet.response()`.\n */\nexport const BLOB_CHUNKS_COLLECTION = '_blob_chunks'\n\n/** Prefix for per-collection slot metadata collections. */\nexport const BLOB_SLOTS_PREFIX = '_blob_slots_'\n\n/** Prefix for per-collection version records. */\nexport const BLOB_VERSIONS_PREFIX = '_blob_versions_'\n\n/**\n * Default chunk size: 256 KB raw bytes.\n * After AES-GCM (same size) + base64 (~33% inflation) → ~342 KB per\n * envelope, safely within DynamoDB's 400 KB item limit.\n */\nexport const DEFAULT_CHUNK_SIZE = 256 * 1024\n\n/** Maximum CAS retry attempts for refCount and slot metadata updates. */\nconst MAX_CAS_RETRIES = 5\n\n// ─── Compression helpers ───────────────────────────────────────────────\n\nasync function compressBytes(\n data: Uint8Array,\n): Promise<{ bytes: Uint8Array; algorithm: 'gzip' | 'none' }> {\n if (typeof CompressionStream === 'undefined') {\n return { bytes: data, algorithm: 'none' }\n }\n const cs = new CompressionStream('gzip')\n const writer = cs.writable.getWriter()\n await writer.write(data as Uint8Array<ArrayBuffer>)\n await writer.close()\n const buf = await new Response(cs.readable).arrayBuffer()\n return { bytes: new Uint8Array(buf), algorithm: 'gzip' }\n}\n\nasync function decompressBytes(data: Uint8Array): Promise<Uint8Array> {\n if (typeof DecompressionStream === 'undefined') {\n throw new Error(\n '[noy-db] DecompressionStream not available — cannot decompress blob chunk',\n )\n }\n const ds = new DecompressionStream('gzip')\n const writer = ds.writable.getWriter()\n await writer.write(data as Uint8Array<ArrayBuffer>)\n await writer.close()\n const buf = await new Response(ds.readable).arrayBuffer()\n return new Uint8Array(buf)\n}\n\nfunction concatChunks(chunks: Uint8Array[]): Uint8Array {\n const total = chunks.reduce((s, c) => s + c.byteLength, 0)\n const out = new Uint8Array(total)\n let offset = 0\n for (const c of chunks) {\n out.set(c, offset)\n offset += c.byteLength\n }\n return out\n}\n\n/** Build the AAD binding for chunk integrity: \"{eTag}:{chunkIndex}:{chunkCount}\" */\nfunction chunkAAD(eTag: string, chunkIndex: number, chunkCount: number): Uint8Array {\n return new TextEncoder().encode(`${eTag}:${chunkIndex}:${chunkCount}`)\n}\n\n// ─── BlobSet ──────────────────────────────────────────────────────────\n\n/**\n * Handle for reading, writing, versioning, and deleting binary blobs\n * on a specific record.\n *\n * Obtained via `collection.blob(id)`. No I/O is performed until you\n * call a method.\n *\n * ## Storage layout\n *\n * ```\n * _blob_index/{eTag} BlobObject metadata (vault-shared DEK)\n * _blob_chunks/{eTag}/{chunkIndex} Encrypted chunk data (vault-shared DEK + AAD)\n * _blob_slots_{collection}/{recordId} Slot map (parent collection DEK)\n * _blob_versions_{collection}/{recordId}/{slot}/{label} Published versions (parent collection DEK)\n * ```\n *\n * ## Deduplication\n *\n * `put()` computes `eTag = HMAC-SHA-256(blobDEK, plaintext)` — keyed so the\n * store cannot predict eTags for known content. If another record already\n * uploaded the same bytes, the chunks are reused and `refCount` is incremented.\n *\n * ## Chunk integrity\n *\n * Each chunk is encrypted with AES-256-GCM using AAD = `{eTag}:{index}:{count}`,\n * preventing chunk reorder, substitution, and truncation attacks.\n */\nexport class BlobSet {\n private readonly store: NoydbStore\n private readonly vault: string\n private readonly collection: string\n private readonly recordId: string\n private readonly getDEK: (name: string) => Promise<CryptoKey>\n private readonly encrypted: boolean\n private readonly userId: string | undefined\n private readonly maxBlobBytes: number | undefined\n\n constructor(opts: {\n store: NoydbStore\n vault: string\n collection: string\n recordId: string\n getDEK: (name: string) => Promise<CryptoKey>\n encrypted: boolean\n userId?: string\n maxBlobBytes?: number\n }) {\n this.store = opts.store\n this.vault = opts.vault\n this.collection = opts.collection\n this.recordId = opts.recordId\n this.getDEK = opts.getDEK\n this.encrypted = opts.encrypted\n this.userId = opts.userId\n this.maxBlobBytes = opts.maxBlobBytes\n }\n\n /** The internal collection that holds slot metadata for this collection's blobs. */\n private get slotsCollection(): string {\n return `${BLOB_SLOTS_PREFIX}${this.collection}`\n }\n\n /** The internal collection that holds published versions for this collection's blobs. */\n private get versionsCollection(): string {\n return `${BLOB_VERSIONS_PREFIX}${this.collection}`\n }\n\n // ─── Slot Metadata I/O (CAS-protected) ─────────────────────────────\n\n private async loadSlots(): Promise<{\n slots: Record<string, SlotRecord>\n version: number\n }> {\n const envelope = await this.store.get(this.vault, this.slotsCollection, this.recordId)\n if (!envelope) return { slots: {}, version: 0 }\n\n if (!this.encrypted) {\n return {\n slots: JSON.parse(envelope._data) as Record<string, SlotRecord>,\n version: envelope._v,\n }\n }\n\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return {\n slots: JSON.parse(json) as Record<string, SlotRecord>,\n version: envelope._v,\n }\n }\n\n private async saveSlots(\n slots: Record<string, SlotRecord>,\n currentVersion: number,\n ): Promise<void> {\n const json = JSON.stringify(slots)\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(this.collection)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: currentVersion + 1,\n _ts: now,\n _iv: iv,\n _data: data,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: currentVersion + 1,\n _ts: now,\n _iv: '',\n _data: json,\n }\n }\n\n await this.store.put(\n this.vault,\n this.slotsCollection,\n this.recordId,\n envelope,\n currentVersion > 0 ? currentVersion : undefined,\n )\n }\n\n /**\n * CAS retry loop for slot metadata updates. Re-reads slots on conflict\n * and re-applies the mutation function.\n */\n private async casUpdateSlots(\n mutate: (slots: Record<string, SlotRecord>) => Record<string, SlotRecord> | null,\n ): Promise<void> {\n for (let attempt = 0; attempt < MAX_CAS_RETRIES; attempt++) {\n const { slots, version } = await this.loadSlots()\n const updated = mutate(slots)\n if (updated === null) return // no-op\n try {\n await this.saveSlots(updated, version)\n return\n } catch (err) {\n if (err instanceof ConflictError && attempt < MAX_CAS_RETRIES - 1) continue\n throw err\n }\n }\n }\n\n // ─── Blob Index I/O (versioned for CAS refCount) ──────────────────\n\n private async loadBlobObject(eTag: string): Promise<{ blob: BlobObject; version: number } | null> {\n const envelope = await this.store.get(this.vault, BLOB_INDEX_COLLECTION, eTag)\n if (!envelope) return null\n\n if (!this.encrypted) {\n return { blob: JSON.parse(envelope._data) as BlobObject, version: envelope._v }\n }\n\n const dek = await this.getDEK(BLOB_COLLECTION)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return { blob: JSON.parse(json) as BlobObject, version: envelope._v }\n }\n\n private async writeBlobObject(blob: BlobObject, expectedVersion?: number): Promise<void> {\n const json = JSON.stringify(blob)\n const now = new Date().toISOString()\n const newVersion = (expectedVersion ?? 0) + 1\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(BLOB_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: newVersion, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: newVersion, _ts: now, _iv: '', _data: json }\n }\n\n await this.store.put(\n this.vault,\n BLOB_INDEX_COLLECTION,\n blob.eTag,\n envelope,\n expectedVersion,\n )\n }\n\n /**\n * CAS retry loop for refCount changes on a BlobObject.\n */\n private async casUpdateRefCount(eTag: string, delta: number): Promise<void> {\n for (let attempt = 0; attempt < MAX_CAS_RETRIES; attempt++) {\n const result = await this.loadBlobObject(eTag)\n if (!result) throw new NotFoundError(`BlobObject ${eTag} not found`)\n const { blob, version } = result\n const updated: BlobObject = { ...blob, refCount: blob.refCount + delta }\n try {\n await this.writeBlobObject(updated, version)\n return\n } catch (err) {\n if (err instanceof ConflictError && attempt < MAX_CAS_RETRIES - 1) continue\n throw err\n }\n }\n }\n\n // ─── Chunk I/O (with AAD binding) ─────────────────────────────────\n\n private async writeChunk(\n eTag: string,\n index: number,\n chunkCount: number,\n chunk: Uint8Array,\n dek: CryptoKey | null,\n ): Promise<void> {\n const id = `${eTag}_${index}`\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (dek) {\n const aad = chunkAAD(eTag, index, chunkCount)\n const { iv, data } = await encryptBytesWithAAD(chunk, dek, aad)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: now,\n _iv: '',\n _data: bufferToBase64(chunk),\n }\n }\n\n await this.store.put(this.vault, BLOB_CHUNKS_COLLECTION, id, envelope)\n }\n\n private async readChunk(\n eTag: string,\n index: number,\n chunkCount: number,\n dek: CryptoKey | null,\n ): Promise<Uint8Array | null> {\n const envelope = await this.store.get(this.vault, BLOB_CHUNKS_COLLECTION, `${eTag}_${index}`)\n if (!envelope) return null\n\n if (dek) {\n const aad = chunkAAD(eTag, index, chunkCount)\n return await decryptBytesWithAAD(envelope._iv, envelope._data, dek, aad)\n }\n\n return base64ToBuffer(envelope._data)\n }\n\n // ─── Version record I/O ───────────────────────────────────────────\n\n private versionKey(slotName: string, label: string): string {\n return `${this.recordId}::${slotName}::${label}`\n }\n\n private async loadVersionRecord(slotName: string, label: string): Promise<VersionRecord | null> {\n const key = this.versionKey(slotName, label)\n const envelope = await this.store.get(this.vault, this.versionsCollection, key)\n if (!envelope) return null\n\n if (!this.encrypted) {\n return JSON.parse(envelope._data) as VersionRecord\n }\n\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n return JSON.parse(json) as VersionRecord\n }\n\n private async writeVersionRecord(slotName: string, record: VersionRecord): Promise<void> {\n const key = this.versionKey(slotName, record.label)\n const json = JSON.stringify(record)\n const now = new Date().toISOString()\n let envelope: EncryptedEnvelope\n\n if (this.encrypted) {\n const dek = await this.getDEK(this.collection)\n const { iv, data } = await encrypt(json, dek)\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: iv, _data: data }\n } else {\n envelope = { _noydb: NOYDB_FORMAT_VERSION, _v: 1, _ts: now, _iv: '', _data: json }\n }\n\n await this.store.put(this.vault, this.versionsCollection, key, envelope)\n }\n\n private async deleteVersionRecord(slotName: string, label: string): Promise<void> {\n const key = this.versionKey(slotName, label)\n await this.store.delete(this.vault, this.versionsCollection, key)\n }\n\n // ─── Effective chunk size ─────────────────────────────────────────\n\n private effectiveChunkSize(opts?: BlobPutOptions): number {\n if (opts?.chunkSize) return opts.chunkSize\n if (this.maxBlobBytes) return this.maxBlobBytes\n return DEFAULT_CHUNK_SIZE\n }\n\n // ─── Fetch all chunks for a blob ──────────────────────────────────\n\n private async fetchAllChunks(blob: BlobObject): Promise<Uint8Array> {\n const blobDEK = this.encrypted ? await this.getDEK(BLOB_COLLECTION) : null\n const chunks: Uint8Array[] = []\n\n for (let i = 0; i < blob.chunkCount; i++) {\n const chunk = await this.readChunk(blob.eTag, i, blob.chunkCount, blobDEK)\n if (!chunk) {\n throw new NotFoundError(\n `Blob chunk ${i}/${blob.chunkCount} missing for eTag \"${blob.eTag}\" on record \"${this.recordId}\"`,\n )\n }\n chunks.push(chunk)\n }\n\n const assembled = concatChunks(chunks)\n return blob.compression === 'gzip' ? await decompressBytes(assembled) : assembled\n }\n\n // ─── Public API: Slot management ──────────────────────────────────\n\n /**\n * Upload bytes and attach them to this record under `slotName`.\n *\n * 1. Computes `eTag = HMAC-SHA-256(blobDEK, plaintext)` for keyed content-addressing.\n * 2. Auto-detects MIME type from magic bytes if not provided.\n * 3. If a blob with this eTag already exists, skips chunk upload (deduplication)\n * and CAS-increments refCount.\n * 4. Otherwise: compresses → splits into chunks → encrypts each chunk with\n * AAD binding → writes `_blob_chunks` → writes `BlobObject` to `_blob_index`.\n * 5. CAS-updates the slot metadata in `_blob_slots_{collection}`.\n * If overwriting an existing slot, decrements the old eTag's refCount.\n */\n async put(slotName: string, data: Uint8Array, opts?: BlobPutOptions): Promise<void> {\n // Step 1 — keyed content-hash (plaintext, before compression)\n const blobDEK = this.encrypted ? await this.getDEK(BLOB_COLLECTION) : null\n const eTag = blobDEK\n ? await hmacSha256Hex(blobDEK, data)\n : await plainSha256Hex(data)\n\n // Step 2 — MIME detection\n let mimeType = opts?.mimeType\n if (!mimeType) {\n const detected = detectMagic(data.subarray(0, 16))\n if (detected) mimeType = detected.mime\n }\n\n // Determine compression: explicit opt > auto-detect > default true\n let shouldCompress: boolean\n if (opts?.compress !== undefined) {\n shouldCompress = opts.compress\n } else if (mimeType && isPreCompressed(mimeType)) {\n shouldCompress = false\n } else {\n shouldCompress = true\n }\n\n // Step 3 — deduplication check\n const existingBlob = await this.loadBlobObject(eTag)\n\n if (existingBlob) {\n // eTag already exists — just increment refCount (CAS retry)\n await this.casUpdateRefCount(eTag, +1)\n } else {\n // Step 4 — compress\n const { bytes: compressed, algorithm } = shouldCompress\n ? await compressBytes(data)\n : { bytes: data, algorithm: 'none' as const }\n\n const chunkSize = this.effectiveChunkSize(opts)\n const chunkCount = Math.max(1, Math.ceil(compressed.byteLength / chunkSize))\n\n // Step 5 — write chunks FIRST with AAD binding (safe failure order)\n for (let i = 0; i < chunkCount; i++) {\n const start = i * chunkSize\n await this.writeChunk(\n eTag, i, chunkCount,\n compressed.subarray(start, start + chunkSize),\n blobDEK,\n )\n }\n\n // Step 6 — write blob index entry after all chunks succeed\n await this.writeBlobObject({\n eTag,\n size: data.byteLength,\n compressedSize: compressed.byteLength,\n compression: algorithm,\n chunkSize,\n chunkCount,\n ...(mimeType !== undefined ? { mimeType } : {}),\n createdAt: new Date().toISOString(),\n refCount: 1,\n })\n }\n\n // Step 7 — CAS-update slot metadata\n const uploaderUserId = opts?.uploadedBy ?? this.userId\n await this.casUpdateSlots((slots) => {\n const oldETag = slots[slotName]?.eTag\n slots[slotName] = {\n eTag,\n filename: slotName,\n size: data.byteLength,\n ...(mimeType !== undefined ? { mimeType } : {}),\n uploadedAt: new Date().toISOString(),\n ...(uploaderUserId !== undefined ? { uploadedBy: uploaderUserId } : {}),\n }\n // Schedule old eTag refCount decrement (non-blocking best-effort)\n if (oldETag && oldETag !== eTag) {\n this._deferredRefDecrement = oldETag\n }\n return slots\n })\n\n // Decrement old eTag refCount outside the CAS loop\n if (this._deferredRefDecrement) {\n const oldETag = this._deferredRefDecrement\n this._deferredRefDecrement = undefined\n await this.casUpdateRefCount(oldETag, -1).catch(() => {\n // Best-effort — blobGC will reconcile\n })\n }\n }\n\n private _deferredRefDecrement: string | undefined\n\n /**\n * Fetch all bytes for the named slot.\n * Returns `null` if the slot does not exist.\n * Throws `NotFoundError` if the index entry exists but a chunk is missing.\n */\n async get(slotName: string): Promise<Uint8Array | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n return this.fetchAllChunks(result.blob)\n }\n\n /**\n * List all slot entries for this record.\n * Returns metadata only — no chunk data is loaded.\n */\n async list(): Promise<SlotInfo[]> {\n const { slots } = await this.loadSlots()\n return Object.entries(slots).map(([name, slot]) => ({ name, ...slot }))\n }\n\n /**\n * Delete the named slot from this record.\n * Decrements refCount on the blob. Chunks are GC'd by `vault.blobGC()`.\n */\n async delete(slotName: string): Promise<void> {\n let eTagToDecrement: string | undefined\n\n await this.casUpdateSlots((slots) => {\n if (!(slotName in slots)) return null\n eTagToDecrement = slots[slotName]!.eTag\n delete slots[slotName]\n return slots\n })\n\n if (eTagToDecrement) {\n await this.casUpdateRefCount(eTagToDecrement, -1).catch(() => {\n // Best-effort — blobGC will reconcile\n })\n }\n }\n\n /**\n * Return a native `Response` whose body streams the decrypted,\n * decompressed blob bytes with full HTTP metadata headers.\n *\n * Note: implementation is buffered — all chunks are loaded into\n * memory before being enqueued. True streaming deferred to.\n *\n * Returns `null` if the slot does not exist.\n */\n async response(slotName: string, opts?: BlobResponseOptions): Promise<Response | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n return this.buildResponse(slot, result.blob, opts)\n }\n\n /**\n * Decrypt the slot and wrap the bytes in a browser ObjectURL ready\n * to feed into `<img src>`, `<a href>`, etc. The caller MUST call\n * `revoke()` when the URL is no longer needed — otherwise the URL\n * (and the underlying decrypted Blob) are pinned for the lifetime\n * of the document, which leaks memory in long-lived pages.\n *\n * Returns `null` when the slot does not exist.\n *\n * Throws when `URL.createObjectURL` is unavailable in the host\n * environment (Node without DOM, restricted workers). Framework\n * adapters — `useBlobURL` in `@noy-db/in-vue`, etc. — guard against\n * this for SSR contexts and stay at `null` instead of propagating.\n */\n async objectURL(\n slotName: string,\n opts?: { mimeType?: string },\n ): Promise<{ url: string; revoke: () => void } | null> {\n if (typeof URL === 'undefined' || typeof URL.createObjectURL !== 'function') {\n throw new Error(\n 'BlobSet.objectURL: URL.createObjectURL is unavailable in this environment. ' +\n 'Call this from the browser, or use BlobSet.get() and create the URL yourself.',\n )\n }\n const bytes = await this.get(slotName)\n if (!bytes) return null\n\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n const type = opts?.mimeType ?? slot?.mimeType ?? 'application/octet-stream'\n\n // Pinning the underlying ArrayBuffer in a Blob is what backs the\n // ObjectURL — once we createObjectURL the URL holds a strong ref\n // to the Blob, so the local `blob` variable can fall out of scope.\n // Copy through a fresh ArrayBuffer so TS narrows away the\n // SharedArrayBuffer branch of `ArrayBufferLike` (Uint8Array is\n // generic over the backing buffer type since TS 5.7).\n const buffer = bytes.buffer.slice(bytes.byteOffset, bytes.byteOffset + bytes.byteLength) as ArrayBuffer\n const blob = new Blob([buffer], { type })\n const url = URL.createObjectURL(blob)\n let revoked = false\n const revoke = (): void => {\n if (revoked) return\n revoked = true\n URL.revokeObjectURL(url)\n }\n return { url, revoke }\n }\n\n // ─── Public API: Published versions (UC-3 amendment versioning) ───\n\n /**\n * Publish the current slot content as a named version snapshot.\n *\n * The published version holds an independent refCount reference to\n * the blob. Even if the slot is later overwritten or deleted, the\n * published version keeps the blob data alive.\n *\n * Publishing with an existing label overwrites it — if the eTags differ,\n * refCounts are adjusted accordingly.\n */\n async publish(slotName: string, label: string): Promise<void> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) throw new NotFoundError(`Slot \"${slotName}\" not found on record \"${this.recordId}\"`)\n\n // Check for existing version with this label\n const existing = await this.loadVersionRecord(slotName, label)\n if (existing && existing.eTag === slot.eTag) return // no-op: same blob\n\n // Write the version record\n const record: VersionRecord = {\n label,\n eTag: slot.eTag,\n publishedAt: new Date().toISOString(),\n ...(this.userId !== undefined ? { publishedBy: this.userId } : {}),\n }\n await this.writeVersionRecord(slotName, record)\n\n // Increment refCount for the new version's eTag\n await this.casUpdateRefCount(slot.eTag, +1)\n\n // If overwriting an existing version with a different eTag, decrement the old one\n if (existing && existing.eTag !== slot.eTag) {\n await this.casUpdateRefCount(existing.eTag, -1).catch(() => {})\n }\n }\n\n /**\n * Fetch bytes for a published version.\n * Returns `null` if the version does not exist.\n */\n async getVersion(slotName: string, label: string): Promise<Uint8Array | null> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return null\n\n const result = await this.loadBlobObject(record.eTag)\n if (!result) return null\n\n return this.fetchAllChunks(result.blob)\n }\n\n /**\n * List all published versions for a slot.\n */\n async listVersions(slotName: string): Promise<VersionRecord[]> {\n const prefix = `${this.recordId}::${slotName}::`\n const allKeys = await this.store.list(this.vault, this.versionsCollection)\n const matchingKeys = allKeys.filter((k) => k.startsWith(prefix))\n\n const versions: VersionRecord[] = []\n for (const key of matchingKeys) {\n const envelope = await this.store.get(this.vault, this.versionsCollection, key)\n if (!envelope) continue\n\n if (!this.encrypted) {\n versions.push(JSON.parse(envelope._data) as VersionRecord)\n } else {\n const dek = await this.getDEK(this.collection)\n const json = await decrypt(envelope._iv, envelope._data, dek)\n versions.push(JSON.parse(json) as VersionRecord)\n }\n }\n\n return versions\n }\n\n /**\n * Delete a published version. Decrements refCount on its blob.\n */\n async deleteVersion(slotName: string, label: string): Promise<void> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return\n\n await this.deleteVersionRecord(slotName, label)\n await this.casUpdateRefCount(record.eTag, -1).catch(() => {})\n }\n\n /**\n * Return a `Response` for a published version — same as `response()`\n * but reads from the version record's eTag instead of the current slot.\n */\n async responseVersion(\n slotName: string,\n label: string,\n opts?: BlobResponseOptions,\n ): Promise<Response | null> {\n const record = await this.loadVersionRecord(slotName, label)\n if (!record) return null\n\n const result = await this.loadBlobObject(record.eTag)\n if (!result) return null\n\n // Build a synthetic SlotRecord from the version + blob data\n const slotLike: SlotRecord = {\n eTag: record.eTag,\n filename: opts?.filename ?? `${slotName}-${label}`,\n size: result.blob.size,\n ...(result.blob.mimeType !== undefined ? { mimeType: result.blob.mimeType } : {}),\n uploadedAt: record.publishedAt,\n ...(record.publishedBy !== undefined ? { uploadedBy: record.publishedBy } : {}),\n }\n\n return this.buildResponse(slotLike, result.blob, opts)\n }\n\n // ─── Diagnostics ──────────────────────────────────────────────────\n\n /**\n * Return the `BlobObject` metadata for the named slot.\n * Returns `null` if the slot or blob does not exist.\n */\n async blobInfo(slotName: string): Promise<BlobObject | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n const result = await this.loadBlobObject(slot.eTag)\n return result?.blob ?? null\n }\n\n // ─── Presigned URL (E5) ────────────────────────────────────────────\n\n /**\n * Generate a presigned URL for direct client download of the blob's\n * ciphertext. Only works when the blob store supports `presignUrl`.\n *\n * **Important:** The URL returns encrypted data. The caller must\n * decrypt client-side using `decryptResponse()` or a service worker.\n *\n * Returns `null` if the slot doesn't exist or the store doesn't support presigning.\n */\n async presignedUrl(slotName: string, expiresInSeconds = 3600): Promise<string | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n // Only works for single-chunk blobs where the store supports presigning\n if (result.blob.chunkCount !== 1) return null\n if (!this.store.presignUrl) return null\n\n const chunkId = `${slot.eTag}_0`\n return this.store.presignUrl(this.vault, '_blob_chunks', chunkId, expiresInSeconds)\n }\n\n /**\n * Decrypt a ciphertext Response (e.g. from a presigned URL fetch)\n * back into a plaintext Response with correct headers.\n *\n * Usage with service worker or client-side fetch:\n * ```ts\n * const url = await blobs.presignedUrl('invoice.pdf')\n * const cipherResponse = await fetch(url)\n * const plainResponse = await blobs.decryptResponse('invoice.pdf', cipherResponse)\n * ```\n */\n async decryptResponse(slotName: string, cipherResponse: Response): Promise<Response | null> {\n const { slots } = await this.loadSlots()\n const slot = slots[slotName]\n if (!slot) return null\n\n const result = await this.loadBlobObject(slot.eTag)\n if (!result) return null\n\n // Parse the envelope from the ciphertext response\n const text = await cipherResponse.text()\n const envelope = JSON.parse(text) as { _iv: string; _data: string }\n\n const blobDEK = this.encrypted ? await this.getDEK('_blob') : null\n if (!blobDEK) {\n return this.buildResponse(slot, result.blob, { inline: true })\n }\n\n // Decrypt the single chunk\n const aad = chunkAAD(slot.eTag, 0, result.blob.chunkCount)\n const { decryptBytesWithAAD: decryptAAD } = await import('../crypto.js')\n const decrypted = await decryptAAD(envelope._iv, envelope._data, blobDEK, aad)\n const plaintext = result.blob.compression === 'gzip'\n ? await decompressBytes(decrypted)\n : decrypted\n\n const body = new ReadableStream<Uint8Array>({\n start(controller) {\n controller.enqueue(plaintext)\n controller.close()\n },\n })\n\n const filename = slot.filename\n return new Response(body, {\n headers: {\n 'Content-Type': slot.mimeType ?? 'application/octet-stream',\n 'Content-Length': String(slot.size),\n 'ETag': `\"${slot.eTag}\"`,\n 'Content-Disposition': `inline; filename=\"${filename}\"`,\n 'Last-Modified': new Date(slot.uploadedAt).toUTCString(),\n },\n })\n }\n\n // ─── Internal: build Response from slot + blob ────────────────────\n\n private async buildResponse(\n slot: SlotRecord,\n blob: BlobObject,\n opts?: BlobResponseOptions,\n ): Promise<Response> {\n const fetchAllChunks = this.fetchAllChunks.bind(this)\n\n // buffered — all chunks loaded into memory then enqueued.\n const body = new ReadableStream<Uint8Array>({\n async start(controller) {\n try {\n const output = await fetchAllChunks(blob)\n controller.enqueue(output)\n controller.close()\n } catch (err) {\n controller.error(err)\n }\n },\n })\n\n const filename = opts?.filename ?? slot.filename\n const disposition = opts?.inline\n ? `inline; filename=\"${filename}\"`\n : `attachment; filename=\"${filename}\"`\n\n return new Response(body, {\n headers: {\n 'Content-Type': slot.mimeType ?? 'application/octet-stream',\n 'Content-Length': String(slot.size),\n 'ETag': `\"${slot.eTag}\"`,\n 'Content-Disposition': disposition,\n 'Last-Modified': new Date(slot.uploadedAt).toUTCString(),\n },\n })\n }\n}\n\n// ─── Fallback for unencrypted mode ──────────────────────────────────────\n\nimport { sha256Hex } from '../crypto.js'\n\nasync function plainSha256Hex(data: Uint8Array): Promise<string> {\n return sha256Hex(data)\n}\n","/**\n * Lightweight MIME type detection from magic bytes (file signatures).\n *\n * Designed for the blob store's auto-detection feature. Operates on the first 16 bytes of\n * plaintext — no filesystem access, no filename guessing.\n *\n * ## Detection strategies\n *\n * 1. **Prefix match** — magic bytes at offset 0 (most formats).\n * 2. **Offset match** — magic bytes at a fixed offset > 0 (ISOBMFF: offset 4).\n * 3. **Compound match** — two separate byte sequences at different offsets\n * (RIFF-based: bytes 0-3 + bytes 8-11).\n *\n * ## Formats excluded (require offset > 16 bytes)\n *\n * - TAR (`ustar` at offset 257)\n * - ISO 9660 (`CD001` at offset 32769)\n *\n * @module\n */\n\n// ─── Types ───────────────────────────────────────────────────────────────\n\ninterface MagicRule {\n /** IANA MIME type (or widely-used x- type). */\n readonly mime: string\n /** Human-readable format name for diagnostics. */\n readonly format: string\n /** Magic bytes to match, as a Uint8Array. */\n readonly bytes: Uint8Array\n /** Byte offset where the magic starts. Default 0. */\n readonly offset?: number\n /**\n * For compound checks (RIFF, FORM): a second byte sequence that must\n * also match at `secondaryOffset`.\n */\n readonly secondaryBytes?: Uint8Array\n /** Offset of the secondary match. */\n readonly secondaryOffset?: number\n /** If true, the format is already compressed — skip gzip in blob.put(). */\n readonly preCompressed?: true\n}\n\n// ─── Helpers ─────────────────────────────────────────────────────────────\n\n/** Convert a hex string like `'FF D8 FF'` to Uint8Array. */\nfunction hex(s: string): Uint8Array {\n return new Uint8Array(s.split(' ').map((b) => parseInt(b, 16)))\n}\n\n// ─── Magic rules ─────────────────────────────────────────────────────────\n//\n// Ordered by detection priority: more specific (longer) signatures first\n// within the same offset group, so that e.g. RAR v5 (8 bytes) is tested\n// before RAR v4 (7 bytes).\n//\n// Sources verified against:\n// - Gary Kessler's File Signatures Table\n// - Wikipedia \"List of file signatures\"\n// - IANA MIME type registry\n// - Individual format specifications (PNG RFC 2083, PDF ISO 32000, etc.)\n//\n// Each entry includes the original CSV row number for traceability.\n\nconst MAGIC_RULES: readonly MagicRule[] = [\n // ── Images ───────────────────────────────────────────────────────────\n\n // #2 PNG — full 8-byte signature (RFC 2083)\n { mime: 'image/png', format: 'PNG', bytes: hex('89 50 4E 47 0D 0A 1A 0A'), preCompressed: true },\n\n // #1 JPEG — FF D8 FF (third byte is start of APP marker, always FF)\n { mime: 'image/jpeg', format: 'JPEG', bytes: hex('FF D8 FF'), preCompressed: true },\n\n // #7 WebP — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = WEBP\n {\n mime: 'image/webp',\n format: 'WebP',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('57 45 42 50'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // #5 TIFF (little-endian) — II + version 42\n { mime: 'image/tiff', format: 'TIFF', bytes: hex('49 49 2A 00') },\n\n // #6 TIFF (big-endian) — MM + version 42\n { mime: 'image/tiff', format: 'TIFF', bytes: hex('4D 4D 00 2A') },\n\n // #3 GIF — GIF8 (covers GIF87a and GIF89a)\n { mime: 'image/gif', format: 'GIF', bytes: hex('47 49 46 38'), preCompressed: true },\n\n // #4 BMP — BM\n { mime: 'image/bmp', format: 'BMP', bytes: hex('42 4D') },\n\n // PSD — 8BPS\n { mime: 'image/vnd.adobe.photoshop', format: 'PSD', bytes: hex('38 42 50 53') },\n\n // #8 ICO — 00 00 01 00 (note: 00 00 02 00 is CUR cursor format)\n { mime: 'image/x-icon', format: 'ICO', bytes: hex('00 00 01 00') },\n\n // #9 HEIC — ISOBMFF: ftyp at offset 4, brand \"heic\" at offset 8\n {\n mime: 'image/heic',\n format: 'HEIC',\n bytes: hex('66 74 79 70'),\n offset: 4,\n secondaryBytes: hex('68 65 69 63'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // ── Documents ────────────────────────────────────────────────────────\n\n // PDF — %PDF\n { mime: 'application/pdf', format: 'PDF', bytes: hex('25 50 44 46') },\n\n // RTF — {\\rtf\n { mime: 'application/rtf', format: 'RTF', bytes: hex('7B 5C 72 74 66') },\n\n // ── Archives & compression ───────────────────────────────────────────\n\n // RAR v5 — 8-byte signature (test before RAR v4)\n { mime: 'application/vnd.rar', format: 'RAR v5', bytes: hex('52 61 72 21 1A 07 01 00'), preCompressed: true },\n\n // RAR v4 — 7-byte signature\n { mime: 'application/vnd.rar', format: 'RAR v4', bytes: hex('52 61 72 21 1A 07 00'), preCompressed: true },\n\n // 7-Zip — 6-byte signature\n { mime: 'application/x-7z-compressed', format: '7Z', bytes: hex('37 7A BC AF 27 1C'), preCompressed: true },\n\n // XZ — 6-byte stream header\n { mime: 'application/x-xz', format: 'XZ', bytes: hex('FD 37 7A 58 5A 00'), preCompressed: true },\n\n // ZIP — PK\\x03\\x04 (local file header)\n { mime: 'application/zip', format: 'ZIP', bytes: hex('50 4B 03 04'), preCompressed: true },\n\n // GZIP — 1F 8B\n { mime: 'application/gzip', format: 'GZIP', bytes: hex('1F 8B'), preCompressed: true },\n\n // BZIP2 — BZh\n { mime: 'application/x-bzip2', format: 'BZIP2', bytes: hex('42 5A 68'), preCompressed: true },\n\n // LZIP — LZIP\n { mime: 'application/x-lzip', format: 'LZIP', bytes: hex('4C 5A 49 50'), preCompressed: true },\n\n // ── Audio ────────────────────────────────────────────────────────────\n\n // WAV — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = WAVE\n {\n mime: 'audio/wav',\n format: 'WAV',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('57 41 56 45'),\n secondaryOffset: 8,\n },\n\n // AIFF — FORM compound: bytes 0-3 = FORM, bytes 8-11 = AIFF\n {\n mime: 'audio/aiff',\n format: 'AIFF',\n bytes: hex('46 4F 52 4D'),\n secondaryBytes: hex('41 49 46 46'),\n secondaryOffset: 8,\n },\n\n // FLAC — fLaC\n { mime: 'audio/flac', format: 'FLAC', bytes: hex('66 4C 61 43') },\n\n // OGG — OggS (container — may hold Vorbis, Opus, Theora, etc.)\n { mime: 'application/ogg', format: 'OGG', bytes: hex('4F 67 67 53') },\n\n // MIDI — MThd\n { mime: 'audio/midi', format: 'MIDI', bytes: hex('4D 54 68 64') },\n\n // MP3 (ID3-tagged) — ID3\n { mime: 'audio/mpeg', format: 'MP3', bytes: hex('49 44 33'), preCompressed: true },\n\n // ── Video ────────────────────────────────────────────────────────────\n\n // AVI — RIFF compound: bytes 0-3 = RIFF, bytes 8-11 = AVI\\x20\n {\n mime: 'video/x-msvideo',\n format: 'AVI',\n bytes: hex('52 49 46 46'),\n secondaryBytes: hex('41 56 49 20'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // WMV/ASF — 8-byte ASF header GUID prefix\n { mime: 'video/x-ms-wmv', format: 'WMV', bytes: hex('30 26 B2 75 8E 66 CF 11'), preCompressed: true },\n\n // MKV/WebM — EBML header (Matroska container)\n { mime: 'video/x-matroska', format: 'MKV', bytes: hex('1A 45 DF A3'), preCompressed: true },\n\n // FLV — FLV\n { mime: 'video/x-flv', format: 'FLV', bytes: hex('46 4C 56'), preCompressed: true },\n\n // MOV — ISOBMFF: ftyp at offset 4, brand \"qt \" at offset 8\n {\n mime: 'video/quicktime',\n format: 'MOV',\n bytes: hex('66 74 79 70'),\n offset: 4,\n secondaryBytes: hex('71 74 20 20'),\n secondaryOffset: 8,\n preCompressed: true,\n },\n\n // MP4 — ISOBMFF: ftyp at offset 4 (brands vary: isom, mp41, mp42, etc.)\n // Tested AFTER MOV and HEIC so their specific brands match first.\n { mime: 'video/mp4', format: 'MP4', bytes: hex('66 74 79 70'), offset: 4, preCompressed: true },\n\n // ── Executables & binaries ───────────────────────────────────────────\n\n // SQLite — \"SQLite 3\" (first 8 bytes of the 16-byte header)\n { mime: 'application/vnd.sqlite3', format: 'SQLite', bytes: hex('53 51 4C 69 74 65 20 33') },\n\n // WASM — \\0asm\n { mime: 'application/wasm', format: 'WASM', bytes: hex('00 61 73 6D') },\n\n // ELF — \\x7FELF\n { mime: 'application/x-elf', format: 'ELF', bytes: hex('7F 45 4C 46') },\n\n // PE (EXE/DLL) — MZ\n { mime: 'application/vnd.microsoft.portable-executable', format: 'PE', bytes: hex('4D 5A') },\n\n // Mach-O — all four single-arch variants\n { mime: 'application/x-mach-binary', format: 'Mach-O 64 LE', bytes: hex('CF FA ED FE') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 64 BE', bytes: hex('FE ED FA CF') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 32 LE', bytes: hex('CE FA ED FE') },\n { mime: 'application/x-mach-binary', format: 'Mach-O 32 BE', bytes: hex('FE ED FA CE') },\n\n // Java Class — CA FE BA BE\n // Note: collides with Mach-O Universal Binary. Disambiguated by checking\n // bytes 4-7: Java class version is >= 0x002D (45), while fat binary\n // arch count is a small number (typically 0x00000002).\n // We place Java after Mach-O single-arch entries so the more common\n // Mach-O variants match first. The CA FE BA BE collision between Java\n // and Mach-O fat binary is resolved by the caller if needed.\n { mime: 'application/java-vm', format: 'Java Class', bytes: hex('CA FE BA BE') },\n\n // DEX — dex\\n (Android Dalvik Executable)\n { mime: 'application/vnd.android.dex', format: 'DEX', bytes: hex('64 65 78 0A') },\n\n // ── Package formats ──────────────────────────────────────────────────\n\n // DEB — !<arch> (ar archive; DEB-specific member follows)\n { mime: 'application/vnd.debian.binary-package', format: 'DEB', bytes: hex('21 3C 61 72 63 68 3E') },\n\n // RPM — ED AB EE DB\n { mime: 'application/x-rpm', format: 'RPM', bytes: hex('ED AB EE DB') },\n\n // CAB — MSCF\n { mime: 'application/vnd.ms-cab-compressed', format: 'CAB', bytes: hex('4D 53 43 46'), preCompressed: true },\n\n // ── Capture & Flash ──────────────────────────────────────────────────\n\n // PCAP (little-endian) — D4 C3 B2 A1\n { mime: 'application/vnd.tcpdump.pcap', format: 'PCAP', bytes: hex('D4 C3 B2 A1') },\n\n // PCAP (big-endian) — A1 B2 C3 D4\n { mime: 'application/vnd.tcpdump.pcap', format: 'PCAP BE', bytes: hex('A1 B2 C3 D4') },\n\n // PCAPNG — Section Header Block\n { mime: 'application/x-pcapng', format: 'PCAPNG', bytes: hex('0A 0D 0D 0A') },\n\n // SWF — all three variants (uncompressed, zlib, LZMA)\n { mime: 'application/x-shockwave-flash', format: 'SWF', bytes: hex('46 57 53') },\n { mime: 'application/x-shockwave-flash', format: 'SWF zlib', bytes: hex('43 57 53'), preCompressed: true },\n { mime: 'application/x-shockwave-flash', format: 'SWF LZMA', bytes: hex('5A 57 53'), preCompressed: true },\n\n // ── Data formats ─────────────────────────────────────────────────────\n\n // Parquet — PAR1 (no registered IANA MIME; using Apache's informal type)\n { mime: 'application/vnd.apache.parquet', format: 'Parquet', bytes: hex('50 41 52 31') },\n\n // Avro Object Container — Obj\\x01\n { mime: 'application/avro', format: 'Avro', bytes: hex('4F 62 6A 01') },\n\n // NES ROM — NES\\x1A (iNES header)\n { mime: 'application/x-nintendo-nes-rom', format: 'NES ROM', bytes: hex('4E 45 53 1A') },\n] as const\n\n// ─── MP3 sync word ───────────────────────────────────────────────────────\n//\n// MP3 files without an ID3 tag start with a frame sync word where the top\n// 11 bits are set: 0xFFE0 mask. The ID3 signature (49 44 33) is handled\n// as a normal rule above. The sync-word check is a fallback tested in\n// `detectMimeType` after all rules.\n\nfunction isMp3SyncWord(byte0: number, byte1: number): boolean {\n return byte0 === 0xff && (byte1 & 0xe0) === 0xe0\n}\n\n// ─── Detection ───────────────────────────────────────────────────────────\n\n/**\n * Detect MIME type from the first bytes of a file.\n *\n * @param header - The first 16 bytes (or more) of the plaintext. Passing\n * fewer than 16 bytes may miss compound and offset-based matches.\n * @returns Detected MIME type, or `'application/octet-stream'` if unknown.\n */\nexport function detectMimeType(header: Uint8Array): string {\n const result = detectMagic(header)\n return result?.mime ?? 'application/octet-stream'\n}\n\n/**\n * Detect MIME type and whether the format is already compressed.\n *\n * Used by `BlobSet.put()` to decide whether to skip gzip compression.\n *\n * @param header - The first 16 bytes (or more) of the plaintext.\n * @returns `{ mime, preCompressed }` or `null` if no match.\n */\nexport function detectMagic(\n header: Uint8Array,\n): { mime: string; format: string; preCompressed: boolean } | null {\n for (const rule of MAGIC_RULES) {\n if (matchRule(header, rule)) {\n return {\n mime: rule.mime,\n format: rule.format,\n preCompressed: rule.preCompressed ?? false,\n }\n }\n }\n\n // Fallback: MP3 sync word (no ID3 tag)\n if (header.length >= 2 && isMp3SyncWord(header[0]!, header[1]!)) {\n return { mime: 'audio/mpeg', format: 'MP3', preCompressed: true }\n }\n\n return null\n}\n\n/**\n * Check whether a format is already compressed (should skip gzip).\n *\n * @param mimeType - A MIME type string.\n * @returns `true` if the format is known to be pre-compressed.\n */\nexport function isPreCompressed(mimeType: string): boolean {\n return PRE_COMPRESSED_MIMES.has(mimeType)\n}\n\n// ─── Internal matching ───────────────────────────────────────────────────\n\nfunction matchRule(header: Uint8Array, rule: MagicRule): boolean {\n const offset = rule.offset ?? 0\n const end = offset + rule.bytes.length\n\n // Not enough data for the primary match\n if (header.length < end) return false\n\n // Primary byte sequence\n for (let i = 0; i < rule.bytes.length; i++) {\n if (header[offset + i] !== rule.bytes[i]) return false\n }\n\n // Secondary byte sequence (compound check)\n if (rule.secondaryBytes && rule.secondaryOffset !== undefined) {\n const sEnd = rule.secondaryOffset + rule.secondaryBytes.length\n if (header.length < sEnd) return false\n for (let i = 0; i < rule.secondaryBytes.length; i++) {\n if (header[rule.secondaryOffset + i] !== rule.secondaryBytes[i]) return false\n }\n }\n\n return true\n}\n\n// ─── Pre-compressed MIME set ─────────────────────────────────────────────\n//\n// Built from the rules above. Used by `isPreCompressed()` for callers who\n// already know the MIME type (e.g. from a Content-Type header) and want to\n// skip the magic-byte detection step.\n\nconst PRE_COMPRESSED_MIMES = new Set<string>(\n MAGIC_RULES.filter((r) => r.preCompressed).map((r) => r.mime),\n)\n","/**\n * Active blob strategy factory. Calling `blobs()` returns a\n * `BlobStrategy` whose `openSlot` constructs a real `BlobSet` bound\n * to the caller's record. The returned strategy is passed into\n * `createNoydb({ blobStrategy: blobs() })` to light up the\n * `collection.blob(id)` path.\n *\n * This module is only reachable through the `@noy-db/hub/blobs`\n * subpath — a consumer that never imports the subpath ships none of\n * this (ESM tree-shaking + hub's `\"sideEffects\": false`).\n */\n\nimport { BlobSet } from './blob-set.js'\nimport type { BlobStrategy } from './strategy.js'\n\n/**\n * Build a default `BlobStrategy` ready to pass into `createNoydb`.\n *\n * Named `withBlobs` (plugin-pattern canonical) rather than `blobs` to\n * avoid shadowing the very common local idiom\n * `const blobs = invoices.blob(id)` in user code.\n *\n * @example\n * ```ts\n * import { createNoydb } from '@noy-db/hub'\n * import { withBlobs } from '@noy-db/hub/blobs'\n *\n * const db = await createNoydb({\n * store, user, secret,\n * blobStrategy: withBlobs(),\n * })\n *\n * // Now live — delegates to BlobSet.\n * await db.vault('acme').collection('invoices').blob('inv-1').put('receipt.pdf', bytes)\n * ```\n */\nexport function withBlobs(): BlobStrategy {\n return {\n openSlot(args) {\n return new BlobSet(args)\n },\n }\n}\n","/**\n * Blob retention + compaction.\n *\n * Declarative per-collection / per-slot eviction policy. Two\n * triggers:\n *\n * - **`retainDays`** — age-based TTL. A slot uploaded more than N\n * days ago is evicted.\n * - **`evictWhen(record)`** — predicate over the **decrypted**\n * record. Lets consumers express \"the image is safe to drop once\n * the structured invoice has been reviewed and confirmed.\"\n *\n * Either trigger (or both) causes the slot to evict. Eviction removes\n * the slot entry from `_blob_slots_{collection}`, decrements the\n * blob's refCount (so unreferenced chunks can be GC'd by the next\n * sweep), and writes one entry to the `_blob_eviction_audit`\n * collection for tamper-evident record-keeping.\n *\n * The audit entry carries the eTag of the evicted blob (opaque HMAC\n * of plaintext under the vault's `_blob` DEK) — no plaintext leakage,\n * per the SPEC non-correlation invariant. Consumers reconstructing\n * \"what used to be attached\" can look up the audit entry by record\n * id.\n *\n * Compaction is **consumer-scheduled** — noy-db never runs a\n * background daemon. Call `vault.compact()` whenever your workflow\n * allows (cron, manual \"tidy\" button, cold-storage export prep, …).\n *\n * @module\n */\n\nimport type { NoydbStore, EncryptedEnvelope, SlotInfo } from '../types.js'\nimport { NOYDB_FORMAT_VERSION } from '../types.js'\nimport { encrypt } from '../crypto.js'\n\n// ─── Config types ───────────────────────────────────────────────────────\n\nexport interface BlobFieldPolicy<T = unknown> {\n /**\n * Age-based TTL in days. A slot whose `uploadedAt` is older than\n * `now - retainDays × 86400s` evicts on the next `vault.compact()`.\n * Omit to disable age-based eviction.\n */\n readonly retainDays?: number\n /**\n * Predicate evaluated against the decrypted record. When it returns\n * `true`, every matching slot on that record evicts. Omit to\n * disable predicate-based eviction.\n */\n readonly evictWhen?: (record: T) => boolean\n}\n\nexport type BlobFieldsConfig<T = unknown> = Record<string, BlobFieldPolicy<T>>\n\n// ─── Audit collection ──────────────────────────────────────────────────\n\nexport const BLOB_EVICTION_AUDIT_COLLECTION = '_blob_eviction_audit'\n\nexport interface BlobEvictionEntry {\n readonly id: string\n readonly collection: string\n readonly recordId: string\n readonly slotName: string\n readonly blobHash: string\n readonly reason: 'ttl' | 'predicate' | 'both'\n readonly evictedAt: string\n readonly actor: string\n}\n\n// ─── Compaction result ──────────────────────────────────────────────────\n\nexport interface CompactionResult {\n /** Number of blob slots evicted across all collections. */\n readonly evicted: number\n /** Number of records touched (iterated + policy checked). */\n readonly records: number\n /** Number of collections with `blobFields` configured. */\n readonly collections: number\n /** Number of audit entries written. Equal to `evicted`. */\n readonly auditEntries: number\n /** Per-collection breakdown for diagnostics. */\n readonly byCollection: Record<string, { records: number; evicted: number }>\n}\n\n// ─── Core ──────────────────────────────────────────────────────────────\n\nexport interface CompactRunOptions {\n /** Override \"now\" for deterministic testing. */\n readonly now?: Date\n /**\n * Stop after this many evictions. Useful for capped batches / cron\n * jobs that need to fit in a time window. `undefined` = unbounded.\n */\n readonly maxEvictions?: number\n /**\n * Dry-run — evaluate policies and return the counts, but do NOT\n * delete slots or write audit entries. Lets a consumer preview\n * what would happen.\n */\n readonly dryRun?: boolean\n}\n\nexport interface CompactionContext {\n readonly adapter: NoydbStore\n readonly vault: string\n readonly actor: string\n readonly encrypted: boolean\n readonly getDEK: (collection: string) => Promise<CryptoKey>\n /**\n * Resolve a collection's declared `blobFields` config. Returns an\n * empty map for collections without the config — the walk skips\n * those.\n */\n readonly getBlobFields: <T>(collection: string) => BlobFieldsConfig<T> | null\n /** List collection names in the vault. */\n readonly listCollections: () => Promise<string[]>\n /** List record ids in a collection. */\n readonly listRecords: (collection: string) => Promise<string[]>\n /** Decrypt and return the record. Null when absent. */\n readonly getRecord: <T>(collection: string, id: string) => Promise<T | null>\n /** Return the BlobSet-like handle for a record's slots. */\n readonly listSlots: (collection: string, id: string) => Promise<SlotInfo[]>\n /** Delete a slot and decrement its blob's refCount. */\n readonly deleteSlot: (collection: string, id: string, slotName: string) => Promise<void>\n}\n\nexport async function runCompaction(\n ctx: CompactionContext,\n options: CompactRunOptions = {},\n): Promise<CompactionResult> {\n const now = options.now ?? new Date()\n const maxEvictions = options.maxEvictions ?? Infinity\n const dryRun = options.dryRun === true\n\n const allCollections = await ctx.listCollections()\n const byCollection: Record<string, { records: number; evicted: number }> = {}\n let evicted = 0\n let records = 0\n let auditEntries = 0\n let collectionsWithPolicy = 0\n\n outer: for (const collectionName of allCollections) {\n if (collectionName.startsWith('_')) continue\n const config = ctx.getBlobFields(collectionName)\n if (!config) continue\n const configuredSlots = Object.keys(config)\n if (configuredSlots.length === 0) continue\n collectionsWithPolicy += 1\n byCollection[collectionName] = { records: 0, evicted: 0 }\n\n const ids = await ctx.listRecords(collectionName)\n for (const recordId of ids) {\n if (evicted >= maxEvictions) break outer\n\n const record = await ctx.getRecord(collectionName, recordId).catch(() => null)\n if (record === null) continue\n records += 1\n byCollection[collectionName].records += 1\n\n const slots = await ctx.listSlots(collectionName, recordId).catch(() => [])\n for (const slot of slots) {\n if (evicted >= maxEvictions) break outer\n const policy = config[slot.name]\n if (!policy) continue\n\n const reason = evaluatePolicy(policy, record, slot, now)\n if (!reason) continue\n\n if (!dryRun) {\n await ctx.deleteSlot(collectionName, recordId, slot.name)\n await writeAuditEntry(ctx, {\n id: generateEvictionId(collectionName, recordId, slot.name),\n collection: collectionName,\n recordId,\n slotName: slot.name,\n blobHash: slot.eTag,\n reason,\n evictedAt: now.toISOString(),\n actor: ctx.actor,\n })\n auditEntries += 1\n }\n evicted += 1\n byCollection[collectionName].evicted += 1\n }\n }\n }\n\n return {\n evicted,\n records,\n collections: collectionsWithPolicy,\n auditEntries,\n byCollection,\n }\n}\n\nfunction evaluatePolicy<T>(\n policy: BlobFieldPolicy<T>,\n record: T,\n slot: SlotInfo,\n now: Date,\n): 'ttl' | 'predicate' | 'both' | null {\n let ttlTriggered = false\n let predicateTriggered = false\n\n if (policy.retainDays !== undefined && policy.retainDays > 0) {\n const uploadedAt = Date.parse(slot.uploadedAt)\n if (Number.isFinite(uploadedAt)) {\n const ageMs = now.getTime() - uploadedAt\n const limitMs = policy.retainDays * 86_400_000\n if (ageMs > limitMs) ttlTriggered = true\n }\n }\n\n if (policy.evictWhen) {\n try {\n if (policy.evictWhen(record)) predicateTriggered = true\n } catch {\n // Predicate error → do NOT evict. Fail closed.\n }\n }\n\n if (ttlTriggered && predicateTriggered) return 'both'\n if (ttlTriggered) return 'ttl'\n if (predicateTriggered) return 'predicate'\n return null\n}\n\nfunction generateEvictionId(collection: string, recordId: string, slotName: string): string {\n const rand = globalThis.crypto.getRandomValues(new Uint8Array(8))\n let suffix = ''\n for (const b of rand) suffix += b.toString(16).padStart(2, '0')\n return `${collection}__${recordId}__${slotName}__${suffix}`\n}\n\nasync function writeAuditEntry(ctx: CompactionContext, entry: BlobEvictionEntry): Promise<void> {\n const json = JSON.stringify(entry)\n let envelope: EncryptedEnvelope\n if (ctx.encrypted) {\n const dek = await ctx.getDEK(BLOB_EVICTION_AUDIT_COLLECTION)\n const { iv, data } = await encrypt(json, dek)\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.evictedAt,\n _iv: iv,\n _data: data,\n _by: entry.actor,\n }\n } else {\n envelope = {\n _noydb: NOYDB_FORMAT_VERSION,\n _v: 1,\n _ts: entry.evictedAt,\n _iv: '',\n _data: json,\n _by: entry.actor,\n }\n }\n await ctx.adapter.put(ctx.vault, BLOB_EVICTION_AUDIT_COLLECTION, entry.id, envelope)\n}\n","/**\n * `vault.exportBlobs()` — bulk blob extraction primitive.\n *\n * Async-iterable handle over every blob attached to records in a\n * vault, optionally filtered by collection allowlist and per-record\n * predicate. Emits tuples of `{ blobId, recordRef, bytes, meta }` so\n * the consumer can pipe into any sink (zip stream, S3 multipart, USB\n * copy, cold-storage tape) without pulling the whole export into\n * memory.\n *\n * ## Auth + audit\n *\n * - Capability check runs **once** at handle creation via\n * `Vault.assertCanExport('plaintext', 'blob')`. An operator whose\n * keyring lacks that bit fails before a single byte of ciphertext\n * is decrypted.\n * - Audit entry lands in `_export_audit` at handle creation: the\n * actor, start timestamp, target collections, predicate presence,\n * and batch mechanism. **No content hashes** — per the spec\n * non-correlation invariant.\n *\n * ## Abort + resume\n *\n * - `handle.abort()` flips the internal signal; the next iteration\n * boundary throws `AbortError`. Consumers already in `for await`\n * can catch and exit cleanly.\n * - Restart after a partial failure with `{ afterBlobId }` — the\n * iterator skips tuples up to (and including) that blob id before\n * yielding again. Combined with a blob-count ceiling it supports\n * idempotent batch re-runs.\n *\n * @module\n */\n\nimport type { Collection } from '../collection.js'\nimport type { SlotInfo } from '../types.js'\n\n// ─── Types ──────────────────────────────────────────────────────────────\n\nexport interface ExportBlobsOptions {\n /**\n * Collection allowlist. Omit to export blobs from every collection\n * the caller has read access to.\n */\n readonly collections?: readonly string[]\n /**\n * Per-record predicate. Called on the decrypted record BEFORE any\n * blob bytes are read for that record — returning false skips the\n * record and all its slots without touching their chunks.\n */\n readonly where?: (record: unknown, context: { collection: string; id: string }) => boolean\n /**\n * Resume after a specific blob id. The iterator skips tuples up to\n * and including this id, then yields. Format of the id is the same\n * as `ExportedBlob.blobId` (the HMAC-keyed eTag).\n */\n readonly afterBlobId?: string\n /**\n * External abort signal. When fired, the next iterator tick throws\n * `ExportBlobsAbortedError`. Honored alongside `handle.abort()`.\n */\n readonly signal?: AbortSignal\n}\n\nexport interface ExportedBlob {\n /** Opaque blob identifier — HMAC-keyed eTag, stable across vaults. */\n readonly blobId: string\n /** Where this blob came from in the vault. */\n readonly recordRef: {\n readonly collection: string\n readonly id: string\n readonly slot: string\n }\n /** Decrypted plaintext bytes. */\n readonly bytes: Uint8Array\n /** Best-effort metadata (from the blob slot record). */\n readonly meta: {\n readonly size: number\n /**\n * User-visible filename stored on the slot. Often equal to the\n * slot name; differs when the caller supplied an explicit\n * `filename` to `BlobSet.put()`.\n */\n readonly filename: string\n readonly mimeType?: string\n readonly createdAt?: string\n }\n}\n\nexport interface ExportBlobsHandle extends AsyncIterable<ExportedBlob> {\n /** Abort the export. Safe to call multiple times. */\n abort(): void\n /** True once `abort()` has fired or the external signal aborted. */\n readonly aborted: boolean\n}\n\nexport class ExportBlobsAbortedError extends Error {\n constructor(reason: string) {\n super(`exportBlobs aborted: ${reason}`)\n this.name = 'ExportBlobsAbortedError'\n }\n}\n\n// ─── Audit ──────────────────────────────────────────────────────────────\n\nexport const EXPORT_AUDIT_COLLECTION = '_export_audit'\n\nexport interface ExportBlobsAuditEntry {\n readonly id: string\n readonly mechanism: 'exportBlobs'\n readonly actor: string\n readonly startedAt: string\n readonly collections: readonly string[] | null\n readonly predicate: boolean\n readonly afterBlobId: string | null\n}\n\n// ─── Implementation ─────────────────────────────────────────────────────\n\n/**\n * Build the handle. Factored out of `Vault.exportBlobs` so the\n * implementation can be unit-tested without going through the\n * compartment lifecycle.\n */\nexport function createExportBlobsHandle(\n actor: string,\n listAccessibleCollections: () => Promise<string[]>,\n getCollection: <T>(name: string) => Collection<T>,\n writeAudit: (entry: ExportBlobsAuditEntry) => Promise<void>,\n options: ExportBlobsOptions,\n): ExportBlobsHandle {\n let aborted = false\n\n const abort = (): void => {\n aborted = true\n }\n\n if (options.signal) {\n if (options.signal.aborted) aborted = true\n options.signal.addEventListener('abort', () => { aborted = true })\n }\n\n function assertLive(): void {\n if (aborted) throw new ExportBlobsAbortedError('aborted by caller')\n }\n\n const allowlist = options.collections ? new Set(options.collections) : null\n\n // Write the audit entry BEFORE the first yield so a blocked\n // iteration still leaves an audit trail that the export started.\n let auditPromise: Promise<void> | null = null\n function writeAuditOnce(): Promise<void> {\n if (!auditPromise) {\n auditPromise = writeAudit({\n id: generateBatchId(),\n mechanism: 'exportBlobs',\n actor,\n startedAt: new Date().toISOString(),\n collections: options.collections ?? null,\n predicate: Boolean(options.where),\n afterBlobId: options.afterBlobId ?? null,\n })\n }\n return auditPromise\n }\n\n async function* generate(): AsyncGenerator<ExportedBlob> {\n await writeAuditOnce()\n assertLive()\n\n // Resolve target collections lazily — also keeps the call async.\n const allCollections = await listAccessibleCollections()\n const targets = allCollections.filter(name => {\n if (name.startsWith('_')) return false\n if (allowlist && !allowlist.has(name)) return false\n return true\n })\n\n let resumeCursorHit = options.afterBlobId === undefined\n\n for (const collectionName of targets) {\n if (aborted) return\n\n const coll = getCollection<Record<string, unknown>>(collectionName)\n const records = await coll.list().catch(() => [])\n for (const record of records) {\n if (aborted) return\n assertLive()\n\n const idField = (record as { id?: unknown }).id\n if (typeof idField !== 'string') continue\n\n if (options.where && !options.where(record, { collection: collectionName, id: idField })) continue\n\n const blobSet = coll.blob(idField)\n const slots = await blobSet.list().catch(() => [] as SlotInfo[])\n for (const slot of slots) {\n if (aborted) return\n\n if (!resumeCursorHit) {\n if (slot.eTag === options.afterBlobId) {\n resumeCursorHit = true\n }\n continue\n }\n\n const bytes = await blobSet.get(slot.name)\n if (!bytes) continue\n\n const item: ExportedBlob = {\n blobId: slot.eTag,\n recordRef: { collection: collectionName, id: idField, slot: slot.name },\n bytes,\n meta: {\n size: slot.size,\n filename: slot.filename,\n ...(slot.mimeType !== undefined && { mimeType: slot.mimeType }),\n ...(slot.uploadedAt !== undefined && { createdAt: slot.uploadedAt }),\n },\n }\n yield item\n }\n }\n }\n }\n\n const handle: ExportBlobsHandle = {\n abort,\n get aborted() { return aborted },\n [Symbol.asyncIterator]: () => generate(),\n }\n return handle\n}\n\n// ─── Helpers ────────────────────────────────────────────────────────────\n\nfunction generateBatchId(): string {\n // 16 bytes of crypto randomness, URL-safe base64, no padding.\n const raw = globalThis.crypto.getRandomValues(new Uint8Array(16))\n let s = ''\n for (const b of raw) s += b.toString(16).padStart(2, '0')\n return `batch-${Date.now().toString(36)}-${s.slice(0, 12)}`\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAAA,IA4Ea,YAqBA,iBAgBA,eAgBA,iBAsaA,eA+EA;AAtnBb;AAAA;AAAA;AA4EO,IAAM,aAAN,cAAyB,MAAM;AAAA;AAAA,MAE3B;AAAA,MAET,YAAY,MAAc,SAAiB;AACzC,cAAM,OAAO;AACb,aAAK,OAAO;AACZ,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAYO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,qBAAqB;AACzC,cAAM,qBAAqB,OAAO;AAClC,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,yEAAoE;AACxF,cAAM,YAAY,OAAO;AACzB,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAWO,IAAM,kBAAN,cAA8B,WAAW;AAAA,MAC9C,YAAY,UAAU,4DAAuD;AAC3E,cAAM,eAAe,OAAO;AAC5B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAiaO,IAAM,gBAAN,cAA4B,WAAW;AAAA;AAAA,MAEnC;AAAA,MAET,YAAY,SAAiB,UAAU,oBAAoB;AACzD,cAAM,YAAY,OAAO;AACzB,aAAK,OAAO;AACZ,aAAK,UAAU;AAAA,MACjB;AAAA,IACF;AAsEO,IAAM,gBAAN,cAA4B,WAAW;AAAA,MAC5C,YAAY,UAAU,oBAAoB;AACxC,cAAM,aAAa,OAAO;AAC1B,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAAA;AAAA;;;AC3nBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkDA,eAAsB,UACpB,YACA,MACoB;AACpB,QAAM,cAAc,MAAM,OAAO;AAAA,IAC/B;AAAA,IACA,IAAI,YAAY,EAAE,OAAO,UAAU;AAAA,IACnC;AAAA,IACA;AAAA,IACA,CAAC,WAAW;AAAA,EACd;AAEA,SAAO,OAAO;AAAA,IACZ;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,MAAM;AAAA,IACR;AAAA,IACA;AAAA,IACA,EAAE,MAAM,UAAU,QAAQ,SAAS;AAAA,IACnC;AAAA,IACA,CAAC,WAAW,WAAW;AAAA,EACzB;AACF;AAKA,eAAsB,cAAkC;AACtD,SAAO,OAAO;AAAA,IACZ,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AAKA,eAAsB,QAAQ,KAAgB,KAAiC;AAC7E,QAAM,UAAU,MAAM,OAAO,QAAQ,OAAO,KAAK,KAAK,QAAQ;AAC9D,SAAO,eAAe,OAAO;AAC/B;AAGA,eAAsB,UACpB,eACA,KACoB;AACpB,MAAI;AACF,WAAO,MAAM,OAAO;AAAA,MAClB;AAAA,MACA,eAAe,aAAa;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,MACpC;AAAA,MACA,CAAC,WAAW,SAAS;AAAA,IACvB;AAAA,EACF,QAAQ;AACN,UAAM,IAAI,gBAAgB;AAAA,EAC5B;AACF;AAUA,eAAsB,QACpB,WACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAElD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAGA,eAAsB,QACpB,UACA,YACA,KACiB;AACjB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAE5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,YAAY,EAAE,OAAO,SAAS;AAAA,EAC3C,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAUA,eAAsB,aACpB,MACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAMA,eAAsB,aACpB,UACA,YACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B,EAAE,MAAM,WAAW,GAAuB;AAAA,MAC1C;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAQA,eAAsB,UAAU,MAAmC;AACjE,QAAM,OAAO,MAAM,OAAO,OAAO,WAAW,IAA+B;AAC3E,SAAO,MAAM,KAAK,IAAI,WAAW,IAAI,CAAC,EACnC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,cAAc,KAAgB,MAAmC;AAErF,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,EAAE,MAAM,QAAQ,MAAM,UAAU;AAAA,IAChC;AAAA,IACA,CAAC,MAAM;AAAA,EACT;AACA,QAAM,MAAM,MAAM,OAAO,KAAK,QAAQ,SAAS,IAA+B;AAC9E,SAAO,MAAM,KAAK,IAAI,WAAW,GAAG,CAAC,EAClC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAC1C,KAAK,EAAE;AACZ;AAgBA,eAAsB,oBACpB,MACA,KACA,KACwB;AACxB,QAAM,KAAK,WAAW;AACtB,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B;AAAA,MACE,MAAM;AAAA,MACN;AAAA,MACA,gBAAgB;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AASA,eAAsB,oBACpB,UACA,YACA,KACA,KACqB;AACrB,QAAM,KAAK,eAAe,QAAQ;AAClC,QAAM,aAAa,eAAe,UAAU;AAC5C,MAAI;AACF,UAAM,YAAY,MAAM,OAAO;AAAA,MAC7B;AAAA,QACE,MAAM;AAAA,QACN;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,WAAO,IAAI,WAAW,SAAS;AAAA,EACjC,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,IAAI,SAAS,kBAAkB;AACzD,YAAM,IAAI,cAAc;AAAA,IAC1B;AACA,UAAM,IAAI;AAAA,MACR,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAiBA,eAAsB,kBAAkB,KAAgB,gBAA4C;AAElG,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAGhD,QAAM,UAAU,MAAM,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,CAAC,YAAY;AAAA,EACf;AAGA,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,gBAAgB;AACtD,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,cAAc;AACpD,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA;AAAA,EACF;AAGA,SAAO,OAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA,EAAE,MAAM,WAAW,QAAQ,SAAS;AAAA,IACpC;AAAA,IACA,CAAC,WAAW,SAAS;AAAA,EACvB;AACF;AA2BA,eAAe,sBACb,KACA,SACA,WACqB;AACrB,QAAM,SAAS,MAAM,OAAO,UAAU,OAAO,GAAG;AAChD,QAAM,UAAU,MAAM,OAAO,UAAU,OAAO,QAAQ,QAAQ,OAAO,CAAC,YAAY,CAAC;AACnF,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,wBAAwB;AAC9D,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,GAAG,OAAO,KAAO,SAAS,EAAE;AAClE,QAAM,OAAO,MAAM,OAAO;AAAA,IACxB,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,KAAK;AAAA,IAC5C;AAAA,IACA,WAAW;AAAA,EACb;AACA,SAAO,IAAI,WAAW,IAAI;AAC5B;AAgBA,eAAsB,qBACpB,WACA,KACA,SACwB;AACxB,QAAM,KAAK,MAAM,sBAAsB,KAAK,SAAS,SAAS;AAC9D,QAAM,UAAU,IAAI,YAAY,EAAE,OAAO,SAAS;AAClD,QAAM,aAAa,MAAM,OAAO;AAAA,IAC9B,EAAE,MAAM,WAAW,GAAuB;AAAA,IAC1C;AAAA,IACA;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,eAAe,EAAE;AAAA,IACrB,MAAM,eAAe,UAAU;AAAA,EACjC;AACF;AAQA,eAAsB,qBACpB,UACA,YACA,KACiB;AACjB,SAAO,QAAQ,UAAU,YAAY,GAAG;AAC1C;AAKO,SAAS,aAAyB;AACvC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,QAAQ,CAAC;AACnE;AAGO,SAAS,eAA2B;AACzC,SAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,UAAU,CAAC;AACrE;AAIO,SAAS,eAAe,QAA0C;AACvE,QAAM,QAAQ,kBAAkB,aAAa,SAAS,IAAI,WAAW,MAAM;AAC3E,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAU,OAAO,aAAa,MAAM,CAAC,CAAE;AAAA,EACzC;AACA,SAAO,KAAK,MAAM;AACpB;AAEO,SAAS,eAAe,QAAyC;AACtE,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AAnfA,IAwCM,mBACA,YACA,UACA,UAEA;AA7CN;AAAA;AAAA;AAsCA;AAEA,IAAM,oBAAoB;AAC1B,IAAM,aAAa;AACnB,IAAM,WAAW;AACjB,IAAM,WAAW;AAEjB,IAAM,SAAS,WAAW,OAAO;AAAA;AAAA;;;AC7CjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC4CO,IAAM,uBAAuB;;;ACjCpC;AASA;;;AC0BA,SAAS,IAAI,GAAuB;AAClC,SAAO,IAAI,WAAW,EAAE,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,SAAS,GAAG,EAAE,CAAC,CAAC;AAChE;AAgBA,IAAM,cAAoC;AAAA;AAAA;AAAA,EAIxC,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG/F,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAGlF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA,EAGA,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAGnF,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,OAAO,EAAE;AAAA;AAAA,EAGxD,EAAE,MAAM,6BAA6B,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG9E,EAAE,MAAM,gBAAgB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGjE;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,QAAQ;AAAA,IACR,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA;AAAA,EAKA,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGpE,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,gBAAgB,EAAE;AAAA;AAAA;AAAA,EAKvE,EAAE,MAAM,uBAAuB,QAAQ,UAAU,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG5G,EAAE,MAAM,uBAAuB,QAAQ,UAAU,OAAO,IAAI,sBAAsB,GAAG,eAAe,KAAK;AAAA;AAAA,EAGzG,EAAE,MAAM,+BAA+B,QAAQ,MAAM,OAAO,IAAI,mBAAmB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG1G,EAAE,MAAM,oBAAoB,QAAQ,MAAM,OAAO,IAAI,mBAAmB,GAAG,eAAe,KAAK;AAAA;AAAA,EAG/F,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAGzF,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,OAAO,GAAG,eAAe,KAAK;AAAA;AAAA,EAGrF,EAAE,MAAM,uBAAuB,QAAQ,SAAS,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAG5F,EAAE,MAAM,sBAAsB,QAAQ,QAAQ,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK7F;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,EACnB;AAAA;AAAA,EAGA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,EACnB;AAAA;AAAA,EAGA,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,mBAAmB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGpE,EAAE,MAAM,cAAc,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGhE,EAAE,MAAM,cAAc,QAAQ,OAAO,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAKjF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA,EAGA,EAAE,MAAM,kBAAkB,QAAQ,OAAO,OAAO,IAAI,yBAAyB,GAAG,eAAe,KAAK;AAAA;AAAA,EAGpG,EAAE,MAAM,oBAAoB,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA,EAG1F,EAAE,MAAM,eAAe,QAAQ,OAAO,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA,EAGlF;AAAA,IACE,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,IAAI,aAAa;AAAA,IACxB,QAAQ;AAAA,IACR,gBAAgB,IAAI,aAAa;AAAA,IACjC,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB;AAAA;AAAA;AAAA,EAIA,EAAE,MAAM,aAAa,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,QAAQ,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK9F,EAAE,MAAM,2BAA2B,QAAQ,UAAU,OAAO,IAAI,yBAAyB,EAAE;AAAA;AAAA,EAG3F,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,qBAAqB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,iDAAiD,QAAQ,MAAM,OAAO,IAAI,OAAO,EAAE;AAAA;AAAA,EAG3F,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA,EACvF,EAAE,MAAM,6BAA6B,QAAQ,gBAAgB,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASvF,EAAE,MAAM,uBAAuB,QAAQ,cAAc,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG/E,EAAE,MAAM,+BAA+B,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA;AAAA,EAKhF,EAAE,MAAM,yCAAyC,QAAQ,OAAO,OAAO,IAAI,sBAAsB,EAAE;AAAA;AAAA,EAGnG,EAAE,MAAM,qBAAqB,QAAQ,OAAO,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,qCAAqC,QAAQ,OAAO,OAAO,IAAI,aAAa,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAK3G,EAAE,MAAM,gCAAgC,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGlF,EAAE,MAAM,gCAAgC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGrF,EAAE,MAAM,wBAAwB,QAAQ,UAAU,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAG5E,EAAE,MAAM,iCAAiC,QAAQ,OAAO,OAAO,IAAI,UAAU,EAAE;AAAA,EAC/E,EAAE,MAAM,iCAAiC,QAAQ,YAAY,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA,EACzG,EAAE,MAAM,iCAAiC,QAAQ,YAAY,OAAO,IAAI,UAAU,GAAG,eAAe,KAAK;AAAA;AAAA;AAAA,EAKzG,EAAE,MAAM,kCAAkC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGvF,EAAE,MAAM,oBAAoB,QAAQ,QAAQ,OAAO,IAAI,aAAa,EAAE;AAAA;AAAA,EAGtE,EAAE,MAAM,kCAAkC,QAAQ,WAAW,OAAO,IAAI,aAAa,EAAE;AACzF;AASA,SAAS,cAAc,OAAe,OAAwB;AAC5D,SAAO,UAAU,QAAS,QAAQ,SAAU;AAC9C;AAWO,SAAS,eAAe,QAA4B;AACzD,QAAM,SAAS,YAAY,MAAM;AACjC,SAAO,QAAQ,QAAQ;AACzB;AAUO,SAAS,YACd,QACiE;AACjE,aAAW,QAAQ,aAAa;AAC9B,QAAI,UAAU,QAAQ,IAAI,GAAG;AAC3B,aAAO;AAAA,QACL,MAAM,KAAK;AAAA,QACX,QAAQ,KAAK;AAAA,QACb,eAAe,KAAK,iBAAiB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,UAAU,KAAK,cAAc,OAAO,CAAC,GAAI,OAAO,CAAC,CAAE,GAAG;AAC/D,WAAO,EAAE,MAAM,cAAc,QAAQ,OAAO,eAAe,KAAK;AAAA,EAClE;AAEA,SAAO;AACT;AAQO,SAAS,gBAAgB,UAA2B;AACzD,SAAO,qBAAqB,IAAI,QAAQ;AAC1C;AAIA,SAAS,UAAU,QAAoB,MAA0B;AAC/D,QAAM,SAAS,KAAK,UAAU;AAC9B,QAAM,MAAM,SAAS,KAAK,MAAM;AAGhC,MAAI,OAAO,SAAS,IAAK,QAAO;AAGhC,WAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,QAAI,OAAO,SAAS,CAAC,MAAM,KAAK,MAAM,CAAC,EAAG,QAAO;AAAA,EACnD;AAGA,MAAI,KAAK,kBAAkB,KAAK,oBAAoB,QAAW;AAC7D,UAAM,OAAO,KAAK,kBAAkB,KAAK,eAAe;AACxD,QAAI,OAAO,SAAS,KAAM,QAAO;AACjC,aAAS,IAAI,GAAG,IAAI,KAAK,eAAe,QAAQ,KAAK;AACnD,UAAI,OAAO,KAAK,kBAAkB,CAAC,MAAM,KAAK,eAAe,CAAC,EAAG,QAAO;AAAA,IAC1E;AAAA,EACF;AAEA,SAAO;AACT;AAQA,IAAM,uBAAuB,IAAI;AAAA,EAC/B,YAAY,OAAO,CAAC,MAAM,EAAE,aAAa,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAC9D;;;AD2gBA;AA52BO,IAAM,kBAAkB;AAGxB,IAAM,wBAAwB;AAO9B,IAAM,yBAAyB;AAG/B,IAAM,oBAAoB;AAG1B,IAAM,uBAAuB;AAO7B,IAAM,qBAAqB,MAAM;AAGxC,IAAM,kBAAkB;AAIxB,eAAe,cACb,MAC4D;AAC5D,MAAI,OAAO,sBAAsB,aAAa;AAC5C,WAAO,EAAE,OAAO,MAAM,WAAW,OAAO;AAAA,EAC1C;AACA,QAAM,KAAK,IAAI,kBAAkB,MAAM;AACvC,QAAM,SAAS,GAAG,SAAS,UAAU;AACrC,QAAM,OAAO,MAAM,IAA+B;AAClD,QAAM,OAAO,MAAM;AACnB,QAAM,MAAM,MAAM,IAAI,SAAS,GAAG,QAAQ,EAAE,YAAY;AACxD,SAAO,EAAE,OAAO,IAAI,WAAW,GAAG,GAAG,WAAW,OAAO;AACzD;AAEA,eAAe,gBAAgB,MAAuC;AACpE,MAAI,OAAO,wBAAwB,aAAa;AAC9C,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,IAAI,oBAAoB,MAAM;AACzC,QAAM,SAAS,GAAG,SAAS,UAAU;AACrC,QAAM,OAAO,MAAM,IAA+B;AAClD,QAAM,OAAO,MAAM;AACnB,QAAM,MAAM,MAAM,IAAI,SAAS,GAAG,QAAQ,EAAE,YAAY;AACxD,SAAO,IAAI,WAAW,GAAG;AAC3B;AAEA,SAAS,aAAa,QAAkC;AACtD,QAAM,QAAQ,OAAO,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,YAAY,CAAC;AACzD,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,KAAK,QAAQ;AACtB,QAAI,IAAI,GAAG,MAAM;AACjB,cAAU,EAAE;AAAA,EACd;AACA,SAAO;AACT;AAGA,SAAS,SAAS,MAAc,YAAoB,YAAgC;AAClF,SAAO,IAAI,YAAY,EAAE,OAAO,GAAG,IAAI,IAAI,UAAU,IAAI,UAAU,EAAE;AACvE;AA+BO,IAAM,UAAN,MAAc;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,MAST;AACD,SAAK,QAAQ,KAAK;AAClB,SAAK,QAAQ,KAAK;AAClB,SAAK,aAAa,KAAK;AACvB,SAAK,WAAW,KAAK;AACrB,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,SAAS,KAAK;AACnB,SAAK,eAAe,KAAK;AAAA,EAC3B;AAAA;AAAA,EAGA,IAAY,kBAA0B;AACpC,WAAO,GAAG,iBAAiB,GAAG,KAAK,UAAU;AAAA,EAC/C;AAAA;AAAA,EAGA,IAAY,qBAA6B;AACvC,WAAO,GAAG,oBAAoB,GAAG,KAAK,UAAU;AAAA,EAClD;AAAA;AAAA,EAIA,MAAc,YAGX;AACD,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,iBAAiB,KAAK,QAAQ;AACrF,QAAI,CAAC,SAAU,QAAO,EAAE,OAAO,CAAC,GAAG,SAAS,EAAE;AAE9C,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,QACL,OAAO,KAAK,MAAM,SAAS,KAAK;AAAA,QAChC,SAAS,SAAS;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO;AAAA,MACL,OAAO,KAAK,MAAM,IAAI;AAAA,MACtB,SAAS,SAAS;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,MAAc,UACZ,OACA,gBACe;AACf,UAAM,OAAO,KAAK,UAAU,KAAK;AACjC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI,iBAAiB;AAAA,QACrB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AAAA,IACF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI,iBAAiB;AAAA,QACrB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,KAAK,MAAM;AAAA,MACf,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,iBAAiB,IAAI,iBAAiB;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eACZ,QACe;AACf,aAAS,UAAU,GAAG,UAAU,iBAAiB,WAAW;AAC1D,YAAM,EAAE,OAAO,QAAQ,IAAI,MAAM,KAAK,UAAU;AAChD,YAAM,UAAU,OAAO,KAAK;AAC5B,UAAI,YAAY,KAAM;AACtB,UAAI;AACF,cAAM,KAAK,UAAU,SAAS,OAAO;AACrC;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,UAAU,kBAAkB,EAAG;AACnE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,eAAe,MAAqE;AAChG,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,uBAAuB,IAAI;AAC7E,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,EAAE,MAAM,KAAK,MAAM,SAAS,KAAK,GAAiB,SAAS,SAAS,GAAG;AAAA,IAChF;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,eAAe;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,EAAE,MAAM,KAAK,MAAM,IAAI,GAAiB,SAAS,SAAS,GAAG;AAAA,EACtE;AAAA,EAEA,MAAc,gBAAgB,MAAkB,iBAAyC;AACvF,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,cAAc,mBAAmB,KAAK;AAC5C,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,eAAe;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,YAAY,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IAC5F,OAAO;AACL,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,YAAY,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IAC5F;AAEA,UAAM,KAAK,MAAM;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,MAAc,OAA8B;AAC1E,aAAS,UAAU,GAAG,UAAU,iBAAiB,WAAW;AAC1D,YAAM,SAAS,MAAM,KAAK,eAAe,IAAI;AAC7C,UAAI,CAAC,OAAQ,OAAM,IAAI,cAAc,cAAc,IAAI,YAAY;AACnE,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,YAAM,UAAsB,EAAE,GAAG,MAAM,UAAU,KAAK,WAAW,MAAM;AACvE,UAAI;AACF,cAAM,KAAK,gBAAgB,SAAS,OAAO;AAC3C;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,eAAe,iBAAiB,UAAU,kBAAkB,EAAG;AACnE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,WACZ,MACA,OACA,YACA,OACA,KACe;AACf,UAAM,KAAK,GAAG,IAAI,IAAI,KAAK;AAC3B,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK;AACP,YAAM,MAAM,SAAS,MAAM,OAAO,UAAU;AAC5C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,oBAAoB,OAAO,KAAK,GAAG;AAC9D,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF,OAAO;AACL,iBAAW;AAAA,QACT,QAAQ;AAAA,QACR,IAAI;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,QACL,OAAO,eAAe,KAAK;AAAA,MAC7B;AAAA,IACF;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,wBAAwB,IAAI,QAAQ;AAAA,EACvE;AAAA,EAEA,MAAc,UACZ,MACA,OACA,YACA,KAC4B;AAC5B,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,wBAAwB,GAAG,IAAI,IAAI,KAAK,EAAE;AAC5F,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,KAAK;AACP,YAAM,MAAM,SAAS,MAAM,OAAO,UAAU;AAC5C,aAAO,MAAM,oBAAoB,SAAS,KAAK,SAAS,OAAO,KAAK,GAAG;AAAA,IACzE;AAEA,WAAO,eAAe,SAAS,KAAK;AAAA,EACtC;AAAA;AAAA,EAIQ,WAAW,UAAkB,OAAuB;AAC1D,WAAO,GAAG,KAAK,QAAQ,KAAK,QAAQ,KAAK,KAAK;AAAA,EAChD;AAAA,EAEA,MAAc,kBAAkB,UAAkB,OAA8C;AAC9F,UAAM,MAAM,KAAK,WAAW,UAAU,KAAK;AAC3C,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAC9E,QAAI,CAAC,SAAU,QAAO;AAEtB,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,KAAK,MAAM,SAAS,KAAK;AAAA,IAClC;AAEA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,UAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB;AAAA,EAEA,MAAc,mBAAmB,UAAkB,QAAsC;AACvF,UAAM,MAAM,KAAK,WAAW,UAAU,OAAO,KAAK;AAClD,UAAM,OAAO,KAAK,UAAU,MAAM;AAClC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI;AAEJ,QAAI,KAAK,WAAW;AAClB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,YAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF,OAAO;AACL,iBAAW,EAAE,QAAQ,sBAAsB,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,OAAO,KAAK;AAAA,IACnF;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,KAAK,QAAQ;AAAA,EACzE;AAAA,EAEA,MAAc,oBAAoB,UAAkB,OAA8B;AAChF,UAAM,MAAM,KAAK,WAAW,UAAU,KAAK;AAC3C,UAAM,KAAK,MAAM,OAAO,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAAA,EAClE;AAAA;AAAA,EAIQ,mBAAmB,MAA+B;AACxD,QAAI,MAAM,UAAW,QAAO,KAAK;AACjC,QAAI,KAAK,aAAc,QAAO,KAAK;AACnC,WAAO;AAAA,EACT;AAAA;AAAA,EAIA,MAAc,eAAe,MAAuC;AAClE,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,eAAe,IAAI;AACtE,UAAM,SAAuB,CAAC;AAE9B,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,KAAK;AACxC,YAAM,QAAQ,MAAM,KAAK,UAAU,KAAK,MAAM,GAAG,KAAK,YAAY,OAAO;AACzE,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,cAAc,CAAC,IAAI,KAAK,UAAU,sBAAsB,KAAK,IAAI,gBAAgB,KAAK,QAAQ;AAAA,QAChG;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,UAAM,YAAY,aAAa,MAAM;AACrC,WAAO,KAAK,gBAAgB,SAAS,MAAM,gBAAgB,SAAS,IAAI;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,IAAI,UAAkB,MAAkB,MAAsC;AAElF,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,eAAe,IAAI;AACtE,UAAM,OAAO,UACT,MAAM,cAAc,SAAS,IAAI,IACjC,MAAM,eAAe,IAAI;AAG7B,QAAI,WAAW,MAAM;AACrB,QAAI,CAAC,UAAU;AACb,YAAM,WAAW,YAAY,KAAK,SAAS,GAAG,EAAE,CAAC;AACjD,UAAI,SAAU,YAAW,SAAS;AAAA,IACpC;AAGA,QAAI;AACJ,QAAI,MAAM,aAAa,QAAW;AAChC,uBAAiB,KAAK;AAAA,IACxB,WAAW,YAAY,gBAAgB,QAAQ,GAAG;AAChD,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB;AAAA,IACnB;AAGA,UAAM,eAAe,MAAM,KAAK,eAAe,IAAI;AAEnD,QAAI,cAAc;AAEhB,YAAM,KAAK,kBAAkB,MAAM,CAAE;AAAA,IACvC,OAAO;AAEL,YAAM,EAAE,OAAO,YAAY,UAAU,IAAI,iBACrC,MAAM,cAAc,IAAI,IACxB,EAAE,OAAO,MAAM,WAAW,OAAgB;AAE9C,YAAM,YAAY,KAAK,mBAAmB,IAAI;AAC9C,YAAM,aAAa,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW,aAAa,SAAS,CAAC;AAG3E,eAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,cAAM,QAAQ,IAAI;AAClB,cAAM,KAAK;AAAA,UACT;AAAA,UAAM;AAAA,UAAG;AAAA,UACT,WAAW,SAAS,OAAO,QAAQ,SAAS;AAAA,UAC5C;AAAA,QACF;AAAA,MACF;AAGA,YAAM,KAAK,gBAAgB;AAAA,QACzB;AAAA,QACA,MAAM,KAAK;AAAA,QACX,gBAAgB,WAAW;AAAA,QAC3B,aAAa;AAAA,QACb;AAAA,QACA;AAAA,QACA,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,QAC7C,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAGA,UAAM,iBAAiB,MAAM,cAAc,KAAK;AAChD,UAAM,KAAK,eAAe,CAAC,UAAU;AACnC,YAAM,UAAU,MAAM,QAAQ,GAAG;AACjC,YAAM,QAAQ,IAAI;AAAA,QAChB;AAAA,QACA,UAAU;AAAA,QACV,MAAM,KAAK;AAAA,QACX,GAAI,aAAa,SAAY,EAAE,SAAS,IAAI,CAAC;AAAA,QAC7C,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACnC,GAAI,mBAAmB,SAAY,EAAE,YAAY,eAAe,IAAI,CAAC;AAAA,MACvE;AAEA,UAAI,WAAW,YAAY,MAAM;AAC/B,aAAK,wBAAwB;AAAA,MAC/B;AACA,aAAO;AAAA,IACT,CAAC;AAGD,QAAI,KAAK,uBAAuB;AAC9B,YAAM,UAAU,KAAK;AACrB,WAAK,wBAAwB;AAC7B,YAAM,KAAK,kBAAkB,SAAS,EAAE,EAAE,MAAM,MAAM;AAAA,MAEtD,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOR,MAAM,IAAI,UAA8C;AACtD,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,eAAe,OAAO,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAA4B;AAChC,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,WAAO,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,OAAO,EAAE,MAAM,GAAG,KAAK,EAAE;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,UAAiC;AAC5C,QAAI;AAEJ,UAAM,KAAK,eAAe,CAAC,UAAU;AACnC,UAAI,EAAE,YAAY,OAAQ,QAAO;AACjC,wBAAkB,MAAM,QAAQ,EAAG;AACnC,aAAO,MAAM,QAAQ;AACrB,aAAO;AAAA,IACT,CAAC;AAED,QAAI,iBAAiB;AACnB,YAAM,KAAK,kBAAkB,iBAAiB,EAAE,EAAE,MAAM,MAAM;AAAA,MAE9D,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,SAAS,UAAkB,MAAsD;AACrF,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,cAAc,MAAM,OAAO,MAAM,IAAI;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,UACJ,UACA,MACqD;AACrD,QAAI,OAAO,QAAQ,eAAe,OAAO,IAAI,oBAAoB,YAAY;AAC3E,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,UAAM,OAAO,MAAM,YAAY,MAAM,YAAY;AAQjD,UAAM,SAAS,MAAM,OAAO,MAAM,MAAM,YAAY,MAAM,aAAa,MAAM,UAAU;AACvF,UAAM,OAAO,IAAI,KAAK,CAAC,MAAM,GAAG,EAAE,KAAK,CAAC;AACxC,UAAM,MAAM,IAAI,gBAAgB,IAAI;AACpC,QAAI,UAAU;AACd,UAAM,SAAS,MAAY;AACzB,UAAI,QAAS;AACb,gBAAU;AACV,UAAI,gBAAgB,GAAG;AAAA,IACzB;AACA,WAAO,EAAE,KAAK,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,QAAQ,UAAkB,OAA8B;AAC5D,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,OAAM,IAAI,cAAc,SAAS,QAAQ,0BAA0B,KAAK,QAAQ,GAAG;AAG9F,UAAM,WAAW,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC7D,QAAI,YAAY,SAAS,SAAS,KAAK,KAAM;AAG7C,UAAM,SAAwB;AAAA,MAC5B;AAAA,MACA,MAAM,KAAK;AAAA,MACX,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC,GAAI,KAAK,WAAW,SAAY,EAAE,aAAa,KAAK,OAAO,IAAI,CAAC;AAAA,IAClE;AACA,UAAM,KAAK,mBAAmB,UAAU,MAAM;AAG9C,UAAM,KAAK,kBAAkB,KAAK,MAAM,CAAE;AAG1C,QAAI,YAAY,SAAS,SAAS,KAAK,MAAM;AAC3C,YAAM,KAAK,kBAAkB,SAAS,MAAM,EAAE,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,UAAkB,OAA2C;AAC5E,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ,QAAO;AAEpB,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO,IAAI;AACpD,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,KAAK,eAAe,OAAO,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAA4C;AAC7D,UAAM,SAAS,GAAG,KAAK,QAAQ,KAAK,QAAQ;AAC5C,UAAM,UAAU,MAAM,KAAK,MAAM,KAAK,KAAK,OAAO,KAAK,kBAAkB;AACzE,UAAM,eAAe,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,CAAC;AAE/D,UAAM,WAA4B,CAAC;AACnC,eAAW,OAAO,cAAc;AAC9B,YAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,KAAK,oBAAoB,GAAG;AAC9E,UAAI,CAAC,SAAU;AAEf,UAAI,CAAC,KAAK,WAAW;AACnB,iBAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAkB;AAAA,MAC3D,OAAO;AACL,cAAM,MAAM,MAAM,KAAK,OAAO,KAAK,UAAU;AAC7C,cAAM,OAAO,MAAM,QAAQ,SAAS,KAAK,SAAS,OAAO,GAAG;AAC5D,iBAAS,KAAK,KAAK,MAAM,IAAI,CAAkB;AAAA,MACjD;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,UAAkB,OAA8B;AAClE,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ;AAEb,UAAM,KAAK,oBAAoB,UAAU,KAAK;AAC9C,UAAM,KAAK,kBAAkB,OAAO,MAAM,EAAE,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,UACA,OACA,MAC0B;AAC1B,UAAM,SAAS,MAAM,KAAK,kBAAkB,UAAU,KAAK;AAC3D,QAAI,CAAC,OAAQ,QAAO;AAEpB,UAAM,SAAS,MAAM,KAAK,eAAe,OAAO,IAAI;AACpD,QAAI,CAAC,OAAQ,QAAO;AAGpB,UAAM,WAAuB;AAAA,MAC3B,MAAM,OAAO;AAAA,MACb,UAAU,MAAM,YAAY,GAAG,QAAQ,IAAI,KAAK;AAAA,MAChD,MAAM,OAAO,KAAK;AAAA,MAClB,GAAI,OAAO,KAAK,aAAa,SAAY,EAAE,UAAU,OAAO,KAAK,SAAS,IAAI,CAAC;AAAA,MAC/E,YAAY,OAAO;AAAA,MACnB,GAAI,OAAO,gBAAgB,SAAY,EAAE,YAAY,OAAO,YAAY,IAAI,CAAC;AAAA,IAC/E;AAEA,WAAO,KAAK,cAAc,UAAU,OAAO,MAAM,IAAI;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,SAAS,UAA8C;AAC3D,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAClB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,aAAa,UAAkB,mBAAmB,MAA8B;AACpF,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAGpB,QAAI,OAAO,KAAK,eAAe,EAAG,QAAO;AACzC,QAAI,CAAC,KAAK,MAAM,WAAY,QAAO;AAEnC,UAAM,UAAU,GAAG,KAAK,IAAI;AAC5B,WAAO,KAAK,MAAM,WAAW,KAAK,OAAO,gBAAgB,SAAS,gBAAgB;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,gBAAgB,UAAkB,gBAAoD;AAC1F,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,UAAU;AACvC,UAAM,OAAO,MAAM,QAAQ;AAC3B,QAAI,CAAC,KAAM,QAAO;AAElB,UAAM,SAAS,MAAM,KAAK,eAAe,KAAK,IAAI;AAClD,QAAI,CAAC,OAAQ,QAAO;AAGpB,UAAM,OAAO,MAAM,eAAe,KAAK;AACvC,UAAM,WAAW,KAAK,MAAM,IAAI;AAEhC,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,OAAO,OAAO,IAAI;AAC9D,QAAI,CAAC,SAAS;AACZ,aAAO,KAAK,cAAc,MAAM,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAAA,IAC/D;AAGA,UAAM,MAAM,SAAS,KAAK,MAAM,GAAG,OAAO,KAAK,UAAU;AACzD,UAAM,EAAE,qBAAqB,WAAW,IAAI,MAAM;AAClD,UAAM,YAAY,MAAM,WAAW,SAAS,KAAK,SAAS,OAAO,SAAS,GAAG;AAC7E,UAAM,YAAY,OAAO,KAAK,gBAAgB,SAC1C,MAAM,gBAAgB,SAAS,IAC/B;AAEJ,UAAM,OAAO,IAAI,eAA2B;AAAA,MAC1C,MAAM,YAAY;AAChB,mBAAW,QAAQ,SAAS;AAC5B,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAED,UAAM,WAAW,KAAK;AACtB,WAAO,IAAI,SAAS,MAAM;AAAA,MACxB,SAAS;AAAA,QACP,gBAAgB,KAAK,YAAY;AAAA,QACjC,kBAAkB,OAAO,KAAK,IAAI;AAAA,QAClC,QAAQ,IAAI,KAAK,IAAI;AAAA,QACrB,uBAAuB,qBAAqB,QAAQ;AAAA,QACpD,iBAAiB,IAAI,KAAK,KAAK,UAAU,EAAE,YAAY;AAAA,MACzD;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA,EAIA,MAAc,cACZ,MACA,MACA,MACmB;AACnB,UAAM,iBAAiB,KAAK,eAAe,KAAK,IAAI;AAGpD,UAAM,OAAO,IAAI,eAA2B;AAAA,MAC1C,MAAM,MAAM,YAAY;AACtB,YAAI;AACF,gBAAM,SAAS,MAAM,eAAe,IAAI;AACxC,qBAAW,QAAQ,MAAM;AACzB,qBAAW,MAAM;AAAA,QACnB,SAAS,KAAK;AACZ,qBAAW,MAAM,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,WAAW,MAAM,YAAY,KAAK;AACxC,UAAM,cAAc,MAAM,SACtB,qBAAqB,QAAQ,MAC7B,yBAAyB,QAAQ;AAErC,WAAO,IAAI,SAAS,MAAM;AAAA,MACxB,SAAS;AAAA,QACP,gBAAgB,KAAK,YAAY;AAAA,QACjC,kBAAkB,OAAO,KAAK,IAAI;AAAA,QAClC,QAAQ,IAAI,KAAK,IAAI;AAAA,QACrB,uBAAuB;AAAA,QACvB,iBAAiB,IAAI,KAAK,KAAK,UAAU,EAAE,YAAY;AAAA,MACzD;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAMA,eAAe,eAAe,MAAmC;AAC/D,SAAO,UAAU,IAAI;AACvB;;;AE12BO,SAAS,YAA0B;AACxC,SAAO;AAAA,IACL,SAAS,MAAM;AACb,aAAO,IAAI,QAAQ,IAAI;AAAA,IACzB;AAAA,EACF;AACF;;;ACTA;AAuBO,IAAM,iCAAiC;AAsE9C,eAAsB,cACpB,KACA,UAA6B,CAAC,GACH;AAC3B,QAAM,MAAM,QAAQ,OAAO,oBAAI,KAAK;AACpC,QAAM,eAAe,QAAQ,gBAAgB;AAC7C,QAAM,SAAS,QAAQ,WAAW;AAElC,QAAM,iBAAiB,MAAM,IAAI,gBAAgB;AACjD,QAAM,eAAqE,CAAC;AAC5E,MAAI,UAAU;AACd,MAAI,UAAU;AACd,MAAI,eAAe;AACnB,MAAI,wBAAwB;AAE5B,QAAO,YAAW,kBAAkB,gBAAgB;AAClD,QAAI,eAAe,WAAW,GAAG,EAAG;AACpC,UAAM,SAAS,IAAI,cAAc,cAAc;AAC/C,QAAI,CAAC,OAAQ;AACb,UAAM,kBAAkB,OAAO,KAAK,MAAM;AAC1C,QAAI,gBAAgB,WAAW,EAAG;AAClC,6BAAyB;AACzB,iBAAa,cAAc,IAAI,EAAE,SAAS,GAAG,SAAS,EAAE;AAExD,UAAM,MAAM,MAAM,IAAI,YAAY,cAAc;AAChD,eAAW,YAAY,KAAK;AAC1B,UAAI,WAAW,aAAc,OAAM;AAEnC,YAAM,SAAS,MAAM,IAAI,UAAU,gBAAgB,QAAQ,EAAE,MAAM,MAAM,IAAI;AAC7E,UAAI,WAAW,KAAM;AACrB,iBAAW;AACX,mBAAa,cAAc,EAAE,WAAW;AAExC,YAAM,QAAQ,MAAM,IAAI,UAAU,gBAAgB,QAAQ,EAAE,MAAM,MAAM,CAAC,CAAC;AAC1E,iBAAW,QAAQ,OAAO;AACxB,YAAI,WAAW,aAAc,OAAM;AACnC,cAAM,SAAS,OAAO,KAAK,IAAI;AAC/B,YAAI,CAAC,OAAQ;AAEb,cAAM,SAAS,eAAe,QAAQ,QAAQ,MAAM,GAAG;AACvD,YAAI,CAAC,OAAQ;AAEb,YAAI,CAAC,QAAQ;AACX,gBAAM,IAAI,WAAW,gBAAgB,UAAU,KAAK,IAAI;AACxD,gBAAM,gBAAgB,KAAK;AAAA,YACzB,IAAI,mBAAmB,gBAAgB,UAAU,KAAK,IAAI;AAAA,YAC1D,YAAY;AAAA,YACZ;AAAA,YACA,UAAU,KAAK;AAAA,YACf,UAAU,KAAK;AAAA,YACf;AAAA,YACA,WAAW,IAAI,YAAY;AAAA,YAC3B,OAAO,IAAI;AAAA,UACb,CAAC;AACD,0BAAgB;AAAA,QAClB;AACA,mBAAW;AACX,qBAAa,cAAc,EAAE,WAAW;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,aAAa;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,eACP,QACA,QACA,MACA,KACqC;AACrC,MAAI,eAAe;AACnB,MAAI,qBAAqB;AAEzB,MAAI,OAAO,eAAe,UAAa,OAAO,aAAa,GAAG;AAC5D,UAAM,aAAa,KAAK,MAAM,KAAK,UAAU;AAC7C,QAAI,OAAO,SAAS,UAAU,GAAG;AAC/B,YAAM,QAAQ,IAAI,QAAQ,IAAI;AAC9B,YAAM,UAAU,OAAO,aAAa;AACpC,UAAI,QAAQ,QAAS,gBAAe;AAAA,IACtC;AAAA,EACF;AAEA,MAAI,OAAO,WAAW;AACpB,QAAI;AACF,UAAI,OAAO,UAAU,MAAM,EAAG,sBAAqB;AAAA,IACrD,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI,gBAAgB,mBAAoB,QAAO;AAC/C,MAAI,aAAc,QAAO;AACzB,MAAI,mBAAoB,QAAO;AAC/B,SAAO;AACT;AAEA,SAAS,mBAAmB,YAAoB,UAAkB,UAA0B;AAC1F,QAAM,OAAO,WAAW,OAAO,gBAAgB,IAAI,WAAW,CAAC,CAAC;AAChE,MAAI,SAAS;AACb,aAAW,KAAK,KAAM,WAAU,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC9D,SAAO,GAAG,UAAU,KAAK,QAAQ,KAAK,QAAQ,KAAK,MAAM;AAC3D;AAEA,eAAe,gBAAgB,KAAwB,OAAyC;AAC9F,QAAM,OAAO,KAAK,UAAU,KAAK;AACjC,MAAI;AACJ,MAAI,IAAI,WAAW;AACjB,UAAM,MAAM,MAAM,IAAI,OAAO,8BAA8B;AAC3D,UAAM,EAAE,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,GAAG;AAC5C,eAAW;AAAA,MACT,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF,OAAO;AACL,eAAW;AAAA,MACT,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK,MAAM;AAAA,MACX,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK,MAAM;AAAA,IACb;AAAA,EACF;AACA,QAAM,IAAI,QAAQ,IAAI,IAAI,OAAO,gCAAgC,MAAM,IAAI,QAAQ;AACrF;;;ACrKO,IAAM,0BAAN,cAAsC,MAAM;AAAA,EACjD,YAAY,QAAgB;AAC1B,UAAM,wBAAwB,MAAM,EAAE;AACtC,SAAK,OAAO;AAAA,EACd;AACF;AAIO,IAAM,0BAA0B;AAmBhC,SAAS,wBACd,OACA,2BACA,eACA,YACA,SACmB;AACnB,MAAI,UAAU;AAEd,QAAM,QAAQ,MAAY;AACxB,cAAU;AAAA,EACZ;AAEA,MAAI,QAAQ,QAAQ;AAClB,QAAI,QAAQ,OAAO,QAAS,WAAU;AACtC,YAAQ,OAAO,iBAAiB,SAAS,MAAM;AAAE,gBAAU;AAAA,IAAK,CAAC;AAAA,EACnE;AAEA,WAAS,aAAmB;AAC1B,QAAI,QAAS,OAAM,IAAI,wBAAwB,mBAAmB;AAAA,EACpE;AAEA,QAAM,YAAY,QAAQ,cAAc,IAAI,IAAI,QAAQ,WAAW,IAAI;AAIvE,MAAI,eAAqC;AACzC,WAAS,iBAAgC;AACvC,QAAI,CAAC,cAAc;AACjB,qBAAe,WAAW;AAAA,QACxB,IAAI,gBAAgB;AAAA,QACpB,WAAW;AAAA,QACX;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,aAAa,QAAQ,eAAe;AAAA,QACpC,WAAW,QAAQ,QAAQ,KAAK;AAAA,QAChC,aAAa,QAAQ,eAAe;AAAA,MACtC,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT;AAEA,kBAAgB,WAAyC;AACvD,UAAM,eAAe;AACrB,eAAW;AAGX,UAAM,iBAAiB,MAAM,0BAA0B;AACvD,UAAM,UAAU,eAAe,OAAO,UAAQ;AAC5C,UAAI,KAAK,WAAW,GAAG,EAAG,QAAO;AACjC,UAAI,aAAa,CAAC,UAAU,IAAI,IAAI,EAAG,QAAO;AAC9C,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB,QAAQ,gBAAgB;AAE9C,eAAW,kBAAkB,SAAS;AACpC,UAAI,QAAS;AAEb,YAAM,OAAO,cAAuC,cAAc;AAClE,YAAM,UAAU,MAAM,KAAK,KAAK,EAAE,MAAM,MAAM,CAAC,CAAC;AAChD,iBAAW,UAAU,SAAS;AAC5B,YAAI,QAAS;AACb,mBAAW;AAEX,cAAM,UAAW,OAA4B;AAC7C,YAAI,OAAO,YAAY,SAAU;AAEjC,YAAI,QAAQ,SAAS,CAAC,QAAQ,MAAM,QAAQ,EAAE,YAAY,gBAAgB,IAAI,QAAQ,CAAC,EAAG;AAE1F,cAAM,UAAU,KAAK,KAAK,OAAO;AACjC,cAAM,QAAQ,MAAM,QAAQ,KAAK,EAAE,MAAM,MAAM,CAAC,CAAe;AAC/D,mBAAW,QAAQ,OAAO;AACxB,cAAI,QAAS;AAEb,cAAI,CAAC,iBAAiB;AACpB,gBAAI,KAAK,SAAS,QAAQ,aAAa;AACrC,gCAAkB;AAAA,YACpB;AACA;AAAA,UACF;AAEA,gBAAM,QAAQ,MAAM,QAAQ,IAAI,KAAK,IAAI;AACzC,cAAI,CAAC,MAAO;AAEZ,gBAAM,OAAqB;AAAA,YACzB,QAAQ,KAAK;AAAA,YACb,WAAW,EAAE,YAAY,gBAAgB,IAAI,SAAS,MAAM,KAAK,KAAK;AAAA,YACtE;AAAA,YACA,MAAM;AAAA,cACJ,MAAM,KAAK;AAAA,cACX,UAAU,KAAK;AAAA,cACf,GAAI,KAAK,aAAa,UAAa,EAAE,UAAU,KAAK,SAAS;AAAA,cAC7D,GAAI,KAAK,eAAe,UAAa,EAAE,WAAW,KAAK,WAAW;AAAA,YACpE;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAA4B;AAAA,IAChC;AAAA,IACA,IAAI,UAAU;AAAE,aAAO;AAAA,IAAQ;AAAA,IAC/B,CAAC,OAAO,aAAa,GAAG,MAAM,SAAS;AAAA,EACzC;AACA,SAAO;AACT;AAIA,SAAS,kBAA0B;AAEjC,QAAM,MAAM,WAAW,OAAO,gBAAgB,IAAI,WAAW,EAAE,CAAC;AAChE,MAAI,IAAI;AACR,aAAW,KAAK,IAAK,MAAK,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AACxD,SAAO,SAAS,KAAK,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,GAAG,EAAE,CAAC;AAC3D;","names":[]}
|