@noy-db/hub 0.1.0-pre.4 → 0.1.0-pre.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blobs/index.cjs.map +1 -1
- package/dist/blobs/index.d.cts +3 -3
- package/dist/blobs/index.d.ts +3 -3
- package/dist/blobs/index.js +2 -2
- package/dist/bundle/index.cjs +26 -3
- package/dist/bundle/index.cjs.map +1 -1
- package/dist/bundle/index.d.cts +3 -3
- package/dist/bundle/index.d.ts +3 -3
- package/dist/bundle/index.js +3 -1
- package/dist/{chunk-LSZHBNDG.js → chunk-3WCRU7TI.js} +2 -2
- package/dist/{chunk-PSHTHSIX.js → chunk-6IJQ27XN.js} +213 -10
- package/dist/chunk-6IJQ27XN.js.map +1 -0
- package/dist/{chunk-O5GK62FJ.js → chunk-B6HF6NTZ.js} +1 -1
- package/dist/chunk-B6HF6NTZ.js.map +1 -0
- package/dist/{chunk-AVWFLPNR.js → chunk-CL37QSND.js} +2 -2
- package/dist/chunk-EMIGCR7X.js +39 -0
- package/dist/chunk-EMIGCR7X.js.map +1 -0
- package/dist/{chunk-GJILMRPO.js → chunk-FAAWLVTF.js} +42 -4
- package/dist/chunk-FAAWLVTF.js.map +1 -0
- package/dist/chunk-GILMPJXB.js +155 -0
- package/dist/chunk-GILMPJXB.js.map +1 -0
- package/dist/{chunk-L77MEFCH.js → chunk-INSJBB5W.js} +3 -3
- package/dist/{chunk-QZIACZZU.js → chunk-KPF2HHPI.js} +2 -2
- package/dist/{chunk-NK2NSXXK.js → chunk-N2LMZKLR.js} +2 -2
- package/dist/{chunk-EARQCIL7.js → chunk-NZ4XCIKS.js} +3 -3
- package/dist/{chunk-E445ICYI.js → chunk-UFL4DUEV.js} +5 -3
- package/dist/chunk-UFL4DUEV.js.map +1 -0
- package/dist/consent/index.d.cts +3 -3
- package/dist/consent/index.d.ts +3 -3
- package/dist/{dev-unlock-XOUecfQ9.d.ts → dev-unlock-CcJ1qIi7.d.ts} +1 -1
- package/dist/{dev-unlock-5SmCVGyx.d.cts → dev-unlock-Dk14V6lX.d.cts} +1 -1
- package/dist/{hash-Bxud16vM.d.ts → hash-1Xsqx1jl.d.ts} +1 -1
- package/dist/{hash-CvuKN2gH.d.cts → hash-h_2U3TFb.d.cts} +1 -1
- package/dist/history/index.cjs.map +1 -1
- package/dist/history/index.d.cts +4 -4
- package/dist/history/index.d.ts +4 -4
- package/dist/history/index.js +2 -2
- package/dist/i18n/index.cjs +3 -1
- package/dist/i18n/index.cjs.map +1 -1
- package/dist/i18n/index.d.cts +3 -3
- package/dist/i18n/index.d.ts +3 -3
- package/dist/i18n/index.js +3 -3
- package/dist/{index-DN-J-5wT.d.cts → index-6xNpPsxR.d.cts} +1 -1
- package/dist/{index-Cy-MKrdK.d.ts → index-Cvb0efA_.d.cts} +39 -5
- package/dist/{index-BRHBCmLt.d.ts → index-DJTf9yxn.d.ts} +1 -1
- package/dist/{index-BvUiM47h.d.cts → index-DZn6Yick.d.ts} +39 -5
- package/dist/index.cjs +2001 -58
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +315 -19
- package/dist/index.d.ts +315 -19
- package/dist/index.js +1503 -41
- package/dist/index.js.map +1 -1
- package/dist/{ledger-HWXYGUIQ.js → ledger-5V67MAIL.js} +3 -3
- package/dist/periods/index.cjs.map +1 -1
- package/dist/periods/index.d.cts +3 -3
- package/dist/periods/index.d.ts +3 -3
- package/dist/periods/index.js +3 -3
- package/dist/public-envelope-DFJZHXVH.js +31 -0
- package/dist/public-envelope-DFJZHXVH.js.map +1 -0
- package/dist/query/index.d.cts +1 -1
- package/dist/query/index.d.ts +1 -1
- package/dist/session/index.cjs +4 -2
- package/dist/session/index.cjs.map +1 -1
- package/dist/session/index.d.cts +4 -4
- package/dist/session/index.d.ts +4 -4
- package/dist/session/index.js +1 -1
- package/dist/shadow/index.d.cts +3 -3
- package/dist/shadow/index.d.ts +3 -3
- package/dist/store/index.d.cts +3 -3
- package/dist/store/index.d.ts +3 -3
- package/dist/sync/index.cjs.map +1 -1
- package/dist/sync/index.d.cts +2 -2
- package/dist/sync/index.d.ts +2 -2
- package/dist/sync/index.js +2 -2
- package/dist/team/index.cjs +3 -1
- package/dist/team/index.cjs.map +1 -1
- package/dist/team/index.d.cts +3 -3
- package/dist/team/index.d.ts +3 -3
- package/dist/team/index.js +4 -4
- package/dist/tx/index.d.cts +3 -3
- package/dist/tx/index.d.ts +3 -3
- package/dist/{types-Dmi7nrC9.d.ts → types-D-6bmD2c.d.ts} +1271 -3
- package/dist/{types-BVSfkYg6.d.cts → types-D3QLmhlk.d.cts} +1271 -3
- package/package.json +1 -1
- package/dist/chunk-E445ICYI.js.map +0 -1
- package/dist/chunk-GJILMRPO.js.map +0 -1
- package/dist/chunk-O5GK62FJ.js.map +0 -1
- package/dist/chunk-PSHTHSIX.js.map +0 -1
- /package/dist/{chunk-LSZHBNDG.js.map → chunk-3WCRU7TI.js.map} +0 -0
- /package/dist/{chunk-AVWFLPNR.js.map → chunk-CL37QSND.js.map} +0 -0
- /package/dist/{chunk-L77MEFCH.js.map → chunk-INSJBB5W.js.map} +0 -0
- /package/dist/{chunk-QZIACZZU.js.map → chunk-KPF2HHPI.js.map} +0 -0
- /package/dist/{chunk-NK2NSXXK.js.map → chunk-N2LMZKLR.js.map} +0 -0
- /package/dist/{chunk-EARQCIL7.js.map → chunk-NZ4XCIKS.js.map} +0 -0
- /package/dist/{ledger-HWXYGUIQ.js.map → ledger-5V67MAIL.js.map} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/types.ts"],"sourcesContent":["/**\n * Core types — the {@link NoydbStore} interface, envelope format, roles, and\n * all configuration shapes consumed by {@link createNoydb}.\n *\n * ## What lives here\n *\n * - **{@link NoydbStore}** — the 6-method contract every backend must implement\n * (`get`, `put`, `delete`, `list`, `loadAll`, `saveAll`).\n * - **{@link EncryptedEnvelope}** — the wire format stored by backends:\n * `{ _noydb, _v, _ts, _iv, _data }`. Backends only ever see this shape.\n * - **{@link Role} / {@link Permission}** — the access-control vocabulary\n * (`owner`, `admin`, `operator`, `viewer`, `client`).\n * - **{@link NoydbOptions}** — the full configuration object passed to\n * {@link createNoydb}.\n *\n * ## Extending the store interface\n *\n * All optional store capabilities (`ping`, `listPage`, `listSince`,\n * `presencePublish`, `presenceSubscribe`, `listVaults`) are additive extensions\n * discovered via `'method' in store`. Implementing them unlocks features but\n * is never required — core always falls back to the 6-method baseline.\n *\n * @module\n */\n\nimport type { StandardSchemaV1 } from './schema.js'\nimport type { SyncPolicy } from './store/sync-policy.js'\nimport type { BlobStrategy } from './blobs/strategy.js'\nimport type { IndexStrategy } from './indexing/strategy.js'\nimport type { AggregateStrategy } from './aggregate/strategy.js'\nimport type { CrdtStrategy } from './crdt/strategy.js'\nimport type { ConsentStrategy } from './consent/strategy.js'\nimport type { PeriodsStrategy } from './periods/strategy.js'\nimport type { ShadowStrategy } from './shadow/strategy.js'\nimport type { TxStrategy } from './tx/strategy.js'\nimport type { HistoryStrategy } from './history/strategy.js'\nimport type { I18nStrategy } from './i18n/strategy.js'\nimport type { SessionStrategy } from './session/strategy.js'\nimport type { SyncStrategy } from './team/sync-strategy.js'\nimport type { UnlockedKeyring } from './team/keyring.js'\nimport type { VaultPolicy } from './policy/types.js'\nimport type { PublicEnvelopeSchema } from './meta/public-envelope/types.js'\n\n/** Format version for encrypted record envelopes. */\nexport const NOYDB_FORMAT_VERSION = 1 as const\n\n/** Format version for keyring files. */\nexport const NOYDB_KEYRING_VERSION = 1 as const\n\n/** Format version for backup files. */\nexport const NOYDB_BACKUP_VERSION = 1 as const\n\n/** Format version for sync metadata. */\nexport const NOYDB_SYNC_VERSION = 1 as const\n\n// ─── Roles & Permissions ───────────────────────────────────────────────\n\n/**\n * Access role assigned to a user within a vault.\n *\n * Roles control both the operations a user can perform and which DEKs\n * they receive in their keyring:\n *\n * | Role | Collections | Can grant/revoke | Can export |\n * |------------|-----------------|:----------------:|:----------:|\n * | `owner` | all (rw) | Yes (all roles) | Yes |\n * | `admin` | all (rw) | Yes (≤ admin) | Yes |\n * | `operator` | explicit (rw) | No | ACL-scoped |\n * | `viewer` | all (ro) | No | Yes |\n * | `client` | explicit (ro) | No | ACL-scoped |\n */\nexport type Role = 'owner' | 'admin' | 'operator' | 'viewer' | 'client'\n\n/**\n * Read-write or read-only access on a collection.\n * Stored per-collection in the user's keyring.\n */\nexport type Permission = 'rw' | 'ro'\n\n/**\n * Map of collection name → permission level for a user's keyring entry.\n * `'*'` is the wildcard collection matching all collections in the vault.\n */\nexport type Permissions = Record<string, Permission>\n\n// ─── Encrypted Envelope ────────────────────────────────────────────────\n\n/** The encrypted wrapper stored by adapters. Adapters only ever see this. */\nexport interface EncryptedEnvelope {\n readonly _noydb: typeof NOYDB_FORMAT_VERSION\n readonly _v: number\n readonly _ts: string\n readonly _iv: string\n readonly _data: string\n /** User who created this version (unencrypted metadata). */\n readonly _by?: string\n /**\n * Hierarchical access tier. Omitted → tier 0.\n *\n * Unencrypted on purpose — the store reads it to route the envelope\n * to the right DEK slot without having to try-decrypt against every\n * tier. Only leaks the tier of each record, not any value\n * equivalence.\n */\n readonly _tier?: number\n /**\n * User id who last elevated this record. Used by\n * `demote()` to gate the reverse operation: only the original\n * elevator or an owner can demote a record back down. Cleared on\n * every successful demote so a later re-elevate requires the new\n * actor to own the demotion right.\n */\n readonly _elevatedBy?: string\n /**\n * Deterministic-encryption index. Map of field name →\n * base64 deterministic ciphertext. Present only when the collection\n * declares `deterministicFields` and the feature is acknowledged. The\n * field names are unencrypted (they're the index keys); the values\n * are AES-GCM ciphertext with an HKDF-derived deterministic IV.\n *\n * Enables blind equality search (`collection.findByDet(field,\n * value)`) without decrypting every record. Leaks equality as a known\n * side channel.\n */\n readonly _det?: Record<string, string>\n}\n\n/**\n * Placeholder returned by `getAtTier()` in `'ghost'` mode when a\n * record is at a tier the caller cannot decrypt. Record existence is\n * advertised — the id and tier are visible — but contents are\n * withheld. `canElevateFrom` lists user ids authorized to elevate\n * access for this caller when known; absent when the workflow is\n * not configured.\n */\nexport interface GhostRecord {\n readonly _ghost: true\n readonly _tier: number\n readonly canElevateFrom?: readonly string[]\n}\n\n/** Control what lower-tier reads see above their clearance. */\nexport type TierMode = 'invisibility' | 'ghost'\n\n/**\n * Event emitted when a record at a tier above the caller's inherent\n * clearance is read or written successfully (via elevation or\n * delegation). Always written to the ledger; subscribers get a\n * real-time feed.\n */\nexport interface CrossTierAccessEvent {\n readonly actor: string\n readonly collection: string\n readonly id: string\n readonly tier: number\n /** How the caller gained tier access: they elevated it, or a delegation is active. */\n readonly authorization: 'elevation' | 'delegation' | 'inherent'\n readonly op: 'get' | 'put' | 'elevate' | 'demote'\n readonly ts: string\n /**\n * When `authorization === 'elevation'`, the audit reason string the\n * caller passed to `vault.elevate(...)`. Empty for inherent /\n * delegation paths.\n */\n readonly reason?: string\n /**\n * When `authorization === 'elevation'`, the tier the caller's\n * keyring effectively held BEFORE elevation. Useful for audit\n * dashboards distinguishing \"operator elevating to 2\" from\n * \"inherent tier-2 write.\"\n */\n readonly elevatedFrom?: number\n}\n\n/**\n * A single deterministic-ciphertext index slot on an envelope. Stored\n * as `iv:data` (both base64, colon-separated) so a single string per\n * field keeps the envelope compact.\n */\nexport type DeterministicCipher = string\n\n// ─── Vault Snapshot ──────────────────────────────────────────────\n\n/** All records across all collections for a compartment. */\nexport type VaultSnapshot = Record<string, Record<string, EncryptedEnvelope>>\n\n/**\n * Result of a single page fetch via the optional `listPage` adapter extension.\n *\n * `items` carries the actual encrypted envelopes (not just ids) so the\n * caller can decrypt and emit a single record without an extra `get()`\n * round-trip per id. `nextCursor` is `null` on the final page.\n */\nexport interface ListPageResult {\n /** Encrypted envelopes for this page, in adapter-defined order. */\n items: Array<{ id: string; envelope: EncryptedEnvelope }>\n /** Opaque cursor for the next page, or `null` if this was the last page. */\n nextCursor: string | null\n}\n\n// ─── Store Interface ───────────────────────────────────────────────────\n\nexport interface NoydbStore {\n /**\n * Optional human-readable adapter name (e.g. 'memory', 'file', 'dynamo').\n * Used in diagnostic messages and the listPage fallback warning. Adapters\n * are encouraged to set this so logs are clearer about which backend is\n * involved when something goes wrong.\n */\n name?: string\n\n /** Get a single record. Returns null if not found. */\n get(vault: string, collection: string, id: string): Promise<EncryptedEnvelope | null>\n\n /** Put a record. Throws ConflictError if expectedVersion doesn't match. */\n put(\n vault: string,\n collection: string,\n id: string,\n envelope: EncryptedEnvelope,\n expectedVersion?: number,\n ): Promise<void>\n\n /** Delete a record. */\n delete(vault: string, collection: string, id: string): Promise<void>\n\n /** List all record IDs in a collection. */\n list(vault: string, collection: string): Promise<string[]>\n\n /** Load all records for a vault (initial hydration). */\n loadAll(vault: string): Promise<VaultSnapshot>\n\n /** Save all records for a vault (bulk write / restore). */\n saveAll(vault: string, data: VaultSnapshot): Promise<void>\n\n /** Optional connectivity check for sync engine. */\n ping?(): Promise<boolean>\n\n /**\n * Optional: list record IDs in a collection that have `_ts` after `since`.\n * Used by partial sync (`pull({ modifiedSince })`). Adapters that omit this\n * fall back to a full `loadAll` + client-side timestamp filter.\n */\n listSince?(vault: string, collection: string, since: string): Promise<string[]>\n\n /**\n * Optional pagination extension. Adapters that implement `listPage` get\n * the streaming `Collection.scan()` fast path; adapters that don't are\n * silently fallen back to a full `loadAll()` + slice (with a one-time\n * console.warn).\n *\n * `cursor` is opaque to the core — each adapter encodes its own paging\n * state (DynamoDB: base64 LastEvaluatedKey JSON; S3: ContinuationToken;\n * memory/file/browser: numeric offset of a sorted id list). Pass\n * `undefined` to start from the beginning.\n *\n * `limit` is a soft upper bound on `items.length`. Adapters MAY return\n * fewer items even when more exist (e.g. if the underlying store has\n * its own page size cap), and MUST signal \"no more pages\" by returning\n * `nextCursor: null`.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listPage' in adapter`.\n */\n listPage?(\n vault: string,\n collection: string,\n cursor?: string,\n limit?: number,\n ): Promise<ListPageResult>\n\n /**\n * Optional pub/sub for real-time presence.\n * Publish an encrypted payload to a presence channel.\n * Falls back to storage-based polling when absent.\n */\n presencePublish?(channel: string, payload: string): Promise<void>\n\n /**\n * Optional pub/sub for real-time presence.\n * Subscribe to a presence channel. Returns an unsubscribe function.\n * Falls back to storage-based polling when absent.\n */\n presenceSubscribe?(channel: string, callback: (payload: string) => void): () => void\n\n /**\n * Optional cross-vault enumeration extension.\n *\n * Returns the names of every top-level vault the store\n * currently stores. Used by `Noydb.listAccessibleVaults()` to\n * enumerate the universe of vaults before filtering down to\n * the ones the calling principal can actually unwrap.\n *\n * **Why this is optional:** the storage shape of compartments\n * differs across backends. Memory and file stores store\n * vaults as top-level keys / directories and can enumerate\n * them in O(1) calls. DynamoDB stores everything in a single table\n * keyed by `(compartment#collection, id)` — enumerating compartments\n * requires either a Scan (expensive, eventually consistent, leaks\n * ciphertext metadata) or a dedicated GSI that the consumer\n * provisioned. S3 needs a prefix list (cheap if enabled, ACL-sensitive\n * otherwise). Browser localStorage can scan keys by prefix.\n *\n * Stores that cannot implement `listVaults` cheaply or\n * cleanly should omit it. Core surfaces a `StoreCapabilityError`\n * with a clear message when a caller invokes\n * `listAccessibleVaults()` against a store that doesn't\n * provide this method, so consumers know to either upgrade their\n * store, provide a candidate list explicitly to `queryAcross()`,\n * or fall back to maintaining the compartment index out of band.\n *\n * **Privacy note:** `listVaults` returns *every* compartment\n * the store has, not just the ones the caller can access. The\n * existence-leak filtering (returning only compartments whose\n * keyring the caller can unwrap) happens in core, not in the\n * store. The store is trusted to know its own contents — that\n * is not a leak in the threat model. The leak the API guards\n * against is the *return value* of `listAccessibleVaults()`\n * exposing existence to a downstream observer who only sees that\n * function's output.\n *\n * The 6-method core contract is unchanged — this is an additive\n * extension discovered via `'listVaults' in store`.\n */\n listVaults?(): Promise<string[]>\n\n /**\n * Optional: generate a presigned URL for direct client download.\n * Only meaningful for object stores (S3, GCS) that support URL signing.\n * Returns a time-limited URL that fetches the encrypted envelope directly.\n * The caller must decrypt client-side (the URL returns ciphertext).\n */\n presignUrl?(vault: string, collection: string, id: string, expiresInSeconds?: number): Promise<string>\n\n /**\n * Optional: estimate current storage usage.\n * Returns `{ usedBytes, quotaBytes }` or null if the store cannot estimate.\n * Used by quota-aware routing to detect overflow conditions.\n */\n estimateUsage?(): Promise<{ usedBytes: number; quotaBytes: number } | null>\n\n /**\n * Optional multi-record atomic write.\n *\n * When present, `db.transaction(async (tx) => { ... })` uses this to\n * commit every staged op in one storage-layer transaction — either\n * all ops land or none do, regardless of which records they touch.\n * Every `TxOp.expectedVersion` (when set) must be honored atomically\n * alongside the write; any violation throws `ConflictError` and the\n * whole batch fails.\n *\n * Stores that omit this fall through to the hub's per-record OCC\n * fallback: pre-flight CAS check, then sequential `put`/`delete`\n * with best-effort unwind on mid-batch failure (see\n * `runTransaction` for the exact semantics and crash window).\n *\n * Native implementations: `to-memory` (single Map mutation),\n * `to-dynamo` (`TransactWriteItems`), `to-browser-idb` (one\n * `readwrite` transaction). File / S3 cannot implement this\n * atomically and should omit the method.\n */\n tx?(ops: readonly TxOp[]): Promise<void>\n}\n\n/**\n * A single staged operation inside a `db.transaction(fn)` commit. The\n * hub assembles `TxOp[]` from the user's `tx.collection().put/delete`\n * calls, encrypts any `record` values into `envelope`, and hands the\n * array to `NoydbStore.tx()` when the store supports atomic batch\n * writes. Stores that implement `tx()` MUST honor every\n * `expectedVersion` atomically against the stored envelope version.\n */\nexport interface TxOp {\n readonly type: 'put' | 'delete'\n readonly vault: string\n readonly collection: string\n readonly id: string\n /** Populated for `type: 'put'` — the encrypted envelope to write. */\n readonly envelope?: EncryptedEnvelope\n /** Optional per-record CAS. Mismatch must throw `ConflictError`. */\n readonly expectedVersion?: number\n}\n\n// ─── Store Factory Helper ──────────────────────────────────────────────\n\n/** Type-safe helper for creating store factories. */\nexport function createStore<TOptions>(\n factory: (options: TOptions) => NoydbStore,\n): (options: TOptions) => NoydbStore {\n return factory\n}\n\n// ─── Keyring ───────────────────────────────────────────────────────────\n\n/**\n * Interchange formats `@noy-db/as-*` packages can produce. `'*'` is a\n * wildcard granting every current + future plaintext format.\n */\nexport type ExportFormat =\n | 'xlsx'\n | 'csv'\n | 'json'\n | 'ndjson'\n | 'xml'\n | 'sql'\n | 'pdf'\n | 'blob'\n | 'zip'\n | '*'\n\n/**\n * Owner-granted export capability on a keyring.\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for record formatters + blob\n * extractors that emit plaintext bytes (`as-xlsx`, `as-csv`,\n * `as-blob`, `as-zip`, …). **Defaults to empty** for every role;\n * the owner/admin must positively grant per-format (or `'*'`).\n * - `bundle` — boolean for `.noydb` encrypted container export\n * (`as-noydb`). **Default policy: on for owner/admin, off for\n * operator/viewer/client** — applied when the field is absent or\n * undefined (see `hasExportCapability`).\n */\nexport interface ExportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\n/**\n * Owner-granted import capability on a keyring (sibling of\n * `ExportCapability`, issue ).\n *\n * Two independent dimensions:\n *\n * - `plaintext` — per-format allowlist for `as-*` readers that ingest\n * plaintext bytes (`as-csv`, `as-json`, `as-ndjson`, `as-zip`, …).\n * Defaults to empty for every role; the owner/admin must positively\n * grant per-format (or `'*'`).\n * - `bundle` — boolean gate for `.noydb` bundle import. **Defaults to\n * `false` for every role**, including owner/admin. Import is more\n * dangerous than export (corrupts vs leaks), so the policy is\n * default-closed across the board — the owner explicitly opts a\n * keyring in via `db.grant({ importCapability: { bundle: true } })`.\n */\nexport interface ImportCapability {\n readonly plaintext?: readonly ExportFormat[]\n readonly bundle?: boolean\n}\n\n/**\n * Forward-declared on-disk shape for `VaultPolicy` — the actual policy\n * model lives in `policy/types.ts` (#9). Declared here as `unknown`-typed\n * map so types.ts has no dependency on the policy module while the\n * `KeyringFile.policy` field can still round-trip foreign documents.\n *\n * @internal\n */\nexport type VaultPolicyOnDisk = Record<string, unknown>\n\n/**\n * Recovery profile enrolled at vault creation (issue #10).\n *\n * - `paper` — `on-recovery` codes (the only end-to-end profile in v0.1.0-pre.5).\n * - `shamir` / `multi-channel` / `admin-mediated` — API surface ships;\n * per-profile dispatch lands in follow-up issues. Calling\n * `db.recoverPassphrase` against these throws\n * {@link RecoveryProfileNotImplementedError}.\n */\nexport type RecoveryEnrollment =\n | {\n readonly profile: 'paper'\n /** Number of single-use codes to print at enrollment. */\n readonly codes: number\n }\n | {\n readonly profile: 'shamir'\n readonly k: number\n readonly n: number\n readonly trustees: ReadonlyArray<string>\n }\n | {\n readonly profile: 'multi-channel'\n readonly email?: string\n readonly pin?: boolean\n readonly paperCodes?: number\n }\n | {\n readonly profile: 'admin-mediated'\n readonly grantorUserId: string\n }\n\n/**\n * One tier-2 authenticator slot inside a keyring file. Each slot\n * independently wraps the SAME KEK under a method-specific derived key\n * (LUKS pattern). Adding or removing a slot is a constant-time keyring\n * write — no DEK re-keying required.\n *\n * @see docs/subsystems/session-tiers.md → Tier 2 — Authenticate (multi-slot)\n */\nexport interface KeyringAuthenticator {\n /** Caller-chosen identifier — e.g. `'webauthn-yubikey-blue'`, `'oidc-google'`, `'password-daily'`. */\n readonly id: string\n /** Method family — selects which `@noy-db/on-*` package handles unlock. */\n readonly method: 'webauthn' | 'oidc' | 'password'\n /** ISO-8601 timestamp at which the slot was added. */\n readonly enrolled_at: string\n /**\n * Which session tier ENROLLED this slot. Tier 1 enrolls a fresh slot;\n * tier 2 may add a sibling slot when the active policy permits.\n */\n readonly enrolled_via_tier: 1 | 2\n /** Base64 wrapped-KEK ciphertext under the method-derived key. */\n readonly wrapped_kek: string\n /**\n * Method-specific metadata: WebAuthn cred id, OIDC issuer/sub, PBKDF2\n * salt for `on-password`, etc. The schema is open by design — the\n * `@noy-db/on-*` package owns the contents.\n */\n readonly meta: Record<string, unknown>\n}\n\nexport interface KeyringFile {\n readonly _noydb_keyring: typeof NOYDB_KEYRING_VERSION\n readonly user_id: string\n readonly display_name: string\n readonly role: Role\n readonly permissions: Permissions\n readonly deks: Record<string, string>\n readonly salt: string\n readonly created_at: string\n readonly granted_by: string\n /**\n * Tier-2 authenticator slots (multi-slot keyring extension).\n * Optional / append-only: keyring files written before the\n * extension load with an empty list. Each slot independently wraps\n * the same KEK; any one of them unlocks.\n *\n * @see KeyringAuthenticator\n */\n readonly authenticators?: readonly KeyringAuthenticator[]\n /**\n * Per-keyring policy override (reserved). The on-disk format\n * accepts the field for forward compatibility with the Option C\n * merge engine deferred to a later release; v1.0 reads only the\n * vault-level `_meta/policy` document, so this field is parsed and\n * round-tripped but never enforced.\n */\n readonly policy?: VaultPolicyOnDisk\n /**\n * Optional — authorization spec capability bits. Absent on keyrings written\n * before the RFC implementation. Loading falls back to role-based\n * defaults (owner/admin get bundle-on, everyone else off).\n */\n readonly export_capability?: ExportCapability\n /**\n * Optional bundle-slot expiry. ISO-8601 timestamp; past\n * the cutoff `loadKeyring` throws `KeyringExpiredError` before any\n * DEK unwrap is attempted. Useful for time-boxed audit access:\n * \"this slot works for 30 days then becomes opaque to its holder.\"\n *\n * Absent on live keyrings written via `db.grant()` — the field is\n * meaningful for `BundleRecipient` slots produced by\n * `writeNoydbBundle({ recipients: [...] })`. Setting it on a live\n * keyring is allowed but unusual.\n */\n readonly expires_at?: string\n /**\n * Optional — issue import-capability bits. Absent on keyrings\n * written before landed. Loading falls back to default-closed\n * for every role and every format.\n */\n readonly import_capability?: ImportCapability\n /**\n * hierarchical access clearance. Absent → 0 (advisory;\n * the real check is whether the DEK map carries a `collection#tier`\n * entry for the requested tier). Owners and admins default to the\n * highest tier they have DEKs for at grant time.\n */\n readonly clearance?: number\n}\n\n// ─── Backup ────────────────────────────────────────────────────────────\n\nexport interface VaultBackup {\n readonly _noydb_backup: typeof NOYDB_BACKUP_VERSION\n readonly _compartment: string\n readonly _exported_at: string\n readonly _exported_by: string\n readonly keyrings: Record<string, KeyringFile>\n readonly collections: VaultSnapshot\n /**\n * Internal collections (`_ledger`, `_ledger_deltas`, `_history`, `_sync`, …)\n * captured alongside the data collections. Optional for backwards\n * compat with backups, which only stored data collections —\n * loading a backup leaves the ledger empty (and `verifyBackupIntegrity`\n * skips the chain check, surfacing only a console warning).\n */\n readonly _internal?: VaultSnapshot\n /**\n * Verifiable-backup metadata. Embeds the ledger head at\n * dump time so `load()` can cross-check that the loaded chain matches\n * exactly what was exported. A backup whose chain has been tampered\n * with — either by modifying ledger entries or by modifying data\n * envelopes that the chain references — fails this check.\n *\n * Optional for backwards compat with backups; missing means\n * \"legacy backup, load with a warning, no integrity check\".\n */\n readonly ledgerHead?: {\n /** Hex sha256 of the canonical JSON of the last ledger entry. */\n readonly hash: string\n /** Sequential index of the last ledger entry. */\n readonly index: number\n /** ISO timestamp captured at dump time. */\n readonly ts: string\n }\n}\n\n// ─── Export ────────────────────────────────────────────────────────────\n\n/**\n * Options for `Vault.exportStream()` and `Vault.exportJSON()`.\n *\n * The defaults match the most common consumer pattern: one chunk per\n * collection, no ledger metadata. Per-record streaming and ledger-head\n * inclusion are opt-in because both add structure most consumers don't\n * need.\n */\nexport interface ExportStreamOptions {\n /**\n * `'collection'` (default) yields one chunk per collection with all\n * records bundled in `chunk.records`. `'record'` yields one chunk per\n * record, useful for arbitrarily large collections that should never\n * be materialized as a single array.\n */\n readonly granularity?: 'collection' | 'record'\n\n /**\n * When `true`, every chunk includes the current compartment ledger\n * head under `chunk.ledgerHead`. The value is identical across every\n * chunk in a single export (one ledger per compartment). Forward-\n * compatible with future partition work where the head would become\n * per-partition. Default: `false`.\n */\n readonly withLedgerHead?: boolean\n /**\n * When set to a BCP 47 locale string (e.g. `'th'`), `exportJSON()`\n * resolves all `dictKey` labels to that locale and omits the raw\n * `dictionaries` snapshot from the output. Has no effect\n * on `exportStream()` — format packages use the `chunk.dictionaries`\n * snapshot directly and apply their own locale strategy.\n *\n * Default: `undefined` — embed the raw snapshot under `_dictionaries`.\n */\n readonly resolveLabels?: string\n}\n\n/**\n * One chunk yielded by `Vault.exportStream()`.\n *\n * `granularity: 'collection'` yields one chunk per collection with the\n * full record array in `records`. `granularity: 'record'` yields one\n * chunk per record with `records` containing exactly one element — the\n * `schema` and `refs` metadata is repeated on every chunk so consumers\n * doing per-record streaming don't have to thread state across yields.\n */\nexport interface ExportChunk<T = unknown> {\n /** Collection name (no leading underscore — internal collections are filtered out). */\n readonly collection: string\n\n /**\n * Standard Schema validator attached to the collection at `collection()`\n * construction time, or `null` if no schema was provided. Surfaced so\n * downstream serializers (`@noy-db/as-*` packages, custom\n * exporters) can produce schema-aware output (typed CSV headers, XSD\n * generation, etc.) without poking at collection internals.\n */\n readonly schema: StandardSchemaV1<unknown, T> | null\n\n /**\n * Foreign-key references declared on the collection via the `refs`\n * option, as the `{ field → { target, mode } }` map produced by\n * `RefRegistry.getOutbound`. Empty object when no refs were declared.\n */\n readonly refs: Record<string, { readonly target: string; readonly mode: 'strict' | 'warn' | 'cascade' }>\n\n /**\n * Decrypted, ACL-scoped, schema-validated records. Length 1 in\n * `granularity: 'record'` mode, full collection in `granularity: 'collection'`\n * mode. Records are returned by reference from the collection's eager\n * cache where applicable — consumers must treat them as immutable.\n */\n readonly records: T[]\n\n /**\n * Dictionary snapshots for every `dictKey` field declared on this\n * collection. Captured once at stream-start and held\n * constant across all chunks within the same export — a rename\n * mid-export does not change the snapshot. `undefined` when the\n * collection has no `dictKeyFields`.\n *\n * Shape: `{ [fieldName]: { [stableKey]: { [locale]: label } } }`\n *\n * @example\n * ```ts\n * chunk.dictionaries?.status?.paid?.th // → 'ชำระแล้ว'\n * ```\n */\n readonly dictionaries?: Record<\n string, // field name\n Record<string, Record<string, string>> // stable key → locale → label\n >\n\n /**\n * Vault ledger head at export time. Present only when\n * `exportStream({ withLedgerHead: true })` was called. Identical\n * across every chunk in the same export — included on every chunk\n * for forward-compatibility with future per-partition ledgers, where\n * the value will differ per chunk.\n */\n readonly ledgerHead?: {\n readonly hash: string\n readonly index: number\n readonly ts: string\n }\n}\n\n// ─── Sync ──────────────────────────────────────────────────────────────\n\nexport interface DirtyEntry {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n readonly version: number\n readonly timestamp: string\n}\n\nexport interface SyncMetadata {\n readonly _noydb_sync: typeof NOYDB_SYNC_VERSION\n readonly last_push: string | null\n readonly last_pull: string | null\n readonly dirty: DirtyEntry[]\n}\n\nexport interface Conflict {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly local: EncryptedEnvelope\n readonly remote: EncryptedEnvelope\n readonly localVersion: number\n readonly remoteVersion: number\n /**\n * Present only when the collection uses `conflictPolicy: 'manual'`.\n * Call `resolve(winner)` to commit the winning envelope, or\n * `resolve(null)` to defer (conflict stays queued for the next sync).\n * Called synchronously inside the `sync:conflict` event handler.\n */\n readonly resolve?: (winner: EncryptedEnvelope | null) => void\n}\n\nexport type ConflictStrategy =\n | 'local-wins'\n | 'remote-wins'\n | 'version'\n | ((conflict: Conflict) => 'local' | 'remote')\n\n/**\n * Collection-level conflict policy.\n * Overrides the db-level `conflict` option for the specific collection.\n *\n * - `'last-writer-wins'` — higher `_ts` wins (timestamp LWW).\n * - `'first-writer-wins'` — lower `_v` wins (earlier version is preserved).\n * - `'manual'` — emits `sync:conflict` with a `resolve` callback. Call\n * `resolve(winner)` synchronously to commit or `resolve(null)` to defer.\n * - Custom fn — synchronous `(local: T, remote: T) => T`. Must be pure.\n */\nexport type ConflictPolicy<T> =\n | 'last-writer-wins'\n | 'first-writer-wins'\n | 'manual'\n | ((local: T, remote: T) => T)\n\n/**\n * Envelope-level resolver registered per collection with the SyncEngine.\n * Receives the `id` of the conflicting record and both envelopes.\n * Returns the winning envelope, or `null` to defer resolution.\n * @internal\n */\nexport type CollectionConflictResolver = (\n id: string,\n local: EncryptedEnvelope,\n remote: EncryptedEnvelope,\n) => Promise<EncryptedEnvelope | null>\n\n/** Options for targeted push operations. */\nexport interface PushOptions {\n /** Only push records belonging to these collections. Omit to push all dirty. */\n collections?: string[]\n}\n\n/** Options for targeted pull operations. */\nexport interface PullOptions {\n /** Only pull these collections. Omit to pull all. */\n collections?: string[]\n /**\n * Only pull records with `_ts` strictly after this ISO timestamp.\n * Adapters that implement `listSince` use it directly; others fall back\n * to a full scan with client-side filtering.\n */\n modifiedSince?: string\n}\n\nexport interface PushResult {\n readonly pushed: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\nexport interface PullResult {\n readonly pulled: number\n readonly conflicts: Conflict[]\n readonly errors: Error[]\n}\n\n/** Result of a sync transaction commit. */\nexport interface SyncTransactionResult {\n readonly status: 'committed' | 'conflict'\n readonly pushed: number\n readonly conflicts: Conflict[]\n}\n\nexport interface SyncStatus {\n readonly dirty: number\n readonly lastPush: string | null\n readonly lastPull: string | null\n readonly online: boolean\n}\n\n// ─── Sync Target ─────────────────────────────────────────\n\nexport type SyncTargetRole = 'sync-peer' | 'backup' | 'archive'\n\n/**\n * A sync target with role and optional per-target policy.\n *\n * | Role | Direction | Conflict resolution | Typical use |\n * |-------------|---------------|---------------------|--------------------------|\n * | `sync-peer` | Bidirectional | ConflictStrategy | DynamoDB live sync |\n * | `backup` | Push-only | N/A (receives merged)| S3 dump, Google Drive |\n * | `archive` | Push-only | N/A | IPFS, Git tags, S3 Lock |\n */\nexport interface SyncTarget {\n /** The store to sync with. */\n readonly store: NoydbStore\n /** Role determines sync direction and conflict handling. */\n readonly role: SyncTargetRole\n /** Per-target sync policy. Inherits store-category default when absent. */\n readonly policy?: SyncPolicy\n /** Human-readable label for DevTools and audit logs. */\n readonly label?: string\n}\n\n// ─── Events ────────────────────────────────────────────────────────────\n\nexport interface ChangeEvent {\n readonly vault: string\n readonly collection: string\n readonly id: string\n readonly action: 'put' | 'delete'\n}\n\nexport interface NoydbEventMap {\n 'change': ChangeEvent\n 'error': Error\n 'sync:push': PushResult\n 'sync:pull': PullResult\n 'sync:conflict': Conflict\n 'sync:online': void\n 'sync:offline': void\n 'sync:backup-error': { vault: string; target: string; error: Error }\n 'history:save': { vault: string; collection: string; id: string; version: number }\n 'history:prune': { vault: string; collection: string; id: string; pruned: number }\n /**\n * Emitted when a persisted-index side-car put/delete fails after the\n * main record write already succeeded. The main record is durable; the\n * index mirror may have drifted. Operators reconcile via\n * `collection.reconcileIndex(field)`.\n */\n 'index:write-partial': {\n vault: string\n collection: string\n id: string\n action: 'put' | 'delete'\n error: Error\n }\n /**\n * emitted by `Collection.ensurePersistedIndexesLoaded()`\n * once per field on first lazy-mode query when\n * `reconcileOnOpen: 'auto' | 'dry-run'` is configured. `applied` is\n * `0` in `'dry-run'` mode. `skipped` is reserved for a future\n * drift-stamp optimization that short-circuits the reconcile when\n * the mirror version matches what's on disk — currently always\n * `false` (the full reconcile runs every session).\n */\n 'index:reconciled': {\n vault: string\n collection: string\n field: string\n missing: readonly string[]\n stale: readonly string[]\n applied: number\n skipped: boolean\n }\n}\n\n// ─── Grant / Revoke ────────────────────────────────────────────────────\n\nexport interface GrantOptions {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly passphrase: string\n readonly permissions?: Permissions\n /**\n * Optional `@noy-db/as-*` export capability. Omit or\n * leave undefined to apply role-based defaults (see\n * `hasExportCapability` and `ExportCapability`).\n */\n readonly exportCapability?: ExportCapability\n /**\n * Optional `@noy-db/as-*` import capability (issue ). Omit or\n * leave undefined for default-closed semantics — no plaintext format\n * is grantable until positively listed; bundle import is denied.\n */\n readonly importCapability?: ImportCapability\n /**\n * Skip phrase-format strength validation (issue #7). Defaults to\n * false — `grant()` rejects phrases that don't meet the configured\n * `PassphrasePolicy`. Test fixtures and CLI scripts pass `true`.\n */\n readonly allowWeakPassphrase?: boolean\n /**\n * Initial user-envelope payload for the new principal. Sealed under\n * the same vault DEK (the reserved `_users` collection's DEK) and\n * persisted alongside the keyring during grant.\n *\n * **Bootstrap-only.** Once the new user activates and writes their\n * own envelope, the own-only write rule kicks in — admins cannot\n * edit a teammate's envelope after activation. Use this field for\n * pre-fill at invite time (e.g. \"displayName: Bob, locale: en-US\")\n * and let the user take over from there.\n *\n * Hub does not introspect the payload; it is JSON-serialized and\n * encrypted opaquely. Apps own the schema.\n *\n * @see docs/superpowers/specs/2026-05-05-user-envelope-design.md → Lifecycle\n */\n readonly initialProfile?: unknown\n}\n\nexport interface RevokeOptions {\n readonly userId: string\n readonly rotateKeys?: boolean\n\n /**\n * Cascade behavior when the revoked user is an admin who has granted\n * other admins.\n *\n * - `'strict'` (default) — recursively revoke every admin that the\n * target (transitively) granted. The cascade walks the\n * `granted_by` field on each keyring file and stops at non-admin\n * leaves. All affected collections are accumulated and rotated in\n * a single pass at the end, so cascade cost is O(records in\n * affected collections), not O(records × cascade depth).\n *\n * - `'warn'` — leave the descendant admins in place but emit a\n * `console.warn` listing them. Useful for diagnostic dry runs and\n * for environments where the operator wants to clean up the\n * delegation tree manually.\n *\n * No effect when the target is not an admin (operators, viewers, and\n * clients cannot grant other users, so they have no delegation\n * subtree to cascade through). Defaults to `'strict'`.\n */\n readonly cascade?: 'strict' | 'warn'\n}\n\n// ─── Cross-vault queries ──────────────────────────────\n\n/**\n * One entry returned by `Noydb.listAccessibleVaults()`. Carries\n * the compartment id and the role the calling principal holds in it,\n * so the consumer can decide how to fan out without re-checking\n * permissions per vault.\n */\nexport interface AccessibleVault {\n readonly id: string\n readonly role: Role\n}\n\n/**\n * Options for `Noydb.listAccessibleVaults()`.\n */\nexport interface ListAccessibleVaultsOptions {\n /**\n * Minimum role the caller must hold to include a compartment in the\n * result. Compartments where the caller's role is strictly *below*\n * this threshold are silently excluded. Defaults to `'client'`,\n * which means \"every vault I can unwrap is returned.\" Set to\n * `'admin'` for \"vaults where I can grant/revoke,\" or\n * `'owner'` for \"vaults I own.\"\n *\n * The privilege ordering used:\n * `client (1) < viewer (2) < operator (3) < admin (4) < owner (5)`\n *\n * Note: `viewer` and `client` are conceptually peers in the ACL\n * (neither can grant), but `viewer` has read-all access while\n * `client` has only explicit-collection read. The numeric order\n * reflects \"how much can this principal see,\" not \"how much can\n * this principal modify.\"\n */\n readonly minRole?: Role\n}\n\n/**\n * Options for `Noydb.queryAcross()`.\n */\nexport interface QueryAcrossOptions {\n /**\n * Maximum number of compartments to process in parallel. Defaults\n * to `1` (sequential) — conservative because the per-compartment\n * callback typically does its own I/O and an unbounded fan-out can\n * exhaust adapter connections (DynamoDB throughput, S3 socket\n * limits, browser fetch concurrency).\n *\n * Set to `4` or `8` for cloud-backed compartments where parallelism\n * is the whole point of fanning out. Set to `1` (default) for local\n * adapters where the disk I/O serializes anyway.\n */\n readonly concurrency?: number\n}\n\n/**\n * One entry in the array returned by `Noydb.queryAcross()`. Either\n * `result` is set (callback succeeded for this compartment) or\n * `error` is set (callback threw, or compartment failed to open).\n *\n * Per-compartment errors do **not** abort the overall fan-out — every\n * compartment is given a chance to run its callback, and the\n * partition between success and failure is exposed in the return\n * value. Consumers that want fail-fast semantics can check\n * `r.error !== undefined` and short-circuit themselves.\n */\nexport type QueryAcrossResult<T> =\n | { readonly vault: string; readonly result: T; readonly error?: undefined }\n | { readonly vault: string; readonly result?: undefined; readonly error: Error }\n\n// ─── User Info ─────────────────────────────────────────────────────────\n\nexport interface UserInfo {\n readonly userId: string\n readonly displayName: string\n readonly role: Role\n readonly permissions: Permissions\n readonly createdAt: string\n readonly grantedBy: string\n}\n\n// ─── Session ───────────────────────────────────────────────\n\n/**\n * Operations that a session policy can require re-authentication for.\n * Passed as the `requireReAuthFor` array in `SessionPolicy`.\n */\nexport type ReAuthOperation = 'export' | 'grant' | 'revoke' | 'rotate' | 'changeSecret'\n\n/**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior.\n *\n * All timeout values are in milliseconds. `undefined` means \"no limit.\"\n * The policy is evaluated lazily — it does not start timers itself;\n * enforcement happens at the Noydb call site.\n */\nexport interface SessionPolicy {\n /**\n * Idle timeout in ms. If no NOYDB operation is performed for this\n * duration, the session is revoked on the next operation attempt\n * (which will throw `SessionExpiredError`). The idle clock resets\n * on every successful operation.\n *\n * Default: `undefined` (no idle timeout).\n */\n readonly idleTimeoutMs?: number\n\n /**\n * Absolute timeout in ms from session creation. After this duration\n * the session is unconditionally revoked regardless of activity.\n *\n * Default: `undefined` (no absolute timeout).\n */\n readonly absoluteTimeoutMs?: number\n\n /**\n * Operations that require the user to re-authenticate (re-enter their\n * passphrase or perform a fresh WebAuthn assertion) before proceeding,\n * even if the session is still alive.\n *\n * Common pattern: `requireReAuthFor: ['export', 'grant']` — allow\n * read/write operations in the background but demand a fresh credential\n * for high-risk mutations.\n *\n * Default: `[]` (no extra re-auth requirements).\n */\n readonly requireReAuthFor?: readonly ReAuthOperation[]\n\n /**\n * If `true`, the session is revoked when the page goes to the background\n * (visibilitychange event, `document.hidden === true`). Useful for\n * high-sensitivity deployments where leaving the tab is treated as\n * a session boundary.\n *\n * No-op in non-browser environments (Node.js, workers without document).\n * Default: `false`.\n */\n readonly lockOnBackground?: boolean\n}\n\n// ─── i18n / Locale ─────────────────────────────────────\n\n/**\n * Locale-aware read options. Pass to `Collection.get()`, `list()`,\n * `query()`, and `scan()` to trigger per-record locale resolution for\n * `dictKey` and `i18nText` fields.\n *\n * - **`locale: 'raw'`** — skip resolution for `i18nText` fields and\n * return the full `{ [locale]: string }` map. Dict key fields still\n * return the stable key (no `<field>Label` added).\n * - **`fallback`** — single locale code or ordered list. Use `'any'` as\n * the last element to fall back to any present translation.\n *\n * When neither the call-level locale nor the compartment's default locale\n * is set, reading a record with `i18nText` fields throws\n * `LocaleNotSpecifiedError`.\n */\nexport interface LocaleReadOptions {\n /**\n * The target locale code (e.g. `'th'`), or `'raw'` to return the full\n * language map without resolution.\n */\n readonly locale?: string\n /**\n * Fallback locale or ordered fallback chain. Use `'any'` as the last\n * element to fall back to any present translation.\n */\n readonly fallback?: string | readonly string[]\n}\n\n// ─── plaintextTranslator hook ──────────────────────────────\n\n/**\n * Context passed to the consumer-supplied `plaintextTranslator` function.\n * The hook receives the source text plus enough metadata to route it to the\n * right translation service and record what it did.\n */\nexport interface PlaintextTranslatorContext {\n /** The plaintext string to translate. */\n readonly text: string\n /** BCP 47 source locale (the locale the text is written in). */\n readonly from: string\n /** BCP 47 target locale to translate into. */\n readonly to: string\n /** The schema field name that triggered the translation. */\n readonly field: string\n /** The collection the record is being put into. */\n readonly collection: string\n}\n\n/**\n * A consumer-supplied async function that translates a single string\n * from one locale to another. noy-db ships no built-in translator.\n *\n * **Security:** this function receives plaintext. The consumer is\n * responsible for the data policy of whatever service it calls. See\n * `NOYDB_SPEC.md § Zero-Knowledge Storage` and the `plaintextTranslator`\n * JSDoc on `NoydbOptions` for the full invariant statement.\n */\nexport type PlaintextTranslatorFn = (\n ctx: PlaintextTranslatorContext,\n) => Promise<string>\n\n/**\n * One entry in the in-process translator audit log. Cleared when\n * `db.close()` is called — same lifetime as the KEK and DEKs.\n *\n * Deliberately omits any content hash or translated-text fingerprint\n * to prevent correlation attacks on the audit trail.\n */\nexport interface TranslatorAuditEntry {\n readonly type: 'translator-invocation'\n /** Schema field name that was translated. */\n readonly field: string\n /** Collection the record belongs to. */\n readonly collection: string\n /** Source locale. */\n readonly fromLocale: string\n /** Target locale. */\n readonly toLocale: string\n /**\n * Consumer-provided translator name from\n * `NoydbOptions.plaintextTranslatorName`. Defaults to `'anonymous'`\n * when not supplied.\n */\n readonly translatorName: string\n /** ISO 8601 timestamp of the invocation. */\n readonly timestamp: string\n /**\n * `true` when the result was served from the in-process cache rather\n * than by calling the translator function. Present only on cache hits\n * so the absence of the field also communicates a cache miss.\n */\n readonly cached?: true\n}\n\n// ─── Presence ─────────────────────────────────────────────\n\n/**\n * A presence peer entry. `lastSeen` is an ISO timestamp set by core on each\n * `update()` call. Stale entries (lastSeen older than `staleMs`) are filtered\n * before delivering to the subscriber callback.\n */\nexport interface PresencePeer<P> {\n readonly userId: string\n readonly payload: P\n readonly lastSeen: string\n}\n\n// ─── CRDT ─────────────────────────────────────────────────\n\n// Re-exported from crdt.ts so consumers only need one import path.\nexport type { CrdtMode, CrdtState, LwwMapState, RgaState, YjsState } from './crdt/crdt.js'\n\n// ─── Blob / Attachment Store ────────────────────────\n\n/**\n * Second store shape for blob-store backends (Drive, WebDAV, Git, iCloud)\n * that operate on whole-vault bundles rather than per-record KV.\n *\n * Implement `readBundle` / `writeBundle` instead of the six-method KV\n * contract. Use `wrapBundleStore()` from `@noy-db/hub` to convert to a\n * `NoydbStore` that the rest of the API consumes transparently.\n *\n * Named `NoydbBundleStore` (not `NoydbBundleAdapter`) for consistency\n * with the hub / to-* / in-* rename. Concrete implementations ship\n * in `@noy-db/to-*` packages starting in.\n */\nexport interface NoydbBundleStore {\n /** Discriminant for engine auto-detection of store shape. */\n readonly kind: 'bundle'\n /** Human-readable name for diagnostics (e.g. `'drive'`, `'webdav'`). */\n readonly name?: string\n /**\n * Read the entire vault as raw bytes. Returns `null` if no bundle exists\n * yet (first open of a brand-new vault).\n */\n readBundle(vaultId: string): Promise<{ bytes: Uint8Array; version: string } | null>\n /**\n * Write the entire vault as raw bytes. `expectedVersion` is the version\n * token from the last `readBundle` (or `null` for a first write).\n * Implementations MUST reject the write if the stored version has advanced\n * past `expectedVersion` — throw `BundleVersionConflictError`.\n * Returns the new version token on success.\n */\n writeBundle(\n vaultId: string,\n bytes: Uint8Array,\n expectedVersion: string | null,\n ): Promise<{ version: string }>\n /** Delete a vault bundle. Idempotent — no-op if the bundle does not exist. */\n deleteBundle(vaultId: string): Promise<void>\n /** List all vault bundles managed by this store. */\n listBundles(): Promise<Array<{ vaultId: string; version: string; size: number }>>\n}\n\n/**\n * Content-addressed blob object stored in the vault-level blob index.\n * Identified by HMAC-SHA-256(blobDEK, plaintext) — opaque to the store.\n *\n * Shared across all collections within a vault for deduplication: two\n * records that attach identical byte content reference the same `eTag`\n * and share a single set of encrypted chunks in `_blob_chunks`.\n */\nexport interface BlobObject {\n /** HMAC-SHA-256 hex of the original plaintext bytes, keyed by `_blob` DEK. */\n readonly eTag: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** Compressed size in bytes (the payload that is actually encrypted and chunked). */\n readonly compressedSize: number\n /** Compression algorithm applied before encryption. */\n readonly compression: 'gzip' | 'none'\n /** Raw chunk size in bytes used at write time. Readers MUST use this value. */\n readonly chunkSize: number\n /** Total number of chunks written. Reader expects exactly this many. */\n readonly chunkCount: number\n /** MIME type if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of first upload. */\n readonly createdAt: string\n /** Live reference count — slots + published versions pointing to this blob. */\n readonly refCount: number\n /**\n * Hint indicating which store holds the chunk data.\n * Used by `routeStore` size-tiered routing: `'default'` for small blobs\n * stored inline (e.g. DynamoDB), `'blobs'` for large blobs in the overflow\n * store (e.g. S3). Absent when no routing is configured.\n */\n readonly storeHint?: 'default' | 'blobs'\n}\n\n// ─── Attachment types ─────────────────────────────────────────\n\n/** Single attachment metadata entry stored inside a record's attachment envelope. */\nexport interface AttachmentEntry {\n /** Content-addressed identifier (HMAC-SHA-256 of plaintext). */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes. */\n readonly size: number\n /** MIME type, if provided or auto-detected at upload time. */\n readonly mimeType?: string\n /** ISO timestamp of the upload. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Attachment entry annotated with its slot name, as returned by `AttachmentHandle.list()`. */\nexport type AttachmentInfo = AttachmentEntry & { readonly name: string }\n\n/** Options for `AttachmentHandle.put()`. */\nexport interface AttachmentPutOptions {\n /** Compress the attachment with gzip before encryption. Default: `true`. */\n compress?: boolean\n /** Chunk size in bytes. Default: `DEFAULT_CHUNK_SIZE` (256 KB). */\n chunkSize?: number\n /** MIME type to store with the attachment. Auto-detected from magic bytes if omitted. */\n mimeType?: string\n /** User ID to record as the uploader. Falls back to the active user's ID. */\n uploadedBy?: string\n}\n\n/** Options for `AttachmentHandle.response()`. */\nexport interface AttachmentResponseOptions {\n /**\n * Set `Content-Disposition: inline` so the browser renders the file\n * instead of downloading it. Default: `false` (attachment disposition).\n */\n inline?: boolean\n}\n\n/**\n * Slot record — mutable metadata linking a named slot on a record\n * to a `BlobObject` via its eTag.\n *\n * Multiple slots (even across different records) may reference the same\n * `eTag` — the underlying chunks are shared. Updating metadata creates\n * a new envelope version (`_v++`) while the blob data is unchanged.\n */\nexport interface SlotRecord {\n /** Reference to the `BlobObject` in `_blob_index`. */\n readonly eTag: string\n /** User-visible filename for the slot. */\n readonly filename: string\n /** Original uncompressed size in bytes (denormalized from `BlobObject`). */\n readonly size: number\n /** MIME type. Takes precedence over the MIME type stored in `BlobObject`. */\n readonly mimeType?: string\n /** ISO timestamp of the upload that set this slot. */\n readonly uploadedAt: string\n /** User ID of the uploader, if available. */\n readonly uploadedBy?: string\n}\n\n/** Result of `BlobSet.list()` — slot record plus its named slot key. */\nexport interface SlotInfo extends SlotRecord {\n /** The slot name (key in the record's slot map). */\n readonly name: string\n}\n\n/**\n * Explicitly published version snapshot — an independent reference to a\n * blob at a specific point in time.\n */\nexport interface VersionRecord {\n /** User-defined label (e.g. `'issued-2025-01'`, `'amendment-2025-02'`). */\n readonly label: string\n /** eTag of the blob snapshot at publish time — independent of the current slot. */\n readonly eTag: string\n /** ISO timestamp when the version was published. */\n readonly publishedAt: string\n /** User ID of the publisher, if available. */\n readonly publishedBy?: string\n}\n\n/** Options for `BlobSet.put()`. */\nexport interface BlobPutOptions {\n /** MIME type hint. If omitted, auto-detected from magic bytes. */\n mimeType?: string\n /**\n * Raw chunk size in bytes. Priority: this value > store.maxBlobBytes > 256 KB.\n */\n chunkSize?: number\n /**\n * Whether to gzip-compress bytes before encrypting. Default: `true`.\n * Auto-set to `false` for pre-compressed MIME types (JPEG, PNG, ZIP, etc.).\n */\n compress?: boolean\n /** User ID to record as `uploadedBy`. Defaults to the Noydb session user. */\n uploadedBy?: string\n}\n\n/** Options for `BlobSet.response()` and `BlobSet.responseVersion()`. */\nexport interface BlobResponseOptions {\n /**\n * When `true`, sets `Content-Disposition: inline; filename=\"...\"` so\n * the browser renders the file in the tab. Default (`false`) sets\n * `attachment; filename=\"...\"` which triggers a download.\n */\n inline?: boolean\n /** Override the filename in the Content-Disposition header. */\n filename?: string\n}\n\n// ─── Store Capabilities ─────────────────────────────\n\nexport type StoreAuthKind =\n | 'none'\n | 'filesystem'\n | 'api-key'\n | 'iam'\n | 'oauth'\n | 'kerberos'\n | 'browser-origin'\n\nexport interface StoreAuth {\n kind: StoreAuthKind | StoreAuthKind[]\n required: boolean\n flow: 'static' | 'oauth' | 'kerberos' | 'implicit'\n}\n\nexport interface StoreCapabilities {\n /**\n * true — the store's expectedVersion check and write are atomic at the\n * storage layer. Two concurrent puts with the same expectedVersion will\n * produce exactly one success and one ConflictError.\n * false — check and write are separate operations with a race window.\n */\n casAtomic: boolean\n auth: StoreAuth\n /**\n * true — the store implements {@link NoydbStore.tx} and commits\n * every op atomically at the storage layer. The hub's\n * `db.transaction(fn)` will delegate to `tx(ops)` and surface a\n * single pass/fail outcome. false (or absent) — no native\n * multi-record atomicity; the hub falls back to per-record OCC\n * with best-effort unwind on partial failure.\n */\n txAtomic?: boolean\n /**\n * Maximum raw bytes per blob chunk record.\n * `undefined` — no limit (S3, file, IDB); blob stored as single chunk.\n * `256 * 1024` — DynamoDB (400 KB item limit minus envelope overhead).\n * `5 * 1024 * 1024` — localStorage quota safety.\n */\n maxBlobBytes?: number\n}\n\n// ─── Factory Options ───────────────────────────────────────────────────\n\nexport interface NoydbOptions {\n /** Primary store (local storage). */\n readonly store: NoydbStore\n /**\n * tree-shake seam — optional blob strategy. Pass `withBlobs()`\n * from `@noy-db/hub/blobs` to enable `collection.blob(id)` storage.\n * When omitted, hub's blob machinery stays out of the bundle (ESM\n * tree-shaking) and `collection.blob(id)` throws with a pointer at\n * the subpath. `BlobStrategy` is `@internal` — users only construct\n * it via the subpath factory.\n *\n * @internal\n */\n readonly blobStrategy?: BlobStrategy\n /**\n * tree-shake seam — optional indexing strategy. Pass\n * `withIndexing()` from `@noy-db/hub/indexing` to enable eager-mode\n * `==/in` fast-paths, lazy-mode `.lazyQuery()`, rebuild/reconcile,\n * and auto-reconcile. When omitted, indexing code never reaches the\n * bundle; `.lazyQuery()` throws with a pointer at the subpath, and\n * eager-mode collections fall back to linear scans regardless of\n * `indexes: [...]` declarations. `IndexStrategy` is `@internal` —\n * users only construct it via the subpath factory.\n *\n * @internal\n */\n readonly indexStrategy?: IndexStrategy\n /**\n * tree-shake seam — optional aggregate strategy. Pass\n * `withAggregate()` from `@noy-db/hub/aggregate` to enable\n * `.aggregate()` and `.groupBy()` on Query. When omitted, those\n * methods throw with a pointer at the subpath; the ~886 LOC of\n * Aggregation + GroupedQuery machinery never reaches the bundle.\n * Streaming `scan().aggregate()` works independently of this\n * strategy — it doesn't use the `Aggregation` class.\n *\n * @internal\n */\n readonly aggregateStrategy?: AggregateStrategy\n /**\n * tree-shake seam — optional CRDT strategy. Required when\n * any collection is declared with `crdt: 'lww-map' | 'rga' | 'yjs'`;\n * otherwise the first put/sync-merge hitting the CRDT path throws.\n * When omitted, ~221 LOC of LWW-Map / RGA / merge helpers never\n * reach the bundle.\n *\n * @internal\n */\n readonly crdtStrategy?: CrdtStrategy\n /**\n * tree-shake seam — optional consent-audit strategy. Pass\n * `withConsent()` from `@noy-db/hub/consent` to enable per-op audit\n * writes into `_consent_audit` when a consent scope is active.\n * When omitted, `vault.consentAudit()` returns `[]` and writes are\n * no-ops; the consent module's ~194 LOC never reaches the bundle.\n *\n * @internal\n */\n readonly consentStrategy?: ConsentStrategy\n /**\n * tree-shake seam — optional periods strategy. Pass\n * `withPeriods()` from `@noy-db/hub/periods` to enable\n * `vault.closePeriod()` / `.openPeriod()` / write-guard on closed\n * periods. When omitted, `vault.listPeriods()` returns `[]` and\n * the write-guard is a no-op; the ~363 LOC of period validation +\n * ledger appending stay out of the bundle.\n *\n * @internal\n */\n readonly periodsStrategy?: PeriodsStrategy\n /**\n * tree-shake seam — optional VaultFrame strategy. Pass\n * `withShadow()` from `@noy-db/hub/shadow` to enable\n * `vault.frame()`. Without it, calling `vault.frame()` throws.\n *\n * @internal\n */\n readonly shadowStrategy?: ShadowStrategy\n /**\n * tree-shake seam — optional multi-record transactions. Pass\n * `withTransactions()` from `@noy-db/hub/tx` to enable\n * `db.transaction(fn)`. Without it, calling the method throws.\n *\n * @internal\n */\n readonly txStrategy?: TxStrategy\n /**\n * tree-shake seam — optional history + ledger + time-machine.\n * Pass `withHistory()` from `@noy-db/hub/history` to enable\n * per-record version snapshots, the hash-chained audit ledger, JSON\n * Patch deltas, `vault.ledger()`, `vault.at()`, and the\n * `collection.history()` / `getVersion()` / `revert()` / `diff()` /\n * `clearHistory()` / `pruneRecordHistory()` read APIs. When omitted,\n * snapshots/prune/clear are silent no-ops, the read APIs throw with\n * a pointer at the subpath, and ~1,880 LOC stay out of the bundle.\n *\n * @internal\n */\n readonly historyStrategy?: HistoryStrategy\n /**\n * tree-shake seam — optional i18n strategy. Pass `withI18n()`\n * from `@noy-db/hub/i18n` to enable `i18nText`/`dictKey` field\n * resolution on reads, `i18nText` validation on writes, and\n * `vault.dictionary(name)`. When omitted, locale resolution is the\n * identity (raw values returned), the validators throw with a\n * pointer to the subpath, and ~854 LOC of dictionary + locale\n * machinery stay out of the bundle.\n *\n * @internal\n */\n readonly i18nStrategy?: I18nStrategy\n /**\n * tree-shake seam — optional session-policy strategy. Pass\n * `withSession()` from `@noy-db/hub/session` to enable\n * `sessionPolicy` validation, `PolicyEnforcer` lifecycle (idle /\n * absolute timeouts, lockOnBackground), and global session-token\n * revocation. When omitted, setting `sessionPolicy` throws at\n * `createNoydb()` time, and ~495 LOC of policy + token machinery\n * stay out of the bundle.\n *\n * @internal\n */\n readonly sessionStrategy?: SessionStrategy\n /**\n * tree-shake seam — optional sync engine + presence strategy.\n * Pass `withSync()` from `@noy-db/hub/sync` to enable\n * `db.push()` / `pull()` / replication, `db.transaction(vault)`\n * for sync-aware transactions, and `collection.presence()`. When\n * omitted, configuring `sync` / calling these surfaces throws with\n * a pointer at the subpath, and ~856 LOC of replication + presence\n * machinery stay out of the bundle. Keyring stays core; grant/\n * revoke/magic-link/delegation tree-shake via direct imports.\n *\n * @internal\n */\n readonly syncStrategy?: SyncStrategy\n /** Optional remote store(s) for sync. Accepts a single store, a SyncTarget, or an array. */\n readonly sync?: NoydbStore | SyncTarget | SyncTarget[]\n /** User identifier. */\n readonly user: string\n /** Passphrase for key derivation. Required unless encrypt is false or `getKeyring` is provided. */\n readonly secret?: string\n /**\n * Optional callback that returns an unlocked keyring for a given vault.\n * Use this to plug in WebAuthn / OIDC / Shamir / any unlock path that\n * produces an `UnlockedKeyring` outside the passphrase model.\n *\n * When set, `secret` MUST NOT also be set — `createNoydb` throws if both\n * are supplied. When neither is set (and `encrypt !== false`), `createNoydb`\n * also throws.\n *\n * The callback is called lazily, on the first operation that needs the\n * keyring for a given vault. Noydb caches the returned keyring per-vault\n * for the lifetime of the instance, so the callback is invoked at most\n * once per `(instance, vault)` pair (assuming the callback resolves\n * successfully). If the callback rejects, the rejection surfaces from the\n * first vault operation that triggered the unlock; subsequent operations\n * will retry the callback.\n *\n * @example\n * ```ts\n * import { createNoydb } from '@noy-db/hub'\n * import { unlockWebAuthn } from '@noy-db/on-webauthn'\n *\n * const enrollment = await loadEnrollment()\n * const db = await createNoydb({\n * store,\n * user: 'alice',\n * getKeyring: (vault) => unlockWebAuthn(enrollment),\n * })\n * ```\n *\n * Note: this callback is responsible for both the \"open existing vault\"\n * and the \"create new vault\" cases. Unlike the passphrase path, there is\n * no automatic `NoAccessError` → `createOwnerKeyring` fallback, because\n * the callback owner has the UI context to decide which path to run.\n * For first-time bootstrap, use a passphrase or recovery code, enroll\n * WebAuthn from the unlocked keyring, then swap to `getKeyring` on\n * subsequent sessions.\n */\n readonly getKeyring?: (vault: string) => Promise<UnlockedKeyring>\n /** Auth method. Default: 'passphrase'. */\n readonly auth?: 'passphrase' | 'biometric'\n /** Enable encryption. Default: true. */\n readonly encrypt?: boolean\n /** Conflict resolution strategy. Default: 'version'. */\n readonly conflict?: ConflictStrategy\n /**\n * Sync scheduling policy. Controls when push/pull fire.\n * Default inferred from store category: per-record → `on-change`,\n * bundle → `debounce 30s`.\n */\n readonly syncPolicy?: SyncPolicy\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n * When both are supplied, `syncPolicy` takes precedence.\n */\n readonly autoSync?: boolean\n /**\n * @deprecated Use `syncPolicy` instead. Kept for backward compatibility.\n */\n readonly syncInterval?: number\n /**\n * Session timeout in ms. Clears keys after inactivity. Default: none.\n * @deprecated Use `sessionPolicy.idleTimeoutMs` instead. This field is\n * still honored for backwards compatibility but `sessionPolicy` takes\n * precedence when both are supplied.\n */\n readonly sessionTimeout?: number\n /**\n * Session policy controlling lifetime, re-auth requirements, and\n * background-lock behavior. When supplied, replaces the\n * legacy `sessionTimeout` field.\n */\n readonly sessionPolicy?: SessionPolicy\n /**\n * Validate passphrase strength against the phrase format\n * (`@noy-db/hub` issue #7) on first-time keyring creation. When\n * `true`, weak phrases throw {@link WeakPassphraseError} from\n * `createNoydb()` / `db.rotatePassphrase()`. Default: `false` for\n * back-compat in v0.1.x; planned to flip to `true` at v1.0.\n */\n readonly validatePassphrase?: boolean\n /**\n * Vault-level policy gate document (issue #9). When present, the hub\n * persists the merged policy at `_meta/policy` on first-time vault\n * creation and gates sensitive operations (`db.rotatePassphrase`,\n * `db.export*`, …) against it. Omitted ⇒ the engine uses\n * {@link PERSONAL_POLICY}. Use {@link STRICT_POLICY} for regulated\n * deployments.\n *\n * The on-disk document is the source of truth — the policy field\n * is only honored at vault creation; subsequent runs read from\n * `_meta/policy`. Use `db.updatePolicy()` to change it deliberately.\n *\n * Imported from `@noy-db/hub` as a type-only reference; the runtime\n * import lives in `policy/index.ts`.\n */\n readonly policy?: VaultPolicy\n /**\n * Mandatory recovery profile enrollment (issue #10). Vaults with\n * `recover-passphrase` enabled MUST register at least one profile\n * before being production-ready, otherwise `createNoydb()` throws\n * {@link RecoveryNotEnrolledError}. Set\n * `policy.gates['recover-passphrase'].enabled = false` to\n * deliberately opt out of recovery (passphrase loss = data loss).\n *\n * v0.1.0-pre.5 supports the `'paper'` profile end-to-end. Other\n * profiles ship the API shape and throw\n * {@link RecoveryProfileNotImplementedError} during use.\n */\n readonly recovery?: ReadonlyArray<RecoveryEnrollment>\n /**\n * When `true`, `createNoydb` rejects vaults with no recovery\n * entries persisted (per the spec's mandatory-enrollment\n * requirement). Default `false` for v0.1.x back-compat; planned to\n * flip to `true` at v1.0. Apps in regulated environments should\n * turn this on now.\n */\n readonly requireRecovery?: boolean\n /**\n * What to do when `openVault` finds an existing keyring in the store that\n * cannot be decrypted with the supplied credentials (`InvalidKeyError`).\n *\n * - `'error'` (default) — propagate the error. The app must prompt the user\n * to supply the correct credentials or clear both the data and auth stores.\n * - `'reset'` — delete the stale keyring and re-initialise the vault from\n * scratch using the current credentials. Use this when the data store can\n * become detached from the auth store (e.g. the user cleared the IndexedDB\n * data records but not the keyring row, or a WebAuthn credential was rotated).\n * **All previously encrypted data is unrecoverable after a reset.**\n *\n * Only applies to the passphrase (`secret`) path. When `getKeyring` is used,\n * the callback is responsible for handling stale-keyring detection itself.\n */\n readonly onInvalidKey?: 'error' | 'reset'\n /**\n * Enable the public envelope subsystem (`docs/subsystems/public-envelope.md`).\n * Pass `true` for the default schema (every standard field, 256 KB\n * icon cap, 200-char text cap), or a `PublicEnvelopeSchema` to\n * narrow what the owner can set. Off by default — vaults written\n * by hubs without this option carry no envelope, full stop.\n */\n readonly publicEnvelope?: true | PublicEnvelopeSchema\n /** Audit history configuration. */\n readonly history?: HistoryConfig\n /**\n * Consumer-supplied translation function for `i18nText` fields with\n * `autoTranslate: true`.\n *\n * ⚠ **`plaintextTranslator` receives unencrypted text.** Configuring\n * this hook causes plaintext to leave noy-db's zero-knowledge boundary\n * over whatever channel the consumer's implementation uses. noy-db ships\n * no built-in translator and adds no translator SDKs as dependencies.\n * The consumer chooses and owns the data policy of the external service.\n *\n * Per-field opt-in via `autoTranslate: true` on `i18nText()`. Calling\n * `put()` on a collection with `autoTranslate: true` fields while this\n * option is absent throws `TranslatorNotConfiguredError`.\n *\n * See `NOYDB_SPEC.md § Zero-Knowledge Storage` for the invariant text.\n */\n readonly plaintextTranslator?: PlaintextTranslatorFn\n /**\n * Human-readable name for the translator, recorded in the in-process\n * audit log (e.g. `'deepl-pro-with-dpa'`, `'self-hosted-llama-7b'`).\n * Defaults to `'anonymous'` when not supplied.\n */\n readonly plaintextTranslatorName?: string\n}\n\n// ─── History / Audit Trail ─────────────────────────────────────────────\n\n/** History configuration. */\nexport interface HistoryConfig {\n /** Enable history tracking. Default: true. */\n readonly enabled?: boolean\n /** Maximum history entries per record. Oldest pruned on overflow. Default: unlimited. */\n readonly maxVersions?: number\n}\n\n/** Options for querying history. */\nexport interface HistoryOptions {\n /** Start date (inclusive), ISO 8601. */\n readonly from?: string\n /** End date (inclusive), ISO 8601. */\n readonly to?: string\n /** Maximum entries to return. */\n readonly limit?: number\n}\n\n/** Options for pruning history. */\nexport interface PruneOptions {\n /** Keep only the N most recent versions. */\n readonly keepVersions?: number\n /** Delete versions older than this date, ISO 8601. */\n readonly beforeDate?: string\n}\n\n/** A decrypted history entry. */\nexport interface HistoryEntry<T> {\n readonly version: number\n readonly timestamp: string\n readonly userId: string\n readonly record: T\n}\n\n// ─── Bulk operations ──────────────────────────────────────\n\n/** Per-item options for `Collection.putMany()`. */\nexport interface PutManyItemOptions {\n /**\n * Optimistic-concurrency check: fail this item if the stored version\n * is not `expectedVersion`. Honored only in `atomic: true` mode;\n * ignored in the default best-effort loop.\n */\n readonly expectedVersion?: number\n}\n\n/**\n * Batch-level options for `Collection.putMany()` and `deleteMany()`.\n *\n * `atomic: true` switches the call from best-effort loop\n * to all-or-nothing: a pre-flight CAS check runs first, then every op\n * is executed; any mid-batch failure triggers a best-effort revert.\n * On failure in atomic mode the whole call throws — you won't get a\n * partial `PutManyResult`. On success the result mirrors the default\n * loop's shape.\n */\nexport interface PutManyOptions {\n readonly atomic?: boolean\n}\n\n/** Result of `Collection.putMany()`. */\nexport interface PutManyResult {\n /** `true` iff every entry succeeded. */\n readonly ok: boolean\n /** IDs that were successfully written. */\n readonly success: readonly string[]\n /** Entries that failed, with the error that prevented each write. */\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n\n/** Result of `Collection.deleteMany()`. Same shape as `PutManyResult`. */\nexport interface DeleteManyResult {\n readonly ok: boolean\n readonly success: readonly string[]\n readonly failures: ReadonlyArray<{ readonly id: string; readonly error: Error }>\n}\n"],"mappings":";AA4CO,IAAM,uBAAuB;AAG7B,IAAM,wBAAwB;AAG9B,IAAM,uBAAuB;AAG7B,IAAM,qBAAqB;AA6U3B,SAAS,YACd,SACmC;AACnC,SAAO;AACT;","names":[]}
|
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
} from "./chunk-2QR2PQTT.js";
|
|
4
4
|
import {
|
|
5
5
|
NOYDB_SYNC_VERSION
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-B6HF6NTZ.js";
|
|
7
7
|
import {
|
|
8
8
|
bufferToBase64,
|
|
9
9
|
decrypt,
|
|
@@ -719,4 +719,4 @@ export {
|
|
|
719
719
|
SyncEngine,
|
|
720
720
|
SyncTransaction
|
|
721
721
|
};
|
|
722
|
-
//# sourceMappingURL=chunk-
|
|
722
|
+
//# sourceMappingURL=chunk-CL37QSND.js.map
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
// src/meta/public-envelope/types.ts
|
|
2
|
+
var PUBLIC_ENVELOPE_FIELDS = [
|
|
3
|
+
"name",
|
|
4
|
+
"description",
|
|
5
|
+
"icon",
|
|
6
|
+
"createdAt",
|
|
7
|
+
"updatedAt",
|
|
8
|
+
"defaultLocale"
|
|
9
|
+
];
|
|
10
|
+
var DEFAULT_PUBLIC_ENVELOPE_SCHEMA = {
|
|
11
|
+
fields: PUBLIC_ENVELOPE_FIELDS,
|
|
12
|
+
maxIconBytes: 256 * 1024,
|
|
13
|
+
iconMimeTypes: ["image/png", "image/svg+xml"],
|
|
14
|
+
maxStringChars: 200
|
|
15
|
+
};
|
|
16
|
+
function resolveSchema(schema) {
|
|
17
|
+
if (!schema) return void 0;
|
|
18
|
+
if (schema === true) {
|
|
19
|
+
return {
|
|
20
|
+
fields: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.fields,
|
|
21
|
+
maxIconBytes: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxIconBytes,
|
|
22
|
+
iconMimeTypes: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.iconMimeTypes,
|
|
23
|
+
maxStringChars: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxStringChars
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
return {
|
|
27
|
+
fields: schema.fields ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.fields,
|
|
28
|
+
maxIconBytes: schema.maxIconBytes ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxIconBytes,
|
|
29
|
+
iconMimeTypes: schema.iconMimeTypes ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.iconMimeTypes,
|
|
30
|
+
maxStringChars: schema.maxStringChars ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxStringChars
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export {
|
|
35
|
+
PUBLIC_ENVELOPE_FIELDS,
|
|
36
|
+
DEFAULT_PUBLIC_ENVELOPE_SCHEMA,
|
|
37
|
+
resolveSchema
|
|
38
|
+
};
|
|
39
|
+
//# sourceMappingURL=chunk-EMIGCR7X.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/meta/public-envelope/types.ts"],"sourcesContent":["/**\n * Public envelope — owner-curated plaintext metadata, readable\n * before vault unlock or bundle decryption.\n *\n * @see docs/subsystems/public-envelope.md\n *\n * @module\n */\n\n/**\n * Either a single string (used when the developer's app is\n * single-locale) or a locale → string map for i18n. Mirrors the\n * shape `@noy-db/hub/i18n` already uses for record fields, so the\n * existing `resolveI18nText` resolver applies.\n */\nexport type PublicEnvelopeText = string | Record<string, string>\n\n/**\n * Persisted shape — both `_meta/public-envelope` and the bundle\n * header carry this. The version number is monotonic per vault and\n * helps cache invalidators detect change without hashing the JSON.\n */\nexport interface PublicEnvelope {\n readonly _noydb_public: 1\n readonly version: number\n readonly name?: PublicEnvelopeText\n readonly description?: PublicEnvelopeText\n /** Inline `data:` URL (`data:image/png;base64,…` or `data:image/svg+xml;base64,…`). */\n readonly icon?: string\n /** ISO-8601 timestamp; auto-set on first envelope write, immutable thereafter. */\n readonly createdAt?: string\n /** ISO-8601 timestamp; auto-updated on every `setPublicEnvelope` call. */\n readonly updatedAt?: string\n /** BCP-47 fallback locale for renderers when the user's locale isn't covered. */\n readonly defaultLocale?: string\n}\n\n/** Field names the developer can allow in `PublicEnvelopeSchema.fields`. */\nexport const PUBLIC_ENVELOPE_FIELDS = [\n 'name',\n 'description',\n 'icon',\n 'createdAt',\n 'updatedAt',\n 'defaultLocale',\n] as const\n\nexport type PublicEnvelopeField = (typeof PUBLIC_ENVELOPE_FIELDS)[number]\n\n/**\n * Build-time schema. The developer enables the feature and bounds\n * what the owner can set. `true` is shorthand for \"all defaults\" —\n * gives the owner the full field set with the standard caps.\n */\nexport interface PublicEnvelopeSchema {\n /**\n * Allowed field names. Setting `name`/`description`/`icon`/`defaultLocale` is\n * gated on the field being listed here. `createdAt` / `updatedAt` are managed\n * by the hub; including them is a no-op (the owner cannot set them\n * directly). Default: every field above.\n */\n readonly fields?: ReadonlyArray<PublicEnvelopeField>\n /**\n * Maximum icon size — measured as the length of the data-URL\n * string. Default 256 KB.\n */\n readonly maxIconBytes?: number\n /** Allowed icon MIME types. Default ['image/png', 'image/svg+xml']. */\n readonly iconMimeTypes?: ReadonlyArray<string>\n /** Maximum length of `name` / `description` per locale. Default 200. */\n readonly maxStringChars?: number\n}\n\n/** Default schema values; merged onto every developer-supplied schema. */\nexport const DEFAULT_PUBLIC_ENVELOPE_SCHEMA = {\n fields: PUBLIC_ENVELOPE_FIELDS,\n maxIconBytes: 256 * 1024,\n iconMimeTypes: ['image/png', 'image/svg+xml'] as const,\n maxStringChars: 200,\n} as const satisfies Required<PublicEnvelopeSchema>\n\n/** Resolved schema after merging developer override onto defaults. */\nexport interface ResolvedPublicEnvelopeSchema {\n readonly fields: ReadonlyArray<PublicEnvelopeField>\n readonly maxIconBytes: number\n readonly iconMimeTypes: ReadonlyArray<string>\n readonly maxStringChars: number\n}\n\n/**\n * Merge developer schema onto the defaults. The shorthand `true`\n * resolves to the full default schema; an explicit object only\n * overrides the keys it provides.\n */\nexport function resolveSchema(\n schema: true | PublicEnvelopeSchema | undefined,\n): ResolvedPublicEnvelopeSchema | undefined {\n if (!schema) return undefined\n if (schema === true) {\n return {\n fields: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.fields,\n maxIconBytes: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxIconBytes,\n iconMimeTypes: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.iconMimeTypes,\n maxStringChars: DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxStringChars,\n }\n }\n return {\n fields: schema.fields ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.fields,\n maxIconBytes: schema.maxIconBytes ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxIconBytes,\n iconMimeTypes: schema.iconMimeTypes ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.iconMimeTypes,\n maxStringChars: schema.maxStringChars ?? DEFAULT_PUBLIC_ENVELOPE_SCHEMA.maxStringChars,\n }\n}\n"],"mappings":";AAsCO,IAAM,yBAAyB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AA6BO,IAAM,iCAAiC;AAAA,EAC5C,QAAQ;AAAA,EACR,cAAc,MAAM;AAAA,EACpB,eAAe,CAAC,aAAa,eAAe;AAAA,EAC5C,gBAAgB;AAClB;AAeO,SAAS,cACd,QAC0C;AAC1C,MAAI,CAAC,OAAQ,QAAO;AACpB,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,MACL,QAAQ,+BAA+B;AAAA,MACvC,cAAc,+BAA+B;AAAA,MAC7C,eAAe,+BAA+B;AAAA,MAC9C,gBAAgB,+BAA+B;AAAA,IACjD;AAAA,EACF;AACA,SAAO;AAAA,IACL,QAAQ,OAAO,UAAU,+BAA+B;AAAA,IACxD,cAAc,OAAO,gBAAgB,+BAA+B;AAAA,IACpE,eAAe,OAAO,iBAAiB,+BAA+B;AAAA,IACtE,gBAAgB,OAAO,kBAAkB,+BAA+B;AAAA,EAC1E;AACF;","names":[]}
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import {
|
|
2
|
+
pickLocale
|
|
3
|
+
} from "./chunk-GILMPJXB.js";
|
|
1
4
|
import {
|
|
2
5
|
BundleIntegrityError
|
|
3
6
|
} from "./chunk-ACLDOTNQ.js";
|
|
@@ -15,7 +18,8 @@ var ALLOWED_HEADER_KEYS = /* @__PURE__ */ new Set([
|
|
|
15
18
|
"formatVersion",
|
|
16
19
|
"handle",
|
|
17
20
|
"bodyBytes",
|
|
18
|
-
"bodySha256"
|
|
21
|
+
"bodySha256",
|
|
22
|
+
"publicEnvelope"
|
|
19
23
|
]);
|
|
20
24
|
function validateBundleHeader(parsed) {
|
|
21
25
|
if (parsed === null || typeof parsed !== "object") {
|
|
@@ -51,6 +55,25 @@ function validateBundleHeader(parsed) {
|
|
|
51
55
|
`.noydb bundle header.bodySha256 must be a 64-character lowercase hex string, got ${typeof h["bodySha256"] === "string" ? `"${h["bodySha256"]}"` : String(h["bodySha256"])}.`
|
|
52
56
|
);
|
|
53
57
|
}
|
|
58
|
+
if (h["publicEnvelope"] !== void 0) {
|
|
59
|
+
const env = h["publicEnvelope"];
|
|
60
|
+
if (env === null || typeof env !== "object" || Array.isArray(env)) {
|
|
61
|
+
throw new Error(
|
|
62
|
+
`.noydb bundle header.publicEnvelope must be a JSON object when present, got ${typeof env}.`
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
const e = env;
|
|
66
|
+
if (e["_noydb_public"] !== 1) {
|
|
67
|
+
throw new Error(
|
|
68
|
+
`.noydb bundle header.publicEnvelope._noydb_public must be 1, got ${String(e["_noydb_public"])}.`
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
if (typeof e["version"] !== "number" || !Number.isInteger(e["version"]) || e["version"] < 1) {
|
|
72
|
+
throw new Error(
|
|
73
|
+
`.noydb bundle header.publicEnvelope.version must be a positive integer, got ${String(e["version"])}.`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
54
77
|
}
|
|
55
78
|
function encodeBundleHeader(header) {
|
|
56
79
|
validateBundleHeader(header);
|
|
@@ -58,7 +81,8 @@ function encodeBundleHeader(header) {
|
|
|
58
81
|
formatVersion: header.formatVersion,
|
|
59
82
|
handle: header.handle,
|
|
60
83
|
bodyBytes: header.bodyBytes,
|
|
61
|
-
bodySha256: header.bodySha256
|
|
84
|
+
bodySha256: header.bodySha256,
|
|
85
|
+
...header.publicEnvelope !== void 0 ? { publicEnvelope: header.publicEnvelope } : {}
|
|
62
86
|
});
|
|
63
87
|
return new TextEncoder().encode(json);
|
|
64
88
|
}
|
|
@@ -257,11 +281,13 @@ async function writeNoydbBundle(vault, opts = {}) {
|
|
|
257
281
|
const { format, streamFormat } = selectCompression(opts.compression);
|
|
258
282
|
const body = streamFormat === null ? dumpBytes : await pumpThroughStream(dumpBytes, new CompressionStream(streamFormat));
|
|
259
283
|
const bodySha256 = await sha256Hex(body);
|
|
284
|
+
const publicEnvelope = await vault.getPublicEnvelope();
|
|
260
285
|
const header = {
|
|
261
286
|
formatVersion: NOYDB_BUNDLE_FORMAT_VERSION,
|
|
262
287
|
handle,
|
|
263
288
|
bodyBytes: body.length,
|
|
264
|
-
bodySha256
|
|
289
|
+
bodySha256,
|
|
290
|
+
...publicEnvelope !== void 0 ? { publicEnvelope } : {}
|
|
265
291
|
};
|
|
266
292
|
const headerBytes = encodeBundleHeader(header);
|
|
267
293
|
const prefix = new Uint8Array(NOYDB_BUNDLE_PREFIX_BYTES);
|
|
@@ -303,6 +329,17 @@ function parsePrefixAndHeader(bytes) {
|
|
|
303
329
|
function readNoydbBundleHeader(bytes) {
|
|
304
330
|
return parsePrefixAndHeader(bytes).header;
|
|
305
331
|
}
|
|
332
|
+
function readNoydbBundlePublicEnvelope(bytes, opts = {}) {
|
|
333
|
+
const header = parsePrefixAndHeader(bytes).header;
|
|
334
|
+
const env = header.publicEnvelope;
|
|
335
|
+
if (!env) return void 0;
|
|
336
|
+
if (opts.locale === void 0) return env;
|
|
337
|
+
return {
|
|
338
|
+
...env,
|
|
339
|
+
...env.name !== void 0 ? { name: pickLocale(env.name, opts.locale, env.defaultLocale) } : {},
|
|
340
|
+
...env.description !== void 0 ? { description: pickLocale(env.description, opts.locale, env.defaultLocale) } : {}
|
|
341
|
+
};
|
|
342
|
+
}
|
|
306
343
|
async function readNoydbBundle(bytes) {
|
|
307
344
|
const { header, bodyOffset, algo } = parsePrefixAndHeader(bytes);
|
|
308
345
|
const body = bytes.slice(bodyOffset);
|
|
@@ -349,6 +386,7 @@ export {
|
|
|
349
386
|
resetBrotliSupportCache,
|
|
350
387
|
writeNoydbBundle,
|
|
351
388
|
readNoydbBundleHeader,
|
|
389
|
+
readNoydbBundlePublicEnvelope,
|
|
352
390
|
readNoydbBundle
|
|
353
391
|
};
|
|
354
|
-
//# sourceMappingURL=chunk-
|
|
392
|
+
//# sourceMappingURL=chunk-FAAWLVTF.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/bundle/format.ts","../src/bundle/bundle.ts"],"sourcesContent":["/**\n * `.noydb` container format — byte layout, header schema, validators.\n *\n *. Wraps a `vault.dump()` JSON string in a thin\n * binary container with a magic-byte prefix, a minimum-disclosure\n * unencrypted header, and a compressed body.\n *\n * **Byte layout** (read in order from offset 0):\n *\n * ```\n * +--------+--------+--------+--------+\n * | N=78 | D=68 | B=66 | 1=49 | Magic 'NDB1' (4 bytes)\n * +--------+--------+--------+--------+\n * | flags | compr | header_length (uint32 BE) |\n * +--------+--------+--------+--------+--------+--------+--------+\n * | header_length bytes of UTF-8 JSON header ...\n * +--------+--------+\n * | compressed body bytes ...\n * ```\n *\n * Total fixed prefix before the header JSON is **10 bytes**:\n * - 4 bytes magic\n * - 1 byte flags\n * - 1 byte compression algorithm\n * - 4 bytes header length (uint32 big-endian)\n *\n * **Why a binary container** at all? `vault.dump()` already\n * produces a JSON string with encrypted records inside. Wrapping it\n * again seems redundant — but the wrap is what makes the file safe\n * to drop into cloud storage (Drive, Dropbox, iCloud) without\n * leaking the vault name and exporter identity through the\n * cloud's metadata API. The minimum-disclosure header is the only\n * thing visible without downloading and decompressing the body.\n * The dump JSON inside the body still contains the original\n * metadata, but that's only readable by someone who already has the\n * file bytes — the same person who could read the encrypted records\n * with the right passphrase.\n *\n * **Why minimum disclosure** in the header? Because consumers will\n * inevitably store these in services where the filename, file size,\n * and any unencrypted metadata are indexed for search. A field like\n * `vault: \"Acme Corp\"` would let an attacker (or a curious\n * cloud admin) enumerate which compartments exist and who exported\n * them, even with zero access to the encrypted body. The header\n * carries only what's needed to identify the file as a NOYDB\n * bundle and verify its integrity — nothing about the contents.\n */\n\nimport type { PublicEnvelope } from '../meta/public-envelope/types.js'\n\n/** Magic bytes 'NDB1' (ASCII), identifying a NOYDB bundle. */\nexport const NOYDB_BUNDLE_MAGIC = new Uint8Array([0x4e, 0x44, 0x42, 0x31])\n\n/** Total fixed prefix before the header JSON: 4+1+1+4 bytes. */\nexport const NOYDB_BUNDLE_PREFIX_BYTES = 10\n\n/** Current bundle format version. Bumped on layout changes. */\nexport const NOYDB_BUNDLE_FORMAT_VERSION = 1\n\n/**\n * Bitfield interpretation of the flags byte.\n *\n * Bit 0 — body is compressed (0 = raw, 1 = compressed)\n * Bit 1 — header carries an integrity hash over the body bytes\n * Bits 2-7 — reserved, must be 0 in\n */\nexport const FLAG_COMPRESSED = 0b0000_0001\nexport const FLAG_HAS_INTEGRITY_HASH = 0b0000_0010\n\n/**\n * Compression algorithm encoding for the byte at offset 5.\n *\n * `none` is admitted for round-trip testing and for callers that\n * want to bundle without compression (e.g. when piping into a\n * separately compressed transport). `gzip` is the universally\n * available baseline (Node 18+, all modern browsers). `brotli` is\n * preferred when the runtime supports it — typically 30-50% smaller\n * for JSON payloads — but Node 22+ / Chrome 124+ / Firefox 122+\n * are required, so the writer feature-detects at runtime and falls\n * back to gzip. The reader must handle all three.\n */\nexport const COMPRESSION_NONE = 0\nexport const COMPRESSION_GZIP = 1\nexport const COMPRESSION_BROTLI = 2\n\nexport type CompressionAlgo = 0 | 1 | 2\n\n/**\n * The unencrypted header carried in every `.noydb` bundle.\n *\n * **Minimum-disclosure rules:** these are the ONLY allowed keys.\n * Any other key in a parsed header causes\n * `validateBundleHeader` to throw. The set is kept short to\n * minimize attack surface from cloud-storage metadata indexing —\n * see the file-level doc comment for the rationale.\n *\n * Forbidden in particular:\n * - `vault` / `_compartment` — would leak the tenant name\n * - `exporter` / `_exported_by` — would leak user identity\n * - `timestamp` / `_exported_at` — would leak activity timing\n * - `kdfParams` / salt fields — would leak crypto config that\n * could narrow brute-force search space\n * - any field starting with `_` (reserved by the dump format)\n */\nexport interface NoydbBundleHeader {\n /** Bundle format version — bumped on layout changes. */\n readonly formatVersion: number\n /**\n * Opaque ULID identifier — generated once per vault and\n * stable across re-exports of the same vault. Does not\n * leak any information about contents (the timestamp prefix is\n * just monotonicity for sortability, not exporter activity —\n * see `bundle/ulid.ts` for the design notes).\n */\n readonly handle: string\n /** Compressed body length in bytes. Lets readers verify completeness without decompressing. */\n readonly bodyBytes: number\n /** SHA-256 of the compressed body bytes (lowercase hex). Lets readers verify integrity without decompressing. */\n readonly bodySha256: string\n /**\n * Owner-curated public envelope (`docs/subsystems/public-envelope.md`).\n * Optional — present only when the source vault has a\n * `_meta/public-envelope` document AND the writer's hub is opted\n * into the feature. Treat as **untrusted hint**; the body's\n * encrypted contents remain the source of truth.\n *\n * The envelope deliberately widens the minimum-disclosure rule\n * for explicit, owner-curated label fields (name, icon, …). Every\n * other unknown header key still rejects at parse time.\n */\n readonly publicEnvelope?: PublicEnvelope\n}\n\n/**\n * Allowlist of header keys. Any key not in this set is forbidden\n * and causes `validateBundleHeader` to throw. Kept as a Set for\n * O(1) lookup; the validator iterates over the parsed header and\n * checks each key against this set.\n */\nconst ALLOWED_HEADER_KEYS: ReadonlySet<string> = new Set([\n 'formatVersion',\n 'handle',\n 'bodyBytes',\n 'bodySha256',\n 'publicEnvelope',\n])\n\n/**\n * Validate a parsed bundle header. Throws on any deviation from\n * the minimum-disclosure schema:\n *\n * - Missing required field\n * - Wrong type for any field\n * - Any extra key not in `ALLOWED_HEADER_KEYS`\n * - Unsupported `formatVersion`\n * - Negative or non-integer `bodyBytes`\n * - Malformed `handle` (must be 26-char Crockford base32)\n * - Malformed `bodySha256` (must be 64-char lowercase hex)\n *\n * The error messages name the offending field so consumers can\n * fix the producer rather than the reader.\n */\nexport function validateBundleHeader(\n parsed: unknown,\n): asserts parsed is NoydbBundleHeader {\n if (parsed === null || typeof parsed !== 'object') {\n throw new Error(\n `.noydb bundle header must be a JSON object, got ${parsed === null ? 'null' : typeof parsed}`,\n )\n }\n // Disallow any unknown key — minimum disclosure means we reject\n // forward-compat extension keys at the format layer; new fields\n // require a format version bump and a new validator.\n for (const key of Object.keys(parsed)) {\n if (!ALLOWED_HEADER_KEYS.has(key)) {\n throw new Error(\n `.noydb bundle header contains forbidden key \"${key}\". ` +\n `Only minimum-disclosure fields are allowed: ` +\n `${[...ALLOWED_HEADER_KEYS].join(', ')}.`,\n )\n }\n }\n const h = parsed as Record<string, unknown>\n if (typeof h['formatVersion'] !== 'number' || h['formatVersion'] !== NOYDB_BUNDLE_FORMAT_VERSION) {\n throw new Error(\n `.noydb bundle header.formatVersion must be ${NOYDB_BUNDLE_FORMAT_VERSION}, ` +\n `got ${String(h['formatVersion'])}. The reader does not support ` +\n `forward-compat versions; upgrade the reader to handle newer bundles.`,\n )\n }\n if (typeof h['handle'] !== 'string' || !/^[0-9A-HJKMNP-TV-Z]{26}$/.test(h['handle'])) {\n throw new Error(\n `.noydb bundle header.handle must be a 26-character Crockford base32 ULID, ` +\n `got ${typeof h['handle'] === 'string' ? `\"${h['handle']}\"` : String(h['handle'])}.`,\n )\n }\n if (typeof h['bodyBytes'] !== 'number' || !Number.isInteger(h['bodyBytes']) || h['bodyBytes'] < 0) {\n throw new Error(\n `.noydb bundle header.bodyBytes must be a non-negative integer, ` +\n `got ${String(h['bodyBytes'])}.`,\n )\n }\n if (typeof h['bodySha256'] !== 'string' || !/^[0-9a-f]{64}$/.test(h['bodySha256'])) {\n throw new Error(\n `.noydb bundle header.bodySha256 must be a 64-character lowercase hex string, ` +\n `got ${typeof h['bodySha256'] === 'string' ? `\"${h['bodySha256']}\"` : String(h['bodySha256'])}.`,\n )\n }\n if (h['publicEnvelope'] !== undefined) {\n const env = h['publicEnvelope']\n if (env === null || typeof env !== 'object' || Array.isArray(env)) {\n throw new Error(\n `.noydb bundle header.publicEnvelope must be a JSON object when present, got ${typeof env}.`,\n )\n }\n const e = env as Record<string, unknown>\n if (e['_noydb_public'] !== 1) {\n throw new Error(\n `.noydb bundle header.publicEnvelope._noydb_public must be 1, got ${String(e['_noydb_public'])}.`,\n )\n }\n if (typeof e['version'] !== 'number' || !Number.isInteger(e['version']) || e['version'] < 1) {\n throw new Error(\n `.noydb bundle header.publicEnvelope.version must be a positive integer, got ${String(e['version'])}.`,\n )\n }\n }\n}\n\n/**\n * Encode a header object to UTF-8 JSON bytes after validating\n * minimum disclosure. Used by the writer to serialize the header\n * region of the container.\n */\nexport function encodeBundleHeader(header: NoydbBundleHeader): Uint8Array {\n validateBundleHeader(header)\n // Stable key ordering — JSON.stringify with no replacer uses\n // insertion order, which is fine here because we control the\n // object construction. Stable ordering means two bundles with\n // identical contents produce byte-identical headers.\n const json = JSON.stringify({\n formatVersion: header.formatVersion,\n handle: header.handle,\n bodyBytes: header.bodyBytes,\n bodySha256: header.bodySha256,\n ...(header.publicEnvelope !== undefined ? { publicEnvelope: header.publicEnvelope } : {}),\n })\n return new TextEncoder().encode(json)\n}\n\n/**\n * Parse a bundle header from its UTF-8 JSON bytes. Throws on\n * invalid JSON or any minimum-disclosure violation.\n */\nexport function decodeBundleHeader(bytes: Uint8Array): NoydbBundleHeader {\n const json = new TextDecoder('utf-8', { fatal: true }).decode(bytes)\n let parsed: unknown\n try {\n parsed = JSON.parse(json)\n } catch (err) {\n throw new Error(\n `.noydb bundle header is not valid JSON: ${(err as Error).message}`,\n )\n }\n validateBundleHeader(parsed)\n return parsed\n}\n\n/**\n * Read a uint32 from `bytes` at `offset` in big-endian byte order.\n * No bounds check — callers must guarantee `offset + 4 <= bytes.length`.\n * Used to decode the header length field; kept inline so the parser\n * doesn't depend on DataView allocation per call.\n */\nexport function readUint32BE(bytes: Uint8Array, offset: number): number {\n return (\n (bytes[offset]! << 24 >>> 0) +\n (bytes[offset + 1]! << 16) +\n (bytes[offset + 2]! << 8) +\n bytes[offset + 3]!\n )\n}\n\n/**\n * Write a uint32 to `bytes` at `offset` in big-endian byte order.\n * No bounds check — callers must guarantee `offset + 4 <= bytes.length`.\n */\nexport function writeUint32BE(bytes: Uint8Array, offset: number, value: number): void {\n bytes[offset] = (value >>> 24) & 0xff\n bytes[offset + 1] = (value >>> 16) & 0xff\n bytes[offset + 2] = (value >>> 8) & 0xff\n bytes[offset + 3] = value & 0xff\n}\n\n/**\n * Verify the magic prefix of a bundle. Returns true if the first\n * 4 bytes match `NDB1`. Used by readers as a fast file-type check\n * before any further parsing.\n */\nexport function hasNoydbBundleMagic(bytes: Uint8Array): boolean {\n if (bytes.length < NOYDB_BUNDLE_MAGIC.length) return false\n for (let i = 0; i < NOYDB_BUNDLE_MAGIC.length; i++) {\n if (bytes[i] !== NOYDB_BUNDLE_MAGIC[i]) return false\n }\n return true\n}\n","/**\n * `.noydb` container primitives — write, read, header-only read.\n *\n *. Wraps a `vault.dump()` JSON string in the\n * binary container described in `format.ts`.\n *\n * **Three primitives:**\n *\n * - `writeNoydbBundle(vault, opts?)` — produces the\n * full container bytes ready to write to disk or upload\n * - `readNoydbBundleHeader(bytes)` — parses just the header\n * without decompressing the body, fast file-type and\n * metadata read for cloud listing UIs\n * - `readNoydbBundle(bytes)` — full read: validates magic,\n * header, integrity hash, and decompresses the body to\n * return the original `dump()` JSON string for use with\n * `vault.load()`\n *\n * **Compression strategy:** brotli when available (Node 22+,\n * Chrome 124+, Firefox 122+), gzip fallback elsewhere. The\n * algorithm choice is encoded in the format byte at offset 5,\n * so readers handle either transparently. Brotli wins ~30-50%\n * on JSON payloads with repeated keys (which vault dumps\n * are).\n *\n * **Why split read/load?** `readNoydbBundle` returns the\n * *unwrapped JSON string*, not a Vault object. The caller\n * is responsible for piping that JSON into\n * `vault.load(json, passphrase)`. Splitting the layers\n * keeps the bundle module free of any crypto/passphrase\n * concerns — it's purely a format layer. The same `readNoydbBundle`\n * call can also feed verification tools, format inspectors, or\n * archive utilities that don't care about decryption.\n */\n\nimport {\n COMPRESSION_BROTLI,\n COMPRESSION_GZIP,\n COMPRESSION_NONE,\n FLAG_COMPRESSED,\n FLAG_HAS_INTEGRITY_HASH,\n NOYDB_BUNDLE_FORMAT_VERSION,\n NOYDB_BUNDLE_MAGIC,\n NOYDB_BUNDLE_PREFIX_BYTES,\n decodeBundleHeader,\n encodeBundleHeader,\n hasNoydbBundleMagic,\n readUint32BE,\n writeUint32BE,\n type CompressionAlgo,\n type NoydbBundleHeader,\n} from './format.js'\nimport { BundleIntegrityError } from '../errors.js'\nimport type { Vault } from '../vault.js'\nimport type { BundleRecipient } from '../team/keyring.js'\nimport { pickLocale } from '../meta/public-envelope/storage.js'\nimport type { PublicEnvelope } from '../meta/public-envelope/types.js'\n\n/**\n * Options accepted by `writeNoydbBundle`.\n *\n * - `compression: 'auto'` (default) — try brotli, fall back to gzip\n * - `compression: 'brotli'` — force brotli, throw if unsupported\n * - `compression: 'gzip'` — force gzip\n * - `compression: 'none'` — no compression (round-trip testing only)\n *\n * **Slice filtering** (added in ):\n * - `collections` — allowlist of collection names to include. Internal\n * collections (keyrings, ledger) and excluded user collections are\n * dropped from the bundle. Records inside included collections are\n * carried through verbatim.\n * - `since` — only records whose envelope `_ts` is on/after the given\n * instant survive. Operates on the unencrypted envelope timestamp,\n * so plaintext access to records is not required.\n *\n * Both filters intersect (AND). When neither is provided the bundle is\n * a whole-vault snapshot, identical to today's behaviour.\n */\nexport interface WriteNoydbBundleOptions {\n readonly compression?: 'auto' | 'brotli' | 'gzip' | 'none'\n /** Allowlist of user-collection names to include. */\n readonly collections?: readonly string[]\n /**\n * Drop records whose envelope `_ts` is strictly older than this\n * instant. Accepts a `Date` or any ISO-8601 string parseable by\n * `new Date()`.\n */\n readonly since?: Date | string\n /**\n * Plaintext-pipeline record predicate. Decrypts each record\n * with the vault's per-collection DEK, runs the predicate, and\n * keeps the original ciphertext for survivors (no re-encrypt —\n * preserves zero-knowledge cleanly). Records the predicate returns\n * `false` for are dropped from the bundle.\n *\n * Async predicates are supported. Mutating the record from inside\n * the predicate is undefined behaviour.\n */\n readonly where?: (\n record: unknown,\n ctx: { collection: string; id: string },\n ) => boolean | Promise<boolean>\n /**\n * Hierarchical-tier ceiling. Records whose envelope `_tier`\n * is strictly greater than this number are dropped. Operates on the\n * envelope `_tier` (no decryption needed) — vault.exportStream is\n * referenced in the issue body for symmetry, but the tier value\n * lives on the unencrypted envelope. Vault without tiers is a no-op.\n */\n readonly tierAtMost?: number\n /**\n * Single-recipient re-keying shorthand. When set, the\n * bundle's keyring is replaced with one freshly-derived entry sealed\n * with this passphrase. The recipient inherits the source keyring's\n * userId, role, and permissions. Mutually exclusive with `recipients`.\n */\n readonly exportPassphrase?: string\n /**\n * Multi-recipient re-keying. Replaces the bundle's keyring\n * map with one slot per recipient, each sealed with its own\n * passphrase. DEKs are unwrapped from the source keyring once and\n * re-wrapped per recipient — record ciphertext is unchanged.\n *\n * Mutually exclusive with `exportPassphrase`. When neither is set,\n * the bundle inherits the source keyring as-is (today's behaviour,\n * suited to personal backup-and-restore).\n */\n readonly recipients?: readonly BundleRecipient[]\n}\n\n/**\n * Result returned by `readNoydbBundle`. The caller is expected to\n * pass `dumpJson` into `vault.load(json, passphrase)` to\n * actually restore a vault. Splitting the layers keeps the\n * bundle module free of crypto concerns — see file-level docs.\n */\nexport interface NoydbBundleReadResult {\n readonly header: NoydbBundleHeader\n readonly dumpJson: string\n}\n\n/**\n * Detect whether the runtime's `CompressionStream` supports brotli.\n *\n * Brotli requires Node 22+ / Chrome 124+ / Firefox 122+. The\n * detection runs the `CompressionStream` constructor in a\n * try/catch — unsupported formats throw `TypeError` synchronously,\n * making this a safe one-shot check that we cache for the\n * lifetime of the process.\n */\nlet cachedBrotliSupport: boolean | null = null\nfunction supportsBrotliCompression(): boolean {\n if (cachedBrotliSupport !== null) return cachedBrotliSupport\n try {\n new CompressionStream('br' as CompressionFormat)\n cachedBrotliSupport = true\n } catch {\n cachedBrotliSupport = false\n }\n return cachedBrotliSupport\n}\n\n/** Test-only: reset the brotli detection cache between tests. */\nexport function resetBrotliSupportCache(): void {\n cachedBrotliSupport = null\n}\n\n/**\n * Pick the compression algorithm and the corresponding format byte\n * from a user option. Throws if the user explicitly requests brotli\n * on a runtime that doesn't support it — a silent fallback would\n * make the produced bundle smaller-than-expected and confuse\n * size-bound tests.\n */\nfunction selectCompression(option: WriteNoydbBundleOptions['compression']): {\n format: CompressionAlgo\n streamFormat: CompressionFormat | null\n} {\n const choice = option ?? 'auto'\n if (choice === 'none') return { format: COMPRESSION_NONE, streamFormat: null }\n if (choice === 'gzip') return { format: COMPRESSION_GZIP, streamFormat: 'gzip' }\n if (choice === 'brotli') {\n if (!supportsBrotliCompression()) {\n throw new Error(\n `writeNoydbBundle({ compression: 'brotli' }) is not supported on this ` +\n `runtime. Brotli requires Node 22+, Chrome 124+, or Firefox 122+. ` +\n `Use { compression: 'auto' } to fall back to gzip silently, or ` +\n `{ compression: 'gzip' } to be explicit.`,\n )\n }\n return { format: COMPRESSION_BROTLI, streamFormat: 'br' as CompressionFormat }\n }\n // 'auto' — prefer brotli, fall back to gzip\n if (supportsBrotliCompression()) {\n return { format: COMPRESSION_BROTLI, streamFormat: 'br' as CompressionFormat }\n }\n return { format: COMPRESSION_GZIP, streamFormat: 'gzip' }\n}\n\n/**\n * Pump a Uint8Array through a CompressionStream / DecompressionStream\n * and collect the output. Both APIs are universally available in\n * Node 18+ and modern browsers; the only variance is which\n * formats they support, handled by `selectCompression` above.\n *\n * Implementation: build a single-chunk ReadableStream from the\n * input, pipe through the transform, then drain the resulting\n * ReadableStream into a single concatenated Uint8Array. This is\n * O(N) memory in the input + output sizes, which is fine for the\n * dump-sized payloads (typically <50MB) targets.\n */\nasync function pumpThroughStream(\n input: Uint8Array,\n stream: CompressionStream | DecompressionStream,\n): Promise<Uint8Array> {\n const readable = new Blob([input as BlobPart]).stream().pipeThrough(stream)\n const reader = readable.getReader()\n const chunks: Uint8Array[] = []\n let total = 0\n for (;;) {\n const { value, done } = await reader.read()\n if (done) break\n if (value) {\n chunks.push(value as Uint8Array)\n total += value.length\n }\n }\n const out = new Uint8Array(total)\n let offset = 0\n for (const chunk of chunks) {\n out.set(chunk, offset)\n offset += chunk.length\n }\n return out\n}\n\n/**\n * SHA-256 hex digest of `bytes`. Used for the bundle integrity\n * hash carried in the header. Web Crypto API only — no Node\n * crypto module, no third-party hash library.\n *\n * The output format is lowercase hex (64 chars for SHA-256). The\n * format validator pins this — uppercase or mixed-case digests\n * are rejected, so the writer and reader agree on canonicalization.\n */\nasync function sha256Hex(bytes: Uint8Array): Promise<string> {\n // Copy into a fresh ArrayBuffer-backed Uint8Array. The\n // underlying buffer of `bytes` may be SharedArrayBuffer (e.g.\n // from a worker), which `subtle.digest` rejects via TypeScript's\n // BufferSource type. Allocating a fresh ArrayBuffer-backed view\n // sidesteps the type narrowing and is portable across all\n // runtimes — the copy cost is O(N) but bundle bodies are\n // typically <50MB, well below the threshold where the copy\n // matters.\n const copy = new Uint8Array(bytes.length)\n copy.set(bytes)\n const digest = await crypto.subtle.digest('SHA-256', copy)\n const view = new Uint8Array(digest)\n let hex = ''\n for (let i = 0; i < view.length; i++) {\n hex += view[i]!.toString(16).padStart(2, '0')\n }\n return hex\n}\n\n/**\n * Concatenate any number of Uint8Arrays into a single new buffer.\n * Used to assemble the final bundle from its prefix + header +\n * body parts.\n */\nfunction concatBytes(parts: readonly Uint8Array[]): Uint8Array {\n let total = 0\n for (const p of parts) total += p.length\n const out = new Uint8Array(total)\n let offset = 0\n for (const p of parts) {\n out.set(p, offset)\n offset += p.length\n }\n return out\n}\n\n/**\n * Replace the bundle's keyrings with freshly built recipient slots,\n * one per supplied recipient. No-op when neither `exportPassphrase`\n * nor `recipients` is set — the source keyring is inherited as-is.\n *\n * The single-passphrase shorthand creates a one-recipient list whose\n * id, role, and permissions inherit from the source vault — useful\n * for \"back up to a different passphrase\" without changing role\n * semantics. The multi-recipient form wraps each slot independently\n * with its declared role + permissions.\n *\n * @internal\n */\nasync function applyRecipientRewrap(\n vault: Vault,\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): Promise<string> {\n if (opts.exportPassphrase === undefined && opts.recipients === undefined) {\n return dumpJson\n }\n\n const recipients: readonly BundleRecipient[] =\n opts.recipients ?? [\n {\n id: vault.userId,\n passphrase: opts.exportPassphrase as string,\n role: vault.role,\n },\n ]\n\n const recipientKeyrings = await vault.buildBundleRecipientKeyrings(recipients)\n\n const backup = JSON.parse(dumpJson) as { keyrings: unknown; [k: string]: unknown }\n backup.keyrings = recipientKeyrings\n return JSON.stringify(backup)\n}\n\n/**\n * Apply opt-in slice filters to a vault dump JSON string. Filters that\n * narrow the bundle without crossing the encryption boundary — both\n * operate on metadata (collection name, envelope `_ts`) and never need\n * to decrypt records. When neither filter is set, the dump is returned\n * unchanged so the no-arg path stays a pure passthrough.\n *\n * Internal-collection filtering: when a `collections` allowlist is\n * provided, the bundle still carries `_internal` (ledger entries) and\n * the keyrings — they're necessary for the receiver to verify and\n * unlock the bundle. The allowlist applies to the user-collection\n * map only.\n *\n * @internal\n */\nfunction applySliceFilters(\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): string {\n const collectionsFilter = opts.collections\n ? new Set(opts.collections)\n : null\n const sinceMs =\n opts.since !== undefined ? new Date(opts.since).getTime() : null\n if (collectionsFilter === null && sinceMs === null) return dumpJson\n\n // Parse, prune, re-serialize. The dump shape is stable\n // (VaultBackup) so this is a one-off allocation; for vaults beyond\n // the documented 1K–50K target a streaming variant would be a\n // follow-up, but the simple parse path keeps the slice path\n // type-safe and trivially auditable.\n const backup = JSON.parse(dumpJson) as {\n collections?: Record<string, Record<string, { _ts?: string }>>\n [k: string]: unknown\n }\n\n if (backup.collections && typeof backup.collections === 'object') {\n const next: Record<string, Record<string, unknown>> = {}\n for (const [name, records] of Object.entries(backup.collections)) {\n if (collectionsFilter && !collectionsFilter.has(name)) continue\n if (sinceMs === null) {\n next[name] = records\n continue\n }\n const kept: Record<string, unknown> = {}\n for (const [id, env] of Object.entries(records)) {\n const envTs = env._ts ? new Date(env._ts).getTime() : NaN\n if (Number.isFinite(envTs) && envTs >= sinceMs) {\n kept[id] = env\n }\n }\n next[name] = kept\n }\n backup.collections = next as typeof backup.collections\n }\n\n return JSON.stringify(backup)\n}\n\n/**\n * Apply opt-in plaintext-tier filters\n * to a vault dump. Operates BEFORE `applySliceFilters` so the metadata\n * pass sees the trimmed record set.\n *\n * The filter never re-encrypts: surviving records carry their original\n * envelope unchanged. Failing records are dropped from the\n * `collections` map. Internal collections (ledger, deltas) and the\n * keyrings map are untouched.\n *\n * @internal\n */\nasync function applyPlaintextFilters(\n vault: Vault,\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): Promise<string> {\n if (opts.where === undefined && opts.tierAtMost === undefined) {\n return dumpJson\n }\n\n type Env = { _ts?: string; _tier?: number; _iv: string; _data: string }\n const backup = JSON.parse(dumpJson) as {\n collections?: Record<string, Record<string, Env>>\n [k: string]: unknown\n }\n if (!backup.collections || typeof backup.collections !== 'object') {\n return dumpJson\n }\n\n const tierCeiling = opts.tierAtMost\n const where = opts.where\n\n const next: Record<string, Record<string, Env>> = {}\n for (const [collName, records] of Object.entries(backup.collections)) {\n const kept: Record<string, Env> = {}\n for (const [id, env] of Object.entries(records)) {\n // Tier ceiling — runs FIRST so we don't waste a decrypt on\n // records about to be dropped anyway. Envelope tier defaults to\n // 0 when absent (matches Vault's tier-0 conventions).\n if (tierCeiling !== undefined) {\n const tier = env._tier ?? 0\n if (tier > tierCeiling) continue\n }\n // Plaintext predicate — decrypt, run, keep on truthy. Errors\n // from inside the predicate propagate (callers want to see why\n // their filter blew up rather than getting a silent passthrough).\n if (where !== undefined) {\n const record = await vault._decryptEnvelopeForBundleFilter(\n env as never,\n collName,\n )\n const ok = await where(record, { collection: collName, id })\n if (!ok) continue\n }\n kept[id] = env\n }\n next[collName] = kept\n }\n backup.collections = next\n return JSON.stringify(backup)\n}\n\n/**\n * Write a `.noydb` bundle for the given vault.\n *\n * Pipeline:\n * 1. Resolve or create the compartment's stable bundle handle\n * via `vault.getBundleHandle()` — same handle on\n * every export from the same vault instance, so cloud\n * adapters can use it as a primary key.\n * 2. `vault.dump()` → JSON string with encrypted records\n * inside.\n * 3. UTF-8 encode the dump string.\n * 4. Compress (brotli if available, gzip fallback by default).\n * 5. Compute SHA-256 of the compressed body for integrity.\n * 6. Build the minimum-disclosure header from format version,\n * handle, body length, body sha.\n * 7. Serialize: magic (4) + flags (1) + algo (1) + headerLen (4)\n * + header JSON (N) + compressed body (M).\n *\n * The output is a single `Uint8Array`. Consumers writing to disk\n * pass it to `fs.writeFile`; consumers uploading to cloud storage\n * pass it as the request body. The `@noy-db/file` adapter wraps\n * this with a `saveBundle(path, vault)` helper.\n */\nexport async function writeNoydbBundle(\n vault: Vault,\n opts: WriteNoydbBundleOptions = {},\n): Promise<Uint8Array> {\n if (opts.exportPassphrase !== undefined && opts.recipients !== undefined) {\n throw new Error(\n 'writeNoydbBundle: pass either exportPassphrase or recipients, not both',\n )\n }\n\n const handle = await vault.getBundleHandle()\n const dumpJson = await vault.dump()\n\n // Re-keying: when caller supplied recipients (or the single-recipient\n // shorthand), substitute the bundle's `keyrings` map with freshly\n // built recipient slots before slice filters run.\n const rekeyed = await applyRecipientRewrap(vault, dumpJson, opts)\n // Plaintext-tier filters run BEFORE\n // the metadata-only slice — that way the metadata pass sees the\n // already-trimmed record set and the two filter chains compose\n // cleanly.\n const plainFiltered = await applyPlaintextFilters(vault, rekeyed, opts)\n const filtered = applySliceFilters(plainFiltered, opts)\n const dumpBytes = new TextEncoder().encode(filtered)\n\n const { format, streamFormat } = selectCompression(opts.compression)\n const body = streamFormat === null\n ? dumpBytes\n : await pumpThroughStream(dumpBytes, new CompressionStream(streamFormat))\n\n const bodySha256 = await sha256Hex(body)\n\n // Snapshot the source vault's public envelope into the header\n // when one is persisted. `Vault.getPublicEnvelope` tolerates a\n // missing document and returns undefined, which we propagate as\n // \"no envelope in the header.\" Vaults without a\n // `_meta/public-envelope` document produce minimum-disclosure\n // headers exactly like before, preserving back-compat.\n const publicEnvelope = await vault.getPublicEnvelope()\n\n const header: NoydbBundleHeader = {\n formatVersion: NOYDB_BUNDLE_FORMAT_VERSION,\n handle,\n bodyBytes: body.length,\n bodySha256,\n ...(publicEnvelope !== undefined ? { publicEnvelope } : {}),\n }\n const headerBytes = encodeBundleHeader(header)\n\n // Assemble the fixed prefix in a 10-byte buffer.\n const prefix = new Uint8Array(NOYDB_BUNDLE_PREFIX_BYTES)\n prefix.set(NOYDB_BUNDLE_MAGIC, 0)\n prefix[4] =\n (streamFormat === null ? 0 : FLAG_COMPRESSED) | FLAG_HAS_INTEGRITY_HASH\n prefix[5] = format\n writeUint32BE(prefix, 6, headerBytes.length)\n\n return concatBytes([prefix, headerBytes, body])\n}\n\n/**\n * Internal helper shared by both readers — parses just the prefix\n * + header region of a bundle without touching the body. Returns\n * the parsed header plus the offset where the body starts and the\n * compression algorithm needed to decompress it.\n *\n * Throws on any format violation: missing/invalid magic, truncated\n * prefix, header length larger than the file, or unknown\n * compression algorithm.\n */\nfunction parsePrefixAndHeader(bytes: Uint8Array): {\n header: NoydbBundleHeader\n bodyOffset: number\n algo: CompressionAlgo\n flags: number\n} {\n if (!hasNoydbBundleMagic(bytes)) {\n throw new Error(\n `Not a .noydb bundle: missing 'NDB1' magic prefix. The first 4 bytes ` +\n `are ${[...bytes.slice(0, 4)].map((b) => b.toString(16).padStart(2, '0')).join(' ')}.`,\n )\n }\n if (bytes.length < NOYDB_BUNDLE_PREFIX_BYTES) {\n throw new Error(\n `Truncated .noydb bundle: file is only ${bytes.length} bytes, ` +\n `which is less than the ${NOYDB_BUNDLE_PREFIX_BYTES}-byte fixed prefix.`,\n )\n }\n const flags = bytes[4]!\n const algo = bytes[5]!\n if (algo !== COMPRESSION_NONE && algo !== COMPRESSION_GZIP && algo !== COMPRESSION_BROTLI) {\n throw new Error(\n `.noydb bundle declares unknown compression algorithm ${algo}. ` +\n `Known values: 0 (none), 1 (gzip), 2 (brotli).`,\n )\n }\n const headerLength = readUint32BE(bytes, 6)\n const bodyOffset = NOYDB_BUNDLE_PREFIX_BYTES + headerLength\n if (bodyOffset > bytes.length) {\n throw new Error(\n `Truncated .noydb bundle: declared header length ${headerLength} ` +\n `would extend past end of file (${bytes.length} bytes).`,\n )\n }\n const headerBytes = bytes.slice(NOYDB_BUNDLE_PREFIX_BYTES, bodyOffset)\n const header = decodeBundleHeader(headerBytes)\n return { header, bodyOffset, algo: algo as CompressionAlgo, flags }\n}\n\n/**\n * Read just the bundle header — no body decompression, no\n * integrity verification. Intended for cloud-listing UIs that want\n * to show the handle and size before downloading the full body.\n *\n * Returns the same `NoydbBundleHeader` shape as the writer, with\n * minimum-disclosure validation already applied.\n *\n * **Cost** — O(prefix + header bytes). The header is normally well\n * under 1 KB, but may grow to roughly 256 KB when a `publicEnvelope`\n * with an inline icon is present. Cloud-listing UIs that previously\n * assumed sub-KB header reads should account for this when sizing\n * range requests against bundles that may carry icons.\n */\nexport function readNoydbBundleHeader(bytes: Uint8Array): NoydbBundleHeader {\n return parsePrefixAndHeader(bytes).header\n}\n\n/**\n * Read just the bundle's public envelope (`docs/subsystems/public-envelope.md`)\n * — without verifying the body or even parsing the dump JSON. Pass\n * the raw bundle bytes; receive the owner-curated metadata or\n * `undefined` if the bundle was written without one.\n *\n * Locale-resolves any `name` / `description` map fields when `locale`\n * is supplied. Omitting `locale` returns the raw envelope.\n *\n * Same security caveat as the on-vault read path — the public\n * envelope is **untrusted hint** in v1; the encrypted body remains\n * the source of truth for vault contents.\n */\nexport function readNoydbBundlePublicEnvelope(\n bytes: Uint8Array,\n opts: { readonly locale?: string } = {},\n): PublicEnvelope | undefined {\n const header = parsePrefixAndHeader(bytes).header\n const env = header.publicEnvelope\n if (!env) return undefined\n if (opts.locale === undefined) return env\n return {\n ...env,\n ...(env.name !== undefined ? { name: pickLocale(env.name, opts.locale, env.defaultLocale) } : {}),\n ...(env.description !== undefined ? { description: pickLocale(env.description, opts.locale, env.defaultLocale) } : {}),\n }\n}\n\n/**\n * Read a full `.noydb` bundle: validate magic + header, verify\n * integrity hash over the body bytes, decompress, and return the\n * original `vault.dump()` JSON string ready to pass to\n * `vault.load()`.\n *\n * Throws `BundleIntegrityError` if the body's actual SHA-256 does\n * not match the value declared in the header. Distinct from a\n * format error so consumers can pattern-match in catch blocks\n * (corrupted-in-transit vs malformed-by-producer).\n *\n * Note: this function does NOT take a passphrase. The dump JSON\n * inside the body still contains encrypted records — restoring\n * the vault requires `vault.load(dumpJson, passphrase)`\n * after this call. Splitting the layers keeps the bundle module\n * free of crypto concerns and lets the same code feed format\n * inspectors that never decrypt anything.\n */\nexport async function readNoydbBundle(\n bytes: Uint8Array,\n): Promise<NoydbBundleReadResult> {\n const { header, bodyOffset, algo } = parsePrefixAndHeader(bytes)\n const body = bytes.slice(bodyOffset)\n\n // Length check before hash check — a length mismatch is the\n // cheapest tamper signal and produces a more actionable error.\n if (body.length !== header.bodyBytes) {\n throw new BundleIntegrityError(\n `body length ${body.length} does not match header.bodyBytes ` +\n `${header.bodyBytes}. The bundle was truncated or padded ` +\n `between write and read.`,\n )\n }\n\n const actualSha = await sha256Hex(body)\n if (actualSha !== header.bodySha256) {\n throw new BundleIntegrityError(\n `body sha256 ${actualSha} does not match header.bodySha256 ` +\n `${header.bodySha256}. The bundle bytes were modified between ` +\n `write and read — refuse to decompress.`,\n )\n }\n\n let dumpBytes: Uint8Array\n if (algo === COMPRESSION_NONE) {\n dumpBytes = body\n } else {\n const streamFormat: CompressionFormat =\n algo === COMPRESSION_BROTLI ? ('br' as CompressionFormat) : 'gzip'\n try {\n dumpBytes = await pumpThroughStream(body, new DecompressionStream(streamFormat))\n } catch (err) {\n throw new BundleIntegrityError(\n `decompression failed: ${(err as Error).message}. The bundle ` +\n `passed the integrity hash but the body is not valid ` +\n `${streamFormat} data — likely a producer bug.`,\n )\n }\n }\n\n const dumpJson = new TextDecoder('utf-8', { fatal: true }).decode(dumpBytes)\n return { header, dumpJson }\n}\n"],"mappings":";;;;;;;;AAmDO,IAAM,qBAAqB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAGlE,IAAM,4BAA4B;AAGlC,IAAM,8BAA8B;AASpC,IAAM,kBAAkB;AACxB,IAAM,0BAA0B;AAchC,IAAM,mBAAmB;AACzB,IAAM,mBAAmB;AACzB,IAAM,qBAAqB;AAwDlC,IAAM,sBAA2C,oBAAI,IAAI;AAAA,EACvD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAiBM,SAAS,qBACd,QACqC;AACrC,MAAI,WAAW,QAAQ,OAAO,WAAW,UAAU;AACjD,UAAM,IAAI;AAAA,MACR,mDAAmD,WAAW,OAAO,SAAS,OAAO,MAAM;AAAA,IAC7F;AAAA,EACF;AAIA,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,CAAC,oBAAoB,IAAI,GAAG,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,gDAAgD,GAAG,kDAE9C,CAAC,GAAG,mBAAmB,EAAE,KAAK,IAAI,CAAC;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AACA,QAAM,IAAI;AACV,MAAI,OAAO,EAAE,eAAe,MAAM,YAAY,EAAE,eAAe,MAAM,6BAA6B;AAChG,UAAM,IAAI;AAAA,MACR,8CAA8C,2BAA2B,SAChE,OAAO,EAAE,eAAe,CAAC,CAAC;AAAA,IAErC;AAAA,EACF;AACA,MAAI,OAAO,EAAE,QAAQ,MAAM,YAAY,CAAC,2BAA2B,KAAK,EAAE,QAAQ,CAAC,GAAG;AACpF,UAAM,IAAI;AAAA,MACR,iFACS,OAAO,EAAE,QAAQ,MAAM,WAAW,IAAI,EAAE,QAAQ,CAAC,MAAM,OAAO,EAAE,QAAQ,CAAC,CAAC;AAAA,IACrF;AAAA,EACF;AACA,MAAI,OAAO,EAAE,WAAW,MAAM,YAAY,CAAC,OAAO,UAAU,EAAE,WAAW,CAAC,KAAK,EAAE,WAAW,IAAI,GAAG;AACjG,UAAM,IAAI;AAAA,MACR,sEACS,OAAO,EAAE,WAAW,CAAC,CAAC;AAAA,IACjC;AAAA,EACF;AACA,MAAI,OAAO,EAAE,YAAY,MAAM,YAAY,CAAC,iBAAiB,KAAK,EAAE,YAAY,CAAC,GAAG;AAClF,UAAM,IAAI;AAAA,MACR,oFACS,OAAO,EAAE,YAAY,MAAM,WAAW,IAAI,EAAE,YAAY,CAAC,MAAM,OAAO,EAAE,YAAY,CAAC,CAAC;AAAA,IACjG;AAAA,EACF;AACA,MAAI,EAAE,gBAAgB,MAAM,QAAW;AACrC,UAAM,MAAM,EAAE,gBAAgB;AAC9B,QAAI,QAAQ,QAAQ,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG,GAAG;AACjE,YAAM,IAAI;AAAA,QACR,+EAA+E,OAAO,GAAG;AAAA,MAC3F;AAAA,IACF;AACA,UAAM,IAAI;AACV,QAAI,EAAE,eAAe,MAAM,GAAG;AAC5B,YAAM,IAAI;AAAA,QACR,oEAAoE,OAAO,EAAE,eAAe,CAAC,CAAC;AAAA,MAChG;AAAA,IACF;AACA,QAAI,OAAO,EAAE,SAAS,MAAM,YAAY,CAAC,OAAO,UAAU,EAAE,SAAS,CAAC,KAAK,EAAE,SAAS,IAAI,GAAG;AAC3F,YAAM,IAAI;AAAA,QACR,+EAA+E,OAAO,EAAE,SAAS,CAAC,CAAC;AAAA,MACrG;AAAA,IACF;AAAA,EACF;AACF;AAOO,SAAS,mBAAmB,QAAuC;AACxE,uBAAqB,MAAM;AAK3B,QAAM,OAAO,KAAK,UAAU;AAAA,IAC1B,eAAe,OAAO;AAAA,IACtB,QAAQ,OAAO;AAAA,IACf,WAAW,OAAO;AAAA,IAClB,YAAY,OAAO;AAAA,IACnB,GAAI,OAAO,mBAAmB,SAAY,EAAE,gBAAgB,OAAO,eAAe,IAAI,CAAC;AAAA,EACzF,CAAC;AACD,SAAO,IAAI,YAAY,EAAE,OAAO,IAAI;AACtC;AAMO,SAAS,mBAAmB,OAAsC;AACvE,QAAM,OAAO,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,KAAK;AACnE,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,IAAI;AAAA,EAC1B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,2CAA4C,IAAc,OAAO;AAAA,IACnE;AAAA,EACF;AACA,uBAAqB,MAAM;AAC3B,SAAO;AACT;AAQO,SAAS,aAAa,OAAmB,QAAwB;AACtE,UACG,MAAM,MAAM,KAAM,OAAO,MACzB,MAAM,SAAS,CAAC,KAAM,OACtB,MAAM,SAAS,CAAC,KAAM,KACvB,MAAM,SAAS,CAAC;AAEpB;AAMO,SAAS,cAAc,OAAmB,QAAgB,OAAqB;AACpF,QAAM,MAAM,IAAK,UAAU,KAAM;AACjC,QAAM,SAAS,CAAC,IAAK,UAAU,KAAM;AACrC,QAAM,SAAS,CAAC,IAAK,UAAU,IAAK;AACpC,QAAM,SAAS,CAAC,IAAI,QAAQ;AAC9B;AAOO,SAAS,oBAAoB,OAA4B;AAC9D,MAAI,MAAM,SAAS,mBAAmB,OAAQ,QAAO;AACrD,WAAS,IAAI,GAAG,IAAI,mBAAmB,QAAQ,KAAK;AAClD,QAAI,MAAM,CAAC,MAAM,mBAAmB,CAAC,EAAG,QAAO;AAAA,EACjD;AACA,SAAO;AACT;;;AC3JA,IAAI,sBAAsC;AAC1C,SAAS,4BAAqC;AAC5C,MAAI,wBAAwB,KAAM,QAAO;AACzC,MAAI;AACF,QAAI,kBAAkB,IAAyB;AAC/C,0BAAsB;AAAA,EACxB,QAAQ;AACN,0BAAsB;AAAA,EACxB;AACA,SAAO;AACT;AAGO,SAAS,0BAAgC;AAC9C,wBAAsB;AACxB;AASA,SAAS,kBAAkB,QAGzB;AACA,QAAM,SAAS,UAAU;AACzB,MAAI,WAAW,OAAQ,QAAO,EAAE,QAAQ,kBAAkB,cAAc,KAAK;AAC7E,MAAI,WAAW,OAAQ,QAAO,EAAE,QAAQ,kBAAkB,cAAc,OAAO;AAC/E,MAAI,WAAW,UAAU;AACvB,QAAI,CAAC,0BAA0B,GAAG;AAChC,YAAM,IAAI;AAAA,QACR;AAAA,MAIF;AAAA,IACF;AACA,WAAO,EAAE,QAAQ,oBAAoB,cAAc,KAA0B;AAAA,EAC/E;AAEA,MAAI,0BAA0B,GAAG;AAC/B,WAAO,EAAE,QAAQ,oBAAoB,cAAc,KAA0B;AAAA,EAC/E;AACA,SAAO,EAAE,QAAQ,kBAAkB,cAAc,OAAO;AAC1D;AAcA,eAAe,kBACb,OACA,QACqB;AACrB,QAAM,WAAW,IAAI,KAAK,CAAC,KAAiB,CAAC,EAAE,OAAO,EAAE,YAAY,MAAM;AAC1E,QAAM,SAAS,SAAS,UAAU;AAClC,QAAM,SAAuB,CAAC;AAC9B,MAAI,QAAQ;AACZ,aAAS;AACP,UAAM,EAAE,OAAO,KAAK,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,QAAI,OAAO;AACT,aAAO,KAAK,KAAmB;AAC/B,eAAS,MAAM;AAAA,IACjB;AAAA,EACF;AACA,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC1B,QAAI,IAAI,OAAO,MAAM;AACrB,cAAU,MAAM;AAAA,EAClB;AACA,SAAO;AACT;AAWA,eAAe,UAAU,OAAoC;AAS3D,QAAM,OAAO,IAAI,WAAW,MAAM,MAAM;AACxC,OAAK,IAAI,KAAK;AACd,QAAM,SAAS,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AACzD,QAAM,OAAO,IAAI,WAAW,MAAM;AAClC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,WAAO,KAAK,CAAC,EAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EAC9C;AACA,SAAO;AACT;AAOA,SAAS,YAAY,OAA0C;AAC7D,MAAI,QAAQ;AACZ,aAAW,KAAK,MAAO,UAAS,EAAE;AAClC,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,KAAK,OAAO;AACrB,QAAI,IAAI,GAAG,MAAM;AACjB,cAAU,EAAE;AAAA,EACd;AACA,SAAO;AACT;AAeA,eAAe,qBACb,OACA,UACA,MACiB;AACjB,MAAI,KAAK,qBAAqB,UAAa,KAAK,eAAe,QAAW;AACxE,WAAO;AAAA,EACT;AAEA,QAAM,aACJ,KAAK,cAAc;AAAA,IACjB;AAAA,MACE,IAAI,MAAM;AAAA,MACV,YAAY,KAAK;AAAA,MACjB,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AAEF,QAAM,oBAAoB,MAAM,MAAM,6BAA6B,UAAU;AAE7E,QAAM,SAAS,KAAK,MAAM,QAAQ;AAClC,SAAO,WAAW;AAClB,SAAO,KAAK,UAAU,MAAM;AAC9B;AAiBA,SAAS,kBACP,UACA,MACQ;AACR,QAAM,oBAAoB,KAAK,cAC3B,IAAI,IAAI,KAAK,WAAW,IACxB;AACJ,QAAM,UACJ,KAAK,UAAU,SAAY,IAAI,KAAK,KAAK,KAAK,EAAE,QAAQ,IAAI;AAC9D,MAAI,sBAAsB,QAAQ,YAAY,KAAM,QAAO;AAO3D,QAAM,SAAS,KAAK,MAAM,QAAQ;AAKlC,MAAI,OAAO,eAAe,OAAO,OAAO,gBAAgB,UAAU;AAChE,UAAM,OAAgD,CAAC;AACvD,eAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AAChE,UAAI,qBAAqB,CAAC,kBAAkB,IAAI,IAAI,EAAG;AACvD,UAAI,YAAY,MAAM;AACpB,aAAK,IAAI,IAAI;AACb;AAAA,MACF;AACA,YAAM,OAAgC,CAAC;AACvC,iBAAW,CAAC,IAAI,GAAG,KAAK,OAAO,QAAQ,OAAO,GAAG;AAC/C,cAAM,QAAQ,IAAI,MAAM,IAAI,KAAK,IAAI,GAAG,EAAE,QAAQ,IAAI;AACtD,YAAI,OAAO,SAAS,KAAK,KAAK,SAAS,SAAS;AAC9C,eAAK,EAAE,IAAI;AAAA,QACb;AAAA,MACF;AACA,WAAK,IAAI,IAAI;AAAA,IACf;AACA,WAAO,cAAc;AAAA,EACvB;AAEA,SAAO,KAAK,UAAU,MAAM;AAC9B;AAcA,eAAe,sBACb,OACA,UACA,MACiB;AACjB,MAAI,KAAK,UAAU,UAAa,KAAK,eAAe,QAAW;AAC7D,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,KAAK,MAAM,QAAQ;AAIlC,MAAI,CAAC,OAAO,eAAe,OAAO,OAAO,gBAAgB,UAAU;AACjE,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,KAAK;AACzB,QAAM,QAAQ,KAAK;AAEnB,QAAM,OAA4C,CAAC;AACnD,aAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AACpE,UAAM,OAA4B,CAAC;AACnC,eAAW,CAAC,IAAI,GAAG,KAAK,OAAO,QAAQ,OAAO,GAAG;AAI/C,UAAI,gBAAgB,QAAW;AAC7B,cAAM,OAAO,IAAI,SAAS;AAC1B,YAAI,OAAO,YAAa;AAAA,MAC1B;AAIA,UAAI,UAAU,QAAW;AACvB,cAAM,SAAS,MAAM,MAAM;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AACA,cAAM,KAAK,MAAM,MAAM,QAAQ,EAAE,YAAY,UAAU,GAAG,CAAC;AAC3D,YAAI,CAAC,GAAI;AAAA,MACX;AACA,WAAK,EAAE,IAAI;AAAA,IACb;AACA,SAAK,QAAQ,IAAI;AAAA,EACnB;AACA,SAAO,cAAc;AACrB,SAAO,KAAK,UAAU,MAAM;AAC9B;AAyBA,eAAsB,iBACpB,OACA,OAAgC,CAAC,GACZ;AACrB,MAAI,KAAK,qBAAqB,UAAa,KAAK,eAAe,QAAW;AACxE,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,MAAM,gBAAgB;AAC3C,QAAM,WAAW,MAAM,MAAM,KAAK;AAKlC,QAAM,UAAU,MAAM,qBAAqB,OAAO,UAAU,IAAI;AAKhE,QAAM,gBAAgB,MAAM,sBAAsB,OAAO,SAAS,IAAI;AACtE,QAAM,WAAW,kBAAkB,eAAe,IAAI;AACtD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,QAAQ;AAEnD,QAAM,EAAE,QAAQ,aAAa,IAAI,kBAAkB,KAAK,WAAW;AACnE,QAAM,OAAO,iBAAiB,OAC1B,YACA,MAAM,kBAAkB,WAAW,IAAI,kBAAkB,YAAY,CAAC;AAE1E,QAAM,aAAa,MAAM,UAAU,IAAI;AAQvC,QAAM,iBAAiB,MAAM,MAAM,kBAAkB;AAErD,QAAM,SAA4B;AAAA,IAChC,eAAe;AAAA,IACf;AAAA,IACA,WAAW,KAAK;AAAA,IAChB;AAAA,IACA,GAAI,mBAAmB,SAAY,EAAE,eAAe,IAAI,CAAC;AAAA,EAC3D;AACA,QAAM,cAAc,mBAAmB,MAAM;AAG7C,QAAM,SAAS,IAAI,WAAW,yBAAyB;AACvD,SAAO,IAAI,oBAAoB,CAAC;AAChC,SAAO,CAAC,KACL,iBAAiB,OAAO,IAAI,mBAAmB;AAClD,SAAO,CAAC,IAAI;AACZ,gBAAc,QAAQ,GAAG,YAAY,MAAM;AAE3C,SAAO,YAAY,CAAC,QAAQ,aAAa,IAAI,CAAC;AAChD;AAYA,SAAS,qBAAqB,OAK5B;AACA,MAAI,CAAC,oBAAoB,KAAK,GAAG;AAC/B,UAAM,IAAI;AAAA,MACR,2EACS,CAAC,GAAG,MAAM,MAAM,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA,IACvF;AAAA,EACF;AACA,MAAI,MAAM,SAAS,2BAA2B;AAC5C,UAAM,IAAI;AAAA,MACR,yCAAyC,MAAM,MAAM,kCACzB,yBAAyB;AAAA,IACvD;AAAA,EACF;AACA,QAAM,QAAQ,MAAM,CAAC;AACrB,QAAM,OAAO,MAAM,CAAC;AACpB,MAAI,SAAS,oBAAoB,SAAS,oBAAoB,SAAS,oBAAoB;AACzF,UAAM,IAAI;AAAA,MACR,wDAAwD,IAAI;AAAA,IAE9D;AAAA,EACF;AACA,QAAM,eAAe,aAAa,OAAO,CAAC;AAC1C,QAAM,aAAa,4BAA4B;AAC/C,MAAI,aAAa,MAAM,QAAQ;AAC7B,UAAM,IAAI;AAAA,MACR,mDAAmD,YAAY,mCAC3B,MAAM,MAAM;AAAA,IAClD;AAAA,EACF;AACA,QAAM,cAAc,MAAM,MAAM,2BAA2B,UAAU;AACrE,QAAM,SAAS,mBAAmB,WAAW;AAC7C,SAAO,EAAE,QAAQ,YAAY,MAA+B,MAAM;AACpE;AAgBO,SAAS,sBAAsB,OAAsC;AAC1E,SAAO,qBAAqB,KAAK,EAAE;AACrC;AAeO,SAAS,8BACd,OACA,OAAqC,CAAC,GACV;AAC5B,QAAM,SAAS,qBAAqB,KAAK,EAAE;AAC3C,QAAM,MAAM,OAAO;AACnB,MAAI,CAAC,IAAK,QAAO;AACjB,MAAI,KAAK,WAAW,OAAW,QAAO;AACtC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAI,IAAI,SAAS,SAAY,EAAE,MAAM,WAAW,IAAI,MAAM,KAAK,QAAQ,IAAI,aAAa,EAAE,IAAI,CAAC;AAAA,IAC/F,GAAI,IAAI,gBAAgB,SAAY,EAAE,aAAa,WAAW,IAAI,aAAa,KAAK,QAAQ,IAAI,aAAa,EAAE,IAAI,CAAC;AAAA,EACtH;AACF;AAoBA,eAAsB,gBACpB,OACgC;AAChC,QAAM,EAAE,QAAQ,YAAY,KAAK,IAAI,qBAAqB,KAAK;AAC/D,QAAM,OAAO,MAAM,MAAM,UAAU;AAInC,MAAI,KAAK,WAAW,OAAO,WAAW;AACpC,UAAM,IAAI;AAAA,MACR,eAAe,KAAK,MAAM,oCACrB,OAAO,SAAS;AAAA,IAEvB;AAAA,EACF;AAEA,QAAM,YAAY,MAAM,UAAU,IAAI;AACtC,MAAI,cAAc,OAAO,YAAY;AACnC,UAAM,IAAI;AAAA,MACR,eAAe,SAAS,qCACnB,OAAO,UAAU;AAAA,IAExB;AAAA,EACF;AAEA,MAAI;AACJ,MAAI,SAAS,kBAAkB;AAC7B,gBAAY;AAAA,EACd,OAAO;AACL,UAAM,eACJ,SAAS,qBAAsB,OAA6B;AAC9D,QAAI;AACF,kBAAY,MAAM,kBAAkB,MAAM,IAAI,oBAAoB,YAAY,CAAC;AAAA,IACjF,SAAS,KAAK;AACZ,YAAM,IAAI;AAAA,QACR,yBAA0B,IAAc,OAAO,oEAE1C,YAAY;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,SAAS;AAC3E,SAAO,EAAE,QAAQ,SAAS;AAC5B;","names":[]}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import {
|
|
2
|
+
NOYDB_FORMAT_VERSION
|
|
3
|
+
} from "./chunk-B6HF6NTZ.js";
|
|
4
|
+
import {
|
|
5
|
+
ValidationError
|
|
6
|
+
} from "./chunk-ACLDOTNQ.js";
|
|
7
|
+
|
|
8
|
+
// src/meta/public-envelope/schema.ts
|
|
9
|
+
var DATA_URL_PREFIX = /^data:([a-zA-Z0-9.+-]+\/[a-zA-Z0-9.+-]+);base64,/;
|
|
10
|
+
function validatePublicEnvelopeInput(input, schema) {
|
|
11
|
+
const allowed = new Set(schema.fields);
|
|
12
|
+
for (const key of Object.keys(input)) {
|
|
13
|
+
const known = key === "name" || key === "description" || key === "icon" || key === "defaultLocale" ? key : void 0;
|
|
14
|
+
if (!known) {
|
|
15
|
+
throw new ValidationError(
|
|
16
|
+
`setPublicEnvelope: unknown field "${key}". Allowed fields: ${[...allowed].join(", ")}.`
|
|
17
|
+
);
|
|
18
|
+
}
|
|
19
|
+
if (!allowed.has(known)) {
|
|
20
|
+
throw new ValidationError(
|
|
21
|
+
`setPublicEnvelope: field "${known}" is not enabled in this vault's schema. Allowed fields: ${[...allowed].join(", ")}.`
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
if (input.name !== void 0) {
|
|
26
|
+
validateText(input.name, "name", schema.maxStringChars);
|
|
27
|
+
}
|
|
28
|
+
if (input.description !== void 0) {
|
|
29
|
+
validateText(input.description, "description", schema.maxStringChars);
|
|
30
|
+
}
|
|
31
|
+
if (input.icon !== void 0) {
|
|
32
|
+
validateIcon(input.icon, schema);
|
|
33
|
+
}
|
|
34
|
+
if (input.defaultLocale !== void 0 && typeof input.defaultLocale !== "string") {
|
|
35
|
+
throw new ValidationError(
|
|
36
|
+
`setPublicEnvelope: defaultLocale must be a string (BCP-47), got ${typeof input.defaultLocale}.`
|
|
37
|
+
);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
function validateText(value, field, maxChars) {
|
|
41
|
+
if (typeof value === "string") {
|
|
42
|
+
if (value.length > maxChars) {
|
|
43
|
+
throw new ValidationError(
|
|
44
|
+
`setPublicEnvelope: ${field} exceeds the ${maxChars}-character cap (got ${value.length}).`
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
if (typeof value !== "object" || value === null || Array.isArray(value)) {
|
|
50
|
+
throw new ValidationError(
|
|
51
|
+
`setPublicEnvelope: ${field} must be a string or { [locale]: string } map, got ${typeof value}.`
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
for (const [locale, str] of Object.entries(value)) {
|
|
55
|
+
if (typeof str !== "string") {
|
|
56
|
+
throw new ValidationError(
|
|
57
|
+
`setPublicEnvelope: ${field}[${locale}] must be a string, got ${typeof str}.`
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
if (str.length > maxChars) {
|
|
61
|
+
throw new ValidationError(
|
|
62
|
+
`setPublicEnvelope: ${field}[${locale}] exceeds the ${maxChars}-character cap (got ${str.length}).`
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
function validateIcon(icon, schema) {
|
|
68
|
+
if (typeof icon !== "string") {
|
|
69
|
+
throw new ValidationError(
|
|
70
|
+
`setPublicEnvelope: icon must be a data: URL string, got ${typeof icon}.`
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
if (icon.length > schema.maxIconBytes) {
|
|
74
|
+
throw new ValidationError(
|
|
75
|
+
`setPublicEnvelope: icon exceeds the ${schema.maxIconBytes}-byte cap (got ${icon.length}).`
|
|
76
|
+
);
|
|
77
|
+
}
|
|
78
|
+
const m = DATA_URL_PREFIX.exec(icon);
|
|
79
|
+
if (!m) {
|
|
80
|
+
throw new ValidationError(
|
|
81
|
+
"setPublicEnvelope: icon must be a base64 data URL (`data:image/png;base64,\u2026` or `data:image/svg+xml;base64,\u2026`). External URLs are not supported in v1."
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
const mime = m[1];
|
|
85
|
+
if (!schema.iconMimeTypes.includes(mime)) {
|
|
86
|
+
throw new ValidationError(
|
|
87
|
+
`setPublicEnvelope: icon MIME type "${mime}" is not allowed. Permitted types: ${schema.iconMimeTypes.join(", ")}.`
|
|
88
|
+
);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
function isPublicEnvelope(x) {
|
|
92
|
+
if (x === null || typeof x !== "object" || Array.isArray(x)) return false;
|
|
93
|
+
const obj = x;
|
|
94
|
+
return obj["_noydb_public"] === 1 && typeof obj["version"] === "number";
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// src/meta/public-envelope/storage.ts
|
|
98
|
+
var PUBLIC_ENVELOPE_RECORD_ID = "public-envelope";
|
|
99
|
+
async function loadPublicEnvelope(store, vault) {
|
|
100
|
+
const envelope = await store.get(vault, "_meta", PUBLIC_ENVELOPE_RECORD_ID);
|
|
101
|
+
if (!envelope) return void 0;
|
|
102
|
+
try {
|
|
103
|
+
const parsed = JSON.parse(envelope._data);
|
|
104
|
+
if (!isPublicEnvelope(parsed)) return void 0;
|
|
105
|
+
return parsed;
|
|
106
|
+
} catch {
|
|
107
|
+
return void 0;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
async function savePublicEnvelope(store, vault, envelope) {
|
|
111
|
+
const wireEnvelope = {
|
|
112
|
+
_noydb: NOYDB_FORMAT_VERSION,
|
|
113
|
+
_v: 1,
|
|
114
|
+
_ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
115
|
+
_iv: "",
|
|
116
|
+
_data: JSON.stringify(envelope)
|
|
117
|
+
};
|
|
118
|
+
await store.put(vault, "_meta", PUBLIC_ENVELOPE_RECORD_ID, wireEnvelope);
|
|
119
|
+
}
|
|
120
|
+
async function readPublicEnvelope(store, vault, opts = {}) {
|
|
121
|
+
const raw = await loadPublicEnvelope(store, vault);
|
|
122
|
+
if (!raw) return void 0;
|
|
123
|
+
if (opts.locale === void 0) return raw;
|
|
124
|
+
return resolveLocale(raw, opts.locale);
|
|
125
|
+
}
|
|
126
|
+
function resolveLocale(envelope, locale) {
|
|
127
|
+
return {
|
|
128
|
+
...envelope,
|
|
129
|
+
...envelope.name !== void 0 ? { name: pickLocale(envelope.name, locale, envelope.defaultLocale) } : {},
|
|
130
|
+
...envelope.description !== void 0 ? { description: pickLocale(envelope.description, locale, envelope.defaultLocale) } : {}
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
function pickLocale(value, locale, defaultLocale) {
|
|
134
|
+
if (typeof value === "string") return value;
|
|
135
|
+
if (value[locale] !== void 0 && value[locale] !== "") return value[locale];
|
|
136
|
+
if (defaultLocale && value[defaultLocale] !== void 0 && value[defaultLocale] !== "") {
|
|
137
|
+
return value[defaultLocale];
|
|
138
|
+
}
|
|
139
|
+
for (const v of Object.values(value)) {
|
|
140
|
+
if (v !== "") return v;
|
|
141
|
+
}
|
|
142
|
+
return "";
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export {
|
|
146
|
+
validatePublicEnvelopeInput,
|
|
147
|
+
isPublicEnvelope,
|
|
148
|
+
PUBLIC_ENVELOPE_RECORD_ID,
|
|
149
|
+
loadPublicEnvelope,
|
|
150
|
+
savePublicEnvelope,
|
|
151
|
+
readPublicEnvelope,
|
|
152
|
+
resolveLocale,
|
|
153
|
+
pickLocale
|
|
154
|
+
};
|
|
155
|
+
//# sourceMappingURL=chunk-GILMPJXB.js.map
|