@noy-db/hub 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +197 -0
  3. package/dist/aggregate/index.cjs +476 -0
  4. package/dist/aggregate/index.cjs.map +1 -0
  5. package/dist/aggregate/index.d.cts +38 -0
  6. package/dist/aggregate/index.d.ts +38 -0
  7. package/dist/aggregate/index.js +53 -0
  8. package/dist/aggregate/index.js.map +1 -0
  9. package/dist/blobs/index.cjs +1480 -0
  10. package/dist/blobs/index.cjs.map +1 -0
  11. package/dist/blobs/index.d.cts +45 -0
  12. package/dist/blobs/index.d.ts +45 -0
  13. package/dist/blobs/index.js +48 -0
  14. package/dist/blobs/index.js.map +1 -0
  15. package/dist/bundle/index.cjs +436 -0
  16. package/dist/bundle/index.cjs.map +1 -0
  17. package/dist/bundle/index.d.cts +7 -0
  18. package/dist/bundle/index.d.ts +7 -0
  19. package/dist/bundle/index.js +40 -0
  20. package/dist/bundle/index.js.map +1 -0
  21. package/dist/chunk-2QR2PQTT.js +217 -0
  22. package/dist/chunk-2QR2PQTT.js.map +1 -0
  23. package/dist/chunk-4OWFYIDQ.js +79 -0
  24. package/dist/chunk-4OWFYIDQ.js.map +1 -0
  25. package/dist/chunk-5AATM2M2.js +90 -0
  26. package/dist/chunk-5AATM2M2.js.map +1 -0
  27. package/dist/chunk-ACLDOTNQ.js +543 -0
  28. package/dist/chunk-ACLDOTNQ.js.map +1 -0
  29. package/dist/chunk-BTDCBVJW.js +160 -0
  30. package/dist/chunk-BTDCBVJW.js.map +1 -0
  31. package/dist/chunk-CIMZBAZB.js +72 -0
  32. package/dist/chunk-CIMZBAZB.js.map +1 -0
  33. package/dist/chunk-E445ICYI.js +365 -0
  34. package/dist/chunk-E445ICYI.js.map +1 -0
  35. package/dist/chunk-EXQRC2L4.js +722 -0
  36. package/dist/chunk-EXQRC2L4.js.map +1 -0
  37. package/dist/chunk-FZU343FL.js +32 -0
  38. package/dist/chunk-FZU343FL.js.map +1 -0
  39. package/dist/chunk-GJILMRPO.js +354 -0
  40. package/dist/chunk-GJILMRPO.js.map +1 -0
  41. package/dist/chunk-GOUT6DND.js +1285 -0
  42. package/dist/chunk-GOUT6DND.js.map +1 -0
  43. package/dist/chunk-J66GRPNH.js +111 -0
  44. package/dist/chunk-J66GRPNH.js.map +1 -0
  45. package/dist/chunk-M2F2JAWB.js +464 -0
  46. package/dist/chunk-M2F2JAWB.js.map +1 -0
  47. package/dist/chunk-M5INGEFC.js +84 -0
  48. package/dist/chunk-M5INGEFC.js.map +1 -0
  49. package/dist/chunk-M62XNWRA.js +72 -0
  50. package/dist/chunk-M62XNWRA.js.map +1 -0
  51. package/dist/chunk-MR4424N3.js +275 -0
  52. package/dist/chunk-MR4424N3.js.map +1 -0
  53. package/dist/chunk-NPC4LFV5.js +132 -0
  54. package/dist/chunk-NPC4LFV5.js.map +1 -0
  55. package/dist/chunk-NXFEYLVG.js +311 -0
  56. package/dist/chunk-NXFEYLVG.js.map +1 -0
  57. package/dist/chunk-R36SIKES.js +79 -0
  58. package/dist/chunk-R36SIKES.js.map +1 -0
  59. package/dist/chunk-TDR6T5CJ.js +381 -0
  60. package/dist/chunk-TDR6T5CJ.js.map +1 -0
  61. package/dist/chunk-UF3BUNQZ.js +1 -0
  62. package/dist/chunk-UF3BUNQZ.js.map +1 -0
  63. package/dist/chunk-UQFSPSWG.js +1109 -0
  64. package/dist/chunk-UQFSPSWG.js.map +1 -0
  65. package/dist/chunk-USKYUS74.js +793 -0
  66. package/dist/chunk-USKYUS74.js.map +1 -0
  67. package/dist/chunk-XCL3WP6J.js +121 -0
  68. package/dist/chunk-XCL3WP6J.js.map +1 -0
  69. package/dist/chunk-XHFOENR2.js +680 -0
  70. package/dist/chunk-XHFOENR2.js.map +1 -0
  71. package/dist/chunk-ZFKD4QMV.js +430 -0
  72. package/dist/chunk-ZFKD4QMV.js.map +1 -0
  73. package/dist/chunk-ZLMV3TUA.js +490 -0
  74. package/dist/chunk-ZLMV3TUA.js.map +1 -0
  75. package/dist/chunk-ZRG4V3F5.js +17 -0
  76. package/dist/chunk-ZRG4V3F5.js.map +1 -0
  77. package/dist/consent/index.cjs +204 -0
  78. package/dist/consent/index.cjs.map +1 -0
  79. package/dist/consent/index.d.cts +24 -0
  80. package/dist/consent/index.d.ts +24 -0
  81. package/dist/consent/index.js +23 -0
  82. package/dist/consent/index.js.map +1 -0
  83. package/dist/crdt/index.cjs +152 -0
  84. package/dist/crdt/index.cjs.map +1 -0
  85. package/dist/crdt/index.d.cts +30 -0
  86. package/dist/crdt/index.d.ts +30 -0
  87. package/dist/crdt/index.js +24 -0
  88. package/dist/crdt/index.js.map +1 -0
  89. package/dist/crypto-IVKU7YTT.js +44 -0
  90. package/dist/crypto-IVKU7YTT.js.map +1 -0
  91. package/dist/delegation-XDJCBTI2.js +16 -0
  92. package/dist/delegation-XDJCBTI2.js.map +1 -0
  93. package/dist/dev-unlock-CeXic1xC.d.cts +263 -0
  94. package/dist/dev-unlock-KrKkcqD3.d.ts +263 -0
  95. package/dist/hash-9KO1BGxh.d.cts +63 -0
  96. package/dist/hash-ChfJjRjQ.d.ts +63 -0
  97. package/dist/history/index.cjs +1215 -0
  98. package/dist/history/index.cjs.map +1 -0
  99. package/dist/history/index.d.cts +62 -0
  100. package/dist/history/index.d.ts +62 -0
  101. package/dist/history/index.js +79 -0
  102. package/dist/history/index.js.map +1 -0
  103. package/dist/i18n/index.cjs +746 -0
  104. package/dist/i18n/index.cjs.map +1 -0
  105. package/dist/i18n/index.d.cts +38 -0
  106. package/dist/i18n/index.d.ts +38 -0
  107. package/dist/i18n/index.js +55 -0
  108. package/dist/i18n/index.js.map +1 -0
  109. package/dist/index-BRHBCmLt.d.ts +1940 -0
  110. package/dist/index-C8kQtmOk.d.ts +380 -0
  111. package/dist/index-DN-J-5wT.d.cts +1940 -0
  112. package/dist/index-DhjMjz7L.d.cts +380 -0
  113. package/dist/index.cjs +14756 -0
  114. package/dist/index.cjs.map +1 -0
  115. package/dist/index.d.cts +269 -0
  116. package/dist/index.d.ts +269 -0
  117. package/dist/index.js +6085 -0
  118. package/dist/index.js.map +1 -0
  119. package/dist/indexing/index.cjs +736 -0
  120. package/dist/indexing/index.cjs.map +1 -0
  121. package/dist/indexing/index.d.cts +36 -0
  122. package/dist/indexing/index.d.ts +36 -0
  123. package/dist/indexing/index.js +77 -0
  124. package/dist/indexing/index.js.map +1 -0
  125. package/dist/lazy-builder-BwEoBQZ9.d.ts +304 -0
  126. package/dist/lazy-builder-CZVLKh0Z.d.cts +304 -0
  127. package/dist/ledger-2NX4L7PN.js +33 -0
  128. package/dist/ledger-2NX4L7PN.js.map +1 -0
  129. package/dist/mime-magic-CBBSOkjm.d.cts +50 -0
  130. package/dist/mime-magic-CBBSOkjm.d.ts +50 -0
  131. package/dist/periods/index.cjs +1035 -0
  132. package/dist/periods/index.cjs.map +1 -0
  133. package/dist/periods/index.d.cts +21 -0
  134. package/dist/periods/index.d.ts +21 -0
  135. package/dist/periods/index.js +25 -0
  136. package/dist/periods/index.js.map +1 -0
  137. package/dist/predicate-SBHmi6D0.d.cts +161 -0
  138. package/dist/predicate-SBHmi6D0.d.ts +161 -0
  139. package/dist/query/index.cjs +1957 -0
  140. package/dist/query/index.cjs.map +1 -0
  141. package/dist/query/index.d.cts +3 -0
  142. package/dist/query/index.d.ts +3 -0
  143. package/dist/query/index.js +62 -0
  144. package/dist/query/index.js.map +1 -0
  145. package/dist/session/index.cjs +487 -0
  146. package/dist/session/index.cjs.map +1 -0
  147. package/dist/session/index.d.cts +45 -0
  148. package/dist/session/index.d.ts +45 -0
  149. package/dist/session/index.js +44 -0
  150. package/dist/session/index.js.map +1 -0
  151. package/dist/shadow/index.cjs +133 -0
  152. package/dist/shadow/index.cjs.map +1 -0
  153. package/dist/shadow/index.d.cts +16 -0
  154. package/dist/shadow/index.d.ts +16 -0
  155. package/dist/shadow/index.js +20 -0
  156. package/dist/shadow/index.js.map +1 -0
  157. package/dist/store/index.cjs +1069 -0
  158. package/dist/store/index.cjs.map +1 -0
  159. package/dist/store/index.d.cts +491 -0
  160. package/dist/store/index.d.ts +491 -0
  161. package/dist/store/index.js +34 -0
  162. package/dist/store/index.js.map +1 -0
  163. package/dist/strategy-BSxFXGzb.d.cts +110 -0
  164. package/dist/strategy-BSxFXGzb.d.ts +110 -0
  165. package/dist/strategy-D-SrOLCl.d.cts +548 -0
  166. package/dist/strategy-D-SrOLCl.d.ts +548 -0
  167. package/dist/sync/index.cjs +1062 -0
  168. package/dist/sync/index.cjs.map +1 -0
  169. package/dist/sync/index.d.cts +42 -0
  170. package/dist/sync/index.d.ts +42 -0
  171. package/dist/sync/index.js +28 -0
  172. package/dist/sync/index.js.map +1 -0
  173. package/dist/team/index.cjs +1233 -0
  174. package/dist/team/index.cjs.map +1 -0
  175. package/dist/team/index.d.cts +117 -0
  176. package/dist/team/index.d.ts +117 -0
  177. package/dist/team/index.js +39 -0
  178. package/dist/team/index.js.map +1 -0
  179. package/dist/tx/index.cjs +212 -0
  180. package/dist/tx/index.cjs.map +1 -0
  181. package/dist/tx/index.d.cts +20 -0
  182. package/dist/tx/index.d.ts +20 -0
  183. package/dist/tx/index.js +20 -0
  184. package/dist/tx/index.js.map +1 -0
  185. package/dist/types-BZpCZB8N.d.ts +7526 -0
  186. package/dist/types-Bfs0qr5F.d.cts +7526 -0
  187. package/dist/ulid-COREQ2RQ.js +9 -0
  188. package/dist/ulid-COREQ2RQ.js.map +1 -0
  189. package/dist/util/index.cjs +230 -0
  190. package/dist/util/index.cjs.map +1 -0
  191. package/dist/util/index.d.cts +77 -0
  192. package/dist/util/index.d.ts +77 -0
  193. package/dist/util/index.js +190 -0
  194. package/dist/util/index.js.map +1 -0
  195. package/package.json +244 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/team/presence.ts","../src/team/sync.ts","../src/team/sync-transaction.ts"],"sourcesContent":["/**\n * Presence handle — real-time awareness of who is viewing/editing a collection.\n * encrypted ephemeral channel keyed by collection DEK via HKDF.\n *\n * The presence key is derived from the collection DEK so:\n * - The adapter never learns user identities from presence payloads.\n * - Presence rotates automatically when the DEK rotates (revoked users\n * can no longer participate after a DEK rotation).\n *\n * Two transport strategies:\n * 1. **Pub/sub** (real-time): used when the adapter implements\n * `presencePublish` and `presenceSubscribe`.\n * 2. **Storage-poll** (fallback): presence records are written to a\n * reserved `_presence_<collection>` collection on the sync adapter\n * (if available) or local adapter, and polled periodically.\n */\n\nimport type { NoydbStore, PresencePeer } from '../types.js'\nimport { encrypt, decrypt, generateIV, bufferToBase64, derivePresenceKey } from '../crypto.js'\n\n/** Options for constructing a PresenceHandle. @internal */\nexport interface PresenceHandleOpts {\n /** Local adapter for storage-poll fallback. */\n adapter: NoydbStore\n /** Remote (sync) adapter — preferred for broadcasting presence if available. */\n syncAdapter?: NoydbStore\n /** Vault name — used as part of the channel and storage key. */\n vault: string\n /** Collection name — used as HKDF `info` and channel suffix. */\n collectionName: string\n /** Calling user's ID, embedded unencrypted in storage records. */\n userId: string\n /** Whether encryption is active. When false, presence payloads are stored as JSON. */\n encrypted: boolean\n /** Callback that resolves the collection DEK (used to derive the presence key). */\n getDEK: (collectionName: string) => Promise<CryptoKey>\n /** How long (ms) before a peer's presence is considered stale. Default: 30_000. */\n staleMs?: number\n /** Poll interval (ms) for the storage-poll fallback. Default: 5_000. */\n pollIntervalMs?: number\n}\n\n/**\n * Internal storage envelope for the storage-poll fallback.\n * Written to `_presence_<collection>` as `{ userId, lastSeen, iv, data }`.\n */\ninterface StoragePresenceRecord {\n userId: string\n lastSeen: string\n iv: string // base64 AES-GCM IV (empty when not encrypted)\n data: string // base64 ciphertext or JSON string when not encrypted\n}\n\n/** Presence handle for a single collection. */\nexport class PresenceHandle<P> {\n private readonly adapter: NoydbStore\n private readonly syncAdapter: NoydbStore | undefined\n private readonly vault: string\n private readonly collectionName: string\n private readonly userId: string\n private readonly encrypted: boolean\n private readonly getDEK: (collectionName: string) => Promise<CryptoKey>\n private readonly staleMs: number\n private readonly pollIntervalMs: number\n private readonly channel: string\n private readonly storageCollection: string\n\n private presenceKey: CryptoKey | null = null\n private subscribers: Array<(peers: PresencePeer<P>[]) => void> = []\n private unsubscribePubSub: (() => void) | null = null\n private pollTimer: ReturnType<typeof setInterval> | null = null\n private stopped = false\n\n constructor(opts: PresenceHandleOpts) {\n this.adapter = opts.adapter\n this.syncAdapter = opts.syncAdapter\n this.vault = opts.vault\n this.collectionName = opts.collectionName\n this.userId = opts.userId\n this.encrypted = opts.encrypted\n this.getDEK = opts.getDEK\n this.staleMs = opts.staleMs ?? 30_000\n this.pollIntervalMs = opts.pollIntervalMs ?? 5_000\n // Channel used by pub/sub adapters — vault-scoped so two collections\n // in the same vault don't bleed into each other's presence channels.\n this.channel = `${opts.vault}:${opts.collectionName}:presence`\n // Reserved collection name for the storage-poll fallback.\n this.storageCollection = `_presence_${opts.collectionName}`\n }\n\n /**\n * Announce yourself (or update your cursor/status).\n * Encrypts `payload` with the presence key and publishes it.\n */\n async update(payload: P): Promise<void> {\n if (this.stopped) return\n\n const key = await this.getPresenceKey()\n const now = new Date().toISOString()\n const plaintext = JSON.stringify({ userId: this.userId, lastSeen: now, payload })\n let encryptedPayload: string\n\n if (this.encrypted && key) {\n const iv = generateIV()\n const ivB64 = bufferToBase64(iv)\n const { data } = await encrypt(plaintext, key)\n encryptedPayload = JSON.stringify({ iv: ivB64, data })\n } else {\n encryptedPayload = plaintext\n }\n\n // Pub/sub path — publish to any adapter that supports it\n const pubAdapter = this.getPubSubAdapter()\n if (pubAdapter?.presencePublish) {\n await pubAdapter.presencePublish(this.channel, encryptedPayload)\n }\n\n // Storage-poll path — write a record to the storage adapter\n await this.writeStorageRecord(payload, now)\n }\n\n /**\n * Subscribe to presence updates. The callback receives a filtered, decrypted\n * list of all currently-active peers (excluding yourself, excluding stale).\n *\n * Returns an unsubscribe function. Also call `stop()` to release all resources.\n */\n subscribe(cb: (peers: PresencePeer<P>[]) => void): () => void {\n if (this.stopped) return () => {}\n\n this.subscribers.push(cb)\n\n // Start pub/sub listener on first subscriber\n if (this.subscribers.length === 1) {\n this.startListening()\n }\n\n return () => {\n this.subscribers = this.subscribers.filter(s => s !== cb)\n if (this.subscribers.length === 0) this.stopListening()\n }\n }\n\n /** Stop all listening and clear resources. */\n stop(): void {\n this.stopped = true\n this.stopListening()\n this.subscribers = []\n }\n\n // ─── Private ────────────────────────────────────────────────────────\n\n private async getPresenceKey(): Promise<CryptoKey | null> {\n if (!this.encrypted) return null\n if (!this.presenceKey) {\n try {\n const dek = await this.getDEK(this.collectionName)\n this.presenceKey = await derivePresenceKey(dek, this.collectionName)\n } catch {\n // no-op — presence degrades gracefully if crypto fails\n }\n }\n return this.presenceKey\n }\n\n private getPubSubAdapter(): NoydbStore | undefined {\n // Prefer the sync adapter (it broadcasts to other devices)\n if (this.syncAdapter?.presencePublish) return this.syncAdapter\n if (this.adapter.presencePublish) return this.adapter\n return undefined\n }\n\n private startListening(): void {\n const pubAdapter = this.getPubSubAdapter()\n\n if (pubAdapter?.presenceSubscribe) {\n // Real-time pub/sub path\n this.unsubscribePubSub = pubAdapter.presenceSubscribe(\n this.channel,\n (encryptedPayload) => { void this.handlePubSubMessage(encryptedPayload) },\n )\n } else {\n // Storage-poll fallback\n this.pollTimer = setInterval(\n () => { void this.pollStoragePresence() },\n this.pollIntervalMs,\n )\n // Kick off an immediate poll\n void this.pollStoragePresence()\n }\n }\n\n private stopListening(): void {\n if (this.unsubscribePubSub) {\n this.unsubscribePubSub()\n this.unsubscribePubSub = null\n }\n if (this.pollTimer) {\n clearInterval(this.pollTimer)\n this.pollTimer = null\n }\n }\n\n private async handlePubSubMessage(encryptedPayload: string): Promise<void> {\n try {\n const peer = await this.decryptPresencePayload(encryptedPayload)\n if (!peer || peer.userId === this.userId) return\n\n const cutoff = new Date(Date.now() - this.staleMs).toISOString()\n if (peer.lastSeen < cutoff) return\n\n // Deliver only this new peer to subscribers; a full snapshot poll follows\n // on next interval. For pub/sub, we could maintain a map of active peers,\n // but for simplicity: emit a snapshot read from storage.\n await this.pollStoragePresence()\n } catch {\n // Decrypt failure — stale key or tampered payload, ignore\n }\n }\n\n private async decryptPresencePayload(\n encryptedPayload: string,\n ): Promise<{ userId: string; lastSeen: string; payload: P } | null> {\n const key = await this.getPresenceKey()\n\n if (!this.encrypted || !key) {\n return JSON.parse(encryptedPayload) as { userId: string; lastSeen: string; payload: P }\n }\n\n const { iv: ivB64, data } = JSON.parse(encryptedPayload) as { iv: string; data: string }\n const plaintext = await decrypt(ivB64, data, key)\n return JSON.parse(plaintext) as { userId: string; lastSeen: string; payload: P }\n }\n\n private async writeStorageRecord(payload: P, now: string): Promise<void> {\n const key = await this.getPresenceKey()\n const plaintext = JSON.stringify(payload)\n let iv = ''\n let data: string\n\n if (this.encrypted && key) {\n const ivBytes = generateIV()\n iv = bufferToBase64(ivBytes)\n const result = await encrypt(plaintext, key)\n data = result.data\n } else {\n data = plaintext\n }\n\n const record: StoragePresenceRecord = { userId: this.userId, lastSeen: now, iv, data }\n const json = JSON.stringify(record)\n\n // Use the sync adapter if available (so other devices can read it);\n // fall back to local adapter.\n const storeAdapter = this.syncAdapter ?? this.adapter\n const envelope = {\n _noydb: 1 as const,\n _v: 1,\n _ts: now,\n _iv: '',\n _data: json,\n }\n try {\n await storeAdapter.put(\n this.vault,\n this.storageCollection,\n this.userId,\n envelope,\n )\n } catch {\n // Presence write failure is non-fatal — the user is still present locally\n }\n }\n\n private async pollStoragePresence(): Promise<void> {\n if (this.stopped || this.subscribers.length === 0) return\n\n try {\n const storeAdapter = this.syncAdapter ?? this.adapter\n const ids = await storeAdapter.list(this.vault, this.storageCollection)\n const cutoff = new Date(Date.now() - this.staleMs).toISOString()\n const peers: PresencePeer<P>[] = []\n\n for (const id of ids) {\n if (id === this.userId) continue // skip ourselves\n const envelope = await storeAdapter.get(this.vault, this.storageCollection, id)\n if (!envelope) continue\n\n const record = JSON.parse(envelope._data) as StoragePresenceRecord\n if (record.lastSeen < cutoff) continue\n\n let peerPayload: P\n if (this.encrypted && this.presenceKey && record.iv) {\n const plaintext = await decrypt(record.iv, record.data, this.presenceKey)\n peerPayload = JSON.parse(plaintext) as P\n } else {\n peerPayload = JSON.parse(record.data) as P\n }\n\n peers.push({ userId: record.userId, payload: peerPayload, lastSeen: record.lastSeen })\n }\n\n for (const cb of this.subscribers) {\n cb(peers)\n }\n } catch {\n // Poll failure is non-fatal\n }\n }\n}\n","import type {\n NoydbStore,\n DirtyEntry,\n Conflict,\n ConflictStrategy,\n CollectionConflictResolver,\n PushOptions,\n PullOptions,\n PushResult,\n PullResult,\n SyncStatus,\n EncryptedEnvelope,\n SyncMetadata,\n SyncTargetRole,\n} from '../types.js'\nimport { NOYDB_SYNC_VERSION } from '../types.js'\nimport { ConflictError } from '../errors.js'\nimport type { NoydbEventEmitter } from '../events.js'\nimport type { SyncPolicy } from '../store/sync-policy.js'\nimport { SyncScheduler } from '../store/sync-policy.js'\n\n/** Sync engine: dirty tracking, push, pull, conflict resolution, scheduling. */\nexport class SyncEngine {\n private readonly local: NoydbStore\n private readonly remote: NoydbStore\n private readonly strategy: ConflictStrategy\n private readonly emitter: NoydbEventEmitter\n private readonly vault: string\n readonly role: SyncTargetRole\n readonly label: string | undefined\n\n private dirty: DirtyEntry[] = []\n private lastPush: string | null = null\n private lastPull: string | null = null\n private loaded = false\n private autoSyncInterval: ReturnType<typeof setInterval> | null = null\n private isOnline = true\n\n /** Sync scheduler. Manages push/pull timing. */\n readonly scheduler: SyncScheduler | null\n\n /** Per-collection conflict resolvers registered by Collection instances. */\n private readonly conflictResolvers = new Map<string, CollectionConflictResolver>()\n\n constructor(opts: {\n local: NoydbStore\n remote: NoydbStore\n vault: string\n strategy: ConflictStrategy\n emitter: NoydbEventEmitter\n syncPolicy?: SyncPolicy\n role?: SyncTargetRole\n label?: string\n }) {\n this.local = opts.local\n this.remote = opts.remote\n this.vault = opts.vault\n this.strategy = opts.strategy\n this.emitter = opts.emitter\n this.role = opts.role ?? 'sync-peer'\n this.label = opts.label\n\n // Create scheduler if a policy is provided\n const policy = opts.syncPolicy\n if (policy && policy.push.mode !== 'manual') {\n this.scheduler = new SyncScheduler(policy, {\n push: () => this.push().then(() => {}),\n pull: () => this.pull().then(() => {}),\n getDirtyCount: () => this.dirty.length,\n })\n } else {\n this.scheduler = null\n }\n }\n\n /** Start the sync scheduler. Called after vault is fully opened. */\n startScheduler(): void {\n this.scheduler?.start()\n }\n\n /** Stop the sync scheduler. Called on close. */\n stopScheduler(): void {\n this.scheduler?.stop()\n }\n\n /**\n * Register a per-collection conflict resolver.\n * Called by Collection when `conflictPolicy` is set.\n */\n registerConflictResolver(collection: string, resolver: CollectionConflictResolver): void {\n this.conflictResolvers.set(collection, resolver)\n }\n\n /** Record a local change for later push. */\n async trackChange(collection: string, id: string, action: 'put' | 'delete', version: number): Promise<void> {\n await this.ensureLoaded()\n\n // Deduplicate: if same collection+id already in dirty, update it\n const idx = this.dirty.findIndex(d => d.collection === collection && d.id === id)\n const entry: DirtyEntry = {\n vault: this.vault,\n collection,\n id,\n action,\n version,\n timestamp: new Date().toISOString(),\n }\n\n if (idx >= 0) {\n this.dirty[idx] = entry\n } else {\n this.dirty.push(entry)\n }\n\n await this.persistMeta()\n\n // Notify scheduler of the write (triggers on-change or debounce)\n this.scheduler?.notifyChange()\n }\n\n /** Push dirty records to remote adapter. Accepts optional `PushOptions` for partial sync. */\n async push(options?: PushOptions): Promise<PushResult> {\n await this.ensureLoaded()\n\n let pushed = 0\n const conflicts: Conflict[] = []\n const errors: Error[] = []\n const completed: number[] = []\n\n for (let i = 0; i < this.dirty.length; i++) {\n const entry = this.dirty[i]!\n\n // Partial sync: skip collections not in the filter\n if (options?.collections && !options.collections.includes(entry.collection)) {\n continue\n }\n\n try {\n if (entry.action === 'delete') {\n await this.remote.delete(this.vault, entry.collection, entry.id)\n completed.push(i)\n pushed++\n } else {\n const envelope = await this.local.get(this.vault, entry.collection, entry.id)\n if (!envelope) {\n // Record was deleted locally after being marked dirty\n completed.push(i)\n continue\n }\n\n try {\n await this.remote.put(\n this.vault,\n entry.collection,\n entry.id,\n envelope,\n entry.version - 1,\n )\n completed.push(i)\n pushed++\n } catch (err) {\n if (err instanceof ConflictError) {\n const remoteEnvelope = await this.remote.get(this.vault, entry.collection, entry.id)\n if (remoteEnvelope) {\n const { handled, conflict } = await this.handleConflict(\n entry.collection,\n entry.id,\n envelope,\n remoteEnvelope,\n 'push',\n )\n conflicts.push(conflict)\n if (handled === 'local') {\n await this.remote.put(this.vault, entry.collection, entry.id, conflict.local)\n completed.push(i)\n pushed++\n } else if (handled === 'remote') {\n await this.local.put(this.vault, entry.collection, entry.id, conflict.remote)\n completed.push(i)\n } else if (handled === 'merged' && conflict.local !== envelope) {\n // Merged envelope is stored in conflict.local (the winner)\n const merged = conflict.local\n await this.remote.put(this.vault, entry.collection, entry.id, merged)\n await this.local.put(this.vault, entry.collection, entry.id, merged)\n completed.push(i)\n pushed++\n }\n // handled === 'deferred': leave in dirty log\n }\n } else {\n throw err\n }\n }\n }\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n }\n\n // Remove completed entries from dirty log (reverse order to preserve indices)\n for (const i of completed.sort((a, b) => b - a)) {\n this.dirty.splice(i, 1)\n }\n\n this.lastPush = new Date().toISOString()\n await this.persistMeta()\n\n const result: PushResult = { pushed, conflicts, errors }\n this.emitter.emit('sync:push', result)\n return result\n }\n\n /** Pull remote records to local adapter. Accepts optional `PullOptions` for partial sync. */\n async pull(options?: PullOptions): Promise<PullResult> {\n await this.ensureLoaded()\n\n let pulled = 0\n const conflicts: Conflict[] = []\n const errors: Error[] = []\n\n try {\n const remoteSnapshot = await this.remote.loadAll(this.vault)\n\n for (const [collName, records] of Object.entries(remoteSnapshot)) {\n // Partial sync: skip collections not in the filter\n if (options?.collections && !options.collections.includes(collName)) {\n continue\n }\n\n for (const [id, remoteEnvelope] of Object.entries(records)) {\n // Partial sync: modifiedSince filter\n if (options?.modifiedSince && remoteEnvelope._ts <= options.modifiedSince) {\n continue\n }\n\n try {\n const localEnvelope = await this.local.get(this.vault, collName, id)\n\n if (!localEnvelope) {\n // New record from remote\n await this.local.put(this.vault, collName, id, remoteEnvelope)\n pulled++\n } else if (remoteEnvelope._v > localEnvelope._v) {\n // Remote is newer — check if we have a dirty entry for this\n const isDirty = this.dirty.some(d => d.collection === collName && d.id === id)\n if (isDirty) {\n // Both changed — conflict\n const { handled, conflict } = await this.handleConflict(\n collName,\n id,\n localEnvelope,\n remoteEnvelope,\n 'pull',\n )\n conflicts.push(conflict)\n if (handled === 'remote') {\n await this.local.put(this.vault, collName, id, conflict.remote)\n this.dirty = this.dirty.filter(d => !(d.collection === collName && d.id === id))\n pulled++\n } else if (handled === 'merged' && conflict.local !== localEnvelope) {\n const merged = conflict.local\n await this.local.put(this.vault, collName, id, merged)\n this.dirty = this.dirty.filter(d => !(d.collection === collName && d.id === id))\n pulled++\n }\n // 'local' or 'deferred': push handles it\n } else {\n // Remote is newer, no local changes — update\n await this.local.put(this.vault, collName, id, remoteEnvelope)\n pulled++\n }\n }\n // Same version or local is newer — skip (push will handle)\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n }\n }\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n\n this.lastPull = new Date().toISOString()\n await this.persistMeta()\n\n const result: PullResult = { pulled, conflicts, errors }\n this.emitter.emit('sync:pull', result)\n return result\n }\n\n /** Bidirectional sync: pull then push. */\n async sync(options?: { push?: PushOptions; pull?: PullOptions }): Promise<{ pull: PullResult; push: PushResult }> {\n const pullResult = await this.pull(options?.pull)\n const pushResult = await this.push(options?.push)\n return { pull: pullResult, push: pushResult }\n }\n\n /**\n * Push a specific subset of dirty entries (for sync transactions, ).\n * Entries are matched by collection+id from the dirty log; matched entries\n * are removed from the dirty log on success.\n */\n async pushFiltered(predicate: (entry: DirtyEntry) => boolean): Promise<PushResult> {\n await this.ensureLoaded()\n\n let pushed = 0\n const conflicts: Conflict[] = []\n const errors: Error[] = []\n const completed: number[] = []\n\n for (let i = 0; i < this.dirty.length; i++) {\n const entry = this.dirty[i]!\n if (!predicate(entry)) continue\n\n try {\n if (entry.action === 'delete') {\n await this.remote.delete(this.vault, entry.collection, entry.id)\n completed.push(i)\n pushed++\n } else {\n const envelope = await this.local.get(this.vault, entry.collection, entry.id)\n if (!envelope) {\n completed.push(i)\n continue\n }\n\n try {\n await this.remote.put(\n this.vault,\n entry.collection,\n entry.id,\n envelope,\n entry.version - 1,\n )\n completed.push(i)\n pushed++\n } catch (err) {\n if (err instanceof ConflictError) {\n const remoteEnvelope = await this.remote.get(this.vault, entry.collection, entry.id)\n if (remoteEnvelope) {\n const { handled, conflict } = await this.handleConflict(\n entry.collection,\n entry.id,\n envelope,\n remoteEnvelope,\n 'push',\n )\n conflicts.push(conflict)\n if (handled === 'local') {\n await this.remote.put(this.vault, entry.collection, entry.id, conflict.local)\n completed.push(i)\n pushed++\n } else if (handled === 'remote') {\n await this.local.put(this.vault, entry.collection, entry.id, conflict.remote)\n completed.push(i)\n } else if (handled === 'merged' && conflict.local !== envelope) {\n const merged = conflict.local\n await this.remote.put(this.vault, entry.collection, entry.id, merged)\n await this.local.put(this.vault, entry.collection, entry.id, merged)\n completed.push(i)\n pushed++\n }\n }\n } else {\n throw err\n }\n }\n }\n } catch (err) {\n errors.push(err instanceof Error ? err : new Error(String(err)))\n }\n }\n\n for (const i of completed.sort((a, b) => b - a)) {\n this.dirty.splice(i, 1)\n }\n\n this.lastPush = new Date().toISOString()\n await this.persistMeta()\n\n const result: PushResult = { pushed, conflicts, errors }\n this.emitter.emit('sync:push', result)\n return result\n }\n\n /** Get current sync status. */\n status(): SyncStatus {\n return {\n dirty: this.dirty.length,\n lastPush: this.lastPush,\n lastPull: this.lastPull,\n online: this.isOnline,\n }\n }\n\n // ─── Auto-Sync ───────────────────────────────────────────────────\n\n /** Start auto-sync: listen for online/offline events, optional periodic sync. */\n startAutoSync(intervalMs?: number): void {\n // Online/offline detection\n if (typeof globalThis.addEventListener === 'function') {\n globalThis.addEventListener('online', this.handleOnline)\n globalThis.addEventListener('offline', this.handleOffline)\n }\n\n // Periodic sync\n if (intervalMs && intervalMs > 0) {\n this.autoSyncInterval = setInterval(() => {\n if (this.isOnline) {\n void this.sync()\n }\n }, intervalMs)\n }\n }\n\n /** Stop auto-sync and scheduler. */\n stopAutoSync(): void {\n this.stopScheduler()\n if (typeof globalThis.removeEventListener === 'function') {\n globalThis.removeEventListener('online', this.handleOnline)\n globalThis.removeEventListener('offline', this.handleOffline)\n }\n if (this.autoSyncInterval) {\n clearInterval(this.autoSyncInterval)\n this.autoSyncInterval = null\n }\n }\n\n private handleOnline = (): void => {\n this.isOnline = true\n this.emitter.emit('sync:online', undefined as never)\n void this.sync()\n }\n\n private handleOffline = (): void => {\n this.isOnline = false\n this.emitter.emit('sync:offline', undefined as never)\n }\n\n /**\n * Resolve a conflict, checking per-collection resolvers first,\n * then falling back to the db-level `ConflictStrategy`.\n *\n * Returns the resolved `Conflict` object (possibly with `resolve` set for\n * manual mode) and a `handled` discriminant:\n * - `'local'` — keep the local envelope; push it to remote.\n * - `'remote'` — keep the remote envelope; update local.\n * - `'merged'` — a custom merge fn produced a new envelope stored as `conflict.local`.\n * - `'deferred'` — manual mode, resolve was not called synchronously.\n */\n private async handleConflict(\n collection: string,\n id: string,\n local: EncryptedEnvelope,\n remote: EncryptedEnvelope,\n _phase: 'push' | 'pull',\n ): Promise<{ handled: 'local' | 'remote' | 'merged' | 'deferred'; conflict: Conflict }> {\n const resolver = this.conflictResolvers.get(collection)\n\n if (resolver) {\n // Per-collection resolver is responsible for emitting sync:conflict\n // (manual policy emits with a resolve callback; LWW/FWW/custom are silent).\n const winner = await resolver(id, local, remote)\n const base: Conflict = {\n vault: this.vault,\n collection,\n id,\n local,\n remote,\n localVersion: local._v,\n remoteVersion: remote._v,\n }\n if (winner === null) return { handled: 'deferred', conflict: base }\n if (winner === local) return { handled: 'local', conflict: base }\n if (winner === remote) return { handled: 'remote', conflict: base }\n // Custom merge fn produced a new envelope — store as conflict.local for the caller\n return {\n handled: 'merged',\n conflict: { ...base, local: winner, localVersion: winner._v },\n }\n }\n\n // Fall back to db-level strategy — emit once\n const baseConflict: Conflict = {\n vault: this.vault,\n collection,\n id,\n local,\n remote,\n localVersion: local._v,\n remoteVersion: remote._v,\n }\n this.emitter.emit('sync:conflict', baseConflict)\n const side = this.legacyResolve(baseConflict)\n return { handled: side, conflict: baseConflict }\n }\n\n /** DB-level ConflictStrategy resolution (legacy, kept for backward compat). */\n private legacyResolve(conflict: Conflict): 'local' | 'remote' {\n if (typeof this.strategy === 'function') {\n return this.strategy(conflict)\n }\n switch (this.strategy) {\n case 'local-wins': return 'local'\n case 'remote-wins': return 'remote'\n case 'version':\n default:\n return conflict.localVersion >= conflict.remoteVersion ? 'local' : 'remote'\n }\n }\n\n // ─── Persistence ─────────────────────────────────────────────────\n\n private async ensureLoaded(): Promise<void> {\n if (this.loaded) return\n\n const envelope = await this.local.get(this.vault, '_sync', 'meta')\n if (envelope) {\n const meta = JSON.parse(envelope._data) as SyncMetadata\n this.dirty = [...meta.dirty]\n this.lastPush = meta.last_push\n this.lastPull = meta.last_pull\n }\n\n this.loaded = true\n }\n\n private async persistMeta(): Promise<void> {\n const meta: SyncMetadata = {\n _noydb_sync: NOYDB_SYNC_VERSION,\n last_push: this.lastPush,\n last_pull: this.lastPull,\n dirty: this.dirty,\n }\n\n const envelope: EncryptedEnvelope = {\n _noydb: 1,\n _v: 1,\n _ts: new Date().toISOString(),\n _iv: '',\n _data: JSON.stringify(meta),\n }\n\n await this.local.put(this.vault, '_sync', 'meta', envelope)\n }\n}\n","import type { SyncTransactionResult } from '../types.js'\nimport type { SyncEngine } from './sync.js'\nimport type { Vault } from '../vault.js'\n\ninterface TxOp {\n readonly type: 'put' | 'delete'\n readonly collection: string\n readonly id: string\n readonly record?: unknown\n}\n\n/**\n * Sync transaction.\n *\n * Stages local writes and then pushes only those records to remote in a\n * single batch. If any record conflicts during the push, the result\n * carries `status: 'conflict'` — no automatic rollback is performed;\n * the caller handles conflict resolution.\n *\n * Obtain via `db.transaction(compartmentName)`.\n */\nexport class SyncTransaction {\n private readonly comp: Vault\n private readonly engine: SyncEngine\n private readonly ops: TxOp[] = []\n\n /** @internal — constructed by `Noydb.transaction()` */\n constructor(comp: Vault, engine: SyncEngine) {\n this.comp = comp\n this.engine = engine\n }\n\n /** Stage a record write. Does not write to any adapter until `commit()`. */\n put(collection: string, id: string, record: unknown): this {\n this.ops.push({ type: 'put', collection, id, record })\n return this\n }\n\n /** Stage a record delete. Does not write to any adapter until `commit()`. */\n delete(collection: string, id: string): this {\n this.ops.push({ type: 'delete', collection, id })\n return this\n }\n\n /**\n * Commit the transaction.\n *\n * Phase 1 — writes all staged operations to the local adapter via the\n * collection layer (encryption + dirty-log tracking).\n *\n * Phase 2 — pushes only the records that were written in this\n * transaction to the remote adapter. Existing dirty entries from\n * outside this transaction are not affected.\n *\n * If any record conflicts during the push, `status` is `'conflict'`\n * and `conflicts` lists the affected records. No automatic rollback is\n * performed.\n */\n async commit(): Promise<SyncTransactionResult> {\n // Phase 1: write all staged ops to local via collection layer\n for (const op of this.ops) {\n if (op.type === 'put') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n await (this.comp.collection<any>(op.collection)).put(op.id, op.record as any)\n } else {\n await this.comp.collection(op.collection).delete(op.id)\n }\n }\n\n // Phase 2: push only the records from this transaction\n const opSet = new Set<string>()\n for (const op of this.ops) {\n opSet.add(`${op.collection}::${op.id}`)\n }\n\n const pushResult = await this.engine.pushFiltered(\n (entry) => opSet.has(`${entry.collection}::${entry.id}`),\n )\n\n return {\n status: pushResult.conflicts.length > 0 ? 'conflict' : 'committed',\n pushed: pushResult.pushed,\n conflicts: pushResult.conflicts,\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAsDO,IAAM,iBAAN,MAAwB;AAAA,EACZ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,cAAgC;AAAA,EAChC,cAAyD,CAAC;AAAA,EAC1D,oBAAyC;AAAA,EACzC,YAAmD;AAAA,EACnD,UAAU;AAAA,EAElB,YAAY,MAA0B;AACpC,SAAK,UAAU,KAAK;AACpB,SAAK,cAAc,KAAK;AACxB,SAAK,QAAQ,KAAK;AAClB,SAAK,iBAAiB,KAAK;AAC3B,SAAK,SAAS,KAAK;AACnB,SAAK,YAAY,KAAK;AACtB,SAAK,SAAS,KAAK;AACnB,SAAK,UAAU,KAAK,WAAW;AAC/B,SAAK,iBAAiB,KAAK,kBAAkB;AAG7C,SAAK,UAAU,GAAG,KAAK,KAAK,IAAI,KAAK,cAAc;AAEnD,SAAK,oBAAoB,aAAa,KAAK,cAAc;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,SAA2B;AACtC,QAAI,KAAK,QAAS;AAElB,UAAM,MAAM,MAAM,KAAK,eAAe;AACtC,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,YAAY,KAAK,UAAU,EAAE,QAAQ,KAAK,QAAQ,UAAU,KAAK,QAAQ,CAAC;AAChF,QAAI;AAEJ,QAAI,KAAK,aAAa,KAAK;AACzB,YAAM,KAAK,WAAW;AACtB,YAAM,QAAQ,eAAe,EAAE;AAC/B,YAAM,EAAE,KAAK,IAAI,MAAM,QAAQ,WAAW,GAAG;AAC7C,yBAAmB,KAAK,UAAU,EAAE,IAAI,OAAO,KAAK,CAAC;AAAA,IACvD,OAAO;AACL,yBAAmB;AAAA,IACrB;AAGA,UAAM,aAAa,KAAK,iBAAiB;AACzC,QAAI,YAAY,iBAAiB;AAC/B,YAAM,WAAW,gBAAgB,KAAK,SAAS,gBAAgB;AAAA,IACjE;AAGA,UAAM,KAAK,mBAAmB,SAAS,GAAG;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,IAAoD;AAC5D,QAAI,KAAK,QAAS,QAAO,MAAM;AAAA,IAAC;AAEhC,SAAK,YAAY,KAAK,EAAE;AAGxB,QAAI,KAAK,YAAY,WAAW,GAAG;AACjC,WAAK,eAAe;AAAA,IACtB;AAEA,WAAO,MAAM;AACX,WAAK,cAAc,KAAK,YAAY,OAAO,OAAK,MAAM,EAAE;AACxD,UAAI,KAAK,YAAY,WAAW,EAAG,MAAK,cAAc;AAAA,IACxD;AAAA,EACF;AAAA;AAAA,EAGA,OAAa;AACX,SAAK,UAAU;AACf,SAAK,cAAc;AACnB,SAAK,cAAc,CAAC;AAAA,EACtB;AAAA;AAAA,EAIA,MAAc,iBAA4C;AACxD,QAAI,CAAC,KAAK,UAAW,QAAO;AAC5B,QAAI,CAAC,KAAK,aAAa;AACrB,UAAI;AACF,cAAM,MAAM,MAAM,KAAK,OAAO,KAAK,cAAc;AACjD,aAAK,cAAc,MAAM,kBAAkB,KAAK,KAAK,cAAc;AAAA,MACrE,QAAQ;AAAA,MAER;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,mBAA2C;AAEjD,QAAI,KAAK,aAAa,gBAAiB,QAAO,KAAK;AACnD,QAAI,KAAK,QAAQ,gBAAiB,QAAO,KAAK;AAC9C,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAuB;AAC7B,UAAM,aAAa,KAAK,iBAAiB;AAEzC,QAAI,YAAY,mBAAmB;AAEjC,WAAK,oBAAoB,WAAW;AAAA,QAClC,KAAK;AAAA,QACL,CAAC,qBAAqB;AAAE,eAAK,KAAK,oBAAoB,gBAAgB;AAAA,QAAE;AAAA,MAC1E;AAAA,IACF,OAAO;AAEL,WAAK,YAAY;AAAA,QACf,MAAM;AAAE,eAAK,KAAK,oBAAoB;AAAA,QAAE;AAAA,QACxC,KAAK;AAAA,MACP;AAEA,WAAK,KAAK,oBAAoB;AAAA,IAChC;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,QAAI,KAAK,mBAAmB;AAC1B,WAAK,kBAAkB;AACvB,WAAK,oBAAoB;AAAA,IAC3B;AACA,QAAI,KAAK,WAAW;AAClB,oBAAc,KAAK,SAAS;AAC5B,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAc,oBAAoB,kBAAyC;AACzE,QAAI;AACF,YAAM,OAAO,MAAM,KAAK,uBAAuB,gBAAgB;AAC/D,UAAI,CAAC,QAAQ,KAAK,WAAW,KAAK,OAAQ;AAE1C,YAAM,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,OAAO,EAAE,YAAY;AAC/D,UAAI,KAAK,WAAW,OAAQ;AAK5B,YAAM,KAAK,oBAAoB;AAAA,IACjC,QAAQ;AAAA,IAER;AAAA,EACF;AAAA,EAEA,MAAc,uBACZ,kBACkE;AAClE,UAAM,MAAM,MAAM,KAAK,eAAe;AAEtC,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK;AAC3B,aAAO,KAAK,MAAM,gBAAgB;AAAA,IACpC;AAEA,UAAM,EAAE,IAAI,OAAO,KAAK,IAAI,KAAK,MAAM,gBAAgB;AACvD,UAAM,YAAY,MAAM,QAAQ,OAAO,MAAM,GAAG;AAChD,WAAO,KAAK,MAAM,SAAS;AAAA,EAC7B;AAAA,EAEA,MAAc,mBAAmB,SAAY,KAA4B;AACvE,UAAM,MAAM,MAAM,KAAK,eAAe;AACtC,UAAM,YAAY,KAAK,UAAU,OAAO;AACxC,QAAI,KAAK;AACT,QAAI;AAEJ,QAAI,KAAK,aAAa,KAAK;AACzB,YAAM,UAAU,WAAW;AAC3B,WAAK,eAAe,OAAO;AAC3B,YAAM,SAAS,MAAM,QAAQ,WAAW,GAAG;AAC3C,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO;AAAA,IACT;AAEA,UAAM,SAAgC,EAAE,QAAQ,KAAK,QAAQ,UAAU,KAAK,IAAI,KAAK;AACrF,UAAM,OAAO,KAAK,UAAU,MAAM;AAIlC,UAAM,eAAe,KAAK,eAAe,KAAK;AAC9C,UAAM,WAAW;AAAA,MACf,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AAAA,MACL,OAAO;AAAA,IACT;AACA,QAAI;AACF,YAAM,aAAa;AAAA,QACjB,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAAA,EAEA,MAAc,sBAAqC;AACjD,QAAI,KAAK,WAAW,KAAK,YAAY,WAAW,EAAG;AAEnD,QAAI;AACF,YAAM,eAAe,KAAK,eAAe,KAAK;AAC9C,YAAM,MAAM,MAAM,aAAa,KAAK,KAAK,OAAO,KAAK,iBAAiB;AACtE,YAAM,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,OAAO,EAAE,YAAY;AAC/D,YAAM,QAA2B,CAAC;AAElC,iBAAW,MAAM,KAAK;AACpB,YAAI,OAAO,KAAK,OAAQ;AACxB,cAAM,WAAW,MAAM,aAAa,IAAI,KAAK,OAAO,KAAK,mBAAmB,EAAE;AAC9E,YAAI,CAAC,SAAU;AAEf,cAAM,SAAS,KAAK,MAAM,SAAS,KAAK;AACxC,YAAI,OAAO,WAAW,OAAQ;AAE9B,YAAI;AACJ,YAAI,KAAK,aAAa,KAAK,eAAe,OAAO,IAAI;AACnD,gBAAM,YAAY,MAAM,QAAQ,OAAO,IAAI,OAAO,MAAM,KAAK,WAAW;AACxE,wBAAc,KAAK,MAAM,SAAS;AAAA,QACpC,OAAO;AACL,wBAAc,KAAK,MAAM,OAAO,IAAI;AAAA,QACtC;AAEA,cAAM,KAAK,EAAE,QAAQ,OAAO,QAAQ,SAAS,aAAa,UAAU,OAAO,SAAS,CAAC;AAAA,MACvF;AAEA,iBAAW,MAAM,KAAK,aAAa;AACjC,WAAG,KAAK;AAAA,MACV;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AACF;;;AC/RO,IAAM,aAAN,MAAiB;AAAA,EACL;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACR;AAAA,EACA;AAAA,EAED,QAAsB,CAAC;AAAA,EACvB,WAA0B;AAAA,EAC1B,WAA0B;AAAA,EAC1B,SAAS;AAAA,EACT,mBAA0D;AAAA,EAC1D,WAAW;AAAA;AAAA,EAGV;AAAA;AAAA,EAGQ,oBAAoB,oBAAI,IAAwC;AAAA,EAEjF,YAAY,MAST;AACD,SAAK,QAAQ,KAAK;AAClB,SAAK,SAAS,KAAK;AACnB,SAAK,QAAQ,KAAK;AAClB,SAAK,WAAW,KAAK;AACrB,SAAK,UAAU,KAAK;AACpB,SAAK,OAAO,KAAK,QAAQ;AACzB,SAAK,QAAQ,KAAK;AAGlB,UAAM,SAAS,KAAK;AACpB,QAAI,UAAU,OAAO,KAAK,SAAS,UAAU;AAC3C,WAAK,YAAY,IAAI,cAAc,QAAQ;AAAA,QACzC,MAAM,MAAM,KAAK,KAAK,EAAE,KAAK,MAAM;AAAA,QAAC,CAAC;AAAA,QACrC,MAAM,MAAM,KAAK,KAAK,EAAE,KAAK,MAAM;AAAA,QAAC,CAAC;AAAA,QACrC,eAAe,MAAM,KAAK,MAAM;AAAA,MAClC,CAAC;AAAA,IACH,OAAO;AACL,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA,EAGA,iBAAuB;AACrB,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA;AAAA,EAGA,gBAAsB;AACpB,SAAK,WAAW,KAAK;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,yBAAyB,YAAoB,UAA4C;AACvF,SAAK,kBAAkB,IAAI,YAAY,QAAQ;AAAA,EACjD;AAAA;AAAA,EAGA,MAAM,YAAY,YAAoB,IAAY,QAA0B,SAAgC;AAC1G,UAAM,KAAK,aAAa;AAGxB,UAAM,MAAM,KAAK,MAAM,UAAU,OAAK,EAAE,eAAe,cAAc,EAAE,OAAO,EAAE;AAChF,UAAM,QAAoB;AAAA,MACxB,OAAO,KAAK;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAEA,QAAI,OAAO,GAAG;AACZ,WAAK,MAAM,GAAG,IAAI;AAAA,IACpB,OAAO;AACL,WAAK,MAAM,KAAK,KAAK;AAAA,IACvB;AAEA,UAAM,KAAK,YAAY;AAGvB,SAAK,WAAW,aAAa;AAAA,EAC/B;AAAA;AAAA,EAGA,MAAM,KAAK,SAA4C;AACrD,UAAM,KAAK,aAAa;AAExB,QAAI,SAAS;AACb,UAAM,YAAwB,CAAC;AAC/B,UAAM,SAAkB,CAAC;AACzB,UAAM,YAAsB,CAAC;AAE7B,aAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,YAAM,QAAQ,KAAK,MAAM,CAAC;AAG1B,UAAI,SAAS,eAAe,CAAC,QAAQ,YAAY,SAAS,MAAM,UAAU,GAAG;AAC3E;AAAA,MACF;AAEA,UAAI;AACF,YAAI,MAAM,WAAW,UAAU;AAC7B,gBAAM,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC/D,oBAAU,KAAK,CAAC;AAChB;AAAA,QACF,OAAO;AACL,gBAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC5E,cAAI,CAAC,UAAU;AAEb,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF;AAEA,cAAI;AACF,kBAAM,KAAK,OAAO;AAAA,cAChB,KAAK;AAAA,cACL,MAAM;AAAA,cACN,MAAM;AAAA,cACN;AAAA,cACA,MAAM,UAAU;AAAA,YAClB;AACA,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF,SAAS,KAAK;AACZ,gBAAI,eAAe,eAAe;AAChC,oBAAM,iBAAiB,MAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AACnF,kBAAI,gBAAgB;AAClB,sBAAM,EAAE,SAAS,SAAS,IAAI,MAAM,KAAK;AAAA,kBACvC,MAAM;AAAA,kBACN,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,0BAAU,KAAK,QAAQ;AACvB,oBAAI,YAAY,SAAS;AACvB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,KAAK;AAC5E,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF,WAAW,YAAY,UAAU;AAC/B,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,MAAM;AAC5E,4BAAU,KAAK,CAAC;AAAA,gBAClB,WAAW,YAAY,YAAY,SAAS,UAAU,UAAU;AAE9D,wBAAM,SAAS,SAAS;AACxB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACpE,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACnE,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF;AAAA,cAEF;AAAA,YACF,OAAO;AACL,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,MACjE;AAAA,IACF;AAGA,eAAW,KAAK,UAAU,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG;AAC/C,WAAK,MAAM,OAAO,GAAG,CAAC;AAAA,IACxB;AAEA,SAAK,YAAW,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,KAAK,YAAY;AAEvB,UAAM,SAAqB,EAAE,QAAQ,WAAW,OAAO;AACvD,SAAK,QAAQ,KAAK,aAAa,MAAM;AACrC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,KAAK,SAA4C;AACrD,UAAM,KAAK,aAAa;AAExB,QAAI,SAAS;AACb,UAAM,YAAwB,CAAC;AAC/B,UAAM,SAAkB,CAAC;AAEzB,QAAI;AACF,YAAM,iBAAiB,MAAM,KAAK,OAAO,QAAQ,KAAK,KAAK;AAE3D,iBAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,cAAc,GAAG;AAEhE,YAAI,SAAS,eAAe,CAAC,QAAQ,YAAY,SAAS,QAAQ,GAAG;AACnE;AAAA,QACF;AAEA,mBAAW,CAAC,IAAI,cAAc,KAAK,OAAO,QAAQ,OAAO,GAAG;AAE1D,cAAI,SAAS,iBAAiB,eAAe,OAAO,QAAQ,eAAe;AACzE;AAAA,UACF;AAEA,cAAI;AACF,kBAAM,gBAAgB,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,EAAE;AAEnE,gBAAI,CAAC,eAAe;AAElB,oBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,cAAc;AAC7D;AAAA,YACF,WAAW,eAAe,KAAK,cAAc,IAAI;AAE/C,oBAAM,UAAU,KAAK,MAAM,KAAK,OAAK,EAAE,eAAe,YAAY,EAAE,OAAO,EAAE;AAC7E,kBAAI,SAAS;AAEX,sBAAM,EAAE,SAAS,SAAS,IAAI,MAAM,KAAK;AAAA,kBACvC;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,0BAAU,KAAK,QAAQ;AACvB,oBAAI,YAAY,UAAU;AACxB,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,SAAS,MAAM;AAC9D,uBAAK,QAAQ,KAAK,MAAM,OAAO,OAAK,EAAE,EAAE,eAAe,YAAY,EAAE,OAAO,GAAG;AAC/E;AAAA,gBACF,WAAW,YAAY,YAAY,SAAS,UAAU,eAAe;AACnE,wBAAM,SAAS,SAAS;AACxB,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,MAAM;AACrD,uBAAK,QAAQ,KAAK,MAAM,OAAO,OAAK,EAAE,EAAE,eAAe,YAAY,EAAE,OAAO,GAAG;AAC/E;AAAA,gBACF;AAAA,cAEF,OAAO;AAEL,sBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,UAAU,IAAI,cAAc;AAC7D;AAAA,cACF;AAAA,YACF;AAAA,UAEF,SAAS,KAAK;AACZ,mBAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IACjE;AAEA,SAAK,YAAW,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,KAAK,YAAY;AAEvB,UAAM,SAAqB,EAAE,QAAQ,WAAW,OAAO;AACvD,SAAK,QAAQ,KAAK,aAAa,MAAM;AACrC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,KAAK,SAAuG;AAChH,UAAM,aAAa,MAAM,KAAK,KAAK,SAAS,IAAI;AAChD,UAAM,aAAa,MAAM,KAAK,KAAK,SAAS,IAAI;AAChD,WAAO,EAAE,MAAM,YAAY,MAAM,WAAW;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,aAAa,WAAgE;AACjF,UAAM,KAAK,aAAa;AAExB,QAAI,SAAS;AACb,UAAM,YAAwB,CAAC;AAC/B,UAAM,SAAkB,CAAC;AACzB,UAAM,YAAsB,CAAC;AAE7B,aAAS,IAAI,GAAG,IAAI,KAAK,MAAM,QAAQ,KAAK;AAC1C,YAAM,QAAQ,KAAK,MAAM,CAAC;AAC1B,UAAI,CAAC,UAAU,KAAK,EAAG;AAEvB,UAAI;AACF,YAAI,MAAM,WAAW,UAAU;AAC7B,gBAAM,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC/D,oBAAU,KAAK,CAAC;AAChB;AAAA,QACF,OAAO;AACL,gBAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AAC5E,cAAI,CAAC,UAAU;AACb,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF;AAEA,cAAI;AACF,kBAAM,KAAK,OAAO;AAAA,cAChB,KAAK;AAAA,cACL,MAAM;AAAA,cACN,MAAM;AAAA,cACN;AAAA,cACA,MAAM,UAAU;AAAA,YAClB;AACA,sBAAU,KAAK,CAAC;AAChB;AAAA,UACF,SAAS,KAAK;AACZ,gBAAI,eAAe,eAAe;AAChC,oBAAM,iBAAiB,MAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,EAAE;AACnF,kBAAI,gBAAgB;AAClB,sBAAM,EAAE,SAAS,SAAS,IAAI,MAAM,KAAK;AAAA,kBACvC,MAAM;AAAA,kBACN,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AACA,0BAAU,KAAK,QAAQ;AACvB,oBAAI,YAAY,SAAS;AACvB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,KAAK;AAC5E,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF,WAAW,YAAY,UAAU;AAC/B,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,SAAS,MAAM;AAC5E,4BAAU,KAAK,CAAC;AAAA,gBAClB,WAAW,YAAY,YAAY,SAAS,UAAU,UAAU;AAC9D,wBAAM,SAAS,SAAS;AACxB,wBAAM,KAAK,OAAO,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACpE,wBAAM,KAAK,MAAM,IAAI,KAAK,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM;AACnE,4BAAU,KAAK,CAAC;AAChB;AAAA,gBACF;AAAA,cACF;AAAA,YACF,OAAO;AACL,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,MACjE;AAAA,IACF;AAEA,eAAW,KAAK,UAAU,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG;AAC/C,WAAK,MAAM,OAAO,GAAG,CAAC;AAAA,IACxB;AAEA,SAAK,YAAW,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,KAAK,YAAY;AAEvB,UAAM,SAAqB,EAAE,QAAQ,WAAW,OAAO;AACvD,SAAK,QAAQ,KAAK,aAAa,MAAM;AACrC,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,SAAqB;AACnB,WAAO;AAAA,MACL,OAAO,KAAK,MAAM;AAAA,MAClB,UAAU,KAAK;AAAA,MACf,UAAU,KAAK;AAAA,MACf,QAAQ,KAAK;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA,EAKA,cAAc,YAA2B;AAEvC,QAAI,OAAO,WAAW,qBAAqB,YAAY;AACrD,iBAAW,iBAAiB,UAAU,KAAK,YAAY;AACvD,iBAAW,iBAAiB,WAAW,KAAK,aAAa;AAAA,IAC3D;AAGA,QAAI,cAAc,aAAa,GAAG;AAChC,WAAK,mBAAmB,YAAY,MAAM;AACxC,YAAI,KAAK,UAAU;AACjB,eAAK,KAAK,KAAK;AAAA,QACjB;AAAA,MACF,GAAG,UAAU;AAAA,IACf;AAAA,EACF;AAAA;AAAA,EAGA,eAAqB;AACnB,SAAK,cAAc;AACnB,QAAI,OAAO,WAAW,wBAAwB,YAAY;AACxD,iBAAW,oBAAoB,UAAU,KAAK,YAAY;AAC1D,iBAAW,oBAAoB,WAAW,KAAK,aAAa;AAAA,IAC9D;AACA,QAAI,KAAK,kBAAkB;AACzB,oBAAc,KAAK,gBAAgB;AACnC,WAAK,mBAAmB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEQ,eAAe,MAAY;AACjC,SAAK,WAAW;AAChB,SAAK,QAAQ,KAAK,eAAe,MAAkB;AACnD,SAAK,KAAK,KAAK;AAAA,EACjB;AAAA,EAEQ,gBAAgB,MAAY;AAClC,SAAK,WAAW;AAChB,SAAK,QAAQ,KAAK,gBAAgB,MAAkB;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAc,eACZ,YACA,IACA,OACA,QACA,QACsF;AACtF,UAAM,WAAW,KAAK,kBAAkB,IAAI,UAAU;AAEtD,QAAI,UAAU;AAGZ,YAAM,SAAS,MAAM,SAAS,IAAI,OAAO,MAAM;AAC/C,YAAM,OAAiB;AAAA,QACrB,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,cAAc,MAAM;AAAA,QACpB,eAAe,OAAO;AAAA,MACxB;AACA,UAAI,WAAW,KAAM,QAAO,EAAE,SAAS,YAAY,UAAU,KAAK;AAClE,UAAI,WAAW,MAAO,QAAO,EAAE,SAAS,SAAS,UAAU,KAAK;AAChE,UAAI,WAAW,OAAQ,QAAO,EAAE,SAAS,UAAU,UAAU,KAAK;AAElE,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,EAAE,GAAG,MAAM,OAAO,QAAQ,cAAc,OAAO,GAAG;AAAA,MAC9D;AAAA,IACF;AAGA,UAAM,eAAyB;AAAA,MAC7B,OAAO,KAAK;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,MAAM;AAAA,MACpB,eAAe,OAAO;AAAA,IACxB;AACA,SAAK,QAAQ,KAAK,iBAAiB,YAAY;AAC/C,UAAM,OAAO,KAAK,cAAc,YAAY;AAC5C,WAAO,EAAE,SAAS,MAAM,UAAU,aAAa;AAAA,EACjD;AAAA;AAAA,EAGQ,cAAc,UAAwC;AAC5D,QAAI,OAAO,KAAK,aAAa,YAAY;AACvC,aAAO,KAAK,SAAS,QAAQ;AAAA,IAC/B;AACA,YAAQ,KAAK,UAAU;AAAA,MACrB,KAAK;AAAc,eAAO;AAAA,MAC1B,KAAK;AAAe,eAAO;AAAA,MAC3B,KAAK;AAAA,MACL;AACE,eAAO,SAAS,gBAAgB,SAAS,gBAAgB,UAAU;AAAA,IACvE;AAAA,EACF;AAAA;AAAA,EAIA,MAAc,eAA8B;AAC1C,QAAI,KAAK,OAAQ;AAEjB,UAAM,WAAW,MAAM,KAAK,MAAM,IAAI,KAAK,OAAO,SAAS,MAAM;AACjE,QAAI,UAAU;AACZ,YAAM,OAAO,KAAK,MAAM,SAAS,KAAK;AACtC,WAAK,QAAQ,CAAC,GAAG,KAAK,KAAK;AAC3B,WAAK,WAAW,KAAK;AACrB,WAAK,WAAW,KAAK;AAAA,IACvB;AAEA,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,MAAc,cAA6B;AACzC,UAAM,OAAqB;AAAA,MACzB,aAAa;AAAA,MACb,WAAW,KAAK;AAAA,MAChB,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,IACd;AAEA,UAAM,WAA8B;AAAA,MAClC,QAAQ;AAAA,MACR,IAAI;AAAA,MACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC5B,KAAK;AAAA,MACL,OAAO,KAAK,UAAU,IAAI;AAAA,IAC5B;AAEA,UAAM,KAAK,MAAM,IAAI,KAAK,OAAO,SAAS,QAAQ,QAAQ;AAAA,EAC5D;AACF;;;AC5gBO,IAAM,kBAAN,MAAsB;AAAA,EACV;AAAA,EACA;AAAA,EACA,MAAc,CAAC;AAAA;AAAA,EAGhC,YAAY,MAAa,QAAoB;AAC3C,SAAK,OAAO;AACZ,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA,EAGA,IAAI,YAAoB,IAAY,QAAuB;AACzD,SAAK,IAAI,KAAK,EAAE,MAAM,OAAO,YAAY,IAAI,OAAO,CAAC;AACrD,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,OAAO,YAAoB,IAAkB;AAC3C,SAAK,IAAI,KAAK,EAAE,MAAM,UAAU,YAAY,GAAG,CAAC;AAChD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,SAAyC;AAE7C,eAAW,MAAM,KAAK,KAAK;AACzB,UAAI,GAAG,SAAS,OAAO;AAErB,cAAO,KAAK,KAAK,WAAgB,GAAG,UAAU,EAAG,IAAI,GAAG,IAAI,GAAG,MAAa;AAAA,MAC9E,OAAO;AACL,cAAM,KAAK,KAAK,WAAW,GAAG,UAAU,EAAE,OAAO,GAAG,EAAE;AAAA,MACxD;AAAA,IACF;AAGA,UAAM,QAAQ,oBAAI,IAAY;AAC9B,eAAW,MAAM,KAAK,KAAK;AACzB,YAAM,IAAI,GAAG,GAAG,UAAU,KAAK,GAAG,EAAE,EAAE;AAAA,IACxC;AAEA,UAAM,aAAa,MAAM,KAAK,OAAO;AAAA,MACnC,CAAC,UAAU,MAAM,IAAI,GAAG,MAAM,UAAU,KAAK,MAAM,EAAE,EAAE;AAAA,IACzD;AAEA,WAAO;AAAA,MACL,QAAQ,WAAW,UAAU,SAAS,IAAI,aAAa;AAAA,MACvD,QAAQ,WAAW;AAAA,MACnB,WAAW,WAAW;AAAA,IACxB;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,32 @@
1
+ // src/bundle/ulid.ts
2
+ var CROCKFORD_ALPHABET = "0123456789ABCDEFGHJKMNPQRSTVWXYZ";
3
+ function encodeBase32(value, length) {
4
+ let out = "";
5
+ let v = value;
6
+ for (let i = 0; i < length; i++) {
7
+ out = CROCKFORD_ALPHABET[v % 32] + out;
8
+ v = Math.floor(v / 32);
9
+ }
10
+ return out;
11
+ }
12
+ function generateULID() {
13
+ const now = Date.now();
14
+ const timestampHigh = Math.floor(now / 16777216);
15
+ const timestampLow = now & 16777215;
16
+ const tsPart = encodeBase32(timestampHigh, 5) + encodeBase32(timestampLow, 5);
17
+ const randBytes = new Uint8Array(10);
18
+ crypto.getRandomValues(randBytes);
19
+ const rand1 = randBytes[0] * 2 ** 32 + (randBytes[1] << 24 >>> 0) + (randBytes[2] << 16) + (randBytes[3] << 8) + randBytes[4];
20
+ const rand2 = randBytes[5] * 2 ** 32 + (randBytes[6] << 24 >>> 0) + (randBytes[7] << 16) + (randBytes[8] << 8) + randBytes[9];
21
+ const randPart = encodeBase32(rand1, 8) + encodeBase32(rand2, 8);
22
+ return tsPart + randPart;
23
+ }
24
+ function isULID(value) {
25
+ return /^[0-9A-HJKMNP-TV-Z]{26}$/.test(value);
26
+ }
27
+
28
+ export {
29
+ generateULID,
30
+ isULID
31
+ };
32
+ //# sourceMappingURL=chunk-FZU343FL.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/bundle/ulid.ts"],"sourcesContent":["/**\n * Minimal ULID generator — zero dependencies, Web Crypto API only.\n *\n *. Used by the bundle writer to generate stable opaque\n * handles for `.noydb` containers.\n *\n * **What's a ULID?** A 128-bit identifier encoded as 26 Crockford\n * base32 characters. Layout:\n *\n * ```\n * 01HYABCDEFGHJKMNPQRSTVWXYZ\n * |--------||---------------|\n * 48-bit 80-bit\n * timestamp randomness\n * ```\n *\n * The first 10 chars encode a millisecond Unix timestamp (so ULIDs\n * sort lexicographically by creation time), and the remaining 16\n * chars are random. Crockford base32 omits I/L/O/U to avoid\n * ambiguity in handwriting and URLs.\n *\n * **Why hand-roll instead of pulling in `ulid`?** The package adds\n * a dep, the implementation is ~30 lines, and the bundle module\n * is the only consumer. Adding `ulid` would also drag in its own\n * crypto polyfill that we don't need on Node 18+ or modern\n * browsers.\n *\n * **Privacy consideration:** the timestamp prefix is observable in\n * the bundle header. This is a deliberate trade-off:\n * - Pro: lexicographic sortability lets bundle adapters list\n * newest-first without an extra index.\n * - Con: a casual observer can read the bundle's creation time\n * from the handle. They cannot read it from any OTHER field\n * (the header explicitly forbids `_exported_at`), and a\n * creation timestamp is the same kind of metadata that\n * filesystem mtime would already expose for a downloaded\n * bundle. The leak is therefore equivalent to what's already\n * visible from the file's mtime — not a new exposure.\n *\n * If a future use case needs timestamp-free handles, a v2 of the\n * format could specify \"use the random portion only\" without a\n * format break — `validateBundleHeader` only checks the regex\n * shape, not the encoded timestamp.\n */\n\n/**\n * Crockford base32 alphabet — omits I, L, O, U to avoid handwriting\n * and URL-encoding ambiguity. 32 characters covering 5 bits each.\n */\nconst CROCKFORD_ALPHABET = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'\n\n/**\n * Encode a non-negative integer as a fixed-width Crockford base32\n * string. The width is fixed (not the natural log32 length) so\n * leading zeros are preserved — that's required for the timestamp\n * prefix to remain lexicographically sortable.\n *\n * Used twice: once for the 48-bit timestamp portion (10 chars) and\n * once for each 40-bit half of the randomness (8 chars × 2).\n */\nfunction encodeBase32(value: number, length: number): string {\n let out = ''\n let v = value\n for (let i = 0; i < length; i++) {\n out = CROCKFORD_ALPHABET[v % 32]! + out\n v = Math.floor(v / 32)\n }\n return out\n}\n\n/**\n * Generate a fresh ULID. Uses `crypto.getRandomValues` for the\n * randomness portion — same Web Crypto API the rest of the\n * codebase uses for IVs and salt.\n *\n * Returns a 26-character string. Calling twice in the same\n * millisecond produces two distinct ULIDs (the random portion\n * differs); ULIDs from the same millisecond are NOT guaranteed\n * to be monotonically ordered relative to each other, only\n * relative to ULIDs from a different millisecond. The bundle\n * format never relies on intra-millisecond ordering.\n */\nexport function generateULID(): string {\n const now = Date.now()\n\n // 48-bit timestamp → 10 Crockford base32 characters.\n // JavaScript's max safe integer is 2^53 - 1; Date.now() is well\n // within that range until the year ~285,000 AD. Splitting into\n // high and low 24-bit halves keeps every intermediate value\n // inside the safe-integer range and avoids any ambiguity in the\n // base32 encoder above.\n const timestampHigh = Math.floor(now / 0x1000000) // top 24 bits\n const timestampLow = now & 0xffffff // bottom 24 bits\n const tsPart =\n encodeBase32(timestampHigh, 5) + encodeBase32(timestampLow, 5)\n\n // 80-bit randomness → 16 Crockford base32 characters. Split into\n // two 40-bit halves so each fits in JavaScript's safe-integer\n // range (53 bits) and the base32 encoder doesn't have to deal\n // with bigints.\n const randBytes = new Uint8Array(10)\n crypto.getRandomValues(randBytes)\n\n // First 5 bytes (40 bits) → 8 Crockford base32 characters.\n // Reconstruct the 40-bit integer from bytes in big-endian order.\n // Multiplication by 2^32 (instead of bit-shift) avoids JavaScript's\n // 32-bit integer cast on the high byte.\n const rand1 =\n randBytes[0]! * 2 ** 32 +\n (randBytes[1]! << 24 >>> 0) +\n (randBytes[2]! << 16) +\n (randBytes[3]! << 8) +\n randBytes[4]!\n // Same for the second 5 bytes.\n const rand2 =\n randBytes[5]! * 2 ** 32 +\n (randBytes[6]! << 24 >>> 0) +\n (randBytes[7]! << 16) +\n (randBytes[8]! << 8) +\n randBytes[9]!\n const randPart = encodeBase32(rand1, 8) + encodeBase32(rand2, 8)\n\n return tsPart + randPart\n}\n\n/**\n * Validate that a string is a syntactically well-formed ULID. Used\n * by the bundle header validator. Does NOT verify that the\n * timestamp portion decodes to a sensible date — the format only\n * cares about the encoding shape.\n */\nexport function isULID(value: string): boolean {\n return /^[0-9A-HJKMNP-TV-Z]{26}$/.test(value)\n}\n"],"mappings":";AAiDA,IAAM,qBAAqB;AAW3B,SAAS,aAAa,OAAe,QAAwB;AAC3D,MAAI,MAAM;AACV,MAAI,IAAI;AACR,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,UAAM,mBAAmB,IAAI,EAAE,IAAK;AACpC,QAAI,KAAK,MAAM,IAAI,EAAE;AAAA,EACvB;AACA,SAAO;AACT;AAcO,SAAS,eAAuB;AACrC,QAAM,MAAM,KAAK,IAAI;AAQrB,QAAM,gBAAgB,KAAK,MAAM,MAAM,QAAS;AAChD,QAAM,eAAe,MAAM;AAC3B,QAAM,SACJ,aAAa,eAAe,CAAC,IAAI,aAAa,cAAc,CAAC;AAM/D,QAAM,YAAY,IAAI,WAAW,EAAE;AACnC,SAAO,gBAAgB,SAAS;AAMhC,QAAM,QACJ,UAAU,CAAC,IAAK,KAAK,MACpB,UAAU,CAAC,KAAM,OAAO,MACxB,UAAU,CAAC,KAAM,OACjB,UAAU,CAAC,KAAM,KAClB,UAAU,CAAC;AAEb,QAAM,QACJ,UAAU,CAAC,IAAK,KAAK,MACpB,UAAU,CAAC,KAAM,OAAO,MACxB,UAAU,CAAC,KAAM,OACjB,UAAU,CAAC,KAAM,KAClB,UAAU,CAAC;AACb,QAAM,WAAW,aAAa,OAAO,CAAC,IAAI,aAAa,OAAO,CAAC;AAE/D,SAAO,SAAS;AAClB;AAQO,SAAS,OAAO,OAAwB;AAC7C,SAAO,2BAA2B,KAAK,KAAK;AAC9C;","names":[]}
@@ -0,0 +1,354 @@
1
+ import {
2
+ BundleIntegrityError
3
+ } from "./chunk-ACLDOTNQ.js";
4
+
5
+ // src/bundle/format.ts
6
+ var NOYDB_BUNDLE_MAGIC = new Uint8Array([78, 68, 66, 49]);
7
+ var NOYDB_BUNDLE_PREFIX_BYTES = 10;
8
+ var NOYDB_BUNDLE_FORMAT_VERSION = 1;
9
+ var FLAG_COMPRESSED = 1;
10
+ var FLAG_HAS_INTEGRITY_HASH = 2;
11
+ var COMPRESSION_NONE = 0;
12
+ var COMPRESSION_GZIP = 1;
13
+ var COMPRESSION_BROTLI = 2;
14
+ var ALLOWED_HEADER_KEYS = /* @__PURE__ */ new Set([
15
+ "formatVersion",
16
+ "handle",
17
+ "bodyBytes",
18
+ "bodySha256"
19
+ ]);
20
+ function validateBundleHeader(parsed) {
21
+ if (parsed === null || typeof parsed !== "object") {
22
+ throw new Error(
23
+ `.noydb bundle header must be a JSON object, got ${parsed === null ? "null" : typeof parsed}`
24
+ );
25
+ }
26
+ for (const key of Object.keys(parsed)) {
27
+ if (!ALLOWED_HEADER_KEYS.has(key)) {
28
+ throw new Error(
29
+ `.noydb bundle header contains forbidden key "${key}". Only minimum-disclosure fields are allowed: ${[...ALLOWED_HEADER_KEYS].join(", ")}.`
30
+ );
31
+ }
32
+ }
33
+ const h = parsed;
34
+ if (typeof h["formatVersion"] !== "number" || h["formatVersion"] !== NOYDB_BUNDLE_FORMAT_VERSION) {
35
+ throw new Error(
36
+ `.noydb bundle header.formatVersion must be ${NOYDB_BUNDLE_FORMAT_VERSION}, got ${String(h["formatVersion"])}. The reader does not support forward-compat versions; upgrade the reader to handle newer bundles.`
37
+ );
38
+ }
39
+ if (typeof h["handle"] !== "string" || !/^[0-9A-HJKMNP-TV-Z]{26}$/.test(h["handle"])) {
40
+ throw new Error(
41
+ `.noydb bundle header.handle must be a 26-character Crockford base32 ULID, got ${typeof h["handle"] === "string" ? `"${h["handle"]}"` : String(h["handle"])}.`
42
+ );
43
+ }
44
+ if (typeof h["bodyBytes"] !== "number" || !Number.isInteger(h["bodyBytes"]) || h["bodyBytes"] < 0) {
45
+ throw new Error(
46
+ `.noydb bundle header.bodyBytes must be a non-negative integer, got ${String(h["bodyBytes"])}.`
47
+ );
48
+ }
49
+ if (typeof h["bodySha256"] !== "string" || !/^[0-9a-f]{64}$/.test(h["bodySha256"])) {
50
+ throw new Error(
51
+ `.noydb bundle header.bodySha256 must be a 64-character lowercase hex string, got ${typeof h["bodySha256"] === "string" ? `"${h["bodySha256"]}"` : String(h["bodySha256"])}.`
52
+ );
53
+ }
54
+ }
55
+ function encodeBundleHeader(header) {
56
+ validateBundleHeader(header);
57
+ const json = JSON.stringify({
58
+ formatVersion: header.formatVersion,
59
+ handle: header.handle,
60
+ bodyBytes: header.bodyBytes,
61
+ bodySha256: header.bodySha256
62
+ });
63
+ return new TextEncoder().encode(json);
64
+ }
65
+ function decodeBundleHeader(bytes) {
66
+ const json = new TextDecoder("utf-8", { fatal: true }).decode(bytes);
67
+ let parsed;
68
+ try {
69
+ parsed = JSON.parse(json);
70
+ } catch (err) {
71
+ throw new Error(
72
+ `.noydb bundle header is not valid JSON: ${err.message}`
73
+ );
74
+ }
75
+ validateBundleHeader(parsed);
76
+ return parsed;
77
+ }
78
+ function readUint32BE(bytes, offset) {
79
+ return (bytes[offset] << 24 >>> 0) + (bytes[offset + 1] << 16) + (bytes[offset + 2] << 8) + bytes[offset + 3];
80
+ }
81
+ function writeUint32BE(bytes, offset, value) {
82
+ bytes[offset] = value >>> 24 & 255;
83
+ bytes[offset + 1] = value >>> 16 & 255;
84
+ bytes[offset + 2] = value >>> 8 & 255;
85
+ bytes[offset + 3] = value & 255;
86
+ }
87
+ function hasNoydbBundleMagic(bytes) {
88
+ if (bytes.length < NOYDB_BUNDLE_MAGIC.length) return false;
89
+ for (let i = 0; i < NOYDB_BUNDLE_MAGIC.length; i++) {
90
+ if (bytes[i] !== NOYDB_BUNDLE_MAGIC[i]) return false;
91
+ }
92
+ return true;
93
+ }
94
+
95
+ // src/bundle/bundle.ts
96
+ var cachedBrotliSupport = null;
97
+ function supportsBrotliCompression() {
98
+ if (cachedBrotliSupport !== null) return cachedBrotliSupport;
99
+ try {
100
+ new CompressionStream("br");
101
+ cachedBrotliSupport = true;
102
+ } catch {
103
+ cachedBrotliSupport = false;
104
+ }
105
+ return cachedBrotliSupport;
106
+ }
107
+ function resetBrotliSupportCache() {
108
+ cachedBrotliSupport = null;
109
+ }
110
+ function selectCompression(option) {
111
+ const choice = option ?? "auto";
112
+ if (choice === "none") return { format: COMPRESSION_NONE, streamFormat: null };
113
+ if (choice === "gzip") return { format: COMPRESSION_GZIP, streamFormat: "gzip" };
114
+ if (choice === "brotli") {
115
+ if (!supportsBrotliCompression()) {
116
+ throw new Error(
117
+ `writeNoydbBundle({ compression: 'brotli' }) is not supported on this runtime. Brotli requires Node 22+, Chrome 124+, or Firefox 122+. Use { compression: 'auto' } to fall back to gzip silently, or { compression: 'gzip' } to be explicit.`
118
+ );
119
+ }
120
+ return { format: COMPRESSION_BROTLI, streamFormat: "br" };
121
+ }
122
+ if (supportsBrotliCompression()) {
123
+ return { format: COMPRESSION_BROTLI, streamFormat: "br" };
124
+ }
125
+ return { format: COMPRESSION_GZIP, streamFormat: "gzip" };
126
+ }
127
+ async function pumpThroughStream(input, stream) {
128
+ const readable = new Blob([input]).stream().pipeThrough(stream);
129
+ const reader = readable.getReader();
130
+ const chunks = [];
131
+ let total = 0;
132
+ for (; ; ) {
133
+ const { value, done } = await reader.read();
134
+ if (done) break;
135
+ if (value) {
136
+ chunks.push(value);
137
+ total += value.length;
138
+ }
139
+ }
140
+ const out = new Uint8Array(total);
141
+ let offset = 0;
142
+ for (const chunk of chunks) {
143
+ out.set(chunk, offset);
144
+ offset += chunk.length;
145
+ }
146
+ return out;
147
+ }
148
+ async function sha256Hex(bytes) {
149
+ const copy = new Uint8Array(bytes.length);
150
+ copy.set(bytes);
151
+ const digest = await crypto.subtle.digest("SHA-256", copy);
152
+ const view = new Uint8Array(digest);
153
+ let hex = "";
154
+ for (let i = 0; i < view.length; i++) {
155
+ hex += view[i].toString(16).padStart(2, "0");
156
+ }
157
+ return hex;
158
+ }
159
+ function concatBytes(parts) {
160
+ let total = 0;
161
+ for (const p of parts) total += p.length;
162
+ const out = new Uint8Array(total);
163
+ let offset = 0;
164
+ for (const p of parts) {
165
+ out.set(p, offset);
166
+ offset += p.length;
167
+ }
168
+ return out;
169
+ }
170
+ async function applyRecipientRewrap(vault, dumpJson, opts) {
171
+ if (opts.exportPassphrase === void 0 && opts.recipients === void 0) {
172
+ return dumpJson;
173
+ }
174
+ const recipients = opts.recipients ?? [
175
+ {
176
+ id: vault.userId,
177
+ passphrase: opts.exportPassphrase,
178
+ role: vault.role
179
+ }
180
+ ];
181
+ const recipientKeyrings = await vault.buildBundleRecipientKeyrings(recipients);
182
+ const backup = JSON.parse(dumpJson);
183
+ backup.keyrings = recipientKeyrings;
184
+ return JSON.stringify(backup);
185
+ }
186
+ function applySliceFilters(dumpJson, opts) {
187
+ const collectionsFilter = opts.collections ? new Set(opts.collections) : null;
188
+ const sinceMs = opts.since !== void 0 ? new Date(opts.since).getTime() : null;
189
+ if (collectionsFilter === null && sinceMs === null) return dumpJson;
190
+ const backup = JSON.parse(dumpJson);
191
+ if (backup.collections && typeof backup.collections === "object") {
192
+ const next = {};
193
+ for (const [name, records] of Object.entries(backup.collections)) {
194
+ if (collectionsFilter && !collectionsFilter.has(name)) continue;
195
+ if (sinceMs === null) {
196
+ next[name] = records;
197
+ continue;
198
+ }
199
+ const kept = {};
200
+ for (const [id, env] of Object.entries(records)) {
201
+ const envTs = env._ts ? new Date(env._ts).getTime() : NaN;
202
+ if (Number.isFinite(envTs) && envTs >= sinceMs) {
203
+ kept[id] = env;
204
+ }
205
+ }
206
+ next[name] = kept;
207
+ }
208
+ backup.collections = next;
209
+ }
210
+ return JSON.stringify(backup);
211
+ }
212
+ async function applyPlaintextFilters(vault, dumpJson, opts) {
213
+ if (opts.where === void 0 && opts.tierAtMost === void 0) {
214
+ return dumpJson;
215
+ }
216
+ const backup = JSON.parse(dumpJson);
217
+ if (!backup.collections || typeof backup.collections !== "object") {
218
+ return dumpJson;
219
+ }
220
+ const tierCeiling = opts.tierAtMost;
221
+ const where = opts.where;
222
+ const next = {};
223
+ for (const [collName, records] of Object.entries(backup.collections)) {
224
+ const kept = {};
225
+ for (const [id, env] of Object.entries(records)) {
226
+ if (tierCeiling !== void 0) {
227
+ const tier = env._tier ?? 0;
228
+ if (tier > tierCeiling) continue;
229
+ }
230
+ if (where !== void 0) {
231
+ const record = await vault._decryptEnvelopeForBundleFilter(
232
+ env,
233
+ collName
234
+ );
235
+ const ok = await where(record, { collection: collName, id });
236
+ if (!ok) continue;
237
+ }
238
+ kept[id] = env;
239
+ }
240
+ next[collName] = kept;
241
+ }
242
+ backup.collections = next;
243
+ return JSON.stringify(backup);
244
+ }
245
+ async function writeNoydbBundle(vault, opts = {}) {
246
+ if (opts.exportPassphrase !== void 0 && opts.recipients !== void 0) {
247
+ throw new Error(
248
+ "writeNoydbBundle: pass either exportPassphrase or recipients, not both"
249
+ );
250
+ }
251
+ const handle = await vault.getBundleHandle();
252
+ const dumpJson = await vault.dump();
253
+ const rekeyed = await applyRecipientRewrap(vault, dumpJson, opts);
254
+ const plainFiltered = await applyPlaintextFilters(vault, rekeyed, opts);
255
+ const filtered = applySliceFilters(plainFiltered, opts);
256
+ const dumpBytes = new TextEncoder().encode(filtered);
257
+ const { format, streamFormat } = selectCompression(opts.compression);
258
+ const body = streamFormat === null ? dumpBytes : await pumpThroughStream(dumpBytes, new CompressionStream(streamFormat));
259
+ const bodySha256 = await sha256Hex(body);
260
+ const header = {
261
+ formatVersion: NOYDB_BUNDLE_FORMAT_VERSION,
262
+ handle,
263
+ bodyBytes: body.length,
264
+ bodySha256
265
+ };
266
+ const headerBytes = encodeBundleHeader(header);
267
+ const prefix = new Uint8Array(NOYDB_BUNDLE_PREFIX_BYTES);
268
+ prefix.set(NOYDB_BUNDLE_MAGIC, 0);
269
+ prefix[4] = (streamFormat === null ? 0 : FLAG_COMPRESSED) | FLAG_HAS_INTEGRITY_HASH;
270
+ prefix[5] = format;
271
+ writeUint32BE(prefix, 6, headerBytes.length);
272
+ return concatBytes([prefix, headerBytes, body]);
273
+ }
274
+ function parsePrefixAndHeader(bytes) {
275
+ if (!hasNoydbBundleMagic(bytes)) {
276
+ throw new Error(
277
+ `Not a .noydb bundle: missing 'NDB1' magic prefix. The first 4 bytes are ${[...bytes.slice(0, 4)].map((b) => b.toString(16).padStart(2, "0")).join(" ")}.`
278
+ );
279
+ }
280
+ if (bytes.length < NOYDB_BUNDLE_PREFIX_BYTES) {
281
+ throw new Error(
282
+ `Truncated .noydb bundle: file is only ${bytes.length} bytes, which is less than the ${NOYDB_BUNDLE_PREFIX_BYTES}-byte fixed prefix.`
283
+ );
284
+ }
285
+ const flags = bytes[4];
286
+ const algo = bytes[5];
287
+ if (algo !== COMPRESSION_NONE && algo !== COMPRESSION_GZIP && algo !== COMPRESSION_BROTLI) {
288
+ throw new Error(
289
+ `.noydb bundle declares unknown compression algorithm ${algo}. Known values: 0 (none), 1 (gzip), 2 (brotli).`
290
+ );
291
+ }
292
+ const headerLength = readUint32BE(bytes, 6);
293
+ const bodyOffset = NOYDB_BUNDLE_PREFIX_BYTES + headerLength;
294
+ if (bodyOffset > bytes.length) {
295
+ throw new Error(
296
+ `Truncated .noydb bundle: declared header length ${headerLength} would extend past end of file (${bytes.length} bytes).`
297
+ );
298
+ }
299
+ const headerBytes = bytes.slice(NOYDB_BUNDLE_PREFIX_BYTES, bodyOffset);
300
+ const header = decodeBundleHeader(headerBytes);
301
+ return { header, bodyOffset, algo, flags };
302
+ }
303
+ function readNoydbBundleHeader(bytes) {
304
+ return parsePrefixAndHeader(bytes).header;
305
+ }
306
+ async function readNoydbBundle(bytes) {
307
+ const { header, bodyOffset, algo } = parsePrefixAndHeader(bytes);
308
+ const body = bytes.slice(bodyOffset);
309
+ if (body.length !== header.bodyBytes) {
310
+ throw new BundleIntegrityError(
311
+ `body length ${body.length} does not match header.bodyBytes ${header.bodyBytes}. The bundle was truncated or padded between write and read.`
312
+ );
313
+ }
314
+ const actualSha = await sha256Hex(body);
315
+ if (actualSha !== header.bodySha256) {
316
+ throw new BundleIntegrityError(
317
+ `body sha256 ${actualSha} does not match header.bodySha256 ${header.bodySha256}. The bundle bytes were modified between write and read \u2014 refuse to decompress.`
318
+ );
319
+ }
320
+ let dumpBytes;
321
+ if (algo === COMPRESSION_NONE) {
322
+ dumpBytes = body;
323
+ } else {
324
+ const streamFormat = algo === COMPRESSION_BROTLI ? "br" : "gzip";
325
+ try {
326
+ dumpBytes = await pumpThroughStream(body, new DecompressionStream(streamFormat));
327
+ } catch (err) {
328
+ throw new BundleIntegrityError(
329
+ `decompression failed: ${err.message}. The bundle passed the integrity hash but the body is not valid ${streamFormat} data \u2014 likely a producer bug.`
330
+ );
331
+ }
332
+ }
333
+ const dumpJson = new TextDecoder("utf-8", { fatal: true }).decode(dumpBytes);
334
+ return { header, dumpJson };
335
+ }
336
+
337
+ export {
338
+ NOYDB_BUNDLE_MAGIC,
339
+ NOYDB_BUNDLE_PREFIX_BYTES,
340
+ NOYDB_BUNDLE_FORMAT_VERSION,
341
+ FLAG_COMPRESSED,
342
+ FLAG_HAS_INTEGRITY_HASH,
343
+ COMPRESSION_NONE,
344
+ COMPRESSION_GZIP,
345
+ COMPRESSION_BROTLI,
346
+ validateBundleHeader,
347
+ encodeBundleHeader,
348
+ hasNoydbBundleMagic,
349
+ resetBrotliSupportCache,
350
+ writeNoydbBundle,
351
+ readNoydbBundleHeader,
352
+ readNoydbBundle
353
+ };
354
+ //# sourceMappingURL=chunk-GJILMRPO.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/bundle/format.ts","../src/bundle/bundle.ts"],"sourcesContent":["/**\n * `.noydb` container format — byte layout, header schema, validators.\n *\n *. Wraps a `vault.dump()` JSON string in a thin\n * binary container with a magic-byte prefix, a minimum-disclosure\n * unencrypted header, and a compressed body.\n *\n * **Byte layout** (read in order from offset 0):\n *\n * ```\n * +--------+--------+--------+--------+\n * | N=78 | D=68 | B=66 | 1=49 | Magic 'NDB1' (4 bytes)\n * +--------+--------+--------+--------+\n * | flags | compr | header_length (uint32 BE) |\n * +--------+--------+--------+--------+--------+--------+--------+\n * | header_length bytes of UTF-8 JSON header ...\n * +--------+--------+\n * | compressed body bytes ...\n * ```\n *\n * Total fixed prefix before the header JSON is **10 bytes**:\n * - 4 bytes magic\n * - 1 byte flags\n * - 1 byte compression algorithm\n * - 4 bytes header length (uint32 big-endian)\n *\n * **Why a binary container** at all? `vault.dump()` already\n * produces a JSON string with encrypted records inside. Wrapping it\n * again seems redundant — but the wrap is what makes the file safe\n * to drop into cloud storage (Drive, Dropbox, iCloud) without\n * leaking the vault name and exporter identity through the\n * cloud's metadata API. The minimum-disclosure header is the only\n * thing visible without downloading and decompressing the body.\n * The dump JSON inside the body still contains the original\n * metadata, but that's only readable by someone who already has the\n * file bytes — the same person who could read the encrypted records\n * with the right passphrase.\n *\n * **Why minimum disclosure** in the header? Because consumers will\n * inevitably store these in services where the filename, file size,\n * and any unencrypted metadata are indexed for search. A field like\n * `vault: \"Acme Corp\"` would let an attacker (or a curious\n * cloud admin) enumerate which compartments exist and who exported\n * them, even with zero access to the encrypted body. The header\n * carries only what's needed to identify the file as a NOYDB\n * bundle and verify its integrity — nothing about the contents.\n */\n\n/** Magic bytes 'NDB1' (ASCII), identifying a NOYDB bundle. */\nexport const NOYDB_BUNDLE_MAGIC = new Uint8Array([0x4e, 0x44, 0x42, 0x31])\n\n/** Total fixed prefix before the header JSON: 4+1+1+4 bytes. */\nexport const NOYDB_BUNDLE_PREFIX_BYTES = 10\n\n/** Current bundle format version. Bumped on layout changes. */\nexport const NOYDB_BUNDLE_FORMAT_VERSION = 1\n\n/**\n * Bitfield interpretation of the flags byte.\n *\n * Bit 0 — body is compressed (0 = raw, 1 = compressed)\n * Bit 1 — header carries an integrity hash over the body bytes\n * Bits 2-7 — reserved, must be 0 in\n */\nexport const FLAG_COMPRESSED = 0b0000_0001\nexport const FLAG_HAS_INTEGRITY_HASH = 0b0000_0010\n\n/**\n * Compression algorithm encoding for the byte at offset 5.\n *\n * `none` is admitted for round-trip testing and for callers that\n * want to bundle without compression (e.g. when piping into a\n * separately compressed transport). `gzip` is the universally\n * available baseline (Node 18+, all modern browsers). `brotli` is\n * preferred when the runtime supports it — typically 30-50% smaller\n * for JSON payloads — but Node 22+ / Chrome 124+ / Firefox 122+\n * are required, so the writer feature-detects at runtime and falls\n * back to gzip. The reader must handle all three.\n */\nexport const COMPRESSION_NONE = 0\nexport const COMPRESSION_GZIP = 1\nexport const COMPRESSION_BROTLI = 2\n\nexport type CompressionAlgo = 0 | 1 | 2\n\n/**\n * The unencrypted header carried in every `.noydb` bundle.\n *\n * **Minimum-disclosure rules:** these are the ONLY allowed keys.\n * Any other key in a parsed header causes\n * `validateBundleHeader` to throw. The set is kept short to\n * minimize attack surface from cloud-storage metadata indexing —\n * see the file-level doc comment for the rationale.\n *\n * Forbidden in particular:\n * - `vault` / `_compartment` — would leak the tenant name\n * - `exporter` / `_exported_by` — would leak user identity\n * - `timestamp` / `_exported_at` — would leak activity timing\n * - `kdfParams` / salt fields — would leak crypto config that\n * could narrow brute-force search space\n * - any field starting with `_` (reserved by the dump format)\n */\nexport interface NoydbBundleHeader {\n /** Bundle format version — bumped on layout changes. */\n readonly formatVersion: number\n /**\n * Opaque ULID identifier — generated once per vault and\n * stable across re-exports of the same vault. Does not\n * leak any information about contents (the timestamp prefix is\n * just monotonicity for sortability, not exporter activity —\n * see `bundle/ulid.ts` for the design notes).\n */\n readonly handle: string\n /** Compressed body length in bytes. Lets readers verify completeness without decompressing. */\n readonly bodyBytes: number\n /** SHA-256 of the compressed body bytes (lowercase hex). Lets readers verify integrity without decompressing. */\n readonly bodySha256: string\n}\n\n/**\n * Allowlist of header keys. Any key not in this set is forbidden\n * and causes `validateBundleHeader` to throw. Kept as a Set for\n * O(1) lookup; the validator iterates over the parsed header and\n * checks each key against this set.\n */\nconst ALLOWED_HEADER_KEYS: ReadonlySet<string> = new Set([\n 'formatVersion',\n 'handle',\n 'bodyBytes',\n 'bodySha256',\n])\n\n/**\n * Validate a parsed bundle header. Throws on any deviation from\n * the minimum-disclosure schema:\n *\n * - Missing required field\n * - Wrong type for any field\n * - Any extra key not in `ALLOWED_HEADER_KEYS`\n * - Unsupported `formatVersion`\n * - Negative or non-integer `bodyBytes`\n * - Malformed `handle` (must be 26-char Crockford base32)\n * - Malformed `bodySha256` (must be 64-char lowercase hex)\n *\n * The error messages name the offending field so consumers can\n * fix the producer rather than the reader.\n */\nexport function validateBundleHeader(\n parsed: unknown,\n): asserts parsed is NoydbBundleHeader {\n if (parsed === null || typeof parsed !== 'object') {\n throw new Error(\n `.noydb bundle header must be a JSON object, got ${parsed === null ? 'null' : typeof parsed}`,\n )\n }\n // Disallow any unknown key — minimum disclosure means we reject\n // forward-compat extension keys at the format layer; new fields\n // require a format version bump and a new validator.\n for (const key of Object.keys(parsed)) {\n if (!ALLOWED_HEADER_KEYS.has(key)) {\n throw new Error(\n `.noydb bundle header contains forbidden key \"${key}\". ` +\n `Only minimum-disclosure fields are allowed: ` +\n `${[...ALLOWED_HEADER_KEYS].join(', ')}.`,\n )\n }\n }\n const h = parsed as Record<string, unknown>\n if (typeof h['formatVersion'] !== 'number' || h['formatVersion'] !== NOYDB_BUNDLE_FORMAT_VERSION) {\n throw new Error(\n `.noydb bundle header.formatVersion must be ${NOYDB_BUNDLE_FORMAT_VERSION}, ` +\n `got ${String(h['formatVersion'])}. The reader does not support ` +\n `forward-compat versions; upgrade the reader to handle newer bundles.`,\n )\n }\n if (typeof h['handle'] !== 'string' || !/^[0-9A-HJKMNP-TV-Z]{26}$/.test(h['handle'])) {\n throw new Error(\n `.noydb bundle header.handle must be a 26-character Crockford base32 ULID, ` +\n `got ${typeof h['handle'] === 'string' ? `\"${h['handle']}\"` : String(h['handle'])}.`,\n )\n }\n if (typeof h['bodyBytes'] !== 'number' || !Number.isInteger(h['bodyBytes']) || h['bodyBytes'] < 0) {\n throw new Error(\n `.noydb bundle header.bodyBytes must be a non-negative integer, ` +\n `got ${String(h['bodyBytes'])}.`,\n )\n }\n if (typeof h['bodySha256'] !== 'string' || !/^[0-9a-f]{64}$/.test(h['bodySha256'])) {\n throw new Error(\n `.noydb bundle header.bodySha256 must be a 64-character lowercase hex string, ` +\n `got ${typeof h['bodySha256'] === 'string' ? `\"${h['bodySha256']}\"` : String(h['bodySha256'])}.`,\n )\n }\n}\n\n/**\n * Encode a header object to UTF-8 JSON bytes after validating\n * minimum disclosure. Used by the writer to serialize the header\n * region of the container.\n */\nexport function encodeBundleHeader(header: NoydbBundleHeader): Uint8Array {\n validateBundleHeader(header)\n // Stable key ordering — JSON.stringify with no replacer uses\n // insertion order, which is fine here because we control the\n // object construction. Stable ordering means two bundles with\n // identical contents produce byte-identical headers.\n const json = JSON.stringify({\n formatVersion: header.formatVersion,\n handle: header.handle,\n bodyBytes: header.bodyBytes,\n bodySha256: header.bodySha256,\n })\n return new TextEncoder().encode(json)\n}\n\n/**\n * Parse a bundle header from its UTF-8 JSON bytes. Throws on\n * invalid JSON or any minimum-disclosure violation.\n */\nexport function decodeBundleHeader(bytes: Uint8Array): NoydbBundleHeader {\n const json = new TextDecoder('utf-8', { fatal: true }).decode(bytes)\n let parsed: unknown\n try {\n parsed = JSON.parse(json)\n } catch (err) {\n throw new Error(\n `.noydb bundle header is not valid JSON: ${(err as Error).message}`,\n )\n }\n validateBundleHeader(parsed)\n return parsed\n}\n\n/**\n * Read a uint32 from `bytes` at `offset` in big-endian byte order.\n * No bounds check — callers must guarantee `offset + 4 <= bytes.length`.\n * Used to decode the header length field; kept inline so the parser\n * doesn't depend on DataView allocation per call.\n */\nexport function readUint32BE(bytes: Uint8Array, offset: number): number {\n return (\n (bytes[offset]! << 24 >>> 0) +\n (bytes[offset + 1]! << 16) +\n (bytes[offset + 2]! << 8) +\n bytes[offset + 3]!\n )\n}\n\n/**\n * Write a uint32 to `bytes` at `offset` in big-endian byte order.\n * No bounds check — callers must guarantee `offset + 4 <= bytes.length`.\n */\nexport function writeUint32BE(bytes: Uint8Array, offset: number, value: number): void {\n bytes[offset] = (value >>> 24) & 0xff\n bytes[offset + 1] = (value >>> 16) & 0xff\n bytes[offset + 2] = (value >>> 8) & 0xff\n bytes[offset + 3] = value & 0xff\n}\n\n/**\n * Verify the magic prefix of a bundle. Returns true if the first\n * 4 bytes match `NDB1`. Used by readers as a fast file-type check\n * before any further parsing.\n */\nexport function hasNoydbBundleMagic(bytes: Uint8Array): boolean {\n if (bytes.length < NOYDB_BUNDLE_MAGIC.length) return false\n for (let i = 0; i < NOYDB_BUNDLE_MAGIC.length; i++) {\n if (bytes[i] !== NOYDB_BUNDLE_MAGIC[i]) return false\n }\n return true\n}\n","/**\n * `.noydb` container primitives — write, read, header-only read.\n *\n *. Wraps a `vault.dump()` JSON string in the\n * binary container described in `format.ts`.\n *\n * **Three primitives:**\n *\n * - `writeNoydbBundle(vault, opts?)` — produces the\n * full container bytes ready to write to disk or upload\n * - `readNoydbBundleHeader(bytes)` — parses just the header\n * without decompressing the body, fast file-type and\n * metadata read for cloud listing UIs\n * - `readNoydbBundle(bytes)` — full read: validates magic,\n * header, integrity hash, and decompresses the body to\n * return the original `dump()` JSON string for use with\n * `vault.load()`\n *\n * **Compression strategy:** brotli when available (Node 22+,\n * Chrome 124+, Firefox 122+), gzip fallback elsewhere. The\n * algorithm choice is encoded in the format byte at offset 5,\n * so readers handle either transparently. Brotli wins ~30-50%\n * on JSON payloads with repeated keys (which vault dumps\n * are).\n *\n * **Why split read/load?** `readNoydbBundle` returns the\n * *unwrapped JSON string*, not a Vault object. The caller\n * is responsible for piping that JSON into\n * `vault.load(json, passphrase)`. Splitting the layers\n * keeps the bundle module free of any crypto/passphrase\n * concerns — it's purely a format layer. The same `readNoydbBundle`\n * call can also feed verification tools, format inspectors, or\n * archive utilities that don't care about decryption.\n */\n\nimport {\n COMPRESSION_BROTLI,\n COMPRESSION_GZIP,\n COMPRESSION_NONE,\n FLAG_COMPRESSED,\n FLAG_HAS_INTEGRITY_HASH,\n NOYDB_BUNDLE_FORMAT_VERSION,\n NOYDB_BUNDLE_MAGIC,\n NOYDB_BUNDLE_PREFIX_BYTES,\n decodeBundleHeader,\n encodeBundleHeader,\n hasNoydbBundleMagic,\n readUint32BE,\n writeUint32BE,\n type CompressionAlgo,\n type NoydbBundleHeader,\n} from './format.js'\nimport { BundleIntegrityError } from '../errors.js'\nimport type { Vault } from '../vault.js'\nimport type { BundleRecipient } from '../team/keyring.js'\n\n/**\n * Options accepted by `writeNoydbBundle`.\n *\n * - `compression: 'auto'` (default) — try brotli, fall back to gzip\n * - `compression: 'brotli'` — force brotli, throw if unsupported\n * - `compression: 'gzip'` — force gzip\n * - `compression: 'none'` — no compression (round-trip testing only)\n *\n * **Slice filtering** (added in ):\n * - `collections` — allowlist of collection names to include. Internal\n * collections (keyrings, ledger) and excluded user collections are\n * dropped from the bundle. Records inside included collections are\n * carried through verbatim.\n * - `since` — only records whose envelope `_ts` is on/after the given\n * instant survive. Operates on the unencrypted envelope timestamp,\n * so plaintext access to records is not required.\n *\n * Both filters intersect (AND). When neither is provided the bundle is\n * a whole-vault snapshot, identical to today's behaviour.\n */\nexport interface WriteNoydbBundleOptions {\n readonly compression?: 'auto' | 'brotli' | 'gzip' | 'none'\n /** Allowlist of user-collection names to include. */\n readonly collections?: readonly string[]\n /**\n * Drop records whose envelope `_ts` is strictly older than this\n * instant. Accepts a `Date` or any ISO-8601 string parseable by\n * `new Date()`.\n */\n readonly since?: Date | string\n /**\n * Plaintext-pipeline record predicate. Decrypts each record\n * with the vault's per-collection DEK, runs the predicate, and\n * keeps the original ciphertext for survivors (no re-encrypt —\n * preserves zero-knowledge cleanly). Records the predicate returns\n * `false` for are dropped from the bundle.\n *\n * Async predicates are supported. Mutating the record from inside\n * the predicate is undefined behaviour.\n */\n readonly where?: (\n record: unknown,\n ctx: { collection: string; id: string },\n ) => boolean | Promise<boolean>\n /**\n * Hierarchical-tier ceiling. Records whose envelope `_tier`\n * is strictly greater than this number are dropped. Operates on the\n * envelope `_tier` (no decryption needed) — vault.exportStream is\n * referenced in the issue body for symmetry, but the tier value\n * lives on the unencrypted envelope. Vault without tiers is a no-op.\n */\n readonly tierAtMost?: number\n /**\n * Single-recipient re-keying shorthand. When set, the\n * bundle's keyring is replaced with one freshly-derived entry sealed\n * with this passphrase. The recipient inherits the source keyring's\n * userId, role, and permissions. Mutually exclusive with `recipients`.\n */\n readonly exportPassphrase?: string\n /**\n * Multi-recipient re-keying. Replaces the bundle's keyring\n * map with one slot per recipient, each sealed with its own\n * passphrase. DEKs are unwrapped from the source keyring once and\n * re-wrapped per recipient — record ciphertext is unchanged.\n *\n * Mutually exclusive with `exportPassphrase`. When neither is set,\n * the bundle inherits the source keyring as-is (today's behaviour,\n * suited to personal backup-and-restore).\n */\n readonly recipients?: readonly BundleRecipient[]\n}\n\n/**\n * Result returned by `readNoydbBundle`. The caller is expected to\n * pass `dumpJson` into `vault.load(json, passphrase)` to\n * actually restore a vault. Splitting the layers keeps the\n * bundle module free of crypto concerns — see file-level docs.\n */\nexport interface NoydbBundleReadResult {\n readonly header: NoydbBundleHeader\n readonly dumpJson: string\n}\n\n/**\n * Detect whether the runtime's `CompressionStream` supports brotli.\n *\n * Brotli requires Node 22+ / Chrome 124+ / Firefox 122+. The\n * detection runs the `CompressionStream` constructor in a\n * try/catch — unsupported formats throw `TypeError` synchronously,\n * making this a safe one-shot check that we cache for the\n * lifetime of the process.\n */\nlet cachedBrotliSupport: boolean | null = null\nfunction supportsBrotliCompression(): boolean {\n if (cachedBrotliSupport !== null) return cachedBrotliSupport\n try {\n new CompressionStream('br' as CompressionFormat)\n cachedBrotliSupport = true\n } catch {\n cachedBrotliSupport = false\n }\n return cachedBrotliSupport\n}\n\n/** Test-only: reset the brotli detection cache between tests. */\nexport function resetBrotliSupportCache(): void {\n cachedBrotliSupport = null\n}\n\n/**\n * Pick the compression algorithm and the corresponding format byte\n * from a user option. Throws if the user explicitly requests brotli\n * on a runtime that doesn't support it — a silent fallback would\n * make the produced bundle smaller-than-expected and confuse\n * size-bound tests.\n */\nfunction selectCompression(option: WriteNoydbBundleOptions['compression']): {\n format: CompressionAlgo\n streamFormat: CompressionFormat | null\n} {\n const choice = option ?? 'auto'\n if (choice === 'none') return { format: COMPRESSION_NONE, streamFormat: null }\n if (choice === 'gzip') return { format: COMPRESSION_GZIP, streamFormat: 'gzip' }\n if (choice === 'brotli') {\n if (!supportsBrotliCompression()) {\n throw new Error(\n `writeNoydbBundle({ compression: 'brotli' }) is not supported on this ` +\n `runtime. Brotli requires Node 22+, Chrome 124+, or Firefox 122+. ` +\n `Use { compression: 'auto' } to fall back to gzip silently, or ` +\n `{ compression: 'gzip' } to be explicit.`,\n )\n }\n return { format: COMPRESSION_BROTLI, streamFormat: 'br' as CompressionFormat }\n }\n // 'auto' — prefer brotli, fall back to gzip\n if (supportsBrotliCompression()) {\n return { format: COMPRESSION_BROTLI, streamFormat: 'br' as CompressionFormat }\n }\n return { format: COMPRESSION_GZIP, streamFormat: 'gzip' }\n}\n\n/**\n * Pump a Uint8Array through a CompressionStream / DecompressionStream\n * and collect the output. Both APIs are universally available in\n * Node 18+ and modern browsers; the only variance is which\n * formats they support, handled by `selectCompression` above.\n *\n * Implementation: build a single-chunk ReadableStream from the\n * input, pipe through the transform, then drain the resulting\n * ReadableStream into a single concatenated Uint8Array. This is\n * O(N) memory in the input + output sizes, which is fine for the\n * dump-sized payloads (typically <50MB) targets.\n */\nasync function pumpThroughStream(\n input: Uint8Array,\n stream: CompressionStream | DecompressionStream,\n): Promise<Uint8Array> {\n const readable = new Blob([input as BlobPart]).stream().pipeThrough(stream)\n const reader = readable.getReader()\n const chunks: Uint8Array[] = []\n let total = 0\n for (;;) {\n const { value, done } = await reader.read()\n if (done) break\n if (value) {\n chunks.push(value as Uint8Array)\n total += value.length\n }\n }\n const out = new Uint8Array(total)\n let offset = 0\n for (const chunk of chunks) {\n out.set(chunk, offset)\n offset += chunk.length\n }\n return out\n}\n\n/**\n * SHA-256 hex digest of `bytes`. Used for the bundle integrity\n * hash carried in the header. Web Crypto API only — no Node\n * crypto module, no third-party hash library.\n *\n * The output format is lowercase hex (64 chars for SHA-256). The\n * format validator pins this — uppercase or mixed-case digests\n * are rejected, so the writer and reader agree on canonicalization.\n */\nasync function sha256Hex(bytes: Uint8Array): Promise<string> {\n // Copy into a fresh ArrayBuffer-backed Uint8Array. The\n // underlying buffer of `bytes` may be SharedArrayBuffer (e.g.\n // from a worker), which `subtle.digest` rejects via TypeScript's\n // BufferSource type. Allocating a fresh ArrayBuffer-backed view\n // sidesteps the type narrowing and is portable across all\n // runtimes — the copy cost is O(N) but bundle bodies are\n // typically <50MB, well below the threshold where the copy\n // matters.\n const copy = new Uint8Array(bytes.length)\n copy.set(bytes)\n const digest = await crypto.subtle.digest('SHA-256', copy)\n const view = new Uint8Array(digest)\n let hex = ''\n for (let i = 0; i < view.length; i++) {\n hex += view[i]!.toString(16).padStart(2, '0')\n }\n return hex\n}\n\n/**\n * Concatenate any number of Uint8Arrays into a single new buffer.\n * Used to assemble the final bundle from its prefix + header +\n * body parts.\n */\nfunction concatBytes(parts: readonly Uint8Array[]): Uint8Array {\n let total = 0\n for (const p of parts) total += p.length\n const out = new Uint8Array(total)\n let offset = 0\n for (const p of parts) {\n out.set(p, offset)\n offset += p.length\n }\n return out\n}\n\n/**\n * Replace the bundle's keyrings with freshly built recipient slots,\n * one per supplied recipient. No-op when neither `exportPassphrase`\n * nor `recipients` is set — the source keyring is inherited as-is.\n *\n * The single-passphrase shorthand creates a one-recipient list whose\n * id, role, and permissions inherit from the source vault — useful\n * for \"back up to a different passphrase\" without changing role\n * semantics. The multi-recipient form wraps each slot independently\n * with its declared role + permissions.\n *\n * @internal\n */\nasync function applyRecipientRewrap(\n vault: Vault,\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): Promise<string> {\n if (opts.exportPassphrase === undefined && opts.recipients === undefined) {\n return dumpJson\n }\n\n const recipients: readonly BundleRecipient[] =\n opts.recipients ?? [\n {\n id: vault.userId,\n passphrase: opts.exportPassphrase as string,\n role: vault.role,\n },\n ]\n\n const recipientKeyrings = await vault.buildBundleRecipientKeyrings(recipients)\n\n const backup = JSON.parse(dumpJson) as { keyrings: unknown; [k: string]: unknown }\n backup.keyrings = recipientKeyrings\n return JSON.stringify(backup)\n}\n\n/**\n * Apply opt-in slice filters to a vault dump JSON string. Filters that\n * narrow the bundle without crossing the encryption boundary — both\n * operate on metadata (collection name, envelope `_ts`) and never need\n * to decrypt records. When neither filter is set, the dump is returned\n * unchanged so the no-arg path stays a pure passthrough.\n *\n * Internal-collection filtering: when a `collections` allowlist is\n * provided, the bundle still carries `_internal` (ledger entries) and\n * the keyrings — they're necessary for the receiver to verify and\n * unlock the bundle. The allowlist applies to the user-collection\n * map only.\n *\n * @internal\n */\nfunction applySliceFilters(\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): string {\n const collectionsFilter = opts.collections\n ? new Set(opts.collections)\n : null\n const sinceMs =\n opts.since !== undefined ? new Date(opts.since).getTime() : null\n if (collectionsFilter === null && sinceMs === null) return dumpJson\n\n // Parse, prune, re-serialize. The dump shape is stable\n // (VaultBackup) so this is a one-off allocation; for vaults beyond\n // the documented 1K–50K target a streaming variant would be a\n // follow-up, but the simple parse path keeps the slice path\n // type-safe and trivially auditable.\n const backup = JSON.parse(dumpJson) as {\n collections?: Record<string, Record<string, { _ts?: string }>>\n [k: string]: unknown\n }\n\n if (backup.collections && typeof backup.collections === 'object') {\n const next: Record<string, Record<string, unknown>> = {}\n for (const [name, records] of Object.entries(backup.collections)) {\n if (collectionsFilter && !collectionsFilter.has(name)) continue\n if (sinceMs === null) {\n next[name] = records\n continue\n }\n const kept: Record<string, unknown> = {}\n for (const [id, env] of Object.entries(records)) {\n const envTs = env._ts ? new Date(env._ts).getTime() : NaN\n if (Number.isFinite(envTs) && envTs >= sinceMs) {\n kept[id] = env\n }\n }\n next[name] = kept\n }\n backup.collections = next as typeof backup.collections\n }\n\n return JSON.stringify(backup)\n}\n\n/**\n * Apply opt-in plaintext-tier filters\n * to a vault dump. Operates BEFORE `applySliceFilters` so the metadata\n * pass sees the trimmed record set.\n *\n * The filter never re-encrypts: surviving records carry their original\n * envelope unchanged. Failing records are dropped from the\n * `collections` map. Internal collections (ledger, deltas) and the\n * keyrings map are untouched.\n *\n * @internal\n */\nasync function applyPlaintextFilters(\n vault: Vault,\n dumpJson: string,\n opts: WriteNoydbBundleOptions,\n): Promise<string> {\n if (opts.where === undefined && opts.tierAtMost === undefined) {\n return dumpJson\n }\n\n type Env = { _ts?: string; _tier?: number; _iv: string; _data: string }\n const backup = JSON.parse(dumpJson) as {\n collections?: Record<string, Record<string, Env>>\n [k: string]: unknown\n }\n if (!backup.collections || typeof backup.collections !== 'object') {\n return dumpJson\n }\n\n const tierCeiling = opts.tierAtMost\n const where = opts.where\n\n const next: Record<string, Record<string, Env>> = {}\n for (const [collName, records] of Object.entries(backup.collections)) {\n const kept: Record<string, Env> = {}\n for (const [id, env] of Object.entries(records)) {\n // Tier ceiling — runs FIRST so we don't waste a decrypt on\n // records about to be dropped anyway. Envelope tier defaults to\n // 0 when absent (matches Vault's tier-0 conventions).\n if (tierCeiling !== undefined) {\n const tier = env._tier ?? 0\n if (tier > tierCeiling) continue\n }\n // Plaintext predicate — decrypt, run, keep on truthy. Errors\n // from inside the predicate propagate (callers want to see why\n // their filter blew up rather than getting a silent passthrough).\n if (where !== undefined) {\n const record = await vault._decryptEnvelopeForBundleFilter(\n env as never,\n collName,\n )\n const ok = await where(record, { collection: collName, id })\n if (!ok) continue\n }\n kept[id] = env\n }\n next[collName] = kept\n }\n backup.collections = next\n return JSON.stringify(backup)\n}\n\n/**\n * Write a `.noydb` bundle for the given vault.\n *\n * Pipeline:\n * 1. Resolve or create the compartment's stable bundle handle\n * via `vault.getBundleHandle()` — same handle on\n * every export from the same vault instance, so cloud\n * adapters can use it as a primary key.\n * 2. `vault.dump()` → JSON string with encrypted records\n * inside.\n * 3. UTF-8 encode the dump string.\n * 4. Compress (brotli if available, gzip fallback by default).\n * 5. Compute SHA-256 of the compressed body for integrity.\n * 6. Build the minimum-disclosure header from format version,\n * handle, body length, body sha.\n * 7. Serialize: magic (4) + flags (1) + algo (1) + headerLen (4)\n * + header JSON (N) + compressed body (M).\n *\n * The output is a single `Uint8Array`. Consumers writing to disk\n * pass it to `fs.writeFile`; consumers uploading to cloud storage\n * pass it as the request body. The `@noy-db/file` adapter wraps\n * this with a `saveBundle(path, vault)` helper.\n */\nexport async function writeNoydbBundle(\n vault: Vault,\n opts: WriteNoydbBundleOptions = {},\n): Promise<Uint8Array> {\n if (opts.exportPassphrase !== undefined && opts.recipients !== undefined) {\n throw new Error(\n 'writeNoydbBundle: pass either exportPassphrase or recipients, not both',\n )\n }\n\n const handle = await vault.getBundleHandle()\n const dumpJson = await vault.dump()\n\n // Re-keying: when caller supplied recipients (or the single-recipient\n // shorthand), substitute the bundle's `keyrings` map with freshly\n // built recipient slots before slice filters run.\n const rekeyed = await applyRecipientRewrap(vault, dumpJson, opts)\n // Plaintext-tier filters run BEFORE\n // the metadata-only slice — that way the metadata pass sees the\n // already-trimmed record set and the two filter chains compose\n // cleanly.\n const plainFiltered = await applyPlaintextFilters(vault, rekeyed, opts)\n const filtered = applySliceFilters(plainFiltered, opts)\n const dumpBytes = new TextEncoder().encode(filtered)\n\n const { format, streamFormat } = selectCompression(opts.compression)\n const body = streamFormat === null\n ? dumpBytes\n : await pumpThroughStream(dumpBytes, new CompressionStream(streamFormat))\n\n const bodySha256 = await sha256Hex(body)\n const header: NoydbBundleHeader = {\n formatVersion: NOYDB_BUNDLE_FORMAT_VERSION,\n handle,\n bodyBytes: body.length,\n bodySha256,\n }\n const headerBytes = encodeBundleHeader(header)\n\n // Assemble the fixed prefix in a 10-byte buffer.\n const prefix = new Uint8Array(NOYDB_BUNDLE_PREFIX_BYTES)\n prefix.set(NOYDB_BUNDLE_MAGIC, 0)\n prefix[4] =\n (streamFormat === null ? 0 : FLAG_COMPRESSED) | FLAG_HAS_INTEGRITY_HASH\n prefix[5] = format\n writeUint32BE(prefix, 6, headerBytes.length)\n\n return concatBytes([prefix, headerBytes, body])\n}\n\n/**\n * Internal helper shared by both readers — parses just the prefix\n * + header region of a bundle without touching the body. Returns\n * the parsed header plus the offset where the body starts and the\n * compression algorithm needed to decompress it.\n *\n * Throws on any format violation: missing/invalid magic, truncated\n * prefix, header length larger than the file, or unknown\n * compression algorithm.\n */\nfunction parsePrefixAndHeader(bytes: Uint8Array): {\n header: NoydbBundleHeader\n bodyOffset: number\n algo: CompressionAlgo\n flags: number\n} {\n if (!hasNoydbBundleMagic(bytes)) {\n throw new Error(\n `Not a .noydb bundle: missing 'NDB1' magic prefix. The first 4 bytes ` +\n `are ${[...bytes.slice(0, 4)].map((b) => b.toString(16).padStart(2, '0')).join(' ')}.`,\n )\n }\n if (bytes.length < NOYDB_BUNDLE_PREFIX_BYTES) {\n throw new Error(\n `Truncated .noydb bundle: file is only ${bytes.length} bytes, ` +\n `which is less than the ${NOYDB_BUNDLE_PREFIX_BYTES}-byte fixed prefix.`,\n )\n }\n const flags = bytes[4]!\n const algo = bytes[5]!\n if (algo !== COMPRESSION_NONE && algo !== COMPRESSION_GZIP && algo !== COMPRESSION_BROTLI) {\n throw new Error(\n `.noydb bundle declares unknown compression algorithm ${algo}. ` +\n `Known values: 0 (none), 1 (gzip), 2 (brotli).`,\n )\n }\n const headerLength = readUint32BE(bytes, 6)\n const bodyOffset = NOYDB_BUNDLE_PREFIX_BYTES + headerLength\n if (bodyOffset > bytes.length) {\n throw new Error(\n `Truncated .noydb bundle: declared header length ${headerLength} ` +\n `would extend past end of file (${bytes.length} bytes).`,\n )\n }\n const headerBytes = bytes.slice(NOYDB_BUNDLE_PREFIX_BYTES, bodyOffset)\n const header = decodeBundleHeader(headerBytes)\n return { header, bodyOffset, algo: algo as CompressionAlgo, flags }\n}\n\n/**\n * Read just the bundle header — no body decompression, no\n * integrity verification. Fast (O(prefix + header bytes)) and\n * intended for cloud-listing UIs that want to show the handle and\n * size before downloading the full body.\n *\n * Returns the same `NoydbBundleHeader` shape as the writer, with\n * minimum-disclosure validation already applied.\n */\nexport function readNoydbBundleHeader(bytes: Uint8Array): NoydbBundleHeader {\n return parsePrefixAndHeader(bytes).header\n}\n\n/**\n * Read a full `.noydb` bundle: validate magic + header, verify\n * integrity hash over the body bytes, decompress, and return the\n * original `vault.dump()` JSON string ready to pass to\n * `vault.load()`.\n *\n * Throws `BundleIntegrityError` if the body's actual SHA-256 does\n * not match the value declared in the header. Distinct from a\n * format error so consumers can pattern-match in catch blocks\n * (corrupted-in-transit vs malformed-by-producer).\n *\n * Note: this function does NOT take a passphrase. The dump JSON\n * inside the body still contains encrypted records — restoring\n * the vault requires `vault.load(dumpJson, passphrase)`\n * after this call. Splitting the layers keeps the bundle module\n * free of crypto concerns and lets the same code feed format\n * inspectors that never decrypt anything.\n */\nexport async function readNoydbBundle(\n bytes: Uint8Array,\n): Promise<NoydbBundleReadResult> {\n const { header, bodyOffset, algo } = parsePrefixAndHeader(bytes)\n const body = bytes.slice(bodyOffset)\n\n // Length check before hash check — a length mismatch is the\n // cheapest tamper signal and produces a more actionable error.\n if (body.length !== header.bodyBytes) {\n throw new BundleIntegrityError(\n `body length ${body.length} does not match header.bodyBytes ` +\n `${header.bodyBytes}. The bundle was truncated or padded ` +\n `between write and read.`,\n )\n }\n\n const actualSha = await sha256Hex(body)\n if (actualSha !== header.bodySha256) {\n throw new BundleIntegrityError(\n `body sha256 ${actualSha} does not match header.bodySha256 ` +\n `${header.bodySha256}. The bundle bytes were modified between ` +\n `write and read — refuse to decompress.`,\n )\n }\n\n let dumpBytes: Uint8Array\n if (algo === COMPRESSION_NONE) {\n dumpBytes = body\n } else {\n const streamFormat: CompressionFormat =\n algo === COMPRESSION_BROTLI ? ('br' as CompressionFormat) : 'gzip'\n try {\n dumpBytes = await pumpThroughStream(body, new DecompressionStream(streamFormat))\n } catch (err) {\n throw new BundleIntegrityError(\n `decompression failed: ${(err as Error).message}. The bundle ` +\n `passed the integrity hash but the body is not valid ` +\n `${streamFormat} data — likely a producer bug.`,\n )\n }\n }\n\n const dumpJson = new TextDecoder('utf-8', { fatal: true }).decode(dumpBytes)\n return { header, dumpJson }\n}\n"],"mappings":";;;;;AAiDO,IAAM,qBAAqB,IAAI,WAAW,CAAC,IAAM,IAAM,IAAM,EAAI,CAAC;AAGlE,IAAM,4BAA4B;AAGlC,IAAM,8BAA8B;AASpC,IAAM,kBAAkB;AACxB,IAAM,0BAA0B;AAchC,IAAM,mBAAmB;AACzB,IAAM,mBAAmB;AACzB,IAAM,qBAAqB;AA4ClC,IAAM,sBAA2C,oBAAI,IAAI;AAAA,EACvD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAiBM,SAAS,qBACd,QACqC;AACrC,MAAI,WAAW,QAAQ,OAAO,WAAW,UAAU;AACjD,UAAM,IAAI;AAAA,MACR,mDAAmD,WAAW,OAAO,SAAS,OAAO,MAAM;AAAA,IAC7F;AAAA,EACF;AAIA,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,CAAC,oBAAoB,IAAI,GAAG,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,gDAAgD,GAAG,kDAE9C,CAAC,GAAG,mBAAmB,EAAE,KAAK,IAAI,CAAC;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AACA,QAAM,IAAI;AACV,MAAI,OAAO,EAAE,eAAe,MAAM,YAAY,EAAE,eAAe,MAAM,6BAA6B;AAChG,UAAM,IAAI;AAAA,MACR,8CAA8C,2BAA2B,SAChE,OAAO,EAAE,eAAe,CAAC,CAAC;AAAA,IAErC;AAAA,EACF;AACA,MAAI,OAAO,EAAE,QAAQ,MAAM,YAAY,CAAC,2BAA2B,KAAK,EAAE,QAAQ,CAAC,GAAG;AACpF,UAAM,IAAI;AAAA,MACR,iFACS,OAAO,EAAE,QAAQ,MAAM,WAAW,IAAI,EAAE,QAAQ,CAAC,MAAM,OAAO,EAAE,QAAQ,CAAC,CAAC;AAAA,IACrF;AAAA,EACF;AACA,MAAI,OAAO,EAAE,WAAW,MAAM,YAAY,CAAC,OAAO,UAAU,EAAE,WAAW,CAAC,KAAK,EAAE,WAAW,IAAI,GAAG;AACjG,UAAM,IAAI;AAAA,MACR,sEACS,OAAO,EAAE,WAAW,CAAC,CAAC;AAAA,IACjC;AAAA,EACF;AACA,MAAI,OAAO,EAAE,YAAY,MAAM,YAAY,CAAC,iBAAiB,KAAK,EAAE,YAAY,CAAC,GAAG;AAClF,UAAM,IAAI;AAAA,MACR,oFACS,OAAO,EAAE,YAAY,MAAM,WAAW,IAAI,EAAE,YAAY,CAAC,MAAM,OAAO,EAAE,YAAY,CAAC,CAAC;AAAA,IACjG;AAAA,EACF;AACF;AAOO,SAAS,mBAAmB,QAAuC;AACxE,uBAAqB,MAAM;AAK3B,QAAM,OAAO,KAAK,UAAU;AAAA,IAC1B,eAAe,OAAO;AAAA,IACtB,QAAQ,OAAO;AAAA,IACf,WAAW,OAAO;AAAA,IAClB,YAAY,OAAO;AAAA,EACrB,CAAC;AACD,SAAO,IAAI,YAAY,EAAE,OAAO,IAAI;AACtC;AAMO,SAAS,mBAAmB,OAAsC;AACvE,QAAM,OAAO,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,KAAK;AACnE,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,IAAI;AAAA,EAC1B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,2CAA4C,IAAc,OAAO;AAAA,IACnE;AAAA,EACF;AACA,uBAAqB,MAAM;AAC3B,SAAO;AACT;AAQO,SAAS,aAAa,OAAmB,QAAwB;AACtE,UACG,MAAM,MAAM,KAAM,OAAO,MACzB,MAAM,SAAS,CAAC,KAAM,OACtB,MAAM,SAAS,CAAC,KAAM,KACvB,MAAM,SAAS,CAAC;AAEpB;AAMO,SAAS,cAAc,OAAmB,QAAgB,OAAqB;AACpF,QAAM,MAAM,IAAK,UAAU,KAAM;AACjC,QAAM,SAAS,CAAC,IAAK,UAAU,KAAM;AACrC,QAAM,SAAS,CAAC,IAAK,UAAU,IAAK;AACpC,QAAM,SAAS,CAAC,IAAI,QAAQ;AAC9B;AAOO,SAAS,oBAAoB,OAA4B;AAC9D,MAAI,MAAM,SAAS,mBAAmB,OAAQ,QAAO;AACrD,WAAS,IAAI,GAAG,IAAI,mBAAmB,QAAQ,KAAK;AAClD,QAAI,MAAM,CAAC,MAAM,mBAAmB,CAAC,EAAG,QAAO;AAAA,EACjD;AACA,SAAO;AACT;;;AC1HA,IAAI,sBAAsC;AAC1C,SAAS,4BAAqC;AAC5C,MAAI,wBAAwB,KAAM,QAAO;AACzC,MAAI;AACF,QAAI,kBAAkB,IAAyB;AAC/C,0BAAsB;AAAA,EACxB,QAAQ;AACN,0BAAsB;AAAA,EACxB;AACA,SAAO;AACT;AAGO,SAAS,0BAAgC;AAC9C,wBAAsB;AACxB;AASA,SAAS,kBAAkB,QAGzB;AACA,QAAM,SAAS,UAAU;AACzB,MAAI,WAAW,OAAQ,QAAO,EAAE,QAAQ,kBAAkB,cAAc,KAAK;AAC7E,MAAI,WAAW,OAAQ,QAAO,EAAE,QAAQ,kBAAkB,cAAc,OAAO;AAC/E,MAAI,WAAW,UAAU;AACvB,QAAI,CAAC,0BAA0B,GAAG;AAChC,YAAM,IAAI;AAAA,QACR;AAAA,MAIF;AAAA,IACF;AACA,WAAO,EAAE,QAAQ,oBAAoB,cAAc,KAA0B;AAAA,EAC/E;AAEA,MAAI,0BAA0B,GAAG;AAC/B,WAAO,EAAE,QAAQ,oBAAoB,cAAc,KAA0B;AAAA,EAC/E;AACA,SAAO,EAAE,QAAQ,kBAAkB,cAAc,OAAO;AAC1D;AAcA,eAAe,kBACb,OACA,QACqB;AACrB,QAAM,WAAW,IAAI,KAAK,CAAC,KAAiB,CAAC,EAAE,OAAO,EAAE,YAAY,MAAM;AAC1E,QAAM,SAAS,SAAS,UAAU;AAClC,QAAM,SAAuB,CAAC;AAC9B,MAAI,QAAQ;AACZ,aAAS;AACP,UAAM,EAAE,OAAO,KAAK,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AACV,QAAI,OAAO;AACT,aAAO,KAAK,KAAmB;AAC/B,eAAS,MAAM;AAAA,IACjB;AAAA,EACF;AACA,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC1B,QAAI,IAAI,OAAO,MAAM;AACrB,cAAU,MAAM;AAAA,EAClB;AACA,SAAO;AACT;AAWA,eAAe,UAAU,OAAoC;AAS3D,QAAM,OAAO,IAAI,WAAW,MAAM,MAAM;AACxC,OAAK,IAAI,KAAK;AACd,QAAM,SAAS,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AACzD,QAAM,OAAO,IAAI,WAAW,MAAM;AAClC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,WAAO,KAAK,CAAC,EAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EAC9C;AACA,SAAO;AACT;AAOA,SAAS,YAAY,OAA0C;AAC7D,MAAI,QAAQ;AACZ,aAAW,KAAK,MAAO,UAAS,EAAE;AAClC,QAAM,MAAM,IAAI,WAAW,KAAK;AAChC,MAAI,SAAS;AACb,aAAW,KAAK,OAAO;AACrB,QAAI,IAAI,GAAG,MAAM;AACjB,cAAU,EAAE;AAAA,EACd;AACA,SAAO;AACT;AAeA,eAAe,qBACb,OACA,UACA,MACiB;AACjB,MAAI,KAAK,qBAAqB,UAAa,KAAK,eAAe,QAAW;AACxE,WAAO;AAAA,EACT;AAEA,QAAM,aACJ,KAAK,cAAc;AAAA,IACjB;AAAA,MACE,IAAI,MAAM;AAAA,MACV,YAAY,KAAK;AAAA,MACjB,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AAEF,QAAM,oBAAoB,MAAM,MAAM,6BAA6B,UAAU;AAE7E,QAAM,SAAS,KAAK,MAAM,QAAQ;AAClC,SAAO,WAAW;AAClB,SAAO,KAAK,UAAU,MAAM;AAC9B;AAiBA,SAAS,kBACP,UACA,MACQ;AACR,QAAM,oBAAoB,KAAK,cAC3B,IAAI,IAAI,KAAK,WAAW,IACxB;AACJ,QAAM,UACJ,KAAK,UAAU,SAAY,IAAI,KAAK,KAAK,KAAK,EAAE,QAAQ,IAAI;AAC9D,MAAI,sBAAsB,QAAQ,YAAY,KAAM,QAAO;AAO3D,QAAM,SAAS,KAAK,MAAM,QAAQ;AAKlC,MAAI,OAAO,eAAe,OAAO,OAAO,gBAAgB,UAAU;AAChE,UAAM,OAAgD,CAAC;AACvD,eAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AAChE,UAAI,qBAAqB,CAAC,kBAAkB,IAAI,IAAI,EAAG;AACvD,UAAI,YAAY,MAAM;AACpB,aAAK,IAAI,IAAI;AACb;AAAA,MACF;AACA,YAAM,OAAgC,CAAC;AACvC,iBAAW,CAAC,IAAI,GAAG,KAAK,OAAO,QAAQ,OAAO,GAAG;AAC/C,cAAM,QAAQ,IAAI,MAAM,IAAI,KAAK,IAAI,GAAG,EAAE,QAAQ,IAAI;AACtD,YAAI,OAAO,SAAS,KAAK,KAAK,SAAS,SAAS;AAC9C,eAAK,EAAE,IAAI;AAAA,QACb;AAAA,MACF;AACA,WAAK,IAAI,IAAI;AAAA,IACf;AACA,WAAO,cAAc;AAAA,EACvB;AAEA,SAAO,KAAK,UAAU,MAAM;AAC9B;AAcA,eAAe,sBACb,OACA,UACA,MACiB;AACjB,MAAI,KAAK,UAAU,UAAa,KAAK,eAAe,QAAW;AAC7D,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,KAAK,MAAM,QAAQ;AAIlC,MAAI,CAAC,OAAO,eAAe,OAAO,OAAO,gBAAgB,UAAU;AACjE,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,KAAK;AACzB,QAAM,QAAQ,KAAK;AAEnB,QAAM,OAA4C,CAAC;AACnD,aAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW,GAAG;AACpE,UAAM,OAA4B,CAAC;AACnC,eAAW,CAAC,IAAI,GAAG,KAAK,OAAO,QAAQ,OAAO,GAAG;AAI/C,UAAI,gBAAgB,QAAW;AAC7B,cAAM,OAAO,IAAI,SAAS;AAC1B,YAAI,OAAO,YAAa;AAAA,MAC1B;AAIA,UAAI,UAAU,QAAW;AACvB,cAAM,SAAS,MAAM,MAAM;AAAA,UACzB;AAAA,UACA;AAAA,QACF;AACA,cAAM,KAAK,MAAM,MAAM,QAAQ,EAAE,YAAY,UAAU,GAAG,CAAC;AAC3D,YAAI,CAAC,GAAI;AAAA,MACX;AACA,WAAK,EAAE,IAAI;AAAA,IACb;AACA,SAAK,QAAQ,IAAI;AAAA,EACnB;AACA,SAAO,cAAc;AACrB,SAAO,KAAK,UAAU,MAAM;AAC9B;AAyBA,eAAsB,iBACpB,OACA,OAAgC,CAAC,GACZ;AACrB,MAAI,KAAK,qBAAqB,UAAa,KAAK,eAAe,QAAW;AACxE,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,MAAM,gBAAgB;AAC3C,QAAM,WAAW,MAAM,MAAM,KAAK;AAKlC,QAAM,UAAU,MAAM,qBAAqB,OAAO,UAAU,IAAI;AAKhE,QAAM,gBAAgB,MAAM,sBAAsB,OAAO,SAAS,IAAI;AACtE,QAAM,WAAW,kBAAkB,eAAe,IAAI;AACtD,QAAM,YAAY,IAAI,YAAY,EAAE,OAAO,QAAQ;AAEnD,QAAM,EAAE,QAAQ,aAAa,IAAI,kBAAkB,KAAK,WAAW;AACnE,QAAM,OAAO,iBAAiB,OAC1B,YACA,MAAM,kBAAkB,WAAW,IAAI,kBAAkB,YAAY,CAAC;AAE1E,QAAM,aAAa,MAAM,UAAU,IAAI;AACvC,QAAM,SAA4B;AAAA,IAChC,eAAe;AAAA,IACf;AAAA,IACA,WAAW,KAAK;AAAA,IAChB;AAAA,EACF;AACA,QAAM,cAAc,mBAAmB,MAAM;AAG7C,QAAM,SAAS,IAAI,WAAW,yBAAyB;AACvD,SAAO,IAAI,oBAAoB,CAAC;AAChC,SAAO,CAAC,KACL,iBAAiB,OAAO,IAAI,mBAAmB;AAClD,SAAO,CAAC,IAAI;AACZ,gBAAc,QAAQ,GAAG,YAAY,MAAM;AAE3C,SAAO,YAAY,CAAC,QAAQ,aAAa,IAAI,CAAC;AAChD;AAYA,SAAS,qBAAqB,OAK5B;AACA,MAAI,CAAC,oBAAoB,KAAK,GAAG;AAC/B,UAAM,IAAI;AAAA,MACR,2EACS,CAAC,GAAG,MAAM,MAAM,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA,IACvF;AAAA,EACF;AACA,MAAI,MAAM,SAAS,2BAA2B;AAC5C,UAAM,IAAI;AAAA,MACR,yCAAyC,MAAM,MAAM,kCACzB,yBAAyB;AAAA,IACvD;AAAA,EACF;AACA,QAAM,QAAQ,MAAM,CAAC;AACrB,QAAM,OAAO,MAAM,CAAC;AACpB,MAAI,SAAS,oBAAoB,SAAS,oBAAoB,SAAS,oBAAoB;AACzF,UAAM,IAAI;AAAA,MACR,wDAAwD,IAAI;AAAA,IAE9D;AAAA,EACF;AACA,QAAM,eAAe,aAAa,OAAO,CAAC;AAC1C,QAAM,aAAa,4BAA4B;AAC/C,MAAI,aAAa,MAAM,QAAQ;AAC7B,UAAM,IAAI;AAAA,MACR,mDAAmD,YAAY,mCAC3B,MAAM,MAAM;AAAA,IAClD;AAAA,EACF;AACA,QAAM,cAAc,MAAM,MAAM,2BAA2B,UAAU;AACrE,QAAM,SAAS,mBAAmB,WAAW;AAC7C,SAAO,EAAE,QAAQ,YAAY,MAA+B,MAAM;AACpE;AAWO,SAAS,sBAAsB,OAAsC;AAC1E,SAAO,qBAAqB,KAAK,EAAE;AACrC;AAoBA,eAAsB,gBACpB,OACgC;AAChC,QAAM,EAAE,QAAQ,YAAY,KAAK,IAAI,qBAAqB,KAAK;AAC/D,QAAM,OAAO,MAAM,MAAM,UAAU;AAInC,MAAI,KAAK,WAAW,OAAO,WAAW;AACpC,UAAM,IAAI;AAAA,MACR,eAAe,KAAK,MAAM,oCACrB,OAAO,SAAS;AAAA,IAEvB;AAAA,EACF;AAEA,QAAM,YAAY,MAAM,UAAU,IAAI;AACtC,MAAI,cAAc,OAAO,YAAY;AACnC,UAAM,IAAI;AAAA,MACR,eAAe,SAAS,qCACnB,OAAO,UAAU;AAAA,IAExB;AAAA,EACF;AAEA,MAAI;AACJ,MAAI,SAAS,kBAAkB;AAC7B,gBAAY;AAAA,EACd,OAAO;AACL,UAAM,eACJ,SAAS,qBAAsB,OAA6B;AAC9D,QAAI;AACF,kBAAY,MAAM,kBAAkB,MAAM,IAAI,oBAAoB,YAAY,CAAC;AAAA,IACjF,SAAS,KAAK;AACZ,YAAM,IAAI;AAAA,QACR,yBAA0B,IAAc,OAAO,oEAE1C,YAAY;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,IAAI,YAAY,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,OAAO,SAAS;AAC3E,SAAO,EAAE,QAAQ,SAAS;AAC5B;","names":[]}