lakesync 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -0
- package/dist/adapter.d.ts +369 -0
- package/dist/adapter.js +39 -0
- package/dist/adapter.js.map +1 -0
- package/dist/analyst.d.ts +268 -0
- package/dist/analyst.js +495 -0
- package/dist/analyst.js.map +1 -0
- package/dist/auth-CAVutXzx.d.ts +30 -0
- package/dist/base-poller-Qo_SmCZs.d.ts +82 -0
- package/dist/catalogue.d.ts +65 -0
- package/dist/catalogue.js +17 -0
- package/dist/catalogue.js.map +1 -0
- package/dist/chunk-4ARO6KTJ.js +257 -0
- package/dist/chunk-4ARO6KTJ.js.map +1 -0
- package/dist/chunk-5YOFCJQ7.js +1115 -0
- package/dist/chunk-5YOFCJQ7.js.map +1 -0
- package/dist/chunk-7D4SUZUM.js +38 -0
- package/dist/chunk-7D4SUZUM.js.map +1 -0
- package/dist/chunk-BNJOGBYK.js +335 -0
- package/dist/chunk-BNJOGBYK.js.map +1 -0
- package/dist/chunk-ICNT7I3K.js +1180 -0
- package/dist/chunk-ICNT7I3K.js.map +1 -0
- package/dist/chunk-P5DRFKIT.js +413 -0
- package/dist/chunk-P5DRFKIT.js.map +1 -0
- package/dist/chunk-X3RO5SYJ.js +880 -0
- package/dist/chunk-X3RO5SYJ.js.map +1 -0
- package/dist/client.d.ts +428 -0
- package/dist/client.js +2048 -0
- package/dist/client.js.map +1 -0
- package/dist/compactor.d.ts +342 -0
- package/dist/compactor.js +793 -0
- package/dist/compactor.js.map +1 -0
- package/dist/coordinator-CxckTzYW.d.ts +396 -0
- package/dist/db-types-BR6Kt4uf.d.ts +29 -0
- package/dist/gateway-D5SaaMvT.d.ts +337 -0
- package/dist/gateway-server.d.ts +306 -0
- package/dist/gateway-server.js +4663 -0
- package/dist/gateway-server.js.map +1 -0
- package/dist/gateway.d.ts +196 -0
- package/dist/gateway.js +79 -0
- package/dist/gateway.js.map +1 -0
- package/dist/hlc-DiD8QNG3.d.ts +70 -0
- package/dist/index.d.ts +245 -0
- package/dist/index.js +102 -0
- package/dist/index.js.map +1 -0
- package/dist/json-dYtqiL0F.d.ts +18 -0
- package/dist/nessie-client-DrNikVXy.d.ts +160 -0
- package/dist/parquet.d.ts +78 -0
- package/dist/parquet.js +15 -0
- package/dist/parquet.js.map +1 -0
- package/dist/proto.d.ts +434 -0
- package/dist/proto.js +67 -0
- package/dist/proto.js.map +1 -0
- package/dist/react.d.ts +147 -0
- package/dist/react.js +224 -0
- package/dist/react.js.map +1 -0
- package/dist/resolver-C3Wphi6O.d.ts +10 -0
- package/dist/result-CojzlFE2.d.ts +64 -0
- package/dist/src-QU2YLPZY.js +383 -0
- package/dist/src-QU2YLPZY.js.map +1 -0
- package/dist/src-WYBF5LOI.js +102 -0
- package/dist/src-WYBF5LOI.js.map +1 -0
- package/dist/src-WZNPHANQ.js +426 -0
- package/dist/src-WZNPHANQ.js.map +1 -0
- package/dist/types-Bs-QyOe-.d.ts +143 -0
- package/dist/types-DAQL_vU_.d.ts +118 -0
- package/dist/types-DSC_EiwR.d.ts +45 -0
- package/dist/types-V_jVu2sA.d.ts +73 -0
- package/package.json +119 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../gateway/src/action-dispatcher.ts","../../gateway/src/buffer.ts","../../gateway/src/config-store.ts","../../gateway/src/constants.ts","../../gateway/src/flush.ts","../../gateway/src/gateway.ts","../../gateway/src/validation.ts","../../gateway/src/request-handler.ts","../../gateway/src/schema-manager.ts"],"sourcesContent":["import type {\n\tAction,\n\tActionDescriptor,\n\tActionDiscovery,\n\tActionExecutionError,\n\tActionHandler,\n\tActionPush,\n\tActionResponse,\n\tActionResult,\n\tActionValidationError,\n\tAuthContext,\n\tHLCTimestamp,\n\tResult,\n} from \"@lakesync/core\";\nimport { Err, Ok, validateAction } from \"@lakesync/core\";\n\n/**\n * Dispatches imperative actions to registered handlers.\n *\n * Manages idempotency via actionId deduplication and idempotencyKey mapping.\n * Completely decoupled from the HLC clock — takes a callback for timestamp generation.\n */\nexport class ActionDispatcher {\n\tprivate actionHandlers: Map<string, ActionHandler> = new Map();\n\tprivate executedActions: Set<string> = new Set();\n\tprivate idempotencyMap: Map<\n\t\tstring,\n\t\tActionResult | { actionId: string; code: string; message: string; retryable: boolean }\n\t> = new Map();\n\n\tconstructor(handlers?: Record<string, ActionHandler>) {\n\t\tif (handlers) {\n\t\t\tfor (const [name, handler] of Object.entries(handlers)) {\n\t\t\t\tthis.actionHandlers.set(name, handler);\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Dispatch an action push to registered handlers.\n\t *\n\t * Iterates over actions, dispatches each to the registered ActionHandler\n\t * by connector name. Supports idempotency via actionId deduplication and\n\t * idempotencyKey mapping.\n\t *\n\t * @param msg - The action push containing one or more actions.\n\t * @param hlcNow - Callback to get the current server HLC timestamp.\n\t * @param context - Optional auth context for permission checks.\n\t * @returns A `Result` containing results for each action.\n\t */\n\tasync dispatch(\n\t\tmsg: ActionPush,\n\t\thlcNow: () => HLCTimestamp,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResponse, ActionValidationError>> {\n\t\tconst results: Array<\n\t\t\tActionResult | { actionId: string; code: string; message: string; retryable: boolean }\n\t\t> = [];\n\n\t\tfor (const action of msg.actions) {\n\t\t\t// Structural validation\n\t\t\tconst validation = validateAction(action);\n\t\t\tif (!validation.ok) {\n\t\t\t\treturn Err(validation.error);\n\t\t\t}\n\n\t\t\t// Idempotency — check actionId\n\t\t\tif (this.executedActions.has(action.actionId)) {\n\t\t\t\tconst cached = this.idempotencyMap.get(action.actionId);\n\t\t\t\tif (cached) {\n\t\t\t\t\tresults.push(cached);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\t// Already executed but no cached result — skip\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Idempotency — check idempotencyKey\n\t\t\tif (action.idempotencyKey) {\n\t\t\t\tconst cached = this.idempotencyMap.get(`idem:${action.idempotencyKey}`);\n\t\t\t\tif (cached) {\n\t\t\t\t\tresults.push(cached);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Resolve handler\n\t\t\tconst handler = this.actionHandlers.get(action.connector);\n\t\t\tif (!handler) {\n\t\t\t\tconst errorResult = {\n\t\t\t\t\tactionId: action.actionId,\n\t\t\t\t\tcode: \"ACTION_NOT_SUPPORTED\",\n\t\t\t\t\tmessage: `No action handler registered for connector \"${action.connector}\"`,\n\t\t\t\t\tretryable: false,\n\t\t\t\t};\n\t\t\t\tresults.push(errorResult);\n\t\t\t\tthis.cacheActionResult(action, errorResult);\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Check action type is supported\n\t\t\tconst supported = handler.supportedActions.some((d) => d.actionType === action.actionType);\n\t\t\tif (!supported) {\n\t\t\t\tconst errorResult = {\n\t\t\t\t\tactionId: action.actionId,\n\t\t\t\t\tcode: \"ACTION_NOT_SUPPORTED\",\n\t\t\t\t\tmessage: `Action type \"${action.actionType}\" not supported by connector \"${action.connector}\"`,\n\t\t\t\t\tretryable: false,\n\t\t\t\t};\n\t\t\t\tresults.push(errorResult);\n\t\t\t\tthis.cacheActionResult(action, errorResult);\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Execute\n\t\t\tconst execResult = await handler.executeAction(action, context);\n\t\t\tif (execResult.ok) {\n\t\t\t\tresults.push(execResult.value);\n\t\t\t\tthis.cacheActionResult(action, execResult.value);\n\t\t\t} else {\n\t\t\t\tconst err = execResult.error;\n\t\t\t\tconst errorResult = {\n\t\t\t\t\tactionId: action.actionId,\n\t\t\t\t\tcode: err.code,\n\t\t\t\t\tmessage: err.message,\n\t\t\t\t\tretryable: \"retryable\" in err ? (err as ActionExecutionError).retryable : false,\n\t\t\t\t};\n\t\t\t\tresults.push(errorResult);\n\t\t\t\t// Only cache non-retryable errors — retryable errors should be retried\n\t\t\t\tif (!errorResult.retryable) {\n\t\t\t\t\tthis.cacheActionResult(action, errorResult);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tconst serverHlc = hlcNow();\n\t\treturn Ok({ results, serverHlc });\n\t}\n\n\t/**\n\t * Register a named action handler.\n\t *\n\t * @param name - Connector name (matches `Action.connector`).\n\t * @param handler - The action handler to register.\n\t */\n\tregisterHandler(name: string, handler: ActionHandler): void {\n\t\tthis.actionHandlers.set(name, handler);\n\t}\n\n\t/**\n\t * Unregister a named action handler.\n\t *\n\t * @param name - The connector name to remove.\n\t */\n\tunregisterHandler(name: string): void {\n\t\tthis.actionHandlers.delete(name);\n\t}\n\n\t/**\n\t * List all registered action handler names.\n\t *\n\t * @returns Array of registered connector names.\n\t */\n\tlistHandlers(): string[] {\n\t\treturn [...this.actionHandlers.keys()];\n\t}\n\n\t/**\n\t * Describe all registered action handlers and their supported actions.\n\t *\n\t * Returns a map of connector name to its {@link ActionDescriptor} array,\n\t * enabling frontend discovery of available actions.\n\t *\n\t * @returns An {@link ActionDiscovery} object listing connectors and their actions.\n\t */\n\tdescribe(): ActionDiscovery {\n\t\tconst connectors: Record<string, ActionDescriptor[]> = {};\n\t\tfor (const [name, handler] of this.actionHandlers) {\n\t\t\tconnectors[name] = handler.supportedActions;\n\t\t}\n\t\treturn { connectors };\n\t}\n\n\t/** Cache an action result for idempotency deduplication. */\n\tprivate cacheActionResult(\n\t\taction: Action,\n\t\tresult: ActionResult | { actionId: string; code: string; message: string; retryable: boolean },\n\t): void {\n\t\tthis.executedActions.add(action.actionId);\n\t\tthis.idempotencyMap.set(action.actionId, result);\n\t\tif (action.idempotencyKey) {\n\t\t\tthis.idempotencyMap.set(`idem:${action.idempotencyKey}`, result);\n\t\t}\n\t}\n}\n","import type { HLCTimestamp, RowDelta, RowKey } from \"@lakesync/core\";\nimport { HLC, rowKey } from \"@lakesync/core\";\n\n/** Estimated base overhead per delta entry (metadata fields: deltaId, table, rowId, clientId, op + HLC bigint). */\nconst BASE_DELTA_OVERHEAD = 8 + 8 + 8 + 8 + 1;\n\n/**\n * Estimate the byte size of a single column value.\n * Uses type-aware heuristics as a proxy for in-memory size.\n */\nfunction estimateValueBytes(value: unknown): number {\n\tif (value === null || value === undefined) return 4;\n\tswitch (typeof value) {\n\t\tcase \"boolean\":\n\t\t\treturn 4;\n\t\tcase \"number\":\n\t\t\treturn 8;\n\t\tcase \"bigint\":\n\t\t\treturn 8;\n\t\tcase \"string\":\n\t\t\treturn (value as string).length * 2; // UTF-16\n\t\tdefault:\n\t\t\t// Objects, arrays — use JSON.stringify as proxy\n\t\t\ttry {\n\t\t\t\treturn JSON.stringify(value).length;\n\t\t\t} catch {\n\t\t\t\treturn 100; // fallback for circular refs etc.\n\t\t\t}\n\t}\n}\n\n/** Estimate the byte size of a RowDelta. */\nfunction estimateDeltaBytes(delta: RowDelta): number {\n\tlet bytes = BASE_DELTA_OVERHEAD;\n\tbytes += delta.deltaId.length;\n\tbytes += delta.table.length * 2;\n\tbytes += delta.rowId.length * 2;\n\tbytes += delta.clientId.length * 2;\n\tfor (const col of delta.columns) {\n\t\tbytes += col.column.length * 2; // column name\n\t\tbytes += estimateValueBytes(col.value); // column value\n\t}\n\treturn bytes;\n}\n\n/**\n * Dual-structure delta buffer.\n *\n * Maintains an append-only log for event streaming (pull) and flush,\n * plus a row-level index for O(1) conflict resolution lookups.\n */\nexport class DeltaBuffer {\n\tprivate log: RowDelta[] = [];\n\tprivate index: Map<RowKey, RowDelta> = new Map();\n\tprivate deltaIds = new Set<string>();\n\tprivate estimatedBytes = 0;\n\tprivate createdAt: number = Date.now();\n\tprivate tableBytes = new Map<string, number>();\n\tprivate tableLog = new Map<string, RowDelta[]>();\n\n\t/** Append a delta to the log and upsert the index (post-conflict-resolution). */\n\tappend(delta: RowDelta): void {\n\t\tthis.log.push(delta);\n\t\tconst key = rowKey(delta.table, delta.rowId);\n\t\tthis.index.set(key, delta);\n\t\tthis.deltaIds.add(delta.deltaId);\n\t\tconst bytes = estimateDeltaBytes(delta);\n\t\tthis.estimatedBytes += bytes;\n\t\t// Per-table tracking\n\t\tthis.tableBytes.set(delta.table, (this.tableBytes.get(delta.table) ?? 0) + bytes);\n\t\tconst tableEntries = this.tableLog.get(delta.table);\n\t\tif (tableEntries) {\n\t\t\ttableEntries.push(delta);\n\t\t} else {\n\t\t\tthis.tableLog.set(delta.table, [delta]);\n\t\t}\n\t}\n\n\t/** Get the current merged state for a row (for conflict resolution). */\n\tgetRow(key: RowKey): RowDelta | undefined {\n\t\treturn this.index.get(key);\n\t}\n\n\t/** Check if a delta with this ID already exists in the log (for idempotency). */\n\thasDelta(deltaId: string): boolean {\n\t\treturn this.deltaIds.has(deltaId);\n\t}\n\n\t/** Return change events from the log since a given HLC. */\n\tgetEventsSince(hlc: HLCTimestamp, limit: number): { deltas: RowDelta[]; hasMore: boolean } {\n\t\tlet lo = 0;\n\t\tlet hi = this.log.length;\n\t\twhile (lo < hi) {\n\t\t\tconst mid = (lo + hi) >>> 1;\n\t\t\tif (HLC.compare(this.log[mid]!.hlc, hlc) <= 0) {\n\t\t\t\tlo = mid + 1;\n\t\t\t} else {\n\t\t\t\thi = mid;\n\t\t\t}\n\t\t}\n\t\tconst hasMore = this.log.length - lo > limit;\n\t\treturn { deltas: this.log.slice(lo, lo + limit), hasMore };\n\t}\n\n\t/** Check if the buffer should be flushed based on size or age thresholds. */\n\tshouldFlush(config: { maxBytes: number; maxAgeMs: number }): boolean {\n\t\tif (this.log.length === 0) return false;\n\t\treturn this.estimatedBytes >= config.maxBytes || Date.now() - this.createdAt >= config.maxAgeMs;\n\t}\n\n\t/** Per-table buffer statistics. */\n\ttableStats(): Array<{ table: string; byteSize: number; deltaCount: number }> {\n\t\tconst stats: Array<{ table: string; byteSize: number; deltaCount: number }> = [];\n\t\tfor (const [table, bytes] of this.tableBytes) {\n\t\t\tstats.push({\n\t\t\t\ttable,\n\t\t\t\tbyteSize: bytes,\n\t\t\t\tdeltaCount: this.tableLog.get(table)?.length ?? 0,\n\t\t\t});\n\t\t}\n\t\treturn stats;\n\t}\n\n\t/** Drain only the specified table's deltas, leaving other tables intact. */\n\tdrainTable(table: string): RowDelta[] {\n\t\tconst tableDeltas = this.tableLog.get(table) ?? [];\n\t\tif (tableDeltas.length === 0) return [];\n\n\t\t// Remove from main log\n\t\tthis.log = this.log.filter((d) => d.table !== table);\n\n\t\t// Remove from index and deltaIds\n\t\tfor (const delta of tableDeltas) {\n\t\t\tthis.index.delete(rowKey(delta.table, delta.rowId));\n\t\t\tthis.deltaIds.delete(delta.deltaId);\n\t\t}\n\n\t\t// Adjust byte tracking\n\t\tconst tableByteSize = this.tableBytes.get(table) ?? 0;\n\t\tthis.estimatedBytes -= tableByteSize;\n\t\tthis.tableBytes.delete(table);\n\t\tthis.tableLog.delete(table);\n\n\t\treturn tableDeltas;\n\t}\n\n\t/** Drain the log for flush. Returns log entries and clears both structures. */\n\tdrain(): RowDelta[] {\n\t\tconst entries = [...this.log];\n\t\tthis.log = [];\n\t\tthis.index.clear();\n\t\tthis.deltaIds.clear();\n\t\tthis.estimatedBytes = 0;\n\t\tthis.createdAt = Date.now();\n\t\tthis.tableBytes.clear();\n\t\tthis.tableLog.clear();\n\t\treturn entries;\n\t}\n\n\t/** Number of log entries */\n\tget logSize(): number {\n\t\treturn this.log.length;\n\t}\n\n\t/** Number of unique rows in the index */\n\tget indexSize(): number {\n\t\treturn this.index.size;\n\t}\n\n\t/** Estimated byte size of the buffer */\n\tget byteSize(): number {\n\t\treturn this.estimatedBytes;\n\t}\n\n\t/** Average byte size per delta in the buffer (0 if empty). */\n\tget averageDeltaBytes(): number {\n\t\treturn this.log.length === 0 ? 0 : this.estimatedBytes / this.log.length;\n\t}\n}\n","import type { ConnectorConfig, SyncRulesConfig, TableSchema } from \"@lakesync/core\";\n\n/**\n * Platform-agnostic configuration storage interface.\n *\n * Implemented by MemoryConfigStore (tests, gateway-server) and\n * DurableStorageConfigStore (gateway-worker).\n */\nexport interface ConfigStore {\n\tgetSchema(gatewayId: string): Promise<TableSchema | undefined>;\n\tsetSchema(gatewayId: string, schema: TableSchema): Promise<void>;\n\tgetSyncRules(gatewayId: string): Promise<SyncRulesConfig | undefined>;\n\tsetSyncRules(gatewayId: string, rules: SyncRulesConfig): Promise<void>;\n\tgetConnectors(): Promise<Record<string, ConnectorConfig>>;\n\tsetConnectors(connectors: Record<string, ConnectorConfig>): Promise<void>;\n}\n\n/**\n * In-memory implementation of ConfigStore.\n * Used by tests and gateway-server.\n */\nexport class MemoryConfigStore implements ConfigStore {\n\tprivate schemas = new Map<string, TableSchema>();\n\tprivate syncRules = new Map<string, SyncRulesConfig>();\n\tprivate connectors: Record<string, ConnectorConfig> = {};\n\n\tasync getSchema(gatewayId: string): Promise<TableSchema | undefined> {\n\t\treturn this.schemas.get(gatewayId);\n\t}\n\n\tasync setSchema(gatewayId: string, schema: TableSchema): Promise<void> {\n\t\tthis.schemas.set(gatewayId, schema);\n\t}\n\n\tasync getSyncRules(gatewayId: string): Promise<SyncRulesConfig | undefined> {\n\t\treturn this.syncRules.get(gatewayId);\n\t}\n\n\tasync setSyncRules(gatewayId: string, rules: SyncRulesConfig): Promise<void> {\n\t\tthis.syncRules.set(gatewayId, rules);\n\t}\n\n\tasync getConnectors(): Promise<Record<string, ConnectorConfig>> {\n\t\treturn { ...this.connectors };\n\t}\n\n\tasync setConnectors(connectors: Record<string, ConnectorConfig>): Promise<void> {\n\t\tthis.connectors = { ...connectors };\n\t}\n}\n","/** Maximum push payload size (1 MiB). */\nexport const MAX_PUSH_PAYLOAD_BYTES = 1_048_576;\n\n/** Maximum number of deltas allowed in a single push. */\nexport const MAX_DELTAS_PER_PUSH = 10_000;\n\n/** Maximum number of deltas returned in a single pull. */\nexport const MAX_PULL_LIMIT = 10_000;\n\n/** Default number of deltas returned in a pull when no limit is specified. */\nexport const DEFAULT_PULL_LIMIT = 100;\n\n/** Allowed column types for schema validation. */\nexport const VALID_COLUMN_TYPES = new Set([\"string\", \"number\", \"boolean\", \"json\", \"null\"]);\n\n/** Default maximum buffer size before triggering flush (4 MiB). */\nexport const DEFAULT_MAX_BUFFER_BYTES = 4 * 1024 * 1024;\n\n/** Default maximum buffer age before triggering flush (30 seconds). */\nexport const DEFAULT_MAX_BUFFER_AGE_MS = 30_000;\n","import { type DatabaseAdapter, isDatabaseAdapter, type LakeAdapter } from \"@lakesync/adapter\";\nimport {\n\tbuildPartitionSpec,\n\ttype DataFile,\n\tlakeSyncTableName,\n\ttype NessieCatalogueClient,\n\ttableSchemaToIceberg,\n} from \"@lakesync/catalogue\";\nimport {\n\tErr,\n\tFlushError,\n\tHLC,\n\ttype HLCTimestamp,\n\tOk,\n\ttype Result,\n\ttype RowDelta,\n\ttype TableSchema,\n\ttoError,\n} from \"@lakesync/core\";\nimport { writeDeltasToParquet } from \"@lakesync/parquet\";\nimport { bigintReplacer } from \"./json\";\nimport type { FlushEnvelope } from \"./types\";\n\n/** Configuration for flush operations. */\nexport interface FlushConfig {\n\tgatewayId: string;\n\tflushFormat?: \"json\" | \"parquet\";\n\ttableSchema?: TableSchema;\n\tcatalogue?: NessieCatalogueClient;\n}\n\n/** Dependencies injected into flush operations. */\nexport interface FlushDeps {\n\tadapter: LakeAdapter | DatabaseAdapter;\n\tconfig: FlushConfig;\n\trestoreEntries: (entries: RowDelta[]) => void;\n}\n\n/** Find the min and max HLC in a non-empty array of deltas. */\nexport function hlcRange(entries: RowDelta[]): { min: HLCTimestamp; max: HLCTimestamp } {\n\tlet min = entries[0]!.hlc;\n\tlet max = entries[0]!.hlc;\n\tfor (let i = 1; i < entries.length; i++) {\n\t\tconst hlc = entries[i]!.hlc;\n\t\tif (HLC.compare(hlc, min) < 0) min = hlc;\n\t\tif (HLC.compare(hlc, max) > 0) max = hlc;\n\t}\n\treturn { min, max };\n}\n\n/**\n * Flush a set of entries to the configured adapter.\n *\n * Unifies both full-buffer flush and per-table flush. The `keyPrefix`\n * parameter, when provided, is prepended to the HLC range in the object key\n * (e.g. \"todos\" for per-table flush).\n */\nexport async function flushEntries(\n\tentries: RowDelta[],\n\tbyteSize: number,\n\tdeps: FlushDeps,\n\tkeyPrefix?: string,\n): Promise<Result<void, FlushError>> {\n\t// Database adapter path — batch INSERT deltas directly\n\tif (isDatabaseAdapter(deps.adapter)) {\n\t\ttry {\n\t\t\tconst result = await deps.adapter.insertDeltas(entries);\n\t\t\tif (!result.ok) {\n\t\t\t\tdeps.restoreEntries(entries);\n\t\t\t\treturn Err(new FlushError(`Database flush failed: ${result.error.message}`));\n\t\t\t}\n\t\t\treturn Ok(undefined);\n\t\t} catch (error: unknown) {\n\t\t\tdeps.restoreEntries(entries);\n\t\t\treturn Err(new FlushError(`Unexpected database flush failure: ${toError(error).message}`));\n\t\t}\n\t}\n\n\t// Lake adapter path — write to object storage as Parquet or JSON\n\ttry {\n\t\tconst { min, max } = hlcRange(entries);\n\t\tconst date = new Date().toISOString().split(\"T\")[0];\n\t\tconst prefix = keyPrefix ? `${keyPrefix}-` : \"\";\n\t\tlet objectKey: string;\n\t\tlet data: Uint8Array;\n\t\tlet contentType: string;\n\n\t\tif (deps.config.flushFormat === \"json\") {\n\t\t\tconst envelope: FlushEnvelope = {\n\t\t\t\tversion: 1,\n\t\t\t\tgatewayId: deps.config.gatewayId,\n\t\t\t\tcreatedAt: new Date().toISOString(),\n\t\t\t\thlcRange: { min, max },\n\t\t\t\tdeltaCount: entries.length,\n\t\t\t\tbyteSize,\n\t\t\t\tdeltas: entries,\n\t\t\t};\n\n\t\t\tobjectKey = `deltas/${date}/${deps.config.gatewayId}/${prefix}${min.toString()}-${max.toString()}.json`;\n\t\t\tdata = new TextEncoder().encode(JSON.stringify(envelope, bigintReplacer));\n\t\t\tcontentType = \"application/json\";\n\t\t} else {\n\t\t\t// Parquet path\n\t\t\tif (!deps.config.tableSchema) {\n\t\t\t\tdeps.restoreEntries(entries);\n\t\t\t\treturn Err(new FlushError(\"tableSchema required for Parquet flush\"));\n\t\t\t}\n\n\t\t\tconst parquetResult = await writeDeltasToParquet(entries, deps.config.tableSchema);\n\t\t\tif (!parquetResult.ok) {\n\t\t\t\tdeps.restoreEntries(entries);\n\t\t\t\treturn Err(parquetResult.error);\n\t\t\t}\n\n\t\t\tobjectKey = `deltas/${date}/${deps.config.gatewayId}/${prefix}${min.toString()}-${max.toString()}.parquet`;\n\t\t\tdata = parquetResult.value;\n\t\t\tcontentType = \"application/vnd.apache.parquet\";\n\t\t}\n\n\t\tconst result = await deps.adapter.putObject(objectKey, data, contentType);\n\t\tif (!result.ok) {\n\t\t\tdeps.restoreEntries(entries);\n\t\t\treturn Err(new FlushError(`Failed to write flush envelope: ${result.error.message}`));\n\t\t}\n\n\t\tif (deps.config.catalogue && deps.config.tableSchema) {\n\t\t\tawait commitToCatalogue(\n\t\t\t\tobjectKey,\n\t\t\t\tdata.byteLength,\n\t\t\t\tentries.length,\n\t\t\t\tdeps.config.catalogue,\n\t\t\t\tdeps.config.tableSchema,\n\t\t\t);\n\t\t}\n\n\t\treturn Ok(undefined);\n\t} catch (error: unknown) {\n\t\tdeps.restoreEntries(entries);\n\t\treturn Err(new FlushError(`Unexpected flush failure: ${toError(error).message}`));\n\t}\n}\n\n/**\n * Best-effort catalogue commit. Registers the flushed Parquet file\n * as an Iceberg snapshot via Nessie. Errors are logged but do not\n * fail the flush — the Parquet file is the source of truth.\n */\nexport async function commitToCatalogue(\n\tobjectKey: string,\n\tfileSizeInBytes: number,\n\trecordCount: number,\n\tcatalogue: NessieCatalogueClient,\n\tschema: TableSchema,\n): Promise<void> {\n\tconst { namespace, name } = lakeSyncTableName(schema.table);\n\tconst icebergSchema = tableSchemaToIceberg(schema);\n\tconst partitionSpec = buildPartitionSpec(icebergSchema);\n\n\t// Ensure namespace exists (idempotent)\n\tawait catalogue.createNamespace(namespace);\n\n\t// Ensure table exists (idempotent — catch 409)\n\tconst createResult = await catalogue.createTable(namespace, name, icebergSchema, partitionSpec);\n\tif (!createResult.ok && createResult.error.statusCode !== 409) {\n\t\treturn;\n\t}\n\n\t// Build DataFile reference\n\tconst dataFile: DataFile = {\n\t\tcontent: \"data\",\n\t\t\"file-path\": objectKey,\n\t\t\"file-format\": \"PARQUET\",\n\t\t\"record-count\": recordCount,\n\t\t\"file-size-in-bytes\": fileSizeInBytes,\n\t};\n\n\t// Append file to table snapshot\n\tconst appendResult = await catalogue.appendFiles(namespace, name, [dataFile]);\n\tif (!appendResult.ok && appendResult.error.statusCode === 409) {\n\t\t// On 409 conflict, retry once with fresh metadata\n\t\tawait catalogue.appendFiles(namespace, name, [dataFile]);\n\t}\n}\n","import { type DatabaseAdapter, isDatabaseAdapter, type LakeAdapter } from \"@lakesync/adapter\";\nimport {\n\ttype ActionDiscovery,\n\ttype ActionHandler,\n\ttype ActionPush,\n\ttype ActionResponse,\n\ttype ActionValidationError,\n\ttype AdapterError,\n\tAdapterNotFoundError,\n\ttype AuthContext,\n\tBackpressureError,\n\ttype ClockDriftError,\n\tErr,\n\tFlushError,\n\tfilterDeltas,\n\tHLC,\n\ttype IngestTarget,\n\tOk,\n\ttype Result,\n\ttype RowDelta,\n\tresolveLWW,\n\trowKey,\n\ttype SchemaError,\n\ttype SyncPull,\n\ttype SyncPush,\n\ttype SyncResponse,\n\ttype SyncRulesContext,\n} from \"@lakesync/core\";\nimport { ActionDispatcher } from \"./action-dispatcher\";\nimport { DeltaBuffer } from \"./buffer\";\nimport { flushEntries } from \"./flush\";\nimport type { GatewayConfig, HandlePushResult } from \"./types\";\n\nexport type { SyncPush, SyncPull, SyncResponse };\n\n/**\n * Sync gateway -- coordinates delta ingestion, conflict resolution, and flush.\n *\n * Thin facade composing ActionDispatcher, DeltaBuffer, and flushEntries.\n */\nexport class SyncGateway implements IngestTarget {\n\tprivate hlc: HLC;\n\treadonly buffer: DeltaBuffer;\n\treadonly actions: ActionDispatcher;\n\tprivate config: GatewayConfig;\n\tprivate adapter: LakeAdapter | DatabaseAdapter | null;\n\tprivate flushing = false;\n\n\tconstructor(config: GatewayConfig, adapter?: LakeAdapter | DatabaseAdapter) {\n\t\tthis.config = { sourceAdapters: {}, ...config };\n\t\tthis.hlc = new HLC();\n\t\tthis.buffer = new DeltaBuffer();\n\t\tthis.adapter = this.config.adapter ?? adapter ?? null;\n\t\tthis.actions = new ActionDispatcher(config.actionHandlers);\n\t}\n\n\t/** Restore drained entries back to the buffer for retry. */\n\tprivate restoreEntries(entries: RowDelta[]): void {\n\t\tfor (const entry of entries) {\n\t\t\tthis.buffer.append(entry);\n\t\t}\n\t}\n\n\t/**\n\t * Handle an incoming push from a client.\n\t *\n\t * Validates HLC drift, resolves conflicts via LWW, and appends to the buffer.\n\t *\n\t * @param msg - The push message containing client deltas.\n\t * @returns A `Result` with the new server HLC and accepted count,\n\t * or a `ClockDriftError` if the client clock is too far ahead.\n\t */\n\thandlePush(\n\t\tmsg: SyncPush,\n\t): Result<HandlePushResult, ClockDriftError | SchemaError | BackpressureError> {\n\t\t// Backpressure — reject when buffer exceeds threshold to prevent OOM\n\t\tconst backpressureLimit = this.config.maxBackpressureBytes ?? this.config.maxBufferBytes * 2;\n\t\tif (this.buffer.byteSize >= backpressureLimit) {\n\t\t\treturn Err(\n\t\t\t\tnew BackpressureError(\n\t\t\t\t\t`Buffer backpressure exceeded (${this.buffer.byteSize} >= ${backpressureLimit} bytes)`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\tlet accepted = 0;\n\t\tconst ingested: RowDelta[] = [];\n\n\t\tfor (const delta of msg.deltas) {\n\t\t\t// Check for idempotent re-push\n\t\t\tif (this.buffer.hasDelta(delta.deltaId)) {\n\t\t\t\taccepted++;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Validate delta against the schema if a schema manager is configured\n\t\t\tif (this.config.schemaManager) {\n\t\t\t\tconst schemaResult = this.config.schemaManager.validateDelta(delta);\n\t\t\t\tif (!schemaResult.ok) {\n\t\t\t\t\treturn Err(schemaResult.error);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Validate HLC drift against server's physical clock\n\t\t\tconst recvResult = this.hlc.recv(delta.hlc);\n\t\t\tif (!recvResult.ok) {\n\t\t\t\treturn Err(recvResult.error);\n\t\t\t}\n\n\t\t\t// Check for conflict with existing state\n\t\t\tconst key = rowKey(delta.table, delta.rowId);\n\t\t\tconst existing = this.buffer.getRow(key);\n\n\t\t\tif (existing) {\n\t\t\t\tconst resolved = resolveLWW(existing, delta);\n\t\t\t\tif (resolved.ok) {\n\t\t\t\t\tthis.buffer.append(resolved.value);\n\t\t\t\t\tingested.push(resolved.value);\n\t\t\t\t}\n\t\t\t\t// If resolution fails (should not happen with LWW on same row), skip\n\t\t\t} else {\n\t\t\t\tthis.buffer.append(delta);\n\t\t\t\tingested.push(delta);\n\t\t\t}\n\n\t\t\taccepted++;\n\t\t}\n\n\t\tconst serverHlc = this.hlc.now();\n\t\treturn Ok({ serverHlc, accepted, deltas: ingested });\n\t}\n\n\t/**\n\t * Handle a pull request from a client.\n\t *\n\t * When `msg.source` is set, pulls deltas from the named source adapter\n\t * instead of the in-memory buffer. Otherwise, returns change events\n\t * from the log since the given HLC. When a {@link SyncRulesContext} is\n\t * provided, deltas are post-filtered by the client's bucket definitions\n\t * and JWT claims. The buffer path over-fetches (3x the requested limit)\n\t * and retries up to 5 times to fill the page.\n\t *\n\t * @param msg - The pull message specifying the cursor and limit.\n\t * @param context - Optional sync rules context for row-level filtering.\n\t * @returns A `Result` containing the matching deltas, server HLC, and pagination flag.\n\t */\n\thandlePull(\n\t\tmsg: SyncPull & { source: string },\n\t\tcontext?: SyncRulesContext,\n\t): Promise<Result<SyncResponse, AdapterNotFoundError | AdapterError>>;\n\thandlePull(msg: SyncPull, context?: SyncRulesContext): Result<SyncResponse, never>;\n\thandlePull(\n\t\tmsg: SyncPull,\n\t\tcontext?: SyncRulesContext,\n\t):\n\t\t| Promise<Result<SyncResponse, AdapterNotFoundError | AdapterError>>\n\t\t| Result<SyncResponse, never> {\n\t\tif (msg.source) {\n\t\t\treturn this.handleAdapterPull(msg, context);\n\t\t}\n\n\t\treturn this.handleBufferPull(msg, context);\n\t}\n\n\t/** Pull from the in-memory buffer (original path). */\n\tprivate handleBufferPull(msg: SyncPull, context?: SyncRulesContext): Result<SyncResponse, never> {\n\t\tif (!context) {\n\t\t\tconst { deltas, hasMore } = this.buffer.getEventsSince(msg.sinceHlc, msg.maxDeltas);\n\t\t\tconst serverHlc = this.hlc.now();\n\t\t\treturn Ok({ deltas, serverHlc, hasMore });\n\t\t}\n\n\t\t// Over-fetch and filter with bounded retry\n\t\tconst maxRetries = 5;\n\t\tconst overFetchMultiplier = 3;\n\t\tlet cursor = msg.sinceHlc;\n\t\tconst collected: RowDelta[] = [];\n\n\t\tfor (let attempt = 0; attempt < maxRetries; attempt++) {\n\t\t\tconst fetchLimit = msg.maxDeltas * overFetchMultiplier;\n\t\t\tconst { deltas: raw, hasMore: rawHasMore } = this.buffer.getEventsSince(cursor, fetchLimit);\n\n\t\t\tif (raw.length === 0) {\n\t\t\t\t// No more data in buffer\n\t\t\t\tconst serverHlc = this.hlc.now();\n\t\t\t\treturn Ok({ deltas: collected, serverHlc, hasMore: false });\n\t\t\t}\n\n\t\t\tconst filtered = filterDeltas(raw, context);\n\t\t\tcollected.push(...filtered);\n\n\t\t\tif (collected.length >= msg.maxDeltas) {\n\t\t\t\t// Trim to exactly maxDeltas\n\t\t\t\tconst trimmed = collected.slice(0, msg.maxDeltas);\n\t\t\t\tconst serverHlc = this.hlc.now();\n\t\t\t\treturn Ok({ deltas: trimmed, serverHlc, hasMore: true });\n\t\t\t}\n\n\t\t\tif (!rawHasMore) {\n\t\t\t\t// Exhausted the buffer\n\t\t\t\tconst serverHlc = this.hlc.now();\n\t\t\t\treturn Ok({ deltas: collected, serverHlc, hasMore: false });\n\t\t\t}\n\n\t\t\t// Advance cursor past the last examined delta\n\t\t\tcursor = raw[raw.length - 1]!.hlc;\n\t\t}\n\n\t\t// Exhausted retries — return what we have\n\t\tconst serverHlc = this.hlc.now();\n\t\tconst hasMore = collected.length >= msg.maxDeltas;\n\t\tconst trimmed = collected.slice(0, msg.maxDeltas);\n\t\treturn Ok({ deltas: trimmed, serverHlc, hasMore });\n\t}\n\n\t/** Pull from a named source adapter. */\n\tprivate async handleAdapterPull(\n\t\tmsg: SyncPull,\n\t\tcontext?: SyncRulesContext,\n\t): Promise<Result<SyncResponse, AdapterNotFoundError | AdapterError>> {\n\t\tconst adapter = this.config.sourceAdapters?.[msg.source!];\n\t\tif (!adapter) {\n\t\t\treturn Err(new AdapterNotFoundError(`Source adapter \"${msg.source}\" not found`));\n\t\t}\n\n\t\tconst queryResult = await adapter.queryDeltasSince(msg.sinceHlc);\n\t\tif (!queryResult.ok) {\n\t\t\treturn Err(queryResult.error);\n\t\t}\n\n\t\tlet deltas = queryResult.value;\n\n\t\t// Apply sync rules filtering if context is provided\n\t\tif (context) {\n\t\t\tdeltas = filterDeltas(deltas, context);\n\t\t}\n\n\t\t// Paginate\n\t\tconst hasMore = deltas.length > msg.maxDeltas;\n\t\tconst sliced = deltas.slice(0, msg.maxDeltas);\n\n\t\tconst serverHlc = this.hlc.now();\n\t\treturn Ok({ deltas: sliced, serverHlc, hasMore });\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Flush — delegates to flush module\n\t// -----------------------------------------------------------------------\n\n\t/**\n\t * Flush the buffer to the configured adapter.\n\t *\n\t * Writes deltas as either a Parquet file (default) or a JSON\n\t * {@link FlushEnvelope} to the adapter, depending on\n\t * `config.flushFormat`. If the write fails, the buffer entries\n\t * are restored so they can be retried.\n\t *\n\t * @returns A `Result` indicating success or a `FlushError`.\n\t */\n\tasync flush(): Promise<Result<void, FlushError>> {\n\t\tif (this.flushing) {\n\t\t\treturn Err(new FlushError(\"Flush already in progress\"));\n\t\t}\n\t\tif (this.buffer.logSize === 0) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\t\tif (!this.adapter) {\n\t\t\treturn Err(new FlushError(\"No adapter configured\"));\n\t\t}\n\n\t\tthis.flushing = true;\n\n\t\t// Database adapter path — drain after flushing flag is set\n\t\tif (isDatabaseAdapter(this.adapter)) {\n\t\t\tconst entries = this.buffer.drain();\n\t\t\tif (entries.length === 0) {\n\t\t\t\tthis.flushing = false;\n\t\t\t\treturn Ok(undefined);\n\t\t\t}\n\n\t\t\ttry {\n\t\t\t\treturn await flushEntries(entries, 0, {\n\t\t\t\t\tadapter: this.adapter,\n\t\t\t\t\tconfig: {\n\t\t\t\t\t\tgatewayId: this.config.gatewayId,\n\t\t\t\t\t\tflushFormat: this.config.flushFormat,\n\t\t\t\t\t\ttableSchema: this.config.tableSchema,\n\t\t\t\t\t\tcatalogue: this.config.catalogue,\n\t\t\t\t\t},\n\t\t\t\t\trestoreEntries: (e) => this.restoreEntries(e),\n\t\t\t\t});\n\t\t\t} finally {\n\t\t\t\tthis.flushing = false;\n\t\t\t}\n\t\t}\n\n\t\t// Lake adapter path\n\t\tconst byteSize = this.buffer.byteSize;\n\t\tconst entries = this.buffer.drain();\n\n\t\ttry {\n\t\t\treturn await flushEntries(entries, byteSize, {\n\t\t\t\tadapter: this.adapter,\n\t\t\t\tconfig: {\n\t\t\t\t\tgatewayId: this.config.gatewayId,\n\t\t\t\t\tflushFormat: this.config.flushFormat,\n\t\t\t\t\ttableSchema: this.config.tableSchema,\n\t\t\t\t\tcatalogue: this.config.catalogue,\n\t\t\t\t},\n\t\t\t\trestoreEntries: (e) => this.restoreEntries(e),\n\t\t\t});\n\t\t} finally {\n\t\t\tthis.flushing = false;\n\t\t}\n\t}\n\n\t/**\n\t * Flush a single table's deltas from the buffer.\n\t *\n\t * Drains only the specified table's deltas and flushes them,\n\t * leaving other tables in the buffer.\n\t */\n\tasync flushTable(table: string): Promise<Result<void, FlushError>> {\n\t\tif (this.flushing) {\n\t\t\treturn Err(new FlushError(\"Flush already in progress\"));\n\t\t}\n\t\tif (!this.adapter) {\n\t\t\treturn Err(new FlushError(\"No adapter configured\"));\n\t\t}\n\n\t\tconst entries = this.buffer.drainTable(table);\n\t\tif (entries.length === 0) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\n\t\tthis.flushing = true;\n\n\t\ttry {\n\t\t\treturn await flushEntries(\n\t\t\t\tentries,\n\t\t\t\t0,\n\t\t\t\t{\n\t\t\t\t\tadapter: this.adapter,\n\t\t\t\t\tconfig: {\n\t\t\t\t\t\tgatewayId: this.config.gatewayId,\n\t\t\t\t\t\tflushFormat: this.config.flushFormat,\n\t\t\t\t\t\ttableSchema: this.config.tableSchema,\n\t\t\t\t\t\tcatalogue: this.config.catalogue,\n\t\t\t\t\t},\n\t\t\t\t\trestoreEntries: (e) => this.restoreEntries(e),\n\t\t\t\t},\n\t\t\t\ttable,\n\t\t\t);\n\t\t} finally {\n\t\t\tthis.flushing = false;\n\t\t}\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Actions — delegates to ActionDispatcher\n\t// -----------------------------------------------------------------------\n\n\t/** Handle an incoming action push from a client. */\n\tasync handleAction(\n\t\tmsg: ActionPush,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResponse, ActionValidationError>> {\n\t\treturn this.actions.dispatch(msg, () => this.hlc.now(), context);\n\t}\n\n\t/** Register a named action handler. */\n\tregisterActionHandler(name: string, handler: ActionHandler): void {\n\t\tthis.actions.registerHandler(name, handler);\n\t}\n\n\t/** Unregister a named action handler. */\n\tunregisterActionHandler(name: string): void {\n\t\tthis.actions.unregisterHandler(name);\n\t}\n\n\t/** List all registered action handler names. */\n\tlistActionHandlers(): string[] {\n\t\treturn this.actions.listHandlers();\n\t}\n\n\t/** Describe all registered action handlers and their supported actions. */\n\tdescribeActions(): ActionDiscovery {\n\t\treturn this.actions.describe();\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Source adapters\n\t// -----------------------------------------------------------------------\n\n\t/**\n\t * Register a named source adapter for adapter-sourced pulls.\n\t *\n\t * @param name - Unique source name (used as the `source` parameter in pull requests).\n\t * @param adapter - The database adapter to register.\n\t */\n\tregisterSource(name: string, adapter: DatabaseAdapter): void {\n\t\tthis.config.sourceAdapters![name] = adapter;\n\t}\n\n\t/**\n\t * Unregister a named source adapter.\n\t *\n\t * @param name - The source name to remove.\n\t */\n\tunregisterSource(name: string): void {\n\t\tdelete this.config.sourceAdapters![name];\n\t}\n\n\t/**\n\t * List all registered source adapter names.\n\t *\n\t * @returns Array of registered source adapter names.\n\t */\n\tlistSources(): string[] {\n\t\treturn Object.keys(this.config.sourceAdapters!);\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Buffer queries\n\t// -----------------------------------------------------------------------\n\n\t/** Get per-table buffer statistics. */\n\tget tableStats(): Array<{ table: string; byteSize: number; deltaCount: number }> {\n\t\treturn this.buffer.tableStats();\n\t}\n\n\t/**\n\t * Get tables that exceed the per-table budget.\n\t */\n\tgetTablesExceedingBudget(): string[] {\n\t\tconst budget = this.config.perTableBudgetBytes;\n\t\tif (!budget) return [];\n\t\treturn this.buffer\n\t\t\t.tableStats()\n\t\t\t.filter((s) => s.byteSize >= budget)\n\t\t\t.map((s) => s.table);\n\t}\n\n\t/** Check if the buffer should be flushed based on config thresholds. */\n\tshouldFlush(): boolean {\n\t\tlet effectiveMaxBytes = this.config.maxBufferBytes;\n\n\t\t// Reduce threshold for wide-column deltas\n\t\tconst adaptive = this.config.adaptiveBufferConfig;\n\t\tif (adaptive && this.buffer.averageDeltaBytes > adaptive.wideColumnThreshold) {\n\t\t\teffectiveMaxBytes = Math.floor(effectiveMaxBytes * adaptive.reductionFactor);\n\t\t}\n\n\t\treturn this.buffer.shouldFlush({\n\t\t\tmaxBytes: effectiveMaxBytes,\n\t\t\tmaxAgeMs: this.config.maxBufferAgeMs,\n\t\t});\n\t}\n\n\t/** Get buffer statistics for monitoring. */\n\tget bufferStats(): {\n\t\tlogSize: number;\n\t\tindexSize: number;\n\t\tbyteSize: number;\n\t} {\n\t\treturn {\n\t\t\tlogSize: this.buffer.logSize,\n\t\t\tindexSize: this.buffer.indexSize,\n\t\t\tbyteSize: this.buffer.byteSize,\n\t\t};\n\t}\n}\n","import type {\n\tActionPush,\n\tHLCTimestamp,\n\tResolvedClaims,\n\tSyncPull,\n\tSyncPush,\n\tSyncRulesConfig,\n\tSyncRulesContext,\n\tTableSchema,\n} from \"@lakesync/core\";\nimport { bigintReviver, Err, Ok, type Result } from \"@lakesync/core\";\nimport {\n\tDEFAULT_PULL_LIMIT,\n\tMAX_DELTAS_PER_PUSH,\n\tMAX_PULL_LIMIT,\n\tVALID_COLUMN_TYPES,\n} from \"./constants\";\n\n/** Validation error with HTTP status code. */\nexport interface RequestError {\n\tstatus: number;\n\tmessage: string;\n}\n\n/**\n * Validate and parse a push request body.\n * Handles JSON parsing with bigint revival.\n */\nexport function validatePushBody(\n\traw: string,\n\theaderClientId?: string | null,\n): Result<SyncPush, RequestError> {\n\tlet body: SyncPush;\n\ttry {\n\t\tbody = JSON.parse(raw, bigintReviver) as SyncPush;\n\t} catch {\n\t\treturn Err({ status: 400, message: \"Invalid JSON body\" });\n\t}\n\n\tif (!body.clientId || !Array.isArray(body.deltas)) {\n\t\treturn Err({ status: 400, message: \"Missing required fields: clientId, deltas\" });\n\t}\n\n\tif (headerClientId && body.clientId !== headerClientId) {\n\t\treturn Err({\n\t\t\tstatus: 403,\n\t\t\tmessage: \"Client ID mismatch: push clientId does not match authenticated identity\",\n\t\t});\n\t}\n\n\tif (body.deltas.length > MAX_DELTAS_PER_PUSH) {\n\t\treturn Err({ status: 400, message: \"Too many deltas in a single push (max 10,000)\" });\n\t}\n\n\treturn Ok(body);\n}\n\n/**\n * Parse and validate pull query parameters.\n */\nexport function parsePullParams(params: {\n\tsince: string | null;\n\tclientId: string | null;\n\tlimit: string | null;\n\tsource: string | null;\n}): Result<SyncPull, RequestError> {\n\tif (!params.since || !params.clientId) {\n\t\treturn Err({ status: 400, message: \"Missing required query params: since, clientId\" });\n\t}\n\n\tlet sinceHlc: HLCTimestamp;\n\ttry {\n\t\tsinceHlc = BigInt(params.since) as HLCTimestamp;\n\t} catch {\n\t\treturn Err({\n\t\t\tstatus: 400,\n\t\t\tmessage: \"Invalid 'since' parameter \\u2014 must be a decimal integer\",\n\t\t});\n\t}\n\n\tconst rawLimit = params.limit ? Number.parseInt(params.limit, 10) : DEFAULT_PULL_LIMIT;\n\tif (Number.isNaN(rawLimit) || rawLimit < 1) {\n\t\treturn Err({\n\t\t\tstatus: 400,\n\t\t\tmessage: \"Invalid 'limit' parameter \\u2014 must be a positive integer\",\n\t\t});\n\t}\n\tconst maxDeltas = Math.min(rawLimit, MAX_PULL_LIMIT);\n\n\tconst msg: SyncPull = {\n\t\tclientId: params.clientId,\n\t\tsinceHlc,\n\t\tmaxDeltas,\n\t\t...(params.source ? { source: params.source } : {}),\n\t};\n\n\treturn Ok(msg);\n}\n\n/**\n * Validate and parse an action request body.\n */\nexport function validateActionBody(\n\traw: string,\n\theaderClientId?: string | null,\n): Result<ActionPush, RequestError> {\n\tlet body: ActionPush;\n\ttry {\n\t\tbody = JSON.parse(raw, bigintReviver) as ActionPush;\n\t} catch {\n\t\treturn Err({ status: 400, message: \"Invalid JSON body\" });\n\t}\n\n\tif (!body.clientId || !Array.isArray(body.actions)) {\n\t\treturn Err({ status: 400, message: \"Missing required fields: clientId, actions\" });\n\t}\n\n\tif (headerClientId && body.clientId !== headerClientId) {\n\t\treturn Err({\n\t\t\tstatus: 403,\n\t\t\tmessage: \"Client ID mismatch: action clientId does not match authenticated identity\",\n\t\t});\n\t}\n\n\treturn Ok(body);\n}\n\n/**\n * Validate a table schema body.\n */\nexport function validateSchemaBody(raw: string): Result<TableSchema, RequestError> {\n\tlet schema: TableSchema;\n\ttry {\n\t\tschema = JSON.parse(raw) as TableSchema;\n\t} catch {\n\t\treturn Err({ status: 400, message: \"Invalid JSON body\" });\n\t}\n\n\tif (!schema.table || !Array.isArray(schema.columns)) {\n\t\treturn Err({ status: 400, message: \"Missing required fields: table, columns\" });\n\t}\n\n\tfor (const col of schema.columns) {\n\t\tif (typeof col.name !== \"string\" || col.name.length === 0) {\n\t\t\treturn Err({ status: 400, message: \"Each column must have a non-empty 'name' string\" });\n\t\t}\n\t\tif (!VALID_COLUMN_TYPES.has(col.type)) {\n\t\t\treturn Err({\n\t\t\t\tstatus: 400,\n\t\t\t\tmessage: `Invalid column type \"${col.type}\" for column \"${col.name}\". Allowed: string, number, boolean, json, null`,\n\t\t\t});\n\t\t}\n\t}\n\n\treturn Ok(schema);\n}\n\n/**\n * Map a gateway push error code to an HTTP status code.\n */\nexport function pushErrorToStatus(code: string): number {\n\tswitch (code) {\n\t\tcase \"CLOCK_DRIFT\":\n\t\t\treturn 409;\n\t\tcase \"SCHEMA_MISMATCH\":\n\t\t\treturn 422;\n\t\tcase \"BACKPRESSURE\":\n\t\t\treturn 503;\n\t\tdefault:\n\t\t\treturn 500;\n\t}\n}\n\n/**\n * Build a SyncRulesContext from rules and claims.\n * Returns undefined when no rules or empty buckets.\n */\nexport function buildSyncRulesContext(\n\trules: SyncRulesConfig | undefined,\n\tclaims: ResolvedClaims,\n): SyncRulesContext | undefined {\n\tif (!rules || rules.buckets.length === 0) {\n\t\treturn undefined;\n\t}\n\treturn { claims, rules };\n}\n","import type { HLCTimestamp, ResolvedClaims, RowDelta, SyncRulesConfig } from \"@lakesync/core\";\nimport { validateConnectorConfig, validateSyncRules } from \"@lakesync/core\";\nimport type { ConfigStore } from \"./config-store\";\nimport type { SyncGateway } from \"./gateway\";\nimport {\n\tbuildSyncRulesContext,\n\tparsePullParams,\n\tpushErrorToStatus,\n\tvalidateActionBody,\n\tvalidatePushBody,\n\tvalidateSchemaBody,\n} from \"./validation\";\n\n/** Result from a request handler, ready for platform-specific serialisation. */\nexport interface HandlerResult {\n\tstatus: number;\n\tbody: unknown;\n}\n\n/**\n * Handle a push request.\n *\n * @param gateway - The SyncGateway instance.\n * @param raw - The raw request body string.\n * @param headerClientId - Client ID from auth header (for mismatch check).\n * @param opts - Optional callbacks for persistence and broadcast.\n */\nexport function handlePushRequest(\n\tgateway: SyncGateway,\n\traw: string,\n\theaderClientId?: string | null,\n\topts?: {\n\t\t/** Persist deltas before processing (WAL-style). */\n\t\tpersistBatch?: (deltas: RowDelta[]) => void;\n\t\t/** Clear persisted deltas after successful push. */\n\t\tclearPersistence?: () => void;\n\t\t/** Broadcast deltas to connected clients. */\n\t\tbroadcastFn?: (\n\t\t\tdeltas: RowDelta[],\n\t\t\tserverHlc: HLCTimestamp,\n\t\t\texcludeClientId: string,\n\t\t) => void | Promise<void>;\n\t},\n): HandlerResult {\n\tconst validation = validatePushBody(raw, headerClientId);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tconst body = validation.value;\n\n\t// Persist before processing (WAL-style)\n\topts?.persistBatch?.(body.deltas);\n\n\tconst result = gateway.handlePush(body);\n\tif (!result.ok) {\n\t\treturn {\n\t\t\tstatus: pushErrorToStatus(result.error.code),\n\t\t\tbody: { error: result.error.message },\n\t\t};\n\t}\n\n\t// Clear persisted deltas on success\n\topts?.clearPersistence?.();\n\n\t// Broadcast to connected clients (fire and forget)\n\tif (opts?.broadcastFn && result.value.deltas.length > 0) {\n\t\topts.broadcastFn(result.value.deltas, result.value.serverHlc, body.clientId);\n\t}\n\n\treturn { status: 200, body: result.value };\n}\n\n/**\n * Handle a pull request.\n */\nexport async function handlePullRequest(\n\tgateway: SyncGateway,\n\tparams: {\n\t\tsince: string | null;\n\t\tclientId: string | null;\n\t\tlimit: string | null;\n\t\tsource: string | null;\n\t},\n\tclaims?: ResolvedClaims,\n\tsyncRules?: SyncRulesConfig,\n): Promise<HandlerResult> {\n\tconst validation = parsePullParams(params);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tconst msg = validation.value;\n\tconst context = buildSyncRulesContext(syncRules, claims ?? {});\n\n\tconst result = msg.source\n\t\t? await gateway.handlePull(\n\t\t\t\tmsg as import(\"@lakesync/core\").SyncPull & { source: string },\n\t\t\t\tcontext,\n\t\t\t)\n\t\t: gateway.handlePull(msg, context);\n\n\tif (!result.ok) {\n\t\tconst err = result.error;\n\t\tif (err.code === \"ADAPTER_NOT_FOUND\") {\n\t\t\treturn { status: 404, body: { error: err.message } };\n\t\t}\n\t\treturn { status: 500, body: { error: err.message } };\n\t}\n\n\treturn { status: 200, body: result.value };\n}\n\n/**\n * Handle an action request.\n */\nexport async function handleActionRequest(\n\tgateway: SyncGateway,\n\traw: string,\n\theaderClientId?: string | null,\n\tclaims?: ResolvedClaims,\n): Promise<HandlerResult> {\n\tconst validation = validateActionBody(raw, headerClientId);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tconst context = claims ? { claims } : undefined;\n\tconst result = await gateway.handleAction(validation.value, context);\n\n\tif (!result.ok) {\n\t\treturn { status: 400, body: { error: result.error.message } };\n\t}\n\n\treturn { status: 200, body: result.value };\n}\n\n/**\n * Handle a flush request.\n */\nexport async function handleFlushRequest(\n\tgateway: SyncGateway,\n\topts?: { clearPersistence?: () => void },\n): Promise<HandlerResult> {\n\tconst result = await gateway.flush();\n\tif (!result.ok) {\n\t\treturn { status: 500, body: { error: result.error.message } };\n\t}\n\n\topts?.clearPersistence?.();\n\treturn { status: 200, body: { flushed: true } };\n}\n\n/**\n * Handle saving a table schema.\n */\nexport async function handleSaveSchema(\n\traw: string,\n\tstore: ConfigStore,\n\tgatewayId: string,\n): Promise<HandlerResult> {\n\tconst validation = validateSchemaBody(raw);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tawait store.setSchema(gatewayId, validation.value);\n\treturn { status: 200, body: { saved: true } };\n}\n\n/**\n * Handle saving sync rules.\n */\nexport async function handleSaveSyncRules(\n\traw: string,\n\tstore: ConfigStore,\n\tgatewayId: string,\n): Promise<HandlerResult> {\n\tlet config: unknown;\n\ttry {\n\t\tconfig = JSON.parse(raw);\n\t} catch {\n\t\treturn { status: 400, body: { error: \"Invalid JSON body\" } };\n\t}\n\n\tconst validation = validateSyncRules(config);\n\tif (!validation.ok) {\n\t\treturn { status: 400, body: { error: validation.error.message } };\n\t}\n\n\tawait store.setSyncRules(gatewayId, config as SyncRulesConfig);\n\treturn { status: 200, body: { saved: true } };\n}\n\n/**\n * Handle registering a connector.\n */\nexport async function handleRegisterConnector(\n\traw: string,\n\tstore: ConfigStore,\n): Promise<HandlerResult> {\n\tlet body: unknown;\n\ttry {\n\t\tbody = JSON.parse(raw);\n\t} catch {\n\t\treturn { status: 400, body: { error: \"Invalid JSON body\" } };\n\t}\n\n\tconst validation = validateConnectorConfig(body);\n\tif (!validation.ok) {\n\t\treturn { status: 400, body: { error: validation.error.message } };\n\t}\n\n\tconst config = validation.value;\n\tconst connectors = await store.getConnectors();\n\n\tif (connectors[config.name]) {\n\t\treturn { status: 409, body: { error: `Connector \"${config.name}\" already exists` } };\n\t}\n\n\tconnectors[config.name] = config;\n\tawait store.setConnectors(connectors);\n\n\treturn { status: 200, body: { registered: true, name: config.name } };\n}\n\n/**\n * Handle unregistering a connector.\n */\nexport async function handleUnregisterConnector(\n\tname: string,\n\tstore: ConfigStore,\n): Promise<HandlerResult> {\n\tconst connectors = await store.getConnectors();\n\n\tif (!connectors[name]) {\n\t\treturn { status: 404, body: { error: `Connector \"${name}\" not found` } };\n\t}\n\n\tdelete connectors[name];\n\tawait store.setConnectors(connectors);\n\n\treturn { status: 200, body: { unregistered: true, name } };\n}\n\n/**\n * Handle listing connectors.\n */\nexport async function handleListConnectors(store: ConfigStore): Promise<HandlerResult> {\n\tconst connectors = await store.getConnectors();\n\tconst list = Object.values(connectors).map((c) => ({\n\t\tname: c.name,\n\t\ttype: c.type,\n\t\thasIngest: c.ingest !== undefined,\n\t}));\n\n\treturn { status: 200, body: list };\n}\n\n/**\n * Handle metrics request.\n */\nexport function handleMetrics(\n\tgateway: SyncGateway,\n\textra?: Record<string, unknown>,\n): HandlerResult {\n\tconst stats = gateway.bufferStats;\n\treturn { status: 200, body: { ...stats, ...extra } };\n}\n","import { Err, Ok, type Result, type RowDelta, SchemaError, type TableSchema } from \"@lakesync/core\";\n\n/**\n * Manages schema versioning and validation for the gateway.\n *\n * Validates incoming deltas against the current schema and supports\n * safe schema evolution (adding nullable columns only).\n */\nexport class SchemaManager {\n\tprivate currentSchema: TableSchema;\n\tprivate version: number;\n\tprivate allowedColumns: Set<string>;\n\n\tconstructor(schema: TableSchema, version?: number) {\n\t\tthis.currentSchema = schema;\n\t\tthis.version = version ?? 1;\n\t\tthis.allowedColumns = new Set(schema.columns.map((c) => c.name));\n\t}\n\n\t/** Get the current schema and version. */\n\tgetSchema(): { schema: TableSchema; version: number } {\n\t\treturn { schema: this.currentSchema, version: this.version };\n\t}\n\n\t/**\n\t * Validate that a delta's columns are compatible with the current schema.\n\t *\n\t * Unknown columns result in a SchemaError. Missing columns are fine (sparse deltas).\n\t * DELETE ops with empty columns are always valid.\n\t */\n\tvalidateDelta(delta: RowDelta): Result<void, SchemaError> {\n\t\tif (delta.op === \"DELETE\" && delta.columns.length === 0) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\n\t\tfor (const col of delta.columns) {\n\t\t\tif (!this.allowedColumns.has(col.column)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SchemaError(\n\t\t\t\t\t\t`Unknown column \"${col.column}\" in delta for table \"${delta.table}\". Schema version ${this.version} does not include this column.`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Evolve the schema by adding new nullable columns.\n\t *\n\t * Only adding columns is allowed. Removing columns or changing types\n\t * returns a SchemaError.\n\t */\n\tevolveSchema(newSchema: TableSchema): Result<{ version: number }, SchemaError> {\n\t\tif (newSchema.table !== this.currentSchema.table) {\n\t\t\treturn Err(new SchemaError(\"Cannot evolve schema: table name mismatch\"));\n\t\t}\n\n\t\tconst oldColumnMap = new Map(this.currentSchema.columns.map((c) => [c.name, c.type]));\n\t\tconst newColumnMap = new Map(newSchema.columns.map((c) => [c.name, c.type]));\n\n\t\t// Check for removed columns\n\t\tfor (const [name] of oldColumnMap) {\n\t\t\tif (!newColumnMap.has(name)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SchemaError(\n\t\t\t\t\t\t`Cannot remove column \"${name}\" — only adding nullable columns is supported`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\t// Check for type changes\n\t\tfor (const [name, oldType] of oldColumnMap) {\n\t\t\tconst newType = newColumnMap.get(name);\n\t\t\tif (newType && newType !== oldType) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SchemaError(\n\t\t\t\t\t\t`Cannot change type of column \"${name}\" from \"${oldType}\" to \"${newType}\"`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\t// Apply evolution\n\t\tthis.currentSchema = newSchema;\n\t\tthis.version++;\n\t\tthis.allowedColumns = new Set(newSchema.columns.map((c) => c.name));\n\n\t\treturn Ok({ version: this.version });\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsBO,IAAM,mBAAN,MAAuB;AAAA,EACrB,iBAA6C,oBAAI,IAAI;AAAA,EACrD,kBAA+B,oBAAI,IAAI;AAAA,EACvC,iBAGJ,oBAAI,IAAI;AAAA,EAEZ,YAAY,UAA0C;AACrD,QAAI,UAAU;AACb,iBAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACvD,aAAK,eAAe,IAAI,MAAM,OAAO;AAAA,MACtC;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,SACL,KACA,QACA,SACyD;AACzD,UAAM,UAEF,CAAC;AAEL,eAAW,UAAU,IAAI,SAAS;AAEjC,YAAM,aAAa,eAAe,MAAM;AACxC,UAAI,CAAC,WAAW,IAAI;AACnB,eAAO,IAAI,WAAW,KAAK;AAAA,MAC5B;AAGA,UAAI,KAAK,gBAAgB,IAAI,OAAO,QAAQ,GAAG;AAC9C,cAAM,SAAS,KAAK,eAAe,IAAI,OAAO,QAAQ;AACtD,YAAI,QAAQ;AACX,kBAAQ,KAAK,MAAM;AACnB;AAAA,QACD;AAEA;AAAA,MACD;AAGA,UAAI,OAAO,gBAAgB;AAC1B,cAAM,SAAS,KAAK,eAAe,IAAI,QAAQ,OAAO,cAAc,EAAE;AACtE,YAAI,QAAQ;AACX,kBAAQ,KAAK,MAAM;AACnB;AAAA,QACD;AAAA,MACD;AAGA,YAAM,UAAU,KAAK,eAAe,IAAI,OAAO,SAAS;AACxD,UAAI,CAAC,SAAS;AACb,cAAM,cAAc;AAAA,UACnB,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,+CAA+C,OAAO,SAAS;AAAA,UACxE,WAAW;AAAA,QACZ;AACA,gBAAQ,KAAK,WAAW;AACxB,aAAK,kBAAkB,QAAQ,WAAW;AAC1C;AAAA,MACD;AAGA,YAAM,YAAY,QAAQ,iBAAiB,KAAK,CAAC,MAAM,EAAE,eAAe,OAAO,UAAU;AACzF,UAAI,CAAC,WAAW;AACf,cAAM,cAAc;AAAA,UACnB,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,gBAAgB,OAAO,UAAU,iCAAiC,OAAO,SAAS;AAAA,UAC3F,WAAW;AAAA,QACZ;AACA,gBAAQ,KAAK,WAAW;AACxB,aAAK,kBAAkB,QAAQ,WAAW;AAC1C;AAAA,MACD;AAGA,YAAM,aAAa,MAAM,QAAQ,cAAc,QAAQ,OAAO;AAC9D,UAAI,WAAW,IAAI;AAClB,gBAAQ,KAAK,WAAW,KAAK;AAC7B,aAAK,kBAAkB,QAAQ,WAAW,KAAK;AAAA,MAChD,OAAO;AACN,cAAM,MAAM,WAAW;AACvB,cAAM,cAAc;AAAA,UACnB,UAAU,OAAO;AAAA,UACjB,MAAM,IAAI;AAAA,UACV,SAAS,IAAI;AAAA,UACb,WAAW,eAAe,MAAO,IAA6B,YAAY;AAAA,QAC3E;AACA,gBAAQ,KAAK,WAAW;AAExB,YAAI,CAAC,YAAY,WAAW;AAC3B,eAAK,kBAAkB,QAAQ,WAAW;AAAA,QAC3C;AAAA,MACD;AAAA,IACD;AAEA,UAAM,YAAY,OAAO;AACzB,WAAO,GAAG,EAAE,SAAS,UAAU,CAAC;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,gBAAgB,MAAc,SAA8B;AAC3D,SAAK,eAAe,IAAI,MAAM,OAAO;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,kBAAkB,MAAoB;AACrC,SAAK,eAAe,OAAO,IAAI;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAyB;AACxB,WAAO,CAAC,GAAG,KAAK,eAAe,KAAK,CAAC;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,WAA4B;AAC3B,UAAM,aAAiD,CAAC;AACxD,eAAW,CAAC,MAAM,OAAO,KAAK,KAAK,gBAAgB;AAClD,iBAAW,IAAI,IAAI,QAAQ;AAAA,IAC5B;AACA,WAAO,EAAE,WAAW;AAAA,EACrB;AAAA;AAAA,EAGQ,kBACP,QACA,QACO;AACP,SAAK,gBAAgB,IAAI,OAAO,QAAQ;AACxC,SAAK,eAAe,IAAI,OAAO,UAAU,MAAM;AAC/C,QAAI,OAAO,gBAAgB;AAC1B,WAAK,eAAe,IAAI,QAAQ,OAAO,cAAc,IAAI,MAAM;AAAA,IAChE;AAAA,EACD;AACD;;;AC9LA,IAAM,sBAAsB,IAAI,IAAI,IAAI,IAAI;AAM5C,SAAS,mBAAmB,OAAwB;AACnD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,UAAQ,OAAO,OAAO;AAAA,IACrB,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAQ,MAAiB,SAAS;AAAA;AAAA,IACnC;AAEC,UAAI;AACH,eAAO,KAAK,UAAU,KAAK,EAAE;AAAA,MAC9B,QAAQ;AACP,eAAO;AAAA,MACR;AAAA,EACF;AACD;AAGA,SAAS,mBAAmB,OAAyB;AACpD,MAAI,QAAQ;AACZ,WAAS,MAAM,QAAQ;AACvB,WAAS,MAAM,MAAM,SAAS;AAC9B,WAAS,MAAM,MAAM,SAAS;AAC9B,WAAS,MAAM,SAAS,SAAS;AACjC,aAAW,OAAO,MAAM,SAAS;AAChC,aAAS,IAAI,OAAO,SAAS;AAC7B,aAAS,mBAAmB,IAAI,KAAK;AAAA,EACtC;AACA,SAAO;AACR;AAQO,IAAM,cAAN,MAAkB;AAAA,EAChB,MAAkB,CAAC;AAAA,EACnB,QAA+B,oBAAI,IAAI;AAAA,EACvC,WAAW,oBAAI,IAAY;AAAA,EAC3B,iBAAiB;AAAA,EACjB,YAAoB,KAAK,IAAI;AAAA,EAC7B,aAAa,oBAAI,IAAoB;AAAA,EACrC,WAAW,oBAAI,IAAwB;AAAA;AAAA,EAG/C,OAAO,OAAuB;AAC7B,SAAK,IAAI,KAAK,KAAK;AACnB,UAAM,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AAC3C,SAAK,MAAM,IAAI,KAAK,KAAK;AACzB,SAAK,SAAS,IAAI,MAAM,OAAO;AAC/B,UAAM,QAAQ,mBAAmB,KAAK;AACtC,SAAK,kBAAkB;AAEvB,SAAK,WAAW,IAAI,MAAM,QAAQ,KAAK,WAAW,IAAI,MAAM,KAAK,KAAK,KAAK,KAAK;AAChF,UAAM,eAAe,KAAK,SAAS,IAAI,MAAM,KAAK;AAClD,QAAI,cAAc;AACjB,mBAAa,KAAK,KAAK;AAAA,IACxB,OAAO;AACN,WAAK,SAAS,IAAI,MAAM,OAAO,CAAC,KAAK,CAAC;AAAA,IACvC;AAAA,EACD;AAAA;AAAA,EAGA,OAAO,KAAmC;AACzC,WAAO,KAAK,MAAM,IAAI,GAAG;AAAA,EAC1B;AAAA;AAAA,EAGA,SAAS,SAA0B;AAClC,WAAO,KAAK,SAAS,IAAI,OAAO;AAAA,EACjC;AAAA;AAAA,EAGA,eAAe,KAAmB,OAAyD;AAC1F,QAAI,KAAK;AACT,QAAI,KAAK,KAAK,IAAI;AAClB,WAAO,KAAK,IAAI;AACf,YAAM,MAAO,KAAK,OAAQ;AAC1B,UAAI,IAAI,QAAQ,KAAK,IAAI,GAAG,EAAG,KAAK,GAAG,KAAK,GAAG;AAC9C,aAAK,MAAM;AAAA,MACZ,OAAO;AACN,aAAK;AAAA,MACN;AAAA,IACD;AACA,UAAM,UAAU,KAAK,IAAI,SAAS,KAAK;AACvC,WAAO,EAAE,QAAQ,KAAK,IAAI,MAAM,IAAI,KAAK,KAAK,GAAG,QAAQ;AAAA,EAC1D;AAAA;AAAA,EAGA,YAAY,QAAyD;AACpE,QAAI,KAAK,IAAI,WAAW,EAAG,QAAO;AAClC,WAAO,KAAK,kBAAkB,OAAO,YAAY,KAAK,IAAI,IAAI,KAAK,aAAa,OAAO;AAAA,EACxF;AAAA;AAAA,EAGA,aAA6E;AAC5E,UAAM,QAAwE,CAAC;AAC/E,eAAW,CAAC,OAAO,KAAK,KAAK,KAAK,YAAY;AAC7C,YAAM,KAAK;AAAA,QACV;AAAA,QACA,UAAU;AAAA,QACV,YAAY,KAAK,SAAS,IAAI,KAAK,GAAG,UAAU;AAAA,MACjD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,WAAW,OAA2B;AACrC,UAAM,cAAc,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC;AACjD,QAAI,YAAY,WAAW,EAAG,QAAO,CAAC;AAGtC,SAAK,MAAM,KAAK,IAAI,OAAO,CAAC,MAAM,EAAE,UAAU,KAAK;AAGnD,eAAW,SAAS,aAAa;AAChC,WAAK,MAAM,OAAO,OAAO,MAAM,OAAO,MAAM,KAAK,CAAC;AAClD,WAAK,SAAS,OAAO,MAAM,OAAO;AAAA,IACnC;AAGA,UAAM,gBAAgB,KAAK,WAAW,IAAI,KAAK,KAAK;AACpD,SAAK,kBAAkB;AACvB,SAAK,WAAW,OAAO,KAAK;AAC5B,SAAK,SAAS,OAAO,KAAK;AAE1B,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,QAAoB;AACnB,UAAM,UAAU,CAAC,GAAG,KAAK,GAAG;AAC5B,SAAK,MAAM,CAAC;AACZ,SAAK,MAAM,MAAM;AACjB,SAAK,SAAS,MAAM;AACpB,SAAK,iBAAiB;AACtB,SAAK,YAAY,KAAK,IAAI;AAC1B,SAAK,WAAW,MAAM;AACtB,SAAK,SAAS,MAAM;AACpB,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,IAAI,UAAkB;AACrB,WAAO,KAAK,IAAI;AAAA,EACjB;AAAA;AAAA,EAGA,IAAI,YAAoB;AACvB,WAAO,KAAK,MAAM;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,WAAmB;AACtB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA,EAGA,IAAI,oBAA4B;AAC/B,WAAO,KAAK,IAAI,WAAW,IAAI,IAAI,KAAK,iBAAiB,KAAK,IAAI;AAAA,EACnE;AACD;;;AC7JO,IAAM,oBAAN,MAA+C;AAAA,EAC7C,UAAU,oBAAI,IAAyB;AAAA,EACvC,YAAY,oBAAI,IAA6B;AAAA,EAC7C,aAA8C,CAAC;AAAA,EAEvD,MAAM,UAAU,WAAqD;AACpE,WAAO,KAAK,QAAQ,IAAI,SAAS;AAAA,EAClC;AAAA,EAEA,MAAM,UAAU,WAAmB,QAAoC;AACtE,SAAK,QAAQ,IAAI,WAAW,MAAM;AAAA,EACnC;AAAA,EAEA,MAAM,aAAa,WAAyD;AAC3E,WAAO,KAAK,UAAU,IAAI,SAAS;AAAA,EACpC;AAAA,EAEA,MAAM,aAAa,WAAmB,OAAuC;AAC5E,SAAK,UAAU,IAAI,WAAW,KAAK;AAAA,EACpC;AAAA,EAEA,MAAM,gBAA0D;AAC/D,WAAO,EAAE,GAAG,KAAK,WAAW;AAAA,EAC7B;AAAA,EAEA,MAAM,cAAc,YAA4D;AAC/E,SAAK,aAAa,EAAE,GAAG,WAAW;AAAA,EACnC;AACD;;;AChDO,IAAM,yBAAyB;AAG/B,IAAM,sBAAsB;AAG5B,IAAM,iBAAiB;AAGvB,IAAM,qBAAqB;AAG3B,IAAM,qBAAqB,oBAAI,IAAI,CAAC,UAAU,UAAU,WAAW,QAAQ,MAAM,CAAC;AAGlF,IAAM,2BAA2B,IAAI,OAAO;AAG5C,IAAM,4BAA4B;;;ACoBlC,SAAS,SAAS,SAA+D;AACvF,MAAI,MAAM,QAAQ,CAAC,EAAG;AACtB,MAAI,MAAM,QAAQ,CAAC,EAAG;AACtB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACxC,UAAM,MAAM,QAAQ,CAAC,EAAG;AACxB,QAAI,IAAI,QAAQ,KAAK,GAAG,IAAI,EAAG,OAAM;AACrC,QAAI,IAAI,QAAQ,KAAK,GAAG,IAAI,EAAG,OAAM;AAAA,EACtC;AACA,SAAO,EAAE,KAAK,IAAI;AACnB;AASA,eAAsB,aACrB,SACA,UACA,MACA,WACoC;AAEpC,MAAI,kBAAkB,KAAK,OAAO,GAAG;AACpC,QAAI;AACH,YAAM,SAAS,MAAM,KAAK,QAAQ,aAAa,OAAO;AACtD,UAAI,CAAC,OAAO,IAAI;AACf,aAAK,eAAe,OAAO;AAC3B,eAAO,IAAI,IAAI,WAAW,0BAA0B,OAAO,MAAM,OAAO,EAAE,CAAC;AAAA,MAC5E;AACA,aAAO,GAAG,MAAS;AAAA,IACpB,SAAS,OAAgB;AACxB,WAAK,eAAe,OAAO;AAC3B,aAAO,IAAI,IAAI,WAAW,sCAAsC,QAAQ,KAAK,EAAE,OAAO,EAAE,CAAC;AAAA,IAC1F;AAAA,EACD;AAGA,MAAI;AACH,UAAM,EAAE,KAAK,IAAI,IAAI,SAAS,OAAO;AACrC,UAAM,QAAO,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAClD,UAAM,SAAS,YAAY,GAAG,SAAS,MAAM;AAC7C,QAAI;AACJ,QAAI;AACJ,QAAI;AAEJ,QAAI,KAAK,OAAO,gBAAgB,QAAQ;AACvC,YAAM,WAA0B;AAAA,QAC/B,SAAS;AAAA,QACT,WAAW,KAAK,OAAO;AAAA,QACvB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,UAAU,EAAE,KAAK,IAAI;AAAA,QACrB,YAAY,QAAQ;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,MACT;AAEA,kBAAY,UAAU,IAAI,IAAI,KAAK,OAAO,SAAS,IAAI,MAAM,GAAG,IAAI,SAAS,CAAC,IAAI,IAAI,SAAS,CAAC;AAChG,aAAO,IAAI,YAAY,EAAE,OAAO,KAAK,UAAU,UAAU,cAAc,CAAC;AACxE,oBAAc;AAAA,IACf,OAAO;AAEN,UAAI,CAAC,KAAK,OAAO,aAAa;AAC7B,aAAK,eAAe,OAAO;AAC3B,eAAO,IAAI,IAAI,WAAW,wCAAwC,CAAC;AAAA,MACpE;AAEA,YAAM,gBAAgB,MAAM,qBAAqB,SAAS,KAAK,OAAO,WAAW;AACjF,UAAI,CAAC,cAAc,IAAI;AACtB,aAAK,eAAe,OAAO;AAC3B,eAAO,IAAI,cAAc,KAAK;AAAA,MAC/B;AAEA,kBAAY,UAAU,IAAI,IAAI,KAAK,OAAO,SAAS,IAAI,MAAM,GAAG,IAAI,SAAS,CAAC,IAAI,IAAI,SAAS,CAAC;AAChG,aAAO,cAAc;AACrB,oBAAc;AAAA,IACf;AAEA,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,WAAW,MAAM,WAAW;AACxE,QAAI,CAAC,OAAO,IAAI;AACf,WAAK,eAAe,OAAO;AAC3B,aAAO,IAAI,IAAI,WAAW,mCAAmC,OAAO,MAAM,OAAO,EAAE,CAAC;AAAA,IACrF;AAEA,QAAI,KAAK,OAAO,aAAa,KAAK,OAAO,aAAa;AACrD,YAAM;AAAA,QACL;AAAA,QACA,KAAK;AAAA,QACL,QAAQ;AAAA,QACR,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,MACb;AAAA,IACD;AAEA,WAAO,GAAG,MAAS;AAAA,EACpB,SAAS,OAAgB;AACxB,SAAK,eAAe,OAAO;AAC3B,WAAO,IAAI,IAAI,WAAW,6BAA6B,QAAQ,KAAK,EAAE,OAAO,EAAE,CAAC;AAAA,EACjF;AACD;AAOA,eAAsB,kBACrB,WACA,iBACA,aACA,WACA,QACgB;AAChB,QAAM,EAAE,WAAW,KAAK,IAAI,kBAAkB,OAAO,KAAK;AAC1D,QAAM,gBAAgB,qBAAqB,MAAM;AACjD,QAAM,gBAAgB,mBAAmB,aAAa;AAGtD,QAAM,UAAU,gBAAgB,SAAS;AAGzC,QAAM,eAAe,MAAM,UAAU,YAAY,WAAW,MAAM,eAAe,aAAa;AAC9F,MAAI,CAAC,aAAa,MAAM,aAAa,MAAM,eAAe,KAAK;AAC9D;AAAA,EACD;AAGA,QAAM,WAAqB;AAAA,IAC1B,SAAS;AAAA,IACT,aAAa;AAAA,IACb,eAAe;AAAA,IACf,gBAAgB;AAAA,IAChB,sBAAsB;AAAA,EACvB;AAGA,QAAM,eAAe,MAAM,UAAU,YAAY,WAAW,MAAM,CAAC,QAAQ,CAAC;AAC5E,MAAI,CAAC,aAAa,MAAM,aAAa,MAAM,eAAe,KAAK;AAE9D,UAAM,UAAU,YAAY,WAAW,MAAM,CAAC,QAAQ,CAAC;AAAA,EACxD;AACD;;;AC9IO,IAAM,cAAN,MAA0C;AAAA,EACxC;AAAA,EACC;AAAA,EACA;AAAA,EACD;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EAEnB,YAAY,QAAuB,SAAyC;AAC3E,SAAK,SAAS,EAAE,gBAAgB,CAAC,GAAG,GAAG,OAAO;AAC9C,SAAK,MAAM,IAAI,IAAI;AACnB,SAAK,SAAS,IAAI,YAAY;AAC9B,SAAK,UAAU,KAAK,OAAO,WAAW,WAAW;AACjD,SAAK,UAAU,IAAI,iBAAiB,OAAO,cAAc;AAAA,EAC1D;AAAA;AAAA,EAGQ,eAAe,SAA2B;AACjD,eAAW,SAAS,SAAS;AAC5B,WAAK,OAAO,OAAO,KAAK;AAAA,IACzB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,WACC,KAC8E;AAE9E,UAAM,oBAAoB,KAAK,OAAO,wBAAwB,KAAK,OAAO,iBAAiB;AAC3F,QAAI,KAAK,OAAO,YAAY,mBAAmB;AAC9C,aAAO;AAAA,QACN,IAAI;AAAA,UACH,iCAAiC,KAAK,OAAO,QAAQ,OAAO,iBAAiB;AAAA,QAC9E;AAAA,MACD;AAAA,IACD;AAEA,QAAI,WAAW;AACf,UAAM,WAAuB,CAAC;AAE9B,eAAW,SAAS,IAAI,QAAQ;AAE/B,UAAI,KAAK,OAAO,SAAS,MAAM,OAAO,GAAG;AACxC;AACA;AAAA,MACD;AAGA,UAAI,KAAK,OAAO,eAAe;AAC9B,cAAM,eAAe,KAAK,OAAO,cAAc,cAAc,KAAK;AAClE,YAAI,CAAC,aAAa,IAAI;AACrB,iBAAO,IAAI,aAAa,KAAK;AAAA,QAC9B;AAAA,MACD;AAGA,YAAM,aAAa,KAAK,IAAI,KAAK,MAAM,GAAG;AAC1C,UAAI,CAAC,WAAW,IAAI;AACnB,eAAO,IAAI,WAAW,KAAK;AAAA,MAC5B;AAGA,YAAM,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AAC3C,YAAM,WAAW,KAAK,OAAO,OAAO,GAAG;AAEvC,UAAI,UAAU;AACb,cAAM,WAAW,WAAW,UAAU,KAAK;AAC3C,YAAI,SAAS,IAAI;AAChB,eAAK,OAAO,OAAO,SAAS,KAAK;AACjC,mBAAS,KAAK,SAAS,KAAK;AAAA,QAC7B;AAAA,MAED,OAAO;AACN,aAAK,OAAO,OAAO,KAAK;AACxB,iBAAS,KAAK,KAAK;AAAA,MACpB;AAEA;AAAA,IACD;AAEA,UAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,WAAO,GAAG,EAAE,WAAW,UAAU,QAAQ,SAAS,CAAC;AAAA,EACpD;AAAA,EAqBA,WACC,KACA,SAG8B;AAC9B,QAAI,IAAI,QAAQ;AACf,aAAO,KAAK,kBAAkB,KAAK,OAAO;AAAA,IAC3C;AAEA,WAAO,KAAK,iBAAiB,KAAK,OAAO;AAAA,EAC1C;AAAA;AAAA,EAGQ,iBAAiB,KAAe,SAAyD;AAChG,QAAI,CAAC,SAAS;AACb,YAAM,EAAE,QAAQ,SAAAA,SAAQ,IAAI,KAAK,OAAO,eAAe,IAAI,UAAU,IAAI,SAAS;AAClF,YAAMC,aAAY,KAAK,IAAI,IAAI;AAC/B,aAAO,GAAG,EAAE,QAAQ,WAAAA,YAAW,SAAAD,SAAQ,CAAC;AAAA,IACzC;AAGA,UAAM,aAAa;AACnB,UAAM,sBAAsB;AAC5B,QAAI,SAAS,IAAI;AACjB,UAAM,YAAwB,CAAC;AAE/B,aAAS,UAAU,GAAG,UAAU,YAAY,WAAW;AACtD,YAAM,aAAa,IAAI,YAAY;AACnC,YAAM,EAAE,QAAQ,KAAK,SAAS,WAAW,IAAI,KAAK,OAAO,eAAe,QAAQ,UAAU;AAE1F,UAAI,IAAI,WAAW,GAAG;AAErB,cAAMC,aAAY,KAAK,IAAI,IAAI;AAC/B,eAAO,GAAG,EAAE,QAAQ,WAAW,WAAAA,YAAW,SAAS,MAAM,CAAC;AAAA,MAC3D;AAEA,YAAM,WAAW,aAAa,KAAK,OAAO;AAC1C,gBAAU,KAAK,GAAG,QAAQ;AAE1B,UAAI,UAAU,UAAU,IAAI,WAAW;AAEtC,cAAMC,WAAU,UAAU,MAAM,GAAG,IAAI,SAAS;AAChD,cAAMD,aAAY,KAAK,IAAI,IAAI;AAC/B,eAAO,GAAG,EAAE,QAAQC,UAAS,WAAAD,YAAW,SAAS,KAAK,CAAC;AAAA,MACxD;AAEA,UAAI,CAAC,YAAY;AAEhB,cAAMA,aAAY,KAAK,IAAI,IAAI;AAC/B,eAAO,GAAG,EAAE,QAAQ,WAAW,WAAAA,YAAW,SAAS,MAAM,CAAC;AAAA,MAC3D;AAGA,eAAS,IAAI,IAAI,SAAS,CAAC,EAAG;AAAA,IAC/B;AAGA,UAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,UAAM,UAAU,UAAU,UAAU,IAAI;AACxC,UAAM,UAAU,UAAU,MAAM,GAAG,IAAI,SAAS;AAChD,WAAO,GAAG,EAAE,QAAQ,SAAS,WAAW,QAAQ,CAAC;AAAA,EAClD;AAAA;AAAA,EAGA,MAAc,kBACb,KACA,SACqE;AACrE,UAAM,UAAU,KAAK,OAAO,iBAAiB,IAAI,MAAO;AACxD,QAAI,CAAC,SAAS;AACb,aAAO,IAAI,IAAI,qBAAqB,mBAAmB,IAAI,MAAM,aAAa,CAAC;AAAA,IAChF;AAEA,UAAM,cAAc,MAAM,QAAQ,iBAAiB,IAAI,QAAQ;AAC/D,QAAI,CAAC,YAAY,IAAI;AACpB,aAAO,IAAI,YAAY,KAAK;AAAA,IAC7B;AAEA,QAAI,SAAS,YAAY;AAGzB,QAAI,SAAS;AACZ,eAAS,aAAa,QAAQ,OAAO;AAAA,IACtC;AAGA,UAAM,UAAU,OAAO,SAAS,IAAI;AACpC,UAAM,SAAS,OAAO,MAAM,GAAG,IAAI,SAAS;AAE5C,UAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,WAAO,GAAG,EAAE,QAAQ,QAAQ,WAAW,QAAQ,CAAC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,QAA2C;AAChD,QAAI,KAAK,UAAU;AAClB,aAAO,IAAI,IAAI,WAAW,2BAA2B,CAAC;AAAA,IACvD;AACA,QAAI,KAAK,OAAO,YAAY,GAAG;AAC9B,aAAO,GAAG,MAAS;AAAA,IACpB;AACA,QAAI,CAAC,KAAK,SAAS;AAClB,aAAO,IAAI,IAAI,WAAW,uBAAuB,CAAC;AAAA,IACnD;AAEA,SAAK,WAAW;AAGhB,QAAI,kBAAkB,KAAK,OAAO,GAAG;AACpC,YAAME,WAAU,KAAK,OAAO,MAAM;AAClC,UAAIA,SAAQ,WAAW,GAAG;AACzB,aAAK,WAAW;AAChB,eAAO,GAAG,MAAS;AAAA,MACpB;AAEA,UAAI;AACH,eAAO,MAAM,aAAaA,UAAS,GAAG;AAAA,UACrC,SAAS,KAAK;AAAA,UACd,QAAQ;AAAA,YACP,WAAW,KAAK,OAAO;AAAA,YACvB,aAAa,KAAK,OAAO;AAAA,YACzB,aAAa,KAAK,OAAO;AAAA,YACzB,WAAW,KAAK,OAAO;AAAA,UACxB;AAAA,UACA,gBAAgB,CAAC,MAAM,KAAK,eAAe,CAAC;AAAA,QAC7C,CAAC;AAAA,MACF,UAAE;AACD,aAAK,WAAW;AAAA,MACjB;AAAA,IACD;AAGA,UAAM,WAAW,KAAK,OAAO;AAC7B,UAAM,UAAU,KAAK,OAAO,MAAM;AAElC,QAAI;AACH,aAAO,MAAM,aAAa,SAAS,UAAU;AAAA,QAC5C,SAAS,KAAK;AAAA,QACd,QAAQ;AAAA,UACP,WAAW,KAAK,OAAO;AAAA,UACvB,aAAa,KAAK,OAAO;AAAA,UACzB,aAAa,KAAK,OAAO;AAAA,UACzB,WAAW,KAAK,OAAO;AAAA,QACxB;AAAA,QACA,gBAAgB,CAAC,MAAM,KAAK,eAAe,CAAC;AAAA,MAC7C,CAAC;AAAA,IACF,UAAE;AACD,WAAK,WAAW;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WAAW,OAAkD;AAClE,QAAI,KAAK,UAAU;AAClB,aAAO,IAAI,IAAI,WAAW,2BAA2B,CAAC;AAAA,IACvD;AACA,QAAI,CAAC,KAAK,SAAS;AAClB,aAAO,IAAI,IAAI,WAAW,uBAAuB,CAAC;AAAA,IACnD;AAEA,UAAM,UAAU,KAAK,OAAO,WAAW,KAAK;AAC5C,QAAI,QAAQ,WAAW,GAAG;AACzB,aAAO,GAAG,MAAS;AAAA,IACpB;AAEA,SAAK,WAAW;AAEhB,QAAI;AACH,aAAO,MAAM;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,UACC,SAAS,KAAK;AAAA,UACd,QAAQ;AAAA,YACP,WAAW,KAAK,OAAO;AAAA,YACvB,aAAa,KAAK,OAAO;AAAA,YACzB,aAAa,KAAK,OAAO;AAAA,YACzB,WAAW,KAAK,OAAO;AAAA,UACxB;AAAA,UACA,gBAAgB,CAAC,MAAM,KAAK,eAAe,CAAC;AAAA,QAC7C;AAAA,QACA;AAAA,MACD;AAAA,IACD,UAAE;AACD,WAAK,WAAW;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,aACL,KACA,SACyD;AACzD,WAAO,KAAK,QAAQ,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,OAAO;AAAA,EAChE;AAAA;AAAA,EAGA,sBAAsB,MAAc,SAA8B;AACjE,SAAK,QAAQ,gBAAgB,MAAM,OAAO;AAAA,EAC3C;AAAA;AAAA,EAGA,wBAAwB,MAAoB;AAC3C,SAAK,QAAQ,kBAAkB,IAAI;AAAA,EACpC;AAAA;AAAA,EAGA,qBAA+B;AAC9B,WAAO,KAAK,QAAQ,aAAa;AAAA,EAClC;AAAA;AAAA,EAGA,kBAAmC;AAClC,WAAO,KAAK,QAAQ,SAAS;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,eAAe,MAAc,SAAgC;AAC5D,SAAK,OAAO,eAAgB,IAAI,IAAI;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,MAAoB;AACpC,WAAO,KAAK,OAAO,eAAgB,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAwB;AACvB,WAAO,OAAO,KAAK,KAAK,OAAO,cAAe;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,aAA6E;AAChF,WAAO,KAAK,OAAO,WAAW;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,2BAAqC;AACpC,UAAM,SAAS,KAAK,OAAO;AAC3B,QAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,WAAO,KAAK,OACV,WAAW,EACX,OAAO,CAAC,MAAM,EAAE,YAAY,MAAM,EAClC,IAAI,CAAC,MAAM,EAAE,KAAK;AAAA,EACrB;AAAA;AAAA,EAGA,cAAuB;AACtB,QAAI,oBAAoB,KAAK,OAAO;AAGpC,UAAM,WAAW,KAAK,OAAO;AAC7B,QAAI,YAAY,KAAK,OAAO,oBAAoB,SAAS,qBAAqB;AAC7E,0BAAoB,KAAK,MAAM,oBAAoB,SAAS,eAAe;AAAA,IAC5E;AAEA,WAAO,KAAK,OAAO,YAAY;AAAA,MAC9B,UAAU;AAAA,MACV,UAAU,KAAK,OAAO;AAAA,IACvB,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,IAAI,cAIF;AACD,WAAO;AAAA,MACN,SAAS,KAAK,OAAO;AAAA,MACrB,WAAW,KAAK,OAAO;AAAA,MACvB,UAAU,KAAK,OAAO;AAAA,IACvB;AAAA,EACD;AACD;;;AC3bO,SAAS,iBACf,KACA,gBACiC;AACjC,MAAI;AACJ,MAAI;AACH,WAAO,KAAK,MAAM,KAAK,aAAa;AAAA,EACrC,QAAQ;AACP,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,oBAAoB,CAAC;AAAA,EACzD;AAEA,MAAI,CAAC,KAAK,YAAY,CAAC,MAAM,QAAQ,KAAK,MAAM,GAAG;AAClD,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,4CAA4C,CAAC;AAAA,EACjF;AAEA,MAAI,kBAAkB,KAAK,aAAa,gBAAgB;AACvD,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAEA,MAAI,KAAK,OAAO,SAAS,qBAAqB;AAC7C,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,gDAAgD,CAAC;AAAA,EACrF;AAEA,SAAO,GAAG,IAAI;AACf;AAKO,SAAS,gBAAgB,QAKG;AAClC,MAAI,CAAC,OAAO,SAAS,CAAC,OAAO,UAAU;AACtC,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,iDAAiD,CAAC;AAAA,EACtF;AAEA,MAAI;AACJ,MAAI;AACH,eAAW,OAAO,OAAO,KAAK;AAAA,EAC/B,QAAQ;AACP,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,QAAQ,OAAO,SAAS,OAAO,OAAO,EAAE,IAAI;AACpE,MAAI,OAAO,MAAM,QAAQ,KAAK,WAAW,GAAG;AAC3C,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AACA,QAAM,YAAY,KAAK,IAAI,UAAU,cAAc;AAEnD,QAAM,MAAgB;AAAA,IACrB,UAAU,OAAO;AAAA,IACjB;AAAA,IACA;AAAA,IACA,GAAI,OAAO,SAAS,EAAE,QAAQ,OAAO,OAAO,IAAI,CAAC;AAAA,EAClD;AAEA,SAAO,GAAG,GAAG;AACd;AAKO,SAAS,mBACf,KACA,gBACmC;AACnC,MAAI;AACJ,MAAI;AACH,WAAO,KAAK,MAAM,KAAK,aAAa;AAAA,EACrC,QAAQ;AACP,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,oBAAoB,CAAC;AAAA,EACzD;AAEA,MAAI,CAAC,KAAK,YAAY,CAAC,MAAM,QAAQ,KAAK,OAAO,GAAG;AACnD,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,6CAA6C,CAAC;AAAA,EAClF;AAEA,MAAI,kBAAkB,KAAK,aAAa,gBAAgB;AACvD,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAEA,SAAO,GAAG,IAAI;AACf;AAKO,SAAS,mBAAmB,KAAgD;AAClF,MAAI;AACJ,MAAI;AACH,aAAS,KAAK,MAAM,GAAG;AAAA,EACxB,QAAQ;AACP,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,oBAAoB,CAAC;AAAA,EACzD;AAEA,MAAI,CAAC,OAAO,SAAS,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AACpD,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,0CAA0C,CAAC;AAAA,EAC/E;AAEA,aAAW,OAAO,OAAO,SAAS;AACjC,QAAI,OAAO,IAAI,SAAS,YAAY,IAAI,KAAK,WAAW,GAAG;AAC1D,aAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,kDAAkD,CAAC;AAAA,IACvF;AACA,QAAI,CAAC,mBAAmB,IAAI,IAAI,IAAI,GAAG;AACtC,aAAO,IAAI;AAAA,QACV,QAAQ;AAAA,QACR,SAAS,wBAAwB,IAAI,IAAI,iBAAiB,IAAI,IAAI;AAAA,MACnE,CAAC;AAAA,IACF;AAAA,EACD;AAEA,SAAO,GAAG,MAAM;AACjB;AAKO,SAAS,kBAAkB,MAAsB;AACvD,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR;AACC,aAAO;AAAA,EACT;AACD;AAMO,SAAS,sBACf,OACA,QAC+B;AAC/B,MAAI,CAAC,SAAS,MAAM,QAAQ,WAAW,GAAG;AACzC,WAAO;AAAA,EACR;AACA,SAAO,EAAE,QAAQ,MAAM;AACxB;;;AC9JO,SAAS,kBACf,SACA,KACA,gBACA,MAYgB;AAChB,QAAM,aAAa,iBAAiB,KAAK,cAAc;AACvD,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,OAAO,WAAW;AAGxB,QAAM,eAAe,KAAK,MAAM;AAEhC,QAAM,SAAS,QAAQ,WAAW,IAAI;AACtC,MAAI,CAAC,OAAO,IAAI;AACf,WAAO;AAAA,MACN,QAAQ,kBAAkB,OAAO,MAAM,IAAI;AAAA,MAC3C,MAAM,EAAE,OAAO,OAAO,MAAM,QAAQ;AAAA,IACrC;AAAA,EACD;AAGA,QAAM,mBAAmB;AAGzB,MAAI,MAAM,eAAe,OAAO,MAAM,OAAO,SAAS,GAAG;AACxD,SAAK,YAAY,OAAO,MAAM,QAAQ,OAAO,MAAM,WAAW,KAAK,QAAQ;AAAA,EAC5E;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,OAAO,MAAM;AAC1C;AAKA,eAAsB,kBACrB,SACA,QAMA,QACA,WACyB;AACzB,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,MAAM,WAAW;AACvB,QAAM,UAAU,sBAAsB,WAAW,UAAU,CAAC,CAAC;AAE7D,QAAM,SAAS,IAAI,SAChB,MAAM,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,EACD,IACC,QAAQ,WAAW,KAAK,OAAO;AAElC,MAAI,CAAC,OAAO,IAAI;AACf,UAAM,MAAM,OAAO;AACnB,QAAI,IAAI,SAAS,qBAAqB;AACrC,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,IAAI,QAAQ,EAAE;AAAA,IACpD;AACA,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,IAAI,QAAQ,EAAE;AAAA,EACpD;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,OAAO,MAAM;AAC1C;AAKA,eAAsB,oBACrB,SACA,KACA,gBACA,QACyB;AACzB,QAAM,aAAa,mBAAmB,KAAK,cAAc;AACzD,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,UAAU,SAAS,EAAE,OAAO,IAAI;AACtC,QAAM,SAAS,MAAM,QAAQ,aAAa,WAAW,OAAO,OAAO;AAEnE,MAAI,CAAC,OAAO,IAAI;AACf,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,OAAO,MAAM,QAAQ,EAAE;AAAA,EAC7D;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,OAAO,MAAM;AAC1C;AAKA,eAAsB,mBACrB,SACA,MACyB;AACzB,QAAM,SAAS,MAAM,QAAQ,MAAM;AACnC,MAAI,CAAC,OAAO,IAAI;AACf,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,OAAO,MAAM,QAAQ,EAAE;AAAA,EAC7D;AAEA,QAAM,mBAAmB;AACzB,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,SAAS,KAAK,EAAE;AAC/C;AAKA,eAAsB,iBACrB,KACA,OACA,WACyB;AACzB,QAAM,aAAa,mBAAmB,GAAG;AACzC,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,MAAM,UAAU,WAAW,WAAW,KAAK;AACjD,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,KAAK,EAAE;AAC7C;AAKA,eAAsB,oBACrB,KACA,OACA,WACyB;AACzB,MAAI;AACJ,MAAI;AACH,aAAS,KAAK,MAAM,GAAG;AAAA,EACxB,QAAQ;AACP,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AAAA,EAC5D;AAEA,QAAM,aAAa,kBAAkB,MAAM;AAC3C,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACjE;AAEA,QAAM,MAAM,aAAa,WAAW,MAAyB;AAC7D,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,KAAK,EAAE;AAC7C;AAKA,eAAsB,wBACrB,KACA,OACyB;AACzB,MAAI;AACJ,MAAI;AACH,WAAO,KAAK,MAAM,GAAG;AAAA,EACtB,QAAQ;AACP,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AAAA,EAC5D;AAEA,QAAM,aAAa,wBAAwB,IAAI;AAC/C,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACjE;AAEA,QAAM,SAAS,WAAW;AAC1B,QAAM,aAAa,MAAM,MAAM,cAAc;AAE7C,MAAI,WAAW,OAAO,IAAI,GAAG;AAC5B,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,OAAO,IAAI,mBAAmB,EAAE;AAAA,EACpF;AAEA,aAAW,OAAO,IAAI,IAAI;AAC1B,QAAM,MAAM,cAAc,UAAU;AAEpC,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,YAAY,MAAM,MAAM,OAAO,KAAK,EAAE;AACrE;AAKA,eAAsB,0BACrB,MACA,OACyB;AACzB,QAAM,aAAa,MAAM,MAAM,cAAc;AAE7C,MAAI,CAAC,WAAW,IAAI,GAAG;AACtB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,IAAI,cAAc,EAAE;AAAA,EACxE;AAEA,SAAO,WAAW,IAAI;AACtB,QAAM,MAAM,cAAc,UAAU;AAEpC,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,cAAc,MAAM,KAAK,EAAE;AAC1D;AAKA,eAAsB,qBAAqB,OAA4C;AACtF,QAAM,aAAa,MAAM,MAAM,cAAc;AAC7C,QAAM,OAAO,OAAO,OAAO,UAAU,EAAE,IAAI,CAAC,OAAO;AAAA,IAClD,MAAM,EAAE;AAAA,IACR,MAAM,EAAE;AAAA,IACR,WAAW,EAAE,WAAW;AAAA,EACzB,EAAE;AAEF,SAAO,EAAE,QAAQ,KAAK,MAAM,KAAK;AAClC;AAKO,SAAS,cACf,SACA,OACgB;AAChB,QAAM,QAAQ,QAAQ;AACtB,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,GAAG,OAAO,GAAG,MAAM,EAAE;AACpD;;;ACpQO,IAAM,gBAAN,MAAoB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAqB,SAAkB;AAClD,SAAK,gBAAgB;AACrB,SAAK,UAAU,WAAW;AAC1B,SAAK,iBAAiB,IAAI,IAAI,OAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAAA,EAChE;AAAA;AAAA,EAGA,YAAsD;AACrD,WAAO,EAAE,QAAQ,KAAK,eAAe,SAAS,KAAK,QAAQ;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,cAAc,OAA4C;AACzD,QAAI,MAAM,OAAO,YAAY,MAAM,QAAQ,WAAW,GAAG;AACxD,aAAO,GAAG,MAAS;AAAA,IACpB;AAEA,eAAW,OAAO,MAAM,SAAS;AAChC,UAAI,CAAC,KAAK,eAAe,IAAI,IAAI,MAAM,GAAG;AACzC,eAAO;AAAA,UACN,IAAI;AAAA,YACH,mBAAmB,IAAI,MAAM,yBAAyB,MAAM,KAAK,qBAAqB,KAAK,OAAO;AAAA,UACnG;AAAA,QACD;AAAA,MACD;AAAA,IACD;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,WAAkE;AAC9E,QAAI,UAAU,UAAU,KAAK,cAAc,OAAO;AACjD,aAAO,IAAI,IAAI,YAAY,2CAA2C,CAAC;AAAA,IACxE;AAEA,UAAM,eAAe,IAAI,IAAI,KAAK,cAAc,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;AACpF,UAAM,eAAe,IAAI,IAAI,UAAU,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;AAG3E,eAAW,CAAC,IAAI,KAAK,cAAc;AAClC,UAAI,CAAC,aAAa,IAAI,IAAI,GAAG;AAC5B,eAAO;AAAA,UACN,IAAI;AAAA,YACH,yBAAyB,IAAI;AAAA,UAC9B;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAGA,eAAW,CAAC,MAAM,OAAO,KAAK,cAAc;AAC3C,YAAM,UAAU,aAAa,IAAI,IAAI;AACrC,UAAI,WAAW,YAAY,SAAS;AACnC,eAAO;AAAA,UACN,IAAI;AAAA,YACH,iCAAiC,IAAI,WAAW,OAAO,SAAS,OAAO;AAAA,UACxE;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAGA,SAAK,gBAAgB;AACrB,SAAK;AACL,SAAK,iBAAiB,IAAI,IAAI,UAAU,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAElE,WAAO,GAAG,EAAE,SAAS,KAAK,QAAQ,CAAC;AAAA,EACpC;AACD;","names":["hasMore","serverHlc","trimmed","entries"]}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
8
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
9
|
+
}) : x)(function(x) {
|
|
10
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
11
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
12
|
+
});
|
|
13
|
+
var __commonJS = (cb, mod) => function __require2() {
|
|
14
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
15
|
+
};
|
|
16
|
+
var __copyProps = (to, from, except, desc) => {
|
|
17
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
18
|
+
for (let key of __getOwnPropNames(from))
|
|
19
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
20
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
21
|
+
}
|
|
22
|
+
return to;
|
|
23
|
+
};
|
|
24
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
25
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
26
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
27
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
28
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
29
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
30
|
+
mod
|
|
31
|
+
));
|
|
32
|
+
|
|
33
|
+
export {
|
|
34
|
+
__require,
|
|
35
|
+
__commonJS,
|
|
36
|
+
__toESM
|
|
37
|
+
};
|
|
38
|
+
//# sourceMappingURL=chunk-7D4SUZUM.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Err,
|
|
3
|
+
Ok
|
|
4
|
+
} from "./chunk-ICNT7I3K.js";
|
|
5
|
+
|
|
6
|
+
// ../proto/src/gen/lakesync_pb.ts
|
|
7
|
+
import { enumDesc, fileDesc, messageDesc } from "@bufbuild/protobuf/codegenv2";
|
|
8
|
+
var file_lakesync = /* @__PURE__ */ fileDesc("Cg5sYWtlc3luYy5wcm90bxILbGFrZXN5bmMudjEiLAoLQ29sdW1uRGVsdGESDgoGY29sdW1uGAEgASgJEg0KBXZhbHVlGAIgASgMIqgBCghSb3dEZWx0YRIgCgJvcBgBIAEoDjIULmxha2VzeW5jLnYxLkRlbHRhT3ASDQoFdGFibGUYAiABKAkSDgoGcm93X2lkGAMgASgJEikKB2NvbHVtbnMYBCADKAsyGC5sYWtlc3luYy52MS5Db2x1bW5EZWx0YRILCgNobGMYBSABKAYSEQoJY2xpZW50X2lkGAYgASgJEhAKCGRlbHRhX2lkGAcgASgJIlsKCFN5bmNQdXNoEhEKCWNsaWVudF9pZBgBIAEoCRIlCgZkZWx0YXMYAiADKAsyFS5sYWtlc3luYy52MS5Sb3dEZWx0YRIVCg1sYXN0X3NlZW5faGxjGAMgASgGIkQKCFN5bmNQdWxsEhEKCWNsaWVudF9pZBgBIAEoCRIRCglzaW5jZV9obGMYAiABKAYSEgoKbWF4X2RlbHRhcxgDIAEoDSJbCgxTeW5jUmVzcG9uc2USJQoGZGVsdGFzGAEgAygLMhUubGFrZXN5bmMudjEuUm93RGVsdGESEgoKc2VydmVyX2hsYxgCIAEoBhIQCghoYXNfbW9yZRgDIAEoCCKMAQoGQWN0aW9uEhEKCWFjdGlvbl9pZBgBIAEoCRIRCgljbGllbnRfaWQYAiABKAkSCwoDaGxjGAMgASgGEhEKCWNvbm5lY3RvchgEIAEoCRITCgthY3Rpb25fdHlwZRgFIAEoCRIOCgZwYXJhbXMYBiABKAwSFwoPaWRlbXBvdGVuY3lfa2V5GAcgASgJIkUKCkFjdGlvblB1c2gSEQoJY2xpZW50X2lkGAEgASgJEiQKB2FjdGlvbnMYAiADKAsyEy5sYWtlc3luYy52MS5BY3Rpb24iRgoPQWN0aW9uUmVzdWx0TXNnEhEKCWFjdGlvbl9pZBgBIAEoCRIMCgRkYXRhGAIgASgMEhIKCnNlcnZlcl9obGMYAyABKAYiVQoOQWN0aW9uRXJyb3JNc2cSEQoJYWN0aW9uX2lkGAEgASgJEgwKBGNvZGUYAiABKAkSDwoHbWVzc2FnZRgDIAEoCRIRCglyZXRyeWFibGUYBCABKAgifgoTQWN0aW9uUmVzcG9uc2VFbnRyeRIvCgdzdWNjZXNzGAEgASgLMhwubGFrZXN5bmMudjEuQWN0aW9uUmVzdWx0TXNnSAASLAoFZXJyb3IYAiABKAsyGy5sYWtlc3luYy52MS5BY3Rpb25FcnJvck1zZ0gAQggKBnJlc3VsdCJXCg5BY3Rpb25SZXNwb25zZRIxCgdyZXN1bHRzGAEgAygLMiAubGFrZXN5bmMudjEuQWN0aW9uUmVzcG9uc2VFbnRyeRISCgpzZXJ2ZXJfaGxjGAIgASgGKmIKB0RlbHRhT3ASGAoUREVMVEFfT1BfVU5TUEVDSUZJRUQQABITCg9ERUxUQV9PUF9JTlNFUlQQARITCg9ERUxUQV9PUF9VUERBVEUQAhITCg9ERUxUQV9PUF9ERUxFVEUQA2IGcHJvdG8z");
|
|
9
|
+
var ColumnDeltaSchema = /* @__PURE__ */ messageDesc(file_lakesync, 0);
|
|
10
|
+
var RowDeltaSchema = /* @__PURE__ */ messageDesc(file_lakesync, 1);
|
|
11
|
+
var SyncPushSchema = /* @__PURE__ */ messageDesc(file_lakesync, 2);
|
|
12
|
+
var SyncPullSchema = /* @__PURE__ */ messageDesc(file_lakesync, 3);
|
|
13
|
+
var SyncResponseSchema = /* @__PURE__ */ messageDesc(file_lakesync, 4);
|
|
14
|
+
var ActionSchema = /* @__PURE__ */ messageDesc(file_lakesync, 5);
|
|
15
|
+
var ActionPushSchema = /* @__PURE__ */ messageDesc(file_lakesync, 6);
|
|
16
|
+
var ActionResultMsgSchema = /* @__PURE__ */ messageDesc(file_lakesync, 7);
|
|
17
|
+
var ActionResponseEntrySchema = /* @__PURE__ */ messageDesc(file_lakesync, 9);
|
|
18
|
+
var ActionResponseSchema = /* @__PURE__ */ messageDesc(file_lakesync, 10);
|
|
19
|
+
var DeltaOp = /* @__PURE__ */ ((DeltaOp2) => {
|
|
20
|
+
DeltaOp2[DeltaOp2["UNSPECIFIED"] = 0] = "UNSPECIFIED";
|
|
21
|
+
DeltaOp2[DeltaOp2["INSERT"] = 1] = "INSERT";
|
|
22
|
+
DeltaOp2[DeltaOp2["UPDATE"] = 2] = "UPDATE";
|
|
23
|
+
DeltaOp2[DeltaOp2["DELETE"] = 3] = "DELETE";
|
|
24
|
+
return DeltaOp2;
|
|
25
|
+
})(DeltaOp || {});
|
|
26
|
+
var DeltaOpSchema = /* @__PURE__ */ enumDesc(file_lakesync, 0);
|
|
27
|
+
|
|
28
|
+
// ../proto/src/codec.ts
|
|
29
|
+
import { create, fromBinary, toBinary } from "@bufbuild/protobuf";
|
|
30
|
+
var CodecError = class extends Error {
|
|
31
|
+
code = "CODEC_ERROR";
|
|
32
|
+
constructor(message, cause) {
|
|
33
|
+
super(message);
|
|
34
|
+
this.name = "CodecError";
|
|
35
|
+
this.cause = cause;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
function tryCodec(label, fn) {
|
|
39
|
+
try {
|
|
40
|
+
return Ok(fn());
|
|
41
|
+
} catch (err) {
|
|
42
|
+
const cause = err instanceof Error ? err : new Error(String(err));
|
|
43
|
+
return Err(new CodecError(label, cause));
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
var textEncoder = new TextEncoder();
|
|
47
|
+
var textDecoder = new TextDecoder();
|
|
48
|
+
function encodeValue(value) {
|
|
49
|
+
return textEncoder.encode(JSON.stringify(value));
|
|
50
|
+
}
|
|
51
|
+
function decodeValue(bytes) {
|
|
52
|
+
return JSON.parse(textDecoder.decode(bytes));
|
|
53
|
+
}
|
|
54
|
+
var CORE_OP_TO_PROTO = {
|
|
55
|
+
INSERT: 1 /* INSERT */,
|
|
56
|
+
UPDATE: 2 /* UPDATE */,
|
|
57
|
+
DELETE: 3 /* DELETE */
|
|
58
|
+
};
|
|
59
|
+
var PROTO_OP_TO_CORE = {
|
|
60
|
+
[0 /* UNSPECIFIED */]: void 0,
|
|
61
|
+
[1 /* INSERT */]: "INSERT",
|
|
62
|
+
[2 /* UPDATE */]: "UPDATE",
|
|
63
|
+
[3 /* DELETE */]: "DELETE"
|
|
64
|
+
};
|
|
65
|
+
function coreColumnToProto(col) {
|
|
66
|
+
return create(ColumnDeltaSchema, {
|
|
67
|
+
column: col.column,
|
|
68
|
+
value: encodeValue(col.value)
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
function protoColumnToCore(col) {
|
|
72
|
+
return {
|
|
73
|
+
column: col.column,
|
|
74
|
+
value: decodeValue(col.value)
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
function coreRowToProto(delta) {
|
|
78
|
+
return create(RowDeltaSchema, {
|
|
79
|
+
op: CORE_OP_TO_PROTO[delta.op],
|
|
80
|
+
table: delta.table,
|
|
81
|
+
rowId: delta.rowId,
|
|
82
|
+
columns: delta.columns.map(coreColumnToProto),
|
|
83
|
+
hlc: delta.hlc,
|
|
84
|
+
clientId: delta.clientId,
|
|
85
|
+
deltaId: delta.deltaId
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
function protoRowToCore(delta) {
|
|
89
|
+
const op = PROTO_OP_TO_CORE[delta.op];
|
|
90
|
+
if (op === void 0) {
|
|
91
|
+
throw new CodecError(`Unknown or unspecified DeltaOp: ${delta.op}`);
|
|
92
|
+
}
|
|
93
|
+
return {
|
|
94
|
+
op,
|
|
95
|
+
table: delta.table,
|
|
96
|
+
rowId: delta.rowId,
|
|
97
|
+
columns: delta.columns.map(protoColumnToCore),
|
|
98
|
+
hlc: delta.hlc,
|
|
99
|
+
clientId: delta.clientId,
|
|
100
|
+
deltaId: delta.deltaId
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
var TAG_SYNC_PUSH = 1;
|
|
104
|
+
var TAG_SYNC_PULL = 2;
|
|
105
|
+
var TAG_BROADCAST = 3;
|
|
106
|
+
var TAG_ACTION_PUSH = 4;
|
|
107
|
+
var TAG_ACTION_RESPONSE = 5;
|
|
108
|
+
function encodeBroadcastFrame(response) {
|
|
109
|
+
const encoded = encodeSyncResponse(response);
|
|
110
|
+
if (!encoded.ok) return encoded;
|
|
111
|
+
const frame = new Uint8Array(1 + encoded.value.length);
|
|
112
|
+
frame[0] = TAG_BROADCAST;
|
|
113
|
+
frame.set(encoded.value, 1);
|
|
114
|
+
return Ok(frame);
|
|
115
|
+
}
|
|
116
|
+
function decodeBroadcastFrame(frame) {
|
|
117
|
+
if (frame.length < 2) {
|
|
118
|
+
return Err(new CodecError("Broadcast frame too short"));
|
|
119
|
+
}
|
|
120
|
+
if (frame[0] !== TAG_BROADCAST) {
|
|
121
|
+
return Err(
|
|
122
|
+
new CodecError(
|
|
123
|
+
`Expected broadcast tag 0x03, got 0x${frame[0].toString(16).padStart(2, "0")}`
|
|
124
|
+
)
|
|
125
|
+
);
|
|
126
|
+
}
|
|
127
|
+
return decodeSyncResponse(frame.subarray(1));
|
|
128
|
+
}
|
|
129
|
+
function encodeRowDelta(delta) {
|
|
130
|
+
return tryCodec(
|
|
131
|
+
"Failed to encode RowDelta",
|
|
132
|
+
() => toBinary(RowDeltaSchema, coreRowToProto(delta))
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
function decodeRowDelta(bytes) {
|
|
136
|
+
return tryCodec(
|
|
137
|
+
"Failed to decode RowDelta",
|
|
138
|
+
() => protoRowToCore(fromBinary(RowDeltaSchema, bytes))
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
function encodeSyncPush(push) {
|
|
142
|
+
return tryCodec("Failed to encode SyncPush", () => {
|
|
143
|
+
const proto = create(SyncPushSchema, {
|
|
144
|
+
clientId: push.clientId,
|
|
145
|
+
deltas: push.deltas.map(coreRowToProto),
|
|
146
|
+
lastSeenHlc: push.lastSeenHlc
|
|
147
|
+
});
|
|
148
|
+
return toBinary(SyncPushSchema, proto);
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
function decodeSyncPush(bytes) {
|
|
152
|
+
return tryCodec("Failed to decode SyncPush", () => {
|
|
153
|
+
const proto = fromBinary(SyncPushSchema, bytes);
|
|
154
|
+
return {
|
|
155
|
+
clientId: proto.clientId,
|
|
156
|
+
deltas: proto.deltas.map(protoRowToCore),
|
|
157
|
+
lastSeenHlc: proto.lastSeenHlc
|
|
158
|
+
};
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
function encodeSyncPull(pull) {
|
|
162
|
+
return tryCodec("Failed to encode SyncPull", () => {
|
|
163
|
+
const proto = create(SyncPullSchema, {
|
|
164
|
+
clientId: pull.clientId,
|
|
165
|
+
sinceHlc: pull.sinceHlc,
|
|
166
|
+
maxDeltas: pull.maxDeltas
|
|
167
|
+
});
|
|
168
|
+
return toBinary(SyncPullSchema, proto);
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
function decodeSyncPull(bytes) {
|
|
172
|
+
return tryCodec("Failed to decode SyncPull", () => {
|
|
173
|
+
const proto = fromBinary(SyncPullSchema, bytes);
|
|
174
|
+
return {
|
|
175
|
+
clientId: proto.clientId,
|
|
176
|
+
sinceHlc: proto.sinceHlc,
|
|
177
|
+
maxDeltas: proto.maxDeltas
|
|
178
|
+
};
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
function encodeSyncResponse(response) {
|
|
182
|
+
return tryCodec("Failed to encode SyncResponse", () => {
|
|
183
|
+
const proto = create(SyncResponseSchema, {
|
|
184
|
+
deltas: response.deltas.map(coreRowToProto),
|
|
185
|
+
serverHlc: response.serverHlc,
|
|
186
|
+
hasMore: response.hasMore
|
|
187
|
+
});
|
|
188
|
+
return toBinary(SyncResponseSchema, proto);
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
function decodeSyncResponse(bytes) {
|
|
192
|
+
return tryCodec("Failed to decode SyncResponse", () => {
|
|
193
|
+
const proto = fromBinary(SyncResponseSchema, bytes);
|
|
194
|
+
return {
|
|
195
|
+
deltas: proto.deltas.map(protoRowToCore),
|
|
196
|
+
serverHlc: proto.serverHlc,
|
|
197
|
+
hasMore: proto.hasMore
|
|
198
|
+
};
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
function encodeActionPush(push) {
|
|
202
|
+
return tryCodec("Failed to encode ActionPush", () => {
|
|
203
|
+
const proto = create(ActionPushSchema, {
|
|
204
|
+
clientId: push.clientId,
|
|
205
|
+
actions: push.actions.map(
|
|
206
|
+
(a) => create(ActionSchema, {
|
|
207
|
+
actionId: a.actionId,
|
|
208
|
+
clientId: a.clientId,
|
|
209
|
+
hlc: a.hlc,
|
|
210
|
+
connector: a.connector,
|
|
211
|
+
actionType: a.actionType,
|
|
212
|
+
params: encodeValue(a.params),
|
|
213
|
+
idempotencyKey: a.idempotencyKey ?? ""
|
|
214
|
+
})
|
|
215
|
+
)
|
|
216
|
+
});
|
|
217
|
+
return toBinary(ActionPushSchema, proto);
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
function decodeActionPush(bytes) {
|
|
221
|
+
return tryCodec("Failed to decode ActionPush", () => {
|
|
222
|
+
const proto = fromBinary(ActionPushSchema, bytes);
|
|
223
|
+
return {
|
|
224
|
+
clientId: proto.clientId,
|
|
225
|
+
actions: proto.actions.map((a) => ({
|
|
226
|
+
actionId: a.actionId,
|
|
227
|
+
clientId: a.clientId,
|
|
228
|
+
hlc: a.hlc,
|
|
229
|
+
connector: a.connector,
|
|
230
|
+
actionType: a.actionType,
|
|
231
|
+
params: decodeValue(a.params),
|
|
232
|
+
...a.idempotencyKey ? { idempotencyKey: a.idempotencyKey } : {}
|
|
233
|
+
}))
|
|
234
|
+
};
|
|
235
|
+
});
|
|
236
|
+
}
|
|
237
|
+
function encodeActionResponse(response) {
|
|
238
|
+
return tryCodec("Failed to encode ActionResponse", () => {
|
|
239
|
+
const entries = response.results.map((r) => {
|
|
240
|
+
if ("data" in r && "serverHlc" in r) {
|
|
241
|
+
const success = r;
|
|
242
|
+
return create(ActionResponseEntrySchema, {
|
|
243
|
+
result: {
|
|
244
|
+
case: "success",
|
|
245
|
+
value: create(ActionResultMsgSchema, {
|
|
246
|
+
actionId: success.actionId,
|
|
247
|
+
data: encodeValue(success.data),
|
|
248
|
+
serverHlc: success.serverHlc
|
|
249
|
+
})
|
|
250
|
+
}
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
const error = r;
|
|
254
|
+
return create(ActionResponseEntrySchema, {
|
|
255
|
+
result: {
|
|
256
|
+
case: "error",
|
|
257
|
+
value: {
|
|
258
|
+
actionId: error.actionId,
|
|
259
|
+
code: error.code,
|
|
260
|
+
message: error.message,
|
|
261
|
+
retryable: error.retryable
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
});
|
|
265
|
+
});
|
|
266
|
+
const proto = create(ActionResponseSchema, {
|
|
267
|
+
results: entries,
|
|
268
|
+
serverHlc: response.serverHlc
|
|
269
|
+
});
|
|
270
|
+
return toBinary(ActionResponseSchema, proto);
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
function decodeActionResponse(bytes) {
|
|
274
|
+
return tryCodec("Failed to decode ActionResponse", () => {
|
|
275
|
+
const proto = fromBinary(ActionResponseSchema, bytes);
|
|
276
|
+
const results = proto.results.map((entry) => {
|
|
277
|
+
if (entry.result.case === "success") {
|
|
278
|
+
const s = entry.result.value;
|
|
279
|
+
return {
|
|
280
|
+
actionId: s.actionId,
|
|
281
|
+
data: decodeValue(s.data),
|
|
282
|
+
serverHlc: s.serverHlc
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
if (entry.result.case === "error") {
|
|
286
|
+
const e = entry.result.value;
|
|
287
|
+
return {
|
|
288
|
+
actionId: e.actionId,
|
|
289
|
+
code: e.code,
|
|
290
|
+
message: e.message,
|
|
291
|
+
retryable: e.retryable
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
throw new CodecError("ActionResponseEntry has no result");
|
|
295
|
+
});
|
|
296
|
+
return {
|
|
297
|
+
results,
|
|
298
|
+
serverHlc: proto.serverHlc
|
|
299
|
+
};
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
export {
|
|
304
|
+
ColumnDeltaSchema,
|
|
305
|
+
RowDeltaSchema,
|
|
306
|
+
SyncPushSchema,
|
|
307
|
+
SyncPullSchema,
|
|
308
|
+
SyncResponseSchema,
|
|
309
|
+
ActionSchema,
|
|
310
|
+
ActionPushSchema,
|
|
311
|
+
ActionResponseSchema,
|
|
312
|
+
DeltaOp,
|
|
313
|
+
DeltaOpSchema,
|
|
314
|
+
CodecError,
|
|
315
|
+
TAG_SYNC_PUSH,
|
|
316
|
+
TAG_SYNC_PULL,
|
|
317
|
+
TAG_BROADCAST,
|
|
318
|
+
TAG_ACTION_PUSH,
|
|
319
|
+
TAG_ACTION_RESPONSE,
|
|
320
|
+
encodeBroadcastFrame,
|
|
321
|
+
decodeBroadcastFrame,
|
|
322
|
+
encodeRowDelta,
|
|
323
|
+
decodeRowDelta,
|
|
324
|
+
encodeSyncPush,
|
|
325
|
+
decodeSyncPush,
|
|
326
|
+
encodeSyncPull,
|
|
327
|
+
decodeSyncPull,
|
|
328
|
+
encodeSyncResponse,
|
|
329
|
+
decodeSyncResponse,
|
|
330
|
+
encodeActionPush,
|
|
331
|
+
decodeActionPush,
|
|
332
|
+
encodeActionResponse,
|
|
333
|
+
decodeActionResponse
|
|
334
|
+
};
|
|
335
|
+
//# sourceMappingURL=chunk-BNJOGBYK.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../proto/src/gen/lakesync_pb.ts","../../proto/src/codec.ts"],"sourcesContent":["// @generated by protoc-gen-es v2.11.0 with parameter \"target=ts\"\n// @generated from file lakesync.proto (package lakesync.v1, syntax proto3)\n/* eslint-disable */\n\nimport type { GenEnum, GenFile, GenMessage } from \"@bufbuild/protobuf/codegenv2\";\nimport { enumDesc, fileDesc, messageDesc } from \"@bufbuild/protobuf/codegenv2\";\nimport type { Message } from \"@bufbuild/protobuf\";\n\n/**\n * Describes the file lakesync.proto.\n */\nexport const file_lakesync: GenFile = /*@__PURE__*/\n fileDesc(\"Cg5sYWtlc3luYy5wcm90bxILbGFrZXN5bmMudjEiLAoLQ29sdW1uRGVsdGESDgoGY29sdW1uGAEgASgJEg0KBXZhbHVlGAIgASgMIqgBCghSb3dEZWx0YRIgCgJvcBgBIAEoDjIULmxha2VzeW5jLnYxLkRlbHRhT3ASDQoFdGFibGUYAiABKAkSDgoGcm93X2lkGAMgASgJEikKB2NvbHVtbnMYBCADKAsyGC5sYWtlc3luYy52MS5Db2x1bW5EZWx0YRILCgNobGMYBSABKAYSEQoJY2xpZW50X2lkGAYgASgJEhAKCGRlbHRhX2lkGAcgASgJIlsKCFN5bmNQdXNoEhEKCWNsaWVudF9pZBgBIAEoCRIlCgZkZWx0YXMYAiADKAsyFS5sYWtlc3luYy52MS5Sb3dEZWx0YRIVCg1sYXN0X3NlZW5faGxjGAMgASgGIkQKCFN5bmNQdWxsEhEKCWNsaWVudF9pZBgBIAEoCRIRCglzaW5jZV9obGMYAiABKAYSEgoKbWF4X2RlbHRhcxgDIAEoDSJbCgxTeW5jUmVzcG9uc2USJQoGZGVsdGFzGAEgAygLMhUubGFrZXN5bmMudjEuUm93RGVsdGESEgoKc2VydmVyX2hsYxgCIAEoBhIQCghoYXNfbW9yZRgDIAEoCCKMAQoGQWN0aW9uEhEKCWFjdGlvbl9pZBgBIAEoCRIRCgljbGllbnRfaWQYAiABKAkSCwoDaGxjGAMgASgGEhEKCWNvbm5lY3RvchgEIAEoCRITCgthY3Rpb25fdHlwZRgFIAEoCRIOCgZwYXJhbXMYBiABKAwSFwoPaWRlbXBvdGVuY3lfa2V5GAcgASgJIkUKCkFjdGlvblB1c2gSEQoJY2xpZW50X2lkGAEgASgJEiQKB2FjdGlvbnMYAiADKAsyEy5sYWtlc3luYy52MS5BY3Rpb24iRgoPQWN0aW9uUmVzdWx0TXNnEhEKCWFjdGlvbl9pZBgBIAEoCRIMCgRkYXRhGAIgASgMEhIKCnNlcnZlcl9obGMYAyABKAYiVQoOQWN0aW9uRXJyb3JNc2cSEQoJYWN0aW9uX2lkGAEgASgJEgwKBGNvZGUYAiABKAkSDwoHbWVzc2FnZRgDIAEoCRIRCglyZXRyeWFibGUYBCABKAgifgoTQWN0aW9uUmVzcG9uc2VFbnRyeRIvCgdzdWNjZXNzGAEgASgLMhwubGFrZXN5bmMudjEuQWN0aW9uUmVzdWx0TXNnSAASLAoFZXJyb3IYAiABKAsyGy5sYWtlc3luYy52MS5BY3Rpb25FcnJvck1zZ0gAQggKBnJlc3VsdCJXCg5BY3Rpb25SZXNwb25zZRIxCgdyZXN1bHRzGAEgAygLMiAubGFrZXN5bmMudjEuQWN0aW9uUmVzcG9uc2VFbnRyeRISCgpzZXJ2ZXJfaGxjGAIgASgGKmIKB0RlbHRhT3ASGAoUREVMVEFfT1BfVU5TUEVDSUZJRUQQABITCg9ERUxUQV9PUF9JTlNFUlQQARITCg9ERUxUQV9PUF9VUERBVEUQAhITCg9ERUxUQV9PUF9ERUxFVEUQA2IGcHJvdG8z\");\n\n/**\n * @generated from message lakesync.v1.ColumnDelta\n */\nexport type ColumnDelta = Message<\"lakesync.v1.ColumnDelta\"> & {\n /**\n * @generated from field: string column = 1;\n */\n column: string;\n\n /**\n * @generated from field: bytes value = 2;\n */\n value: Uint8Array;\n};\n\n/**\n * Describes the message lakesync.v1.ColumnDelta.\n * Use `create(ColumnDeltaSchema)` to create a new message.\n */\nexport const ColumnDeltaSchema: GenMessage<ColumnDelta> = /*@__PURE__*/\n messageDesc(file_lakesync, 0);\n\n/**\n * @generated from message lakesync.v1.RowDelta\n */\nexport type RowDelta = Message<\"lakesync.v1.RowDelta\"> & {\n /**\n * @generated from field: lakesync.v1.DeltaOp op = 1;\n */\n op: DeltaOp;\n\n /**\n * @generated from field: string table = 2;\n */\n table: string;\n\n /**\n * @generated from field: string row_id = 3;\n */\n rowId: string;\n\n /**\n * @generated from field: repeated lakesync.v1.ColumnDelta columns = 4;\n */\n columns: ColumnDelta[];\n\n /**\n * @generated from field: fixed64 hlc = 5;\n */\n hlc: bigint;\n\n /**\n * @generated from field: string client_id = 6;\n */\n clientId: string;\n\n /**\n * @generated from field: string delta_id = 7;\n */\n deltaId: string;\n};\n\n/**\n * Describes the message lakesync.v1.RowDelta.\n * Use `create(RowDeltaSchema)` to create a new message.\n */\nexport const RowDeltaSchema: GenMessage<RowDelta> = /*@__PURE__*/\n messageDesc(file_lakesync, 1);\n\n/**\n * @generated from message lakesync.v1.SyncPush\n */\nexport type SyncPush = Message<\"lakesync.v1.SyncPush\"> & {\n /**\n * @generated from field: string client_id = 1;\n */\n clientId: string;\n\n /**\n * @generated from field: repeated lakesync.v1.RowDelta deltas = 2;\n */\n deltas: RowDelta[];\n\n /**\n * @generated from field: fixed64 last_seen_hlc = 3;\n */\n lastSeenHlc: bigint;\n};\n\n/**\n * Describes the message lakesync.v1.SyncPush.\n * Use `create(SyncPushSchema)` to create a new message.\n */\nexport const SyncPushSchema: GenMessage<SyncPush> = /*@__PURE__*/\n messageDesc(file_lakesync, 2);\n\n/**\n * @generated from message lakesync.v1.SyncPull\n */\nexport type SyncPull = Message<\"lakesync.v1.SyncPull\"> & {\n /**\n * @generated from field: string client_id = 1;\n */\n clientId: string;\n\n /**\n * @generated from field: fixed64 since_hlc = 2;\n */\n sinceHlc: bigint;\n\n /**\n * @generated from field: uint32 max_deltas = 3;\n */\n maxDeltas: number;\n};\n\n/**\n * Describes the message lakesync.v1.SyncPull.\n * Use `create(SyncPullSchema)` to create a new message.\n */\nexport const SyncPullSchema: GenMessage<SyncPull> = /*@__PURE__*/\n messageDesc(file_lakesync, 3);\n\n/**\n * @generated from message lakesync.v1.SyncResponse\n */\nexport type SyncResponse = Message<\"lakesync.v1.SyncResponse\"> & {\n /**\n * @generated from field: repeated lakesync.v1.RowDelta deltas = 1;\n */\n deltas: RowDelta[];\n\n /**\n * @generated from field: fixed64 server_hlc = 2;\n */\n serverHlc: bigint;\n\n /**\n * @generated from field: bool has_more = 3;\n */\n hasMore: boolean;\n};\n\n/**\n * Describes the message lakesync.v1.SyncResponse.\n * Use `create(SyncResponseSchema)` to create a new message.\n */\nexport const SyncResponseSchema: GenMessage<SyncResponse> = /*@__PURE__*/\n messageDesc(file_lakesync, 4);\n\n/**\n * @generated from message lakesync.v1.Action\n */\nexport type Action = Message<\"lakesync.v1.Action\"> & {\n /**\n * @generated from field: string action_id = 1;\n */\n actionId: string;\n\n /**\n * @generated from field: string client_id = 2;\n */\n clientId: string;\n\n /**\n * @generated from field: fixed64 hlc = 3;\n */\n hlc: bigint;\n\n /**\n * @generated from field: string connector = 4;\n */\n connector: string;\n\n /**\n * @generated from field: string action_type = 5;\n */\n actionType: string;\n\n /**\n * @generated from field: bytes params = 6;\n */\n params: Uint8Array;\n\n /**\n * @generated from field: string idempotency_key = 7;\n */\n idempotencyKey: string;\n};\n\n/**\n * Describes the message lakesync.v1.Action.\n * Use `create(ActionSchema)` to create a new message.\n */\nexport const ActionSchema: GenMessage<Action> = /*@__PURE__*/\n messageDesc(file_lakesync, 5);\n\n/**\n * @generated from message lakesync.v1.ActionPush\n */\nexport type ActionPush = Message<\"lakesync.v1.ActionPush\"> & {\n /**\n * @generated from field: string client_id = 1;\n */\n clientId: string;\n\n /**\n * @generated from field: repeated lakesync.v1.Action actions = 2;\n */\n actions: Action[];\n};\n\n/**\n * Describes the message lakesync.v1.ActionPush.\n * Use `create(ActionPushSchema)` to create a new message.\n */\nexport const ActionPushSchema: GenMessage<ActionPush> = /*@__PURE__*/\n messageDesc(file_lakesync, 6);\n\n/**\n * @generated from message lakesync.v1.ActionResultMsg\n */\nexport type ActionResultMsg = Message<\"lakesync.v1.ActionResultMsg\"> & {\n /**\n * @generated from field: string action_id = 1;\n */\n actionId: string;\n\n /**\n * @generated from field: bytes data = 2;\n */\n data: Uint8Array;\n\n /**\n * @generated from field: fixed64 server_hlc = 3;\n */\n serverHlc: bigint;\n};\n\n/**\n * Describes the message lakesync.v1.ActionResultMsg.\n * Use `create(ActionResultMsgSchema)` to create a new message.\n */\nexport const ActionResultMsgSchema: GenMessage<ActionResultMsg> = /*@__PURE__*/\n messageDesc(file_lakesync, 7);\n\n/**\n * @generated from message lakesync.v1.ActionErrorMsg\n */\nexport type ActionErrorMsg = Message<\"lakesync.v1.ActionErrorMsg\"> & {\n /**\n * @generated from field: string action_id = 1;\n */\n actionId: string;\n\n /**\n * @generated from field: string code = 2;\n */\n code: string;\n\n /**\n * @generated from field: string message = 3;\n */\n message: string;\n\n /**\n * @generated from field: bool retryable = 4;\n */\n retryable: boolean;\n};\n\n/**\n * Describes the message lakesync.v1.ActionErrorMsg.\n * Use `create(ActionErrorMsgSchema)` to create a new message.\n */\nexport const ActionErrorMsgSchema: GenMessage<ActionErrorMsg> = /*@__PURE__*/\n messageDesc(file_lakesync, 8);\n\n/**\n * @generated from message lakesync.v1.ActionResponseEntry\n */\nexport type ActionResponseEntry = Message<\"lakesync.v1.ActionResponseEntry\"> & {\n /**\n * @generated from oneof lakesync.v1.ActionResponseEntry.result\n */\n result: {\n /**\n * @generated from field: lakesync.v1.ActionResultMsg success = 1;\n */\n value: ActionResultMsg;\n case: \"success\";\n } | {\n /**\n * @generated from field: lakesync.v1.ActionErrorMsg error = 2;\n */\n value: ActionErrorMsg;\n case: \"error\";\n } | { case: undefined; value?: undefined };\n};\n\n/**\n * Describes the message lakesync.v1.ActionResponseEntry.\n * Use `create(ActionResponseEntrySchema)` to create a new message.\n */\nexport const ActionResponseEntrySchema: GenMessage<ActionResponseEntry> = /*@__PURE__*/\n messageDesc(file_lakesync, 9);\n\n/**\n * @generated from message lakesync.v1.ActionResponse\n */\nexport type ActionResponse = Message<\"lakesync.v1.ActionResponse\"> & {\n /**\n * @generated from field: repeated lakesync.v1.ActionResponseEntry results = 1;\n */\n results: ActionResponseEntry[];\n\n /**\n * @generated from field: fixed64 server_hlc = 2;\n */\n serverHlc: bigint;\n};\n\n/**\n * Describes the message lakesync.v1.ActionResponse.\n * Use `create(ActionResponseSchema)` to create a new message.\n */\nexport const ActionResponseSchema: GenMessage<ActionResponse> = /*@__PURE__*/\n messageDesc(file_lakesync, 10);\n\n/**\n * @generated from enum lakesync.v1.DeltaOp\n */\nexport enum DeltaOp {\n /**\n * @generated from enum value: DELTA_OP_UNSPECIFIED = 0;\n */\n UNSPECIFIED = 0,\n\n /**\n * @generated from enum value: DELTA_OP_INSERT = 1;\n */\n INSERT = 1,\n\n /**\n * @generated from enum value: DELTA_OP_UPDATE = 2;\n */\n UPDATE = 2,\n\n /**\n * @generated from enum value: DELTA_OP_DELETE = 3;\n */\n DELETE = 3,\n}\n\n/**\n * Describes the enum lakesync.v1.DeltaOp.\n */\nexport const DeltaOpSchema: GenEnum<DeltaOp> = /*@__PURE__*/\n enumDesc(file_lakesync, 0);\n\n","import { create, fromBinary, toBinary } from \"@bufbuild/protobuf\";\nimport type {\n\tActionErrorResult as CoreActionErrorResult,\n\tActionPush as CoreActionPush,\n\tActionResponse as CoreActionResponse,\n\tActionResult as CoreActionResult,\n\tColumnDelta as CoreColumnDelta,\n\tDeltaOp as CoreDeltaOp,\n\tRowDelta as CoreRowDelta,\n\tHLCTimestamp,\n\tResult,\n} from \"@lakesync/core\";\nimport { Err, Ok } from \"@lakesync/core\";\nimport {\n\tActionPushSchema,\n\tActionResponseEntrySchema,\n\tActionResponseSchema,\n\tActionResultMsgSchema,\n\tActionSchema,\n\tColumnDeltaSchema,\n\ttype ColumnDelta as ProtoColumnDelta,\n\tDeltaOp as ProtoDeltaOp,\n\ttype RowDelta as ProtoRowDelta,\n\tRowDeltaSchema,\n\tSyncPullSchema,\n\tSyncPushSchema,\n\tSyncResponseSchema,\n} from \"./gen/lakesync_pb.js\";\n\n// ---------------------------------------------------------------------------\n// Codec error\n// ---------------------------------------------------------------------------\n\n/** Error returned when encoding or decoding a protobuf message fails. */\nexport class CodecError extends Error {\n\treadonly code = \"CODEC_ERROR\";\n\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message);\n\t\tthis.name = \"CodecError\";\n\t\tthis.cause = cause;\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// Result helper\n// ---------------------------------------------------------------------------\n\n/**\n * Wrap a codec operation in a try/catch, returning a `Result`.\n * Centralises the error-wrapping logic shared by all encode/decode functions.\n */\nfunction tryCodec<T>(label: string, fn: () => T): Result<T, CodecError> {\n\ttry {\n\t\treturn Ok(fn());\n\t} catch (err) {\n\t\tconst cause = err instanceof Error ? err : new Error(String(err));\n\t\treturn Err(new CodecError(label, cause));\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// Value serialisation helpers\n// ---------------------------------------------------------------------------\n\nconst textEncoder = new TextEncoder();\nconst textDecoder = new TextDecoder();\n\n/** Encode an arbitrary serialisable column value to UTF-8 JSON bytes. */\nfunction encodeValue(value: unknown): Uint8Array {\n\treturn textEncoder.encode(JSON.stringify(value));\n}\n\n/** Decode UTF-8 JSON bytes back to a column value. */\nfunction decodeValue(bytes: Uint8Array): unknown {\n\treturn JSON.parse(textDecoder.decode(bytes));\n}\n\n// ---------------------------------------------------------------------------\n// DeltaOp mapping\n// ---------------------------------------------------------------------------\n\n/** Map from core DeltaOp string to proto DeltaOp enum number. */\nconst CORE_OP_TO_PROTO: Record<CoreDeltaOp, ProtoDeltaOp> = {\n\tINSERT: ProtoDeltaOp.INSERT,\n\tUPDATE: ProtoDeltaOp.UPDATE,\n\tDELETE: ProtoDeltaOp.DELETE,\n};\n\n/** Map from proto DeltaOp enum number to core DeltaOp string. */\nconst PROTO_OP_TO_CORE: Record<ProtoDeltaOp, CoreDeltaOp | undefined> = {\n\t[ProtoDeltaOp.UNSPECIFIED]: undefined,\n\t[ProtoDeltaOp.INSERT]: \"INSERT\",\n\t[ProtoDeltaOp.UPDATE]: \"UPDATE\",\n\t[ProtoDeltaOp.DELETE]: \"DELETE\",\n};\n\n// ---------------------------------------------------------------------------\n// Internal conversion helpers\n// ---------------------------------------------------------------------------\n\n/** Convert a core ColumnDelta to a proto ColumnDelta message. */\nfunction coreColumnToProto(col: CoreColumnDelta): ProtoColumnDelta {\n\treturn create(ColumnDeltaSchema, {\n\t\tcolumn: col.column,\n\t\tvalue: encodeValue(col.value),\n\t});\n}\n\n/** Convert a proto ColumnDelta message to a core ColumnDelta. */\nfunction protoColumnToCore(col: ProtoColumnDelta): CoreColumnDelta {\n\treturn {\n\t\tcolumn: col.column,\n\t\tvalue: decodeValue(col.value),\n\t};\n}\n\n/** Convert a core RowDelta to a proto RowDelta message. */\nfunction coreRowToProto(delta: CoreRowDelta): ProtoRowDelta {\n\treturn create(RowDeltaSchema, {\n\t\top: CORE_OP_TO_PROTO[delta.op],\n\t\ttable: delta.table,\n\t\trowId: delta.rowId,\n\t\tcolumns: delta.columns.map(coreColumnToProto),\n\t\thlc: delta.hlc as bigint,\n\t\tclientId: delta.clientId,\n\t\tdeltaId: delta.deltaId,\n\t});\n}\n\n/**\n * Convert a proto RowDelta message to a core RowDelta.\n *\n * @throws {CodecError} If the proto DeltaOp is UNSPECIFIED.\n */\nfunction protoRowToCore(delta: ProtoRowDelta): CoreRowDelta {\n\tconst op = PROTO_OP_TO_CORE[delta.op];\n\tif (op === undefined) {\n\t\tthrow new CodecError(`Unknown or unspecified DeltaOp: ${delta.op}`);\n\t}\n\treturn {\n\t\top,\n\t\ttable: delta.table,\n\t\trowId: delta.rowId,\n\t\tcolumns: delta.columns.map(protoColumnToCore),\n\t\thlc: delta.hlc as HLCTimestamp,\n\t\tclientId: delta.clientId,\n\t\tdeltaId: delta.deltaId,\n\t};\n}\n\n// ---------------------------------------------------------------------------\n// Public types\n// ---------------------------------------------------------------------------\n\n/** Shape of a SyncPush payload using core domain types. */\nexport interface SyncPushPayload {\n\t/** Client identifier. */\n\tclientId: string;\n\t/** Deltas to push to the server. */\n\tdeltas: CoreRowDelta[];\n\t/** Last HLC timestamp seen by the client. */\n\tlastSeenHlc: HLCTimestamp;\n}\n\n/** Shape of a SyncPull request using core domain types. */\nexport interface SyncPullPayload {\n\t/** Client identifier. */\n\tclientId: string;\n\t/** Request deltas since this HLC timestamp. */\n\tsinceHlc: HLCTimestamp;\n\t/** Maximum number of deltas to return. */\n\tmaxDeltas: number;\n}\n\n/** Shape of a SyncResponse using core domain types. */\nexport interface SyncResponsePayload {\n\t/** Deltas returned by the server. */\n\tdeltas: CoreRowDelta[];\n\t/** Current server HLC timestamp. */\n\tserverHlc: HLCTimestamp;\n\t/** Whether more deltas are available. */\n\thasMore: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Wire tags for binary framing\n// ---------------------------------------------------------------------------\n\n/** Tag byte for SyncPush frames. */\nexport const TAG_SYNC_PUSH = 0x01;\n\n/** Tag byte for SyncPull frames. */\nexport const TAG_SYNC_PULL = 0x02;\n\n/** Tag byte for server-initiated broadcast frames. */\nexport const TAG_BROADCAST = 0x03;\n\n/** Tag byte for ActionPush frames. */\nexport const TAG_ACTION_PUSH = 0x04;\n\n/** Tag byte for ActionResponse frames. */\nexport const TAG_ACTION_RESPONSE = 0x05;\n\n// ---------------------------------------------------------------------------\n// Broadcast frame encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Encode a broadcast frame: tag `0x03` + SyncResponse proto bytes.\n *\n * Used by the server to push deltas to connected WebSocket clients.\n *\n * @param response - The SyncResponse payload to broadcast.\n * @returns A `Result` containing the framed binary, or a `CodecError` on failure.\n */\nexport function encodeBroadcastFrame(\n\tresponse: SyncResponsePayload,\n): Result<Uint8Array, CodecError> {\n\tconst encoded = encodeSyncResponse(response);\n\tif (!encoded.ok) return encoded;\n\tconst frame = new Uint8Array(1 + encoded.value.length);\n\tframe[0] = TAG_BROADCAST;\n\tframe.set(encoded.value, 1);\n\treturn Ok(frame);\n}\n\n/**\n * Decode a broadcast frame: strip tag `0x03` and decode the SyncResponse.\n *\n * @param frame - The full framed binary (tag byte + proto payload).\n * @returns A `Result` containing the SyncResponse payload, or a `CodecError` on failure.\n */\nexport function decodeBroadcastFrame(frame: Uint8Array): Result<SyncResponsePayload, CodecError> {\n\tif (frame.length < 2) {\n\t\treturn Err(new CodecError(\"Broadcast frame too short\"));\n\t}\n\tif (frame[0] !== TAG_BROADCAST) {\n\t\treturn Err(\n\t\t\tnew CodecError(\n\t\t\t\t`Expected broadcast tag 0x03, got 0x${frame[0]!.toString(16).padStart(2, \"0\")}`,\n\t\t\t),\n\t\t);\n\t}\n\treturn decodeSyncResponse(frame.subarray(1));\n}\n\n// ---------------------------------------------------------------------------\n// RowDelta encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Serialise a core RowDelta to protobuf binary.\n *\n * @param delta - The core RowDelta to serialise.\n * @returns A `Result` containing the binary bytes, or a `CodecError` on failure.\n */\nexport function encodeRowDelta(delta: CoreRowDelta): Result<Uint8Array, CodecError> {\n\treturn tryCodec(\"Failed to encode RowDelta\", () =>\n\t\ttoBinary(RowDeltaSchema, coreRowToProto(delta)),\n\t);\n}\n\n/**\n * Deserialise protobuf binary to a core RowDelta.\n *\n * @param bytes - The protobuf binary to deserialise.\n * @returns A `Result` containing the core RowDelta, or a `CodecError` on failure.\n */\nexport function decodeRowDelta(bytes: Uint8Array): Result<CoreRowDelta, CodecError> {\n\treturn tryCodec(\"Failed to decode RowDelta\", () =>\n\t\tprotoRowToCore(fromBinary(RowDeltaSchema, bytes)),\n\t);\n}\n\n// ---------------------------------------------------------------------------\n// SyncPush encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Serialise a SyncPush payload to protobuf binary.\n *\n * @param push - The SyncPush payload containing client ID, deltas, and last seen HLC.\n * @returns A `Result` containing the binary bytes, or a `CodecError` on failure.\n */\nexport function encodeSyncPush(push: SyncPushPayload): Result<Uint8Array, CodecError> {\n\treturn tryCodec(\"Failed to encode SyncPush\", () => {\n\t\tconst proto = create(SyncPushSchema, {\n\t\t\tclientId: push.clientId,\n\t\t\tdeltas: push.deltas.map(coreRowToProto),\n\t\t\tlastSeenHlc: push.lastSeenHlc as bigint,\n\t\t});\n\t\treturn toBinary(SyncPushSchema, proto);\n\t});\n}\n\n/**\n * Deserialise protobuf binary to a SyncPush payload.\n *\n * @param bytes - The protobuf binary to deserialise.\n * @returns A `Result` containing the SyncPush payload, or a `CodecError` on failure.\n */\nexport function decodeSyncPush(bytes: Uint8Array): Result<SyncPushPayload, CodecError> {\n\treturn tryCodec(\"Failed to decode SyncPush\", () => {\n\t\tconst proto = fromBinary(SyncPushSchema, bytes);\n\t\treturn {\n\t\t\tclientId: proto.clientId,\n\t\t\tdeltas: proto.deltas.map(protoRowToCore),\n\t\t\tlastSeenHlc: proto.lastSeenHlc as HLCTimestamp,\n\t\t};\n\t});\n}\n\n// ---------------------------------------------------------------------------\n// SyncPull encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Serialise a SyncPull request to protobuf binary.\n *\n * @param pull - The SyncPull payload containing client ID, since HLC, and max deltas.\n * @returns A `Result` containing the binary bytes, or a `CodecError` on failure.\n */\nexport function encodeSyncPull(pull: SyncPullPayload): Result<Uint8Array, CodecError> {\n\treturn tryCodec(\"Failed to encode SyncPull\", () => {\n\t\tconst proto = create(SyncPullSchema, {\n\t\t\tclientId: pull.clientId,\n\t\t\tsinceHlc: pull.sinceHlc as bigint,\n\t\t\tmaxDeltas: pull.maxDeltas,\n\t\t});\n\t\treturn toBinary(SyncPullSchema, proto);\n\t});\n}\n\n/**\n * Deserialise protobuf binary to a SyncPull payload.\n *\n * @param bytes - The protobuf binary to deserialise.\n * @returns A `Result` containing the SyncPull payload, or a `CodecError` on failure.\n */\nexport function decodeSyncPull(bytes: Uint8Array): Result<SyncPullPayload, CodecError> {\n\treturn tryCodec(\"Failed to decode SyncPull\", () => {\n\t\tconst proto = fromBinary(SyncPullSchema, bytes);\n\t\treturn {\n\t\t\tclientId: proto.clientId,\n\t\t\tsinceHlc: proto.sinceHlc as HLCTimestamp,\n\t\t\tmaxDeltas: proto.maxDeltas,\n\t\t};\n\t});\n}\n\n// ---------------------------------------------------------------------------\n// SyncResponse encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Serialise a SyncResponse to protobuf binary.\n *\n * @param response - The SyncResponse payload containing deltas, server HLC, and has_more flag.\n * @returns A `Result` containing the binary bytes, or a `CodecError` on failure.\n */\nexport function encodeSyncResponse(response: SyncResponsePayload): Result<Uint8Array, CodecError> {\n\treturn tryCodec(\"Failed to encode SyncResponse\", () => {\n\t\tconst proto = create(SyncResponseSchema, {\n\t\t\tdeltas: response.deltas.map(coreRowToProto),\n\t\t\tserverHlc: response.serverHlc as bigint,\n\t\t\thasMore: response.hasMore,\n\t\t});\n\t\treturn toBinary(SyncResponseSchema, proto);\n\t});\n}\n\n/**\n * Deserialise protobuf binary to a SyncResponse payload.\n *\n * @param bytes - The protobuf binary to deserialise.\n * @returns A `Result` containing the SyncResponse payload, or a `CodecError` on failure.\n */\nexport function decodeSyncResponse(bytes: Uint8Array): Result<SyncResponsePayload, CodecError> {\n\treturn tryCodec(\"Failed to decode SyncResponse\", () => {\n\t\tconst proto = fromBinary(SyncResponseSchema, bytes);\n\t\treturn {\n\t\t\tdeltas: proto.deltas.map(protoRowToCore),\n\t\t\tserverHlc: proto.serverHlc as HLCTimestamp,\n\t\t\thasMore: proto.hasMore,\n\t\t};\n\t});\n}\n\n// ---------------------------------------------------------------------------\n// ActionPush encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Serialise an ActionPush payload to protobuf binary.\n *\n * @param push - The ActionPush payload containing client ID and actions.\n * @returns A `Result` containing the binary bytes, or a `CodecError` on failure.\n */\nexport function encodeActionPush(push: CoreActionPush): Result<Uint8Array, CodecError> {\n\treturn tryCodec(\"Failed to encode ActionPush\", () => {\n\t\tconst proto = create(ActionPushSchema, {\n\t\t\tclientId: push.clientId,\n\t\t\tactions: push.actions.map((a) =>\n\t\t\t\tcreate(ActionSchema, {\n\t\t\t\t\tactionId: a.actionId,\n\t\t\t\t\tclientId: a.clientId,\n\t\t\t\t\thlc: a.hlc as bigint,\n\t\t\t\t\tconnector: a.connector,\n\t\t\t\t\tactionType: a.actionType,\n\t\t\t\t\tparams: encodeValue(a.params),\n\t\t\t\t\tidempotencyKey: a.idempotencyKey ?? \"\",\n\t\t\t\t}),\n\t\t\t),\n\t\t});\n\t\treturn toBinary(ActionPushSchema, proto);\n\t});\n}\n\n/**\n * Deserialise protobuf binary to an ActionPush payload.\n *\n * @param bytes - The protobuf binary to deserialise.\n * @returns A `Result` containing the ActionPush payload, or a `CodecError` on failure.\n */\nexport function decodeActionPush(bytes: Uint8Array): Result<CoreActionPush, CodecError> {\n\treturn tryCodec(\"Failed to decode ActionPush\", () => {\n\t\tconst proto = fromBinary(ActionPushSchema, bytes);\n\t\treturn {\n\t\t\tclientId: proto.clientId,\n\t\t\tactions: proto.actions.map((a) => ({\n\t\t\t\tactionId: a.actionId,\n\t\t\t\tclientId: a.clientId,\n\t\t\t\thlc: a.hlc as HLCTimestamp,\n\t\t\t\tconnector: a.connector,\n\t\t\t\tactionType: a.actionType,\n\t\t\t\tparams: decodeValue(a.params) as Record<string, unknown>,\n\t\t\t\t...(a.idempotencyKey ? { idempotencyKey: a.idempotencyKey } : {}),\n\t\t\t})),\n\t\t};\n\t});\n}\n\n// ---------------------------------------------------------------------------\n// ActionResponse encode / decode\n// ---------------------------------------------------------------------------\n\n/**\n * Serialise an ActionResponse to protobuf binary.\n *\n * @param response - The ActionResponse payload.\n * @returns A `Result` containing the binary bytes, or a `CodecError` on failure.\n */\nexport function encodeActionResponse(response: CoreActionResponse): Result<Uint8Array, CodecError> {\n\treturn tryCodec(\"Failed to encode ActionResponse\", () => {\n\t\tconst entries = response.results.map((r) => {\n\t\t\tif (\"data\" in r && \"serverHlc\" in r) {\n\t\t\t\t// Success result\n\t\t\t\tconst success = r as CoreActionResult;\n\t\t\t\treturn create(ActionResponseEntrySchema, {\n\t\t\t\t\tresult: {\n\t\t\t\t\t\tcase: \"success\" as const,\n\t\t\t\t\t\tvalue: create(ActionResultMsgSchema, {\n\t\t\t\t\t\t\tactionId: success.actionId,\n\t\t\t\t\t\t\tdata: encodeValue(success.data),\n\t\t\t\t\t\t\tserverHlc: success.serverHlc as bigint,\n\t\t\t\t\t\t}),\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t\t// Error result\n\t\t\tconst error = r as CoreActionErrorResult;\n\t\t\treturn create(ActionResponseEntrySchema, {\n\t\t\t\tresult: {\n\t\t\t\t\tcase: \"error\" as const,\n\t\t\t\t\tvalue: {\n\t\t\t\t\t\tactionId: error.actionId,\n\t\t\t\t\t\tcode: error.code,\n\t\t\t\t\t\tmessage: error.message,\n\t\t\t\t\t\tretryable: error.retryable,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t});\n\t\t});\n\n\t\tconst proto = create(ActionResponseSchema, {\n\t\t\tresults: entries,\n\t\t\tserverHlc: response.serverHlc as bigint,\n\t\t});\n\t\treturn toBinary(ActionResponseSchema, proto);\n\t});\n}\n\n/**\n * Deserialise protobuf binary to an ActionResponse payload.\n *\n * @param bytes - The protobuf binary to deserialise.\n * @returns A `Result` containing the ActionResponse payload, or a `CodecError` on failure.\n */\nexport function decodeActionResponse(bytes: Uint8Array): Result<CoreActionResponse, CodecError> {\n\treturn tryCodec(\"Failed to decode ActionResponse\", () => {\n\t\tconst proto = fromBinary(ActionResponseSchema, bytes);\n\t\tconst results: Array<CoreActionResult | CoreActionErrorResult> = proto.results.map((entry) => {\n\t\t\tif (entry.result.case === \"success\") {\n\t\t\t\tconst s = entry.result.value;\n\t\t\t\treturn {\n\t\t\t\t\tactionId: s.actionId,\n\t\t\t\t\tdata: decodeValue(s.data) as Record<string, unknown>,\n\t\t\t\t\tserverHlc: s.serverHlc as HLCTimestamp,\n\t\t\t\t};\n\t\t\t}\n\t\t\tif (entry.result.case === \"error\") {\n\t\t\t\tconst e = entry.result.value;\n\t\t\t\treturn {\n\t\t\t\t\tactionId: e.actionId,\n\t\t\t\t\tcode: e.code,\n\t\t\t\t\tmessage: e.message,\n\t\t\t\t\tretryable: e.retryable,\n\t\t\t\t};\n\t\t\t}\n\t\t\tthrow new CodecError(\"ActionResponseEntry has no result\");\n\t\t});\n\n\t\treturn {\n\t\t\tresults,\n\t\t\tserverHlc: proto.serverHlc as HLCTimestamp,\n\t\t};\n\t});\n}\n"],"mappings":";;;;;;AAKA,SAAS,UAAU,UAAU,mBAAmB;AAMzC,IAAM,gBACX,yBAAS,kkDAAkkD;AAqBtkD,IAAM,oBACX,4BAAY,eAAe,CAAC;AA8CvB,IAAM,iBACX,4BAAY,eAAe,CAAC;AA0BvB,IAAM,iBACX,4BAAY,eAAe,CAAC;AA0BvB,IAAM,iBACX,4BAAY,eAAe,CAAC;AA0BvB,IAAM,qBACX,4BAAY,eAAe,CAAC;AA8CvB,IAAM,eACX,4BAAY,eAAe,CAAC;AAqBvB,IAAM,mBACX,4BAAY,eAAe,CAAC;AA0BvB,IAAM,wBACX,4BAAY,eAAe,CAAC;AA4DvB,IAAM,4BACX,4BAAY,eAAe,CAAC;AAqBvB,IAAM,uBACX,4BAAY,eAAe,EAAE;AAKxB,IAAK,UAAL,kBAAKA,aAAL;AAIL,EAAAA,kBAAA,iBAAc,KAAd;AAKA,EAAAA,kBAAA,YAAS,KAAT;AAKA,EAAAA,kBAAA,YAAS,KAAT;AAKA,EAAAA,kBAAA,YAAS,KAAT;AAnBU,SAAAA;AAAA,GAAA;AAyBL,IAAM,gBACX,yBAAS,eAAe,CAAC;;;ACpX3B,SAAS,QAAQ,YAAY,gBAAgB;AAkCtC,IAAM,aAAN,cAAyB,MAAM;AAAA,EAC5B,OAAO;AAAA,EAEhB,YAAY,SAAiB,OAAe;AAC3C,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,QAAQ;AAAA,EACd;AACD;AAUA,SAAS,SAAY,OAAe,IAAoC;AACvE,MAAI;AACH,WAAO,GAAG,GAAG,CAAC;AAAA,EACf,SAAS,KAAK;AACb,UAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAChE,WAAO,IAAI,IAAI,WAAW,OAAO,KAAK,CAAC;AAAA,EACxC;AACD;AAMA,IAAM,cAAc,IAAI,YAAY;AACpC,IAAM,cAAc,IAAI,YAAY;AAGpC,SAAS,YAAY,OAA4B;AAChD,SAAO,YAAY,OAAO,KAAK,UAAU,KAAK,CAAC;AAChD;AAGA,SAAS,YAAY,OAA4B;AAChD,SAAO,KAAK,MAAM,YAAY,OAAO,KAAK,CAAC;AAC5C;AAOA,IAAM,mBAAsD;AAAA,EAC3D;AAAA,EACA;AAAA,EACA;AACD;AAGA,IAAM,mBAAkE;AAAA,EACvE,oBAAyB,GAAG;AAAA,EAC5B,eAAoB,GAAG;AAAA,EACvB,eAAoB,GAAG;AAAA,EACvB,eAAoB,GAAG;AACxB;AAOA,SAAS,kBAAkB,KAAwC;AAClE,SAAO,OAAO,mBAAmB;AAAA,IAChC,QAAQ,IAAI;AAAA,IACZ,OAAO,YAAY,IAAI,KAAK;AAAA,EAC7B,CAAC;AACF;AAGA,SAAS,kBAAkB,KAAwC;AAClE,SAAO;AAAA,IACN,QAAQ,IAAI;AAAA,IACZ,OAAO,YAAY,IAAI,KAAK;AAAA,EAC7B;AACD;AAGA,SAAS,eAAe,OAAoC;AAC3D,SAAO,OAAO,gBAAgB;AAAA,IAC7B,IAAI,iBAAiB,MAAM,EAAE;AAAA,IAC7B,OAAO,MAAM;AAAA,IACb,OAAO,MAAM;AAAA,IACb,SAAS,MAAM,QAAQ,IAAI,iBAAiB;AAAA,IAC5C,KAAK,MAAM;AAAA,IACX,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM;AAAA,EAChB,CAAC;AACF;AAOA,SAAS,eAAe,OAAoC;AAC3D,QAAM,KAAK,iBAAiB,MAAM,EAAE;AACpC,MAAI,OAAO,QAAW;AACrB,UAAM,IAAI,WAAW,mCAAmC,MAAM,EAAE,EAAE;AAAA,EACnE;AACA,SAAO;AAAA,IACN;AAAA,IACA,OAAO,MAAM;AAAA,IACb,OAAO,MAAM;AAAA,IACb,SAAS,MAAM,QAAQ,IAAI,iBAAiB;AAAA,IAC5C,KAAK,MAAM;AAAA,IACX,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM;AAAA,EAChB;AACD;AAyCO,IAAM,gBAAgB;AAGtB,IAAM,gBAAgB;AAGtB,IAAM,gBAAgB;AAGtB,IAAM,kBAAkB;AAGxB,IAAM,sBAAsB;AAc5B,SAAS,qBACf,UACiC;AACjC,QAAM,UAAU,mBAAmB,QAAQ;AAC3C,MAAI,CAAC,QAAQ,GAAI,QAAO;AACxB,QAAM,QAAQ,IAAI,WAAW,IAAI,QAAQ,MAAM,MAAM;AACrD,QAAM,CAAC,IAAI;AACX,QAAM,IAAI,QAAQ,OAAO,CAAC;AAC1B,SAAO,GAAG,KAAK;AAChB;AAQO,SAAS,qBAAqB,OAA4D;AAChG,MAAI,MAAM,SAAS,GAAG;AACrB,WAAO,IAAI,IAAI,WAAW,2BAA2B,CAAC;AAAA,EACvD;AACA,MAAI,MAAM,CAAC,MAAM,eAAe;AAC/B,WAAO;AAAA,MACN,IAAI;AAAA,QACH,sCAAsC,MAAM,CAAC,EAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC;AAAA,MAC9E;AAAA,IACD;AAAA,EACD;AACA,SAAO,mBAAmB,MAAM,SAAS,CAAC,CAAC;AAC5C;AAYO,SAAS,eAAe,OAAqD;AACnF,SAAO;AAAA,IAAS;AAAA,IAA6B,MAC5C,SAAS,gBAAgB,eAAe,KAAK,CAAC;AAAA,EAC/C;AACD;AAQO,SAAS,eAAe,OAAqD;AACnF,SAAO;AAAA,IAAS;AAAA,IAA6B,MAC5C,eAAe,WAAW,gBAAgB,KAAK,CAAC;AAAA,EACjD;AACD;AAYO,SAAS,eAAe,MAAuD;AACrF,SAAO,SAAS,6BAA6B,MAAM;AAClD,UAAM,QAAQ,OAAO,gBAAgB;AAAA,MACpC,UAAU,KAAK;AAAA,MACf,QAAQ,KAAK,OAAO,IAAI,cAAc;AAAA,MACtC,aAAa,KAAK;AAAA,IACnB,CAAC;AACD,WAAO,SAAS,gBAAgB,KAAK;AAAA,EACtC,CAAC;AACF;AAQO,SAAS,eAAe,OAAwD;AACtF,SAAO,SAAS,6BAA6B,MAAM;AAClD,UAAM,QAAQ,WAAW,gBAAgB,KAAK;AAC9C,WAAO;AAAA,MACN,UAAU,MAAM;AAAA,MAChB,QAAQ,MAAM,OAAO,IAAI,cAAc;AAAA,MACvC,aAAa,MAAM;AAAA,IACpB;AAAA,EACD,CAAC;AACF;AAYO,SAAS,eAAe,MAAuD;AACrF,SAAO,SAAS,6BAA6B,MAAM;AAClD,UAAM,QAAQ,OAAO,gBAAgB;AAAA,MACpC,UAAU,KAAK;AAAA,MACf,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,IACjB,CAAC;AACD,WAAO,SAAS,gBAAgB,KAAK;AAAA,EACtC,CAAC;AACF;AAQO,SAAS,eAAe,OAAwD;AACtF,SAAO,SAAS,6BAA6B,MAAM;AAClD,UAAM,QAAQ,WAAW,gBAAgB,KAAK;AAC9C,WAAO;AAAA,MACN,UAAU,MAAM;AAAA,MAChB,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,IAClB;AAAA,EACD,CAAC;AACF;AAYO,SAAS,mBAAmB,UAA+D;AACjG,SAAO,SAAS,iCAAiC,MAAM;AACtD,UAAM,QAAQ,OAAO,oBAAoB;AAAA,MACxC,QAAQ,SAAS,OAAO,IAAI,cAAc;AAAA,MAC1C,WAAW,SAAS;AAAA,MACpB,SAAS,SAAS;AAAA,IACnB,CAAC;AACD,WAAO,SAAS,oBAAoB,KAAK;AAAA,EAC1C,CAAC;AACF;AAQO,SAAS,mBAAmB,OAA4D;AAC9F,SAAO,SAAS,iCAAiC,MAAM;AACtD,UAAM,QAAQ,WAAW,oBAAoB,KAAK;AAClD,WAAO;AAAA,MACN,QAAQ,MAAM,OAAO,IAAI,cAAc;AAAA,MACvC,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM;AAAA,IAChB;AAAA,EACD,CAAC;AACF;AAYO,SAAS,iBAAiB,MAAsD;AACtF,SAAO,SAAS,+BAA+B,MAAM;AACpD,UAAM,QAAQ,OAAO,kBAAkB;AAAA,MACtC,UAAU,KAAK;AAAA,MACf,SAAS,KAAK,QAAQ;AAAA,QAAI,CAAC,MAC1B,OAAO,cAAc;AAAA,UACpB,UAAU,EAAE;AAAA,UACZ,UAAU,EAAE;AAAA,UACZ,KAAK,EAAE;AAAA,UACP,WAAW,EAAE;AAAA,UACb,YAAY,EAAE;AAAA,UACd,QAAQ,YAAY,EAAE,MAAM;AAAA,UAC5B,gBAAgB,EAAE,kBAAkB;AAAA,QACrC,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AACD,WAAO,SAAS,kBAAkB,KAAK;AAAA,EACxC,CAAC;AACF;AAQO,SAAS,iBAAiB,OAAuD;AACvF,SAAO,SAAS,+BAA+B,MAAM;AACpD,UAAM,QAAQ,WAAW,kBAAkB,KAAK;AAChD,WAAO;AAAA,MACN,UAAU,MAAM;AAAA,MAChB,SAAS,MAAM,QAAQ,IAAI,CAAC,OAAO;AAAA,QAClC,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE;AAAA,QACZ,KAAK,EAAE;AAAA,QACP,WAAW,EAAE;AAAA,QACb,YAAY,EAAE;AAAA,QACd,QAAQ,YAAY,EAAE,MAAM;AAAA,QAC5B,GAAI,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,eAAe,IAAI,CAAC;AAAA,MAChE,EAAE;AAAA,IACH;AAAA,EACD,CAAC;AACF;AAYO,SAAS,qBAAqB,UAA8D;AAClG,SAAO,SAAS,mCAAmC,MAAM;AACxD,UAAM,UAAU,SAAS,QAAQ,IAAI,CAAC,MAAM;AAC3C,UAAI,UAAU,KAAK,eAAe,GAAG;AAEpC,cAAM,UAAU;AAChB,eAAO,OAAO,2BAA2B;AAAA,UACxC,QAAQ;AAAA,YACP,MAAM;AAAA,YACN,OAAO,OAAO,uBAAuB;AAAA,cACpC,UAAU,QAAQ;AAAA,cAClB,MAAM,YAAY,QAAQ,IAAI;AAAA,cAC9B,WAAW,QAAQ;AAAA,YACpB,CAAC;AAAA,UACF;AAAA,QACD,CAAC;AAAA,MACF;AAEA,YAAM,QAAQ;AACd,aAAO,OAAO,2BAA2B;AAAA,QACxC,QAAQ;AAAA,UACP,MAAM;AAAA,UACN,OAAO;AAAA,YACN,UAAU,MAAM;AAAA,YAChB,MAAM,MAAM;AAAA,YACZ,SAAS,MAAM;AAAA,YACf,WAAW,MAAM;AAAA,UAClB;AAAA,QACD;AAAA,MACD,CAAC;AAAA,IACF,CAAC;AAED,UAAM,QAAQ,OAAO,sBAAsB;AAAA,MAC1C,SAAS;AAAA,MACT,WAAW,SAAS;AAAA,IACrB,CAAC;AACD,WAAO,SAAS,sBAAsB,KAAK;AAAA,EAC5C,CAAC;AACF;AAQO,SAAS,qBAAqB,OAA2D;AAC/F,SAAO,SAAS,mCAAmC,MAAM;AACxD,UAAM,QAAQ,WAAW,sBAAsB,KAAK;AACpD,UAAM,UAA2D,MAAM,QAAQ,IAAI,CAAC,UAAU;AAC7F,UAAI,MAAM,OAAO,SAAS,WAAW;AACpC,cAAM,IAAI,MAAM,OAAO;AACvB,eAAO;AAAA,UACN,UAAU,EAAE;AAAA,UACZ,MAAM,YAAY,EAAE,IAAI;AAAA,UACxB,WAAW,EAAE;AAAA,QACd;AAAA,MACD;AACA,UAAI,MAAM,OAAO,SAAS,SAAS;AAClC,cAAM,IAAI,MAAM,OAAO;AACvB,eAAO;AAAA,UACN,UAAU,EAAE;AAAA,UACZ,MAAM,EAAE;AAAA,UACR,SAAS,EAAE;AAAA,UACX,WAAW,EAAE;AAAA,QACd;AAAA,MACD;AACA,YAAM,IAAI,WAAW,mCAAmC;AAAA,IACzD,CAAC;AAED,WAAO;AAAA,MACN;AAAA,MACA,WAAW,MAAM;AAAA,IAClB;AAAA,EACD,CAAC;AACF;","names":["DeltaOp"]}
|