lakesync 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -0
- package/dist/adapter.d.ts +369 -0
- package/dist/adapter.js +39 -0
- package/dist/adapter.js.map +1 -0
- package/dist/analyst.d.ts +268 -0
- package/dist/analyst.js +495 -0
- package/dist/analyst.js.map +1 -0
- package/dist/auth-CAVutXzx.d.ts +30 -0
- package/dist/base-poller-Qo_SmCZs.d.ts +82 -0
- package/dist/catalogue.d.ts +65 -0
- package/dist/catalogue.js +17 -0
- package/dist/catalogue.js.map +1 -0
- package/dist/chunk-4ARO6KTJ.js +257 -0
- package/dist/chunk-4ARO6KTJ.js.map +1 -0
- package/dist/chunk-5YOFCJQ7.js +1115 -0
- package/dist/chunk-5YOFCJQ7.js.map +1 -0
- package/dist/chunk-7D4SUZUM.js +38 -0
- package/dist/chunk-7D4SUZUM.js.map +1 -0
- package/dist/chunk-BNJOGBYK.js +335 -0
- package/dist/chunk-BNJOGBYK.js.map +1 -0
- package/dist/chunk-ICNT7I3K.js +1180 -0
- package/dist/chunk-ICNT7I3K.js.map +1 -0
- package/dist/chunk-P5DRFKIT.js +413 -0
- package/dist/chunk-P5DRFKIT.js.map +1 -0
- package/dist/chunk-X3RO5SYJ.js +880 -0
- package/dist/chunk-X3RO5SYJ.js.map +1 -0
- package/dist/client.d.ts +428 -0
- package/dist/client.js +2048 -0
- package/dist/client.js.map +1 -0
- package/dist/compactor.d.ts +342 -0
- package/dist/compactor.js +793 -0
- package/dist/compactor.js.map +1 -0
- package/dist/coordinator-CxckTzYW.d.ts +396 -0
- package/dist/db-types-BR6Kt4uf.d.ts +29 -0
- package/dist/gateway-D5SaaMvT.d.ts +337 -0
- package/dist/gateway-server.d.ts +306 -0
- package/dist/gateway-server.js +4663 -0
- package/dist/gateway-server.js.map +1 -0
- package/dist/gateway.d.ts +196 -0
- package/dist/gateway.js +79 -0
- package/dist/gateway.js.map +1 -0
- package/dist/hlc-DiD8QNG3.d.ts +70 -0
- package/dist/index.d.ts +245 -0
- package/dist/index.js +102 -0
- package/dist/index.js.map +1 -0
- package/dist/json-dYtqiL0F.d.ts +18 -0
- package/dist/nessie-client-DrNikVXy.d.ts +160 -0
- package/dist/parquet.d.ts +78 -0
- package/dist/parquet.js +15 -0
- package/dist/parquet.js.map +1 -0
- package/dist/proto.d.ts +434 -0
- package/dist/proto.js +67 -0
- package/dist/proto.js.map +1 -0
- package/dist/react.d.ts +147 -0
- package/dist/react.js +224 -0
- package/dist/react.js.map +1 -0
- package/dist/resolver-C3Wphi6O.d.ts +10 -0
- package/dist/result-CojzlFE2.d.ts +64 -0
- package/dist/src-QU2YLPZY.js +383 -0
- package/dist/src-QU2YLPZY.js.map +1 -0
- package/dist/src-WYBF5LOI.js +102 -0
- package/dist/src-WYBF5LOI.js.map +1 -0
- package/dist/src-WZNPHANQ.js +426 -0
- package/dist/src-WZNPHANQ.js.map +1 -0
- package/dist/types-Bs-QyOe-.d.ts +143 -0
- package/dist/types-DAQL_vU_.d.ts +118 -0
- package/dist/types-DSC_EiwR.d.ts +45 -0
- package/dist/types-V_jVu2sA.d.ts +73 -0
- package/package.json +119 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../client/src/db/idb-persistence.ts","../../client/src/db/local-db.ts","../../client/src/db/types.ts","../../client/src/db/schema-registry.ts","../../client/src/queue/idb-action-queue.ts","../../client/src/queue/idb-queue.ts","../../client/src/queue/memory-action-queue.ts","../../client/src/queue/memory-queue.ts","../../client/src/sync/applier.ts","../../client/src/sync/tracker.ts","../../client/src/sync/coordinator.ts","../../client/src/sync/schema-sync.ts","../../client/src/sync/transport-http.ts","../../client/src/sync/transport-local.ts","../../client/src/sync/transport-ws.ts"],"sourcesContent":["import { type IDBPDatabase, openDB } from \"idb\";\n\nconst IDB_DB_NAME = \"lakesync-snapshots\";\nconst IDB_DB_VERSION = 1;\nconst STORE_NAME = \"snapshots\";\n\n/** Cached IDB connection */\nlet cachedDb: IDBPDatabase | null = null;\n\n/** Open (or reuse) the snapshot IndexedDB database */\nasync function getDb(): Promise<IDBPDatabase> {\n\tif (cachedDb) return cachedDb;\n\tcachedDb = await openDB(IDB_DB_NAME, IDB_DB_VERSION, {\n\t\tupgrade(db) {\n\t\t\tif (!db.objectStoreNames.contains(STORE_NAME)) {\n\t\t\t\tdb.createObjectStore(STORE_NAME);\n\t\t\t}\n\t\t},\n\t});\n\treturn cachedDb;\n}\n\n/**\n * Load a database snapshot from IndexedDB.\n *\n * @param dbName - The logical database name (used as key in IndexedDB)\n * @returns The raw SQLite database bytes, or null if no snapshot exists\n */\nexport async function loadSnapshot(dbName: string): Promise<Uint8Array | null> {\n\tconst idb = await getDb();\n\tconst data = await idb.get(STORE_NAME, dbName);\n\tif (data instanceof Uint8Array) return data;\n\treturn null;\n}\n\n/**\n * Save a database snapshot to IndexedDB.\n *\n * @param dbName - The logical database name (used as key in IndexedDB)\n * @param data - The raw SQLite database bytes from sql.js `db.export()`\n */\nexport async function saveSnapshot(dbName: string, data: Uint8Array): Promise<void> {\n\tconst idb = await getDb();\n\tawait idb.put(STORE_NAME, data, dbName);\n}\n\n/**\n * Delete a database snapshot from IndexedDB.\n *\n * @param dbName - The logical database name to remove\n */\nexport async function deleteSnapshot(dbName: string): Promise<void> {\n\tconst idb = await getDb();\n\tawait idb.delete(STORE_NAME, dbName);\n}\n","import type { Result } from \"@lakesync/core\";\nimport { Err, Ok, toError } from \"@lakesync/core\";\nimport type { Database, QueryExecResult } from \"sql.js\";\nimport initSqlJs from \"sql.js\";\nimport { loadSnapshot, saveSnapshot } from \"./idb-persistence\";\nimport type { DbConfig, Transaction } from \"./types\";\nimport { DbError } from \"./types\";\n\n/** Resolved storage backend after auto-detection */\ntype ResolvedBackend = \"idb\" | \"memory\";\n\n/** Map sql.js query results into typed row objects */\nfunction mapResultRows<T>(results: QueryExecResult[]): T[] {\n\tif (results.length === 0 || !results[0]) {\n\t\treturn [];\n\t}\n\tconst { columns, values } = results[0];\n\treturn values.map((row) => {\n\t\tconst obj: Record<string, unknown> = {};\n\t\tfor (let i = 0; i < columns.length; i++) {\n\t\t\tconst col = columns[i];\n\t\t\tif (col !== undefined) {\n\t\t\t\tobj[col] = row[i];\n\t\t\t}\n\t\t}\n\t\treturn obj as T;\n\t});\n}\n\n/** Wrap a synchronous operation in a try/catch returning Result<T, DbError> */\nfunction wrapDbError<T>(label: string, fn: () => T): Result<T, DbError> {\n\ttry {\n\t\treturn Ok(fn());\n\t} catch (err) {\n\t\treturn Err(new DbError(label, toError(err)));\n\t}\n}\n\n/**\n * Local SQLite database backed by sql.js (SQLite compiled to WASM).\n *\n * Supports two persistence backends:\n * - `\"memory\"` — purely in-memory, data lost on close\n * - `\"idb\"` — snapshots persisted to IndexedDB between sessions\n *\n * When no backend is specified, auto-detects: uses `\"idb\"` if\n * `indexedDB` is available, otherwise falls back to `\"memory\"`.\n */\nexport class LocalDB {\n\treadonly #db: Database;\n\treadonly #config: DbConfig;\n\treadonly #backend: ResolvedBackend;\n\n\tprivate constructor(db: Database, config: DbConfig, backend: ResolvedBackend) {\n\t\tthis.#db = db;\n\t\tthis.#config = config;\n\t\tthis.#backend = backend;\n\t}\n\n\t/** The database name from configuration */\n\tget name(): string {\n\t\treturn this.#config.name;\n\t}\n\n\t/** The resolved storage backend for this instance */\n\tget backend(): ResolvedBackend {\n\t\treturn this.#backend;\n\t}\n\n\t/**\n\t * Open a new LocalDB instance.\n\t *\n\t * Initialises the sql.js WASM engine and creates a database. When the\n\t * backend is `\"idb\"`, any existing snapshot is loaded from IndexedDB.\n\t * If no backend is specified, auto-detects based on `indexedDB` availability.\n\t */\n\tstatic async open(config: DbConfig): Promise<Result<LocalDB, DbError>> {\n\t\ttry {\n\t\t\tconst backend: ResolvedBackend = resolveBackend(config.backend);\n\n\t\t\tconst SQL = await initSqlJs();\n\n\t\t\tlet data: Uint8Array | null = null;\n\t\t\tif (backend === \"idb\") {\n\t\t\t\tdata = await loadSnapshot(config.name);\n\t\t\t}\n\n\t\t\tconst db = data ? new SQL.Database(data) : new SQL.Database();\n\t\t\treturn Ok(new LocalDB(db, config, backend));\n\t\t} catch (err) {\n\t\t\treturn Err(new DbError(`Failed to open database \"${config.name}\"`, toError(err)));\n\t\t}\n\t}\n\n\t/**\n\t * Execute a SQL statement (INSERT, UPDATE, DELETE, CREATE, etc.).\n\t *\n\t * Returns `Ok(void)` on success, or `Err(DbError)` on failure.\n\t */\n\tasync exec(sql: string, params?: unknown[]): Promise<Result<void, DbError>> {\n\t\treturn wrapDbError(`Failed to execute SQL: ${sql}`, () => {\n\t\t\tthis.#db.run(sql, params as Parameters<Database[\"run\"]>[1]);\n\t\t});\n\t}\n\n\t/**\n\t * Query the database and return typed rows as an array of objects.\n\t *\n\t * Each row is mapped from sql.js column-array format into a keyed object.\n\t */\n\tasync query<T>(sql: string, params?: unknown[]): Promise<Result<T[], DbError>> {\n\t\treturn wrapDbError(`Failed to query SQL: ${sql}`, () => {\n\t\t\tconst results = this.#db.exec(sql, params as Parameters<Database[\"exec\"]>[1]);\n\t\t\treturn mapResultRows<T>(results);\n\t\t});\n\t}\n\n\t/**\n\t * Execute a function within a database transaction.\n\t *\n\t * Begins a transaction, executes the callback with a `Transaction` object,\n\t * commits on success, or rolls back if the callback throws.\n\t */\n\tasync transaction<T>(fn: (tx: Transaction) => T): Promise<Result<T, DbError>> {\n\t\tconst tx = this.#createTransaction();\n\n\t\tconst beginResult = wrapDbError(\"Failed to begin transaction\", () => {\n\t\t\tthis.#db.run(\"BEGIN\");\n\t\t});\n\t\tif (!beginResult.ok) return beginResult;\n\n\t\ttry {\n\t\t\tconst result = fn(tx);\n\t\t\tthis.#db.run(\"COMMIT\");\n\t\t\treturn Ok(result);\n\t\t} catch (err) {\n\t\t\ttry {\n\t\t\t\tthis.#db.run(\"ROLLBACK\");\n\t\t\t} catch (_rollbackErr) {\n\t\t\t\t// Rollback failure is secondary; report the original error\n\t\t\t}\n\t\t\treturn Err(new DbError(\"Transaction failed\", toError(err)));\n\t\t}\n\t}\n\n\t/**\n\t * Export the current database state and persist it to IndexedDB.\n\t *\n\t * No-op when the backend is `\"memory\"`.\n\t */\n\tasync save(): Promise<Result<void, DbError>> {\n\t\tif (this.#backend !== \"idb\") {\n\t\t\treturn Ok(undefined);\n\t\t}\n\t\ttry {\n\t\t\tconst data = this.#db.export();\n\t\t\tawait saveSnapshot(this.#config.name, data);\n\t\t\treturn Ok(undefined);\n\t\t} catch (err) {\n\t\t\treturn Err(\n\t\t\t\tnew DbError(`Failed to save database \"${this.#config.name}\" to IndexedDB`, toError(err)),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Close the database and release resources.\n\t *\n\t * When the backend is `\"idb\"`, the database snapshot is persisted\n\t * to IndexedDB before closing.\n\t */\n\tasync close(): Promise<void> {\n\t\tif (this.#backend === \"idb\") {\n\t\t\tawait this.save();\n\t\t}\n\t\tthis.#db.close();\n\t}\n\n\t#createTransaction(): Transaction {\n\t\tconst db = this.#db;\n\t\treturn {\n\t\t\texec(sql: string, params?: unknown[]): Result<void, DbError> {\n\t\t\t\treturn wrapDbError(`Transaction exec failed: ${sql}`, () => {\n\t\t\t\t\tdb.run(sql, params as Parameters<Database[\"run\"]>[1]);\n\t\t\t\t});\n\t\t\t},\n\t\t\tquery<T>(sql: string, params?: unknown[]): Result<T[], DbError> {\n\t\t\t\treturn wrapDbError(`Transaction query failed: ${sql}`, () => {\n\t\t\t\t\tconst results = db.exec(sql, params as Parameters<Database[\"exec\"]>[1]);\n\t\t\t\t\treturn mapResultRows<T>(results);\n\t\t\t\t});\n\t\t\t},\n\t\t};\n\t}\n}\n\n/**\n * Resolve the storage backend from configuration.\n *\n * When no backend is specified, auto-detects: uses `\"idb\"` if\n * the `indexedDB` global is available, otherwise `\"memory\"`.\n */\nfunction resolveBackend(configured?: DbConfig[\"backend\"]): ResolvedBackend {\n\tif (configured === \"memory\") return \"memory\";\n\tif (configured === \"idb\") return \"idb\";\n\t// Auto-detect: prefer IndexedDB when available\n\tif (typeof indexedDB !== \"undefined\") return \"idb\";\n\treturn \"memory\";\n}\n","import type { Result } from \"@lakesync/core\";\nimport { LakeSyncError } from \"@lakesync/core\";\n\n/** Configuration for opening a local database */\nexport interface DbConfig {\n\t/** Database name (used for identification and future persistence) */\n\tname: string;\n\t/** Storage backend — auto-detected if not set */\n\tbackend?: \"idb\" | \"memory\";\n}\n\n/** Error type for database operations */\nexport class DbError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"DB_ERROR\", cause);\n\t}\n}\n\n/** Synchronous transaction interface wrapping sql.js operations */\nexport interface Transaction {\n\t/** Execute a SQL statement with optional parameters */\n\texec(sql: string, params?: unknown[]): Result<void, DbError>;\n\t/** Query rows from the database with optional parameters */\n\tquery<T>(sql: string, params?: unknown[]): Result<T[], DbError>;\n}\n","import {\n\tassertValidIdentifier,\n\tErr,\n\tOk,\n\tquoteIdentifier,\n\ttype Result,\n\tSchemaError,\n\ttype TableSchema,\n\tunwrapOrThrow,\n} from \"@lakesync/core\";\nimport type { LocalDB } from \"./local-db\";\nimport { DbError } from \"./types\";\n\n/** SQL column type mapping from LakeSync column types */\nconst COLUMN_TYPE_MAP: Record<TableSchema[\"columns\"][number][\"type\"], string> = {\n\tstring: \"TEXT\",\n\tnumber: \"REAL\",\n\tboolean: \"INTEGER\",\n\tjson: \"TEXT\",\n\tnull: \"TEXT\",\n};\n\n/**\n * Ensures the `_lakesync_meta` table exists in the database.\n * This table stores schema metadata for all registered tables.\n *\n * @param db - The LocalDB instance to initialise the meta table in\n * @returns A Result indicating success or a DbError\n */\nasync function ensureMetaTable(db: LocalDB): Promise<Result<void, DbError>> {\n\treturn db.exec(`\n\t\tCREATE TABLE IF NOT EXISTS _lakesync_meta (\n\t\t\ttable_name TEXT PRIMARY KEY,\n\t\t\tschema_version INTEGER NOT NULL DEFAULT 1,\n\t\t\tschema_json TEXT NOT NULL,\n\t\t\tupdated_at TEXT NOT NULL\n\t\t)\n\t`);\n}\n\n/**\n * Registers a table schema in the local database.\n *\n * Creates the `_lakesync_meta` metadata table if it does not already exist,\n * then inserts or updates the schema entry and creates the corresponding\n * user table with columns derived from the provided TableSchema.\n *\n * The user table always includes a `_rowId TEXT PRIMARY KEY` column in\n * addition to the columns specified in the schema.\n *\n * This operation is idempotent — calling it twice with the same schema\n * produces no error.\n *\n * @param db - The LocalDB instance to register the schema in\n * @param schema - The TableSchema describing the table and its columns\n * @returns A Result indicating success or a DbError\n */\nexport async function registerSchema(\n\tdb: LocalDB,\n\tschema: TableSchema,\n): Promise<Result<void, DbError | SchemaError>> {\n\tconst tableCheck = assertValidIdentifier(schema.table);\n\tif (!tableCheck.ok) return tableCheck;\n\tfor (const col of schema.columns) {\n\t\tconst colCheck = assertValidIdentifier(col.name);\n\t\tif (!colCheck.ok) return colCheck;\n\t}\n\n\tconst metaResult = await ensureMetaTable(db);\n\tif (!metaResult.ok) return metaResult;\n\n\treturn db.transaction((tx) => {\n\t\tconst now = new Date().toISOString();\n\t\tconst schemaJson = JSON.stringify(schema);\n\n\t\t// Insert or update schema metadata\n\t\tunwrapOrThrow(\n\t\t\ttx.exec(\n\t\t\t\t`INSERT INTO _lakesync_meta (table_name, schema_version, schema_json, updated_at)\n\t\t\t\t VALUES (?, 1, ?, ?)\n\t\t\t\t ON CONFLICT(table_name) DO UPDATE SET\n\t\t\t\t schema_json = excluded.schema_json,\n\t\t\t\t updated_at = excluded.updated_at`,\n\t\t\t\t[schema.table, schemaJson, now],\n\t\t\t),\n\t\t);\n\n\t\t// Build the CREATE TABLE statement with _rowId as primary key\n\t\tconst quotedTable = quoteIdentifier(schema.table);\n\t\tconst columnDefs = schema.columns\n\t\t\t.map((col) => `${quoteIdentifier(col.name)} ${COLUMN_TYPE_MAP[col.type]}`)\n\t\t\t.join(\", \");\n\n\t\tconst createSql = columnDefs\n\t\t\t? `CREATE TABLE IF NOT EXISTS ${quotedTable} (_rowId TEXT PRIMARY KEY, ${columnDefs})`\n\t\t\t: `CREATE TABLE IF NOT EXISTS ${quotedTable} (_rowId TEXT PRIMARY KEY)`;\n\n\t\tunwrapOrThrow(tx.exec(createSql));\n\t});\n}\n\n/**\n * Retrieves a previously registered table schema from the database.\n *\n * Ensures the `_lakesync_meta` table exists before querying, so this\n * function is safe to call even on a freshly created database.\n *\n * @param db - The LocalDB instance to query\n * @param table - The table name to look up\n * @returns A Result containing the TableSchema if found, null if the\n * table has not been registered, or a DbError on failure\n */\nexport async function getSchema(\n\tdb: LocalDB,\n\ttable: string,\n): Promise<Result<TableSchema | null, DbError>> {\n\tconst metaResult = await ensureMetaTable(db);\n\tif (!metaResult.ok) return metaResult;\n\n\tconst queryResult = await db.query<{ schema_json: string }>(\n\t\t\"SELECT schema_json FROM _lakesync_meta WHERE table_name = ?\",\n\t\t[table],\n\t);\n\n\tif (!queryResult.ok) return queryResult;\n\n\tconst firstRow = queryResult.value[0];\n\tif (!firstRow) {\n\t\treturn Ok(null);\n\t}\n\n\ttry {\n\t\tconst schema = JSON.parse(firstRow.schema_json) as TableSchema;\n\t\treturn Ok(schema);\n\t} catch (err) {\n\t\treturn Err(\n\t\t\tnew DbError(\n\t\t\t\t`Failed to parse schema JSON for table \"${table}\"`,\n\t\t\t\terr instanceof Error ? err : undefined,\n\t\t\t),\n\t\t);\n\t}\n}\n\n/**\n * Migrates an existing table schema to a new version.\n *\n * Only additive migrations are supported — new columns can be added\n * (they will be nullable). Removing columns or changing column types\n * will result in a SchemaError.\n *\n * Both schemas must reference the same table name; mismatched table\n * names will produce a SchemaError.\n *\n * On success, the `_lakesync_meta` entry is updated with the new schema\n * and the schema version is incremented by one.\n *\n * @param db - The LocalDB instance to migrate\n * @param oldSchema - The current table schema\n * @param newSchema - The desired table schema after migration\n * @returns A Result indicating success, or a SchemaError/DbError on failure\n */\nexport async function migrateSchema(\n\tdb: LocalDB,\n\toldSchema: TableSchema,\n\tnewSchema: TableSchema,\n): Promise<Result<void, DbError | SchemaError>> {\n\t// Validate both schemas reference the same table\n\tif (oldSchema.table !== newSchema.table) {\n\t\treturn Err(\n\t\t\tnew SchemaError(\n\t\t\t\t`Table name mismatch: old schema references \"${oldSchema.table}\" but new schema references \"${newSchema.table}\"`,\n\t\t\t),\n\t\t);\n\t}\n\n\tconst tableName = newSchema.table;\n\n\tconst tableCheck = assertValidIdentifier(tableName);\n\tif (!tableCheck.ok) return tableCheck;\n\tfor (const col of newSchema.columns) {\n\t\tconst colCheck = assertValidIdentifier(col.name);\n\t\tif (!colCheck.ok) return colCheck;\n\t}\n\n\t// Build lookup maps for comparison\n\tconst oldColumnMap = new Map<string, string>();\n\tfor (const col of oldSchema.columns) {\n\t\toldColumnMap.set(col.name, col.type);\n\t}\n\n\tconst newColumnMap = new Map<string, string>();\n\tfor (const col of newSchema.columns) {\n\t\tnewColumnMap.set(col.name, col.type);\n\t}\n\n\t// Detect removed columns\n\tfor (const col of oldSchema.columns) {\n\t\tif (!newColumnMap.has(col.name)) {\n\t\t\treturn Err(\n\t\t\t\tnew SchemaError(\n\t\t\t\t\t`Cannot remove column \"${col.name}\" from table \"${tableName}\". Only additive migrations are supported.`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t// Detect type changes\n\tfor (const col of newSchema.columns) {\n\t\tconst oldType = oldColumnMap.get(col.name);\n\t\tif (oldType !== undefined && oldType !== col.type) {\n\t\t\treturn Err(\n\t\t\t\tnew SchemaError(\n\t\t\t\t\t`Cannot change type of column \"${col.name}\" in table \"${tableName}\" from \"${oldType}\" to \"${col.type}\". Type changes are not supported.`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t// Find added columns\n\tconst addedColumns = newSchema.columns.filter((col) => !oldColumnMap.has(col.name));\n\n\tconst metaResult = await ensureMetaTable(db);\n\tif (!metaResult.ok) return metaResult;\n\n\treturn db.transaction((tx) => {\n\t\t// Add new columns via ALTER TABLE\n\t\tconst quotedTable = quoteIdentifier(tableName);\n\t\tfor (const col of addedColumns) {\n\t\t\tunwrapOrThrow(\n\t\t\t\ttx.exec(\n\t\t\t\t\t`ALTER TABLE ${quotedTable} ADD COLUMN ${quoteIdentifier(col.name)} ${COLUMN_TYPE_MAP[col.type]}`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\t// Update schema metadata with incremented version\n\t\tconst now = new Date().toISOString();\n\t\tconst schemaJson = JSON.stringify(newSchema);\n\n\t\tunwrapOrThrow(\n\t\t\ttx.exec(\n\t\t\t\t`UPDATE _lakesync_meta\n\t\t\t\t SET schema_json = ?,\n\t\t\t\t schema_version = schema_version + 1,\n\t\t\t\t updated_at = ?\n\t\t\t\t WHERE table_name = ?`,\n\t\t\t\t[schemaJson, now, tableName],\n\t\t\t),\n\t\t);\n\t});\n}\n","import type { Action, HLCTimestamp, Result } from \"@lakesync/core\";\nimport { Err, LakeSyncError, Ok } from \"@lakesync/core\";\nimport { type IDBPDatabase, openDB } from \"idb\";\nimport type { ActionQueue, ActionQueueEntry } from \"./action-types\";\n\nconst DB_NAME = \"lakesync-action-queue\";\nconst DB_VERSION = 1;\nconst STORE_NAME = \"entries\";\n\n/**\n * Serialised form of an Action where the HLC bigint is stored as a string.\n * IndexedDB uses structuredClone internally which cannot handle bigint values.\n */\ntype SerialisedAction = Omit<Action, \"hlc\"> & { hlc: string };\n\n/** Serialised queue entry stored in IndexedDB. */\ntype SerialisedActionQueueEntry = Omit<ActionQueueEntry, \"action\"> & {\n\taction: SerialisedAction;\n};\n\n/** Convert an Action to its serialised form for IDB storage. */\nfunction serialiseAction(action: Action): SerialisedAction {\n\treturn { ...action, hlc: action.hlc.toString() };\n}\n\n/** Convert a serialised action back to an Action with bigint HLC. */\nfunction deserialiseAction(serialised: SerialisedAction): Action {\n\treturn { ...serialised, hlc: BigInt(serialised.hlc) as HLCTimestamp };\n}\n\n/** Convert an ActionQueueEntry to its serialised form. */\nfunction serialiseEntry(entry: ActionQueueEntry): SerialisedActionQueueEntry {\n\treturn { ...entry, action: serialiseAction(entry.action) };\n}\n\n/** Convert a serialised entry back to an ActionQueueEntry. */\nfunction deserialiseEntry(serialised: SerialisedActionQueueEntry): ActionQueueEntry {\n\treturn { ...serialised, action: deserialiseAction(serialised.action) };\n}\n\n/** Wrap an async IDB operation, catching errors into a QUEUE_ERROR Result. */\nasync function wrapIdbOp<T>(\n\toperation: string,\n\tfn: () => Promise<T>,\n): Promise<Result<T, LakeSyncError>> {\n\ttry {\n\t\treturn Ok(await fn());\n\t} catch (error) {\n\t\tconst message = error instanceof Error ? error.message : String(error);\n\t\treturn Err(new LakeSyncError(`Failed to ${operation}: ${message}`, \"QUEUE_ERROR\"));\n\t}\n}\n\n/**\n * IndexedDB-backed action queue implementation.\n *\n * HLC timestamps (branded bigints) are serialised to strings for storage,\n * as IndexedDB's structuredClone cannot handle bigint values.\n */\nexport class IDBActionQueue implements ActionQueue {\n\tprivate dbPromise: Promise<IDBPDatabase>;\n\tprivate counter = 0;\n\n\t/**\n\t * Create a new IDB-backed action queue.\n\t *\n\t * @param dbName - Optional database name. Defaults to `'lakesync-action-queue'`.\n\t */\n\tconstructor(dbName: string = DB_NAME) {\n\t\tthis.dbPromise = openDB(dbName, DB_VERSION, {\n\t\t\tupgrade(db) {\n\t\t\t\tconst store = db.createObjectStore(STORE_NAME, { keyPath: \"id\" });\n\t\t\t\tstore.createIndex(\"status\", \"status\");\n\t\t\t\tstore.createIndex(\"createdAt\", \"createdAt\");\n\t\t\t},\n\t\t});\n\t}\n\n\t/** Add an action to the queue. */\n\tasync push(action: Action): Promise<Result<ActionQueueEntry, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"push to action queue\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst entry: ActionQueueEntry = {\n\t\t\t\tid: `idb-action-${Date.now()}-${++this.counter}`,\n\t\t\t\taction,\n\t\t\t\tstatus: \"pending\",\n\t\t\t\tcreatedAt: Date.now(),\n\t\t\t\tretryCount: 0,\n\t\t\t};\n\t\t\tawait db.put(STORE_NAME, serialiseEntry(entry));\n\t\t\treturn entry;\n\t\t});\n\t}\n\n\t/** Peek at pending entries (ordered by createdAt). */\n\tasync peek(limit: number): Promise<Result<ActionQueueEntry[], LakeSyncError>> {\n\t\treturn wrapIdbOp(\"peek action queue\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readonly\");\n\t\t\tconst index = tx.objectStore(STORE_NAME).index(\"createdAt\");\n\t\t\tconst results: ActionQueueEntry[] = [];\n\n\t\t\tlet cursor = await index.openCursor();\n\t\t\twhile (cursor && results.length < limit) {\n\t\t\t\tconst serialised = cursor.value as SerialisedActionQueueEntry;\n\t\t\t\tif (serialised.status === \"pending\") {\n\t\t\t\t\tconst entry = deserialiseEntry(serialised);\n\t\t\t\t\tif (entry.retryAfter === undefined || entry.retryAfter <= Date.now()) {\n\t\t\t\t\t\tresults.push(entry);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tcursor = await cursor.continue();\n\t\t\t}\n\n\t\t\treturn results;\n\t\t});\n\t}\n\n\t/** Mark entries as currently being sent. */\n\tasync markSending(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"mark sending\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readwrite\");\n\t\t\tconst store = tx.objectStore(STORE_NAME);\n\n\t\t\tfor (const id of ids) {\n\t\t\t\tconst serialised = (await store.get(id)) as SerialisedActionQueueEntry | undefined;\n\t\t\t\tif (serialised?.status === \"pending\") {\n\t\t\t\t\tserialised.status = \"sending\";\n\t\t\t\t\tawait store.put(serialised);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tawait tx.done;\n\t\t});\n\t}\n\n\t/** Acknowledge successful delivery (removes entries). */\n\tasync ack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"ack\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readwrite\");\n\t\t\tfor (const id of ids) {\n\t\t\t\tawait tx.objectStore(STORE_NAME).delete(id);\n\t\t\t}\n\t\t\tawait tx.done;\n\t\t});\n\t}\n\n\t/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff. */\n\tasync nack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"nack\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readwrite\");\n\t\t\tconst store = tx.objectStore(STORE_NAME);\n\n\t\t\tfor (const id of ids) {\n\t\t\t\tconst serialised = (await store.get(id)) as SerialisedActionQueueEntry | undefined;\n\t\t\t\tif (serialised) {\n\t\t\t\t\tserialised.status = \"pending\";\n\t\t\t\t\tserialised.retryCount++;\n\t\t\t\t\tconst backoffMs = Math.min(1000 * 2 ** serialised.retryCount, 30_000);\n\t\t\t\t\t(serialised as Record<string, unknown>).retryAfter = Date.now() + backoffMs;\n\t\t\t\t\tawait store.put(serialised);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tawait tx.done;\n\t\t});\n\t}\n\n\t/** Get the number of pending + sending entries. */\n\tasync depth(): Promise<Result<number, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"get depth\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst all = (await db.getAll(STORE_NAME)) as SerialisedActionQueueEntry[];\n\t\t\treturn all.filter((e) => e.status === \"pending\" || e.status === \"sending\").length;\n\t\t});\n\t}\n\n\t/** Remove all entries. */\n\tasync clear(): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"clear action queue\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tawait db.clear(STORE_NAME);\n\t\t});\n\t}\n}\n","import type { HLCTimestamp, Result, RowDelta } from \"@lakesync/core\";\nimport { Err, LakeSyncError, Ok } from \"@lakesync/core\";\nimport { type IDBPDatabase, openDB } from \"idb\";\nimport type { QueueEntry, SyncQueue } from \"./types\";\n\nconst DB_NAME = \"lakesync-queue\";\nconst DB_VERSION = 1;\nconst STORE_NAME = \"entries\";\n\n/**\n * Serialised form of a RowDelta where the HLC bigint is stored as a string.\n * IndexedDB uses structuredClone internally which cannot handle bigint values,\n * so we convert to/from string representation for storage.\n */\ntype SerialisedRowDelta = Omit<RowDelta, \"hlc\"> & { hlc: string };\n\n/** Serialised queue entry stored in IndexedDB */\ntype SerialisedQueueEntry = Omit<QueueEntry, \"delta\"> & {\n\tdelta: SerialisedRowDelta;\n};\n\n/** Convert a RowDelta to its serialised form for IDB storage */\nfunction serialiseDelta(delta: RowDelta): SerialisedRowDelta {\n\treturn { ...delta, hlc: delta.hlc.toString() };\n}\n\n/** Convert a serialised delta back to a RowDelta with bigint HLC */\nfunction deserialiseDelta(serialised: SerialisedRowDelta): RowDelta {\n\treturn { ...serialised, hlc: BigInt(serialised.hlc) as HLCTimestamp };\n}\n\n/** Convert a QueueEntry to its serialised form */\nfunction serialiseEntry(entry: QueueEntry): SerialisedQueueEntry {\n\treturn { ...entry, delta: serialiseDelta(entry.delta) };\n}\n\n/** Convert a serialised entry back to a QueueEntry */\nfunction deserialiseEntry(serialised: SerialisedQueueEntry): QueueEntry {\n\treturn { ...serialised, delta: deserialiseDelta(serialised.delta) };\n}\n\n/** Wrap an async IDB operation, catching errors into a QUEUE_ERROR Result */\nasync function wrapIdbOp<T>(\n\toperation: string,\n\tfn: () => Promise<T>,\n): Promise<Result<T, LakeSyncError>> {\n\ttry {\n\t\treturn Ok(await fn());\n\t} catch (error) {\n\t\tconst message = error instanceof Error ? error.message : String(error);\n\t\treturn Err(new LakeSyncError(`Failed to ${operation}: ${message}`, \"QUEUE_ERROR\"));\n\t}\n}\n\n/**\n * IndexedDB-backed sync queue implementation.\n * Uses a single readwrite transaction for atomic claim operations.\n *\n * HLC timestamps (branded bigints) are serialised to strings for storage,\n * as IndexedDB's structuredClone cannot handle bigint values.\n */\nexport class IDBQueue implements SyncQueue {\n\tprivate dbPromise: Promise<IDBPDatabase>;\n\tprivate counter = 0;\n\n\t/**\n\t * Create a new IDB-backed sync queue.\n\t *\n\t * @param dbName - Optional database name. Defaults to `'lakesync-queue'`.\n\t * Useful for tests or running multiple independent queues.\n\t */\n\tconstructor(dbName: string = DB_NAME) {\n\t\tthis.dbPromise = openDB(dbName, DB_VERSION, {\n\t\t\tupgrade(db) {\n\t\t\t\tconst store = db.createObjectStore(STORE_NAME, { keyPath: \"id\" });\n\t\t\t\tstore.createIndex(\"status\", \"status\");\n\t\t\t\tstore.createIndex(\"createdAt\", \"createdAt\");\n\t\t\t},\n\t\t});\n\t}\n\n\t/** Add a delta to the queue */\n\tasync push(delta: RowDelta): Promise<Result<QueueEntry, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"push to queue\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst entry: QueueEntry = {\n\t\t\t\tid: `idb-${Date.now()}-${++this.counter}`,\n\t\t\t\tdelta,\n\t\t\t\tstatus: \"pending\",\n\t\t\t\tcreatedAt: Date.now(),\n\t\t\t\tretryCount: 0,\n\t\t\t};\n\t\t\tawait db.put(STORE_NAME, serialiseEntry(entry));\n\t\t\treturn entry;\n\t\t});\n\t}\n\n\t/** Peek at pending entries (ordered by createdAt) */\n\tasync peek(limit: number): Promise<Result<QueueEntry[], LakeSyncError>> {\n\t\treturn wrapIdbOp(\"peek queue\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readonly\");\n\t\t\tconst index = tx.objectStore(STORE_NAME).index(\"createdAt\");\n\t\t\tconst results: QueueEntry[] = [];\n\n\t\t\tlet cursor = await index.openCursor();\n\t\t\twhile (cursor && results.length < limit) {\n\t\t\t\tconst serialised = cursor.value as SerialisedQueueEntry;\n\t\t\t\tif (serialised.status === \"pending\") {\n\t\t\t\t\tconst entry = deserialiseEntry(serialised);\n\t\t\t\t\tif (entry.retryAfter === undefined || entry.retryAfter <= Date.now()) {\n\t\t\t\t\t\tresults.push(entry);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tcursor = await cursor.continue();\n\t\t\t}\n\n\t\t\treturn results;\n\t\t});\n\t}\n\n\t/** Mark entries as currently being sent */\n\tasync markSending(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"mark sending\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readwrite\");\n\t\t\tconst store = tx.objectStore(STORE_NAME);\n\n\t\t\tfor (const id of ids) {\n\t\t\t\tconst serialised = (await store.get(id)) as SerialisedQueueEntry | undefined;\n\t\t\t\tif (serialised?.status === \"pending\") {\n\t\t\t\t\tserialised.status = \"sending\";\n\t\t\t\t\tawait store.put(serialised);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tawait tx.done;\n\t\t});\n\t}\n\n\t/** Acknowledge successful delivery (removes entries) */\n\tasync ack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"ack\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readwrite\");\n\t\t\tfor (const id of ids) {\n\t\t\t\tawait tx.objectStore(STORE_NAME).delete(id);\n\t\t\t}\n\t\t\tawait tx.done;\n\t\t});\n\t}\n\n\t/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff */\n\tasync nack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"nack\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst tx = db.transaction(STORE_NAME, \"readwrite\");\n\t\t\tconst store = tx.objectStore(STORE_NAME);\n\n\t\t\tfor (const id of ids) {\n\t\t\t\tconst serialised = (await store.get(id)) as SerialisedQueueEntry | undefined;\n\t\t\t\tif (serialised) {\n\t\t\t\t\tserialised.status = \"pending\";\n\t\t\t\t\tserialised.retryCount++;\n\t\t\t\t\tconst backoffMs = Math.min(1000 * 2 ** serialised.retryCount, 30_000);\n\t\t\t\t\t(serialised as Record<string, unknown>).retryAfter = Date.now() + backoffMs;\n\t\t\t\t\tawait store.put(serialised);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tawait tx.done;\n\t\t});\n\t}\n\n\t/** Get the number of pending + sending entries */\n\tasync depth(): Promise<Result<number, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"get depth\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tconst all = (await db.getAll(STORE_NAME)) as SerialisedQueueEntry[];\n\t\t\treturn all.filter((e) => e.status !== \"acked\").length;\n\t\t});\n\t}\n\n\t/** Remove all entries */\n\tasync clear(): Promise<Result<void, LakeSyncError>> {\n\t\treturn wrapIdbOp(\"clear queue\", async () => {\n\t\t\tconst db = await this.dbPromise;\n\t\t\tawait db.clear(STORE_NAME);\n\t\t});\n\t}\n}\n","import type { Action, LakeSyncError, Result } from \"@lakesync/core\";\nimport { Ok } from \"@lakesync/core\";\nimport type { ActionQueue, ActionQueueEntry } from \"./action-types\";\n\n/**\n * In-memory action queue implementation.\n * Suitable for testing and server-side use.\n */\nexport class MemoryActionQueue implements ActionQueue {\n\tprivate entries: Map<string, ActionQueueEntry> = new Map();\n\tprivate counter = 0;\n\n\t/** Add an action to the queue. */\n\tasync push(action: Action): Promise<Result<ActionQueueEntry, LakeSyncError>> {\n\t\tconst entry: ActionQueueEntry = {\n\t\t\tid: `mem-action-${++this.counter}`,\n\t\t\taction,\n\t\t\tstatus: \"pending\",\n\t\t\tcreatedAt: Date.now(),\n\t\t\tretryCount: 0,\n\t\t};\n\t\tthis.entries.set(entry.id, entry);\n\t\treturn Ok(entry);\n\t}\n\n\t/** Peek at pending entries (ordered by createdAt), skipping entries with future retryAfter. */\n\tasync peek(limit: number): Promise<Result<ActionQueueEntry[], LakeSyncError>> {\n\t\tconst now = Date.now();\n\t\tconst pending = [...this.entries.values()]\n\t\t\t.filter((e) => e.status === \"pending\" && (e.retryAfter === undefined || e.retryAfter <= now))\n\t\t\t.sort((a, b) => a.createdAt - b.createdAt)\n\t\t\t.slice(0, limit);\n\t\treturn Ok(pending);\n\t}\n\n\t/** Mark entries as currently being sent. */\n\tasync markSending(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\tfor (const id of ids) {\n\t\t\tconst entry = this.entries.get(id);\n\t\t\tif (entry?.status === \"pending\") {\n\t\t\t\tentry.status = \"sending\";\n\t\t\t}\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/** Acknowledge successful delivery (removes entries). */\n\tasync ack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\tfor (const id of ids) {\n\t\t\tthis.entries.delete(id);\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff. */\n\tasync nack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\tfor (const id of ids) {\n\t\t\tconst entry = this.entries.get(id);\n\t\t\tif (entry) {\n\t\t\t\tentry.status = \"pending\";\n\t\t\t\tentry.retryCount++;\n\t\t\t\tconst backoffMs = Math.min(1000 * 2 ** entry.retryCount, 30_000);\n\t\t\t\tentry.retryAfter = Date.now() + backoffMs;\n\t\t\t}\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/** Get the number of pending + sending entries. */\n\tasync depth(): Promise<Result<number, LakeSyncError>> {\n\t\tconst count = [...this.entries.values()].filter(\n\t\t\t(e) => e.status === \"pending\" || e.status === \"sending\",\n\t\t).length;\n\t\treturn Ok(count);\n\t}\n\n\t/** Remove all entries. */\n\tasync clear(): Promise<Result<void, LakeSyncError>> {\n\t\tthis.entries.clear();\n\t\treturn Ok(undefined);\n\t}\n}\n","import type { LakeSyncError, Result, RowDelta } from \"@lakesync/core\";\nimport { Ok } from \"@lakesync/core\";\nimport type { QueueEntry, SyncQueue } from \"./types\";\n\n/**\n * In-memory sync queue implementation.\n * Suitable for testing and server-side use.\n */\nexport class MemoryQueue implements SyncQueue {\n\tprivate entries: Map<string, QueueEntry> = new Map();\n\tprivate counter = 0;\n\n\t/** Add a delta to the queue */\n\tasync push(delta: RowDelta): Promise<Result<QueueEntry, LakeSyncError>> {\n\t\tconst entry: QueueEntry = {\n\t\t\tid: `mem-${++this.counter}`,\n\t\t\tdelta,\n\t\t\tstatus: \"pending\",\n\t\t\tcreatedAt: Date.now(),\n\t\t\tretryCount: 0,\n\t\t};\n\t\tthis.entries.set(entry.id, entry);\n\t\treturn Ok(entry);\n\t}\n\n\t/** Peek at pending entries (ordered by createdAt), skipping entries with future retryAfter */\n\tasync peek(limit: number): Promise<Result<QueueEntry[], LakeSyncError>> {\n\t\tconst now = Date.now();\n\t\tconst pending = [...this.entries.values()]\n\t\t\t.filter((e) => e.status === \"pending\" && (e.retryAfter === undefined || e.retryAfter <= now))\n\t\t\t.sort((a, b) => a.createdAt - b.createdAt)\n\t\t\t.slice(0, limit);\n\t\treturn Ok(pending);\n\t}\n\n\t/** Mark entries as currently being sent */\n\tasync markSending(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\tfor (const id of ids) {\n\t\t\tconst entry = this.entries.get(id);\n\t\t\tif (entry?.status === \"pending\") {\n\t\t\t\tentry.status = \"sending\";\n\t\t\t}\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/** Acknowledge successful delivery (removes entries) */\n\tasync ack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\tfor (const id of ids) {\n\t\t\tthis.entries.delete(id);\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff */\n\tasync nack(ids: string[]): Promise<Result<void, LakeSyncError>> {\n\t\tfor (const id of ids) {\n\t\t\tconst entry = this.entries.get(id);\n\t\t\tif (entry) {\n\t\t\t\tentry.status = \"pending\";\n\t\t\t\tentry.retryCount++;\n\t\t\t\tconst backoffMs = Math.min(1000 * 2 ** entry.retryCount, 30_000);\n\t\t\t\tentry.retryAfter = Date.now() + backoffMs;\n\t\t\t}\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/** Get the number of pending + sending entries */\n\tasync depth(): Promise<Result<number, LakeSyncError>> {\n\t\tconst count = [...this.entries.values()].filter((e) => e.status !== \"acked\").length;\n\t\treturn Ok(count);\n\t}\n\n\t/** Remove all entries */\n\tasync clear(): Promise<Result<void, LakeSyncError>> {\n\t\tthis.entries.clear();\n\t\treturn Ok(undefined);\n\t}\n}\n","import type { ConflictResolver, HLCTimestamp, Result, RowDelta } from \"@lakesync/core\";\nimport {\n\tassertValidIdentifier,\n\tErr,\n\tHLC,\n\tLakeSyncError,\n\tOk,\n\tquoteIdentifier,\n} from \"@lakesync/core\";\nimport type { LocalDB } from \"../db/local-db\";\nimport type { QueueEntry, SyncQueue } from \"../queue/types\";\n\n/**\n * Apply remote deltas to the local SQLite database.\n *\n * For each remote delta:\n * 1. Check if the same rowId has a pending local delta in the queue\n * 2. If conflict: use the resolver to determine winner\n * - Remote wins: apply to SQLite, remove local from queue\n * - Local wins: skip remote, keep local in queue\n * 3. If no conflict: apply remote delta directly\n *\n * After the batch, the `_sync_cursor` table is updated with the maximum\n * HLC from applied deltas so that subsequent syncs can resume from the\n * correct position.\n *\n * @param db - The local SQLite database\n * @param deltas - Remote deltas to apply\n * @param resolver - Conflict resolution strategy\n * @param pendingQueue - The local sync queue to check for conflicts\n * @returns The number of applied deltas, or an error\n */\nexport async function applyRemoteDeltas(\n\tdb: LocalDB,\n\tdeltas: RowDelta[],\n\tresolver: ConflictResolver,\n\tpendingQueue: SyncQueue,\n): Promise<Result<number, LakeSyncError>> {\n\tif (deltas.length === 0) {\n\t\treturn Ok(0);\n\t}\n\n\t// Ensure the _sync_cursor table exists\n\tconst cursorTableResult = await db.exec(`\n\t\tCREATE TABLE IF NOT EXISTS _sync_cursor (\n\t\t\ttable_name TEXT PRIMARY KEY,\n\t\t\tlast_synced_hlc TEXT NOT NULL\n\t\t)\n\t`);\n\tif (!cursorTableResult.ok) {\n\t\treturn Err(\n\t\t\tnew LakeSyncError(\n\t\t\t\t\"Failed to create _sync_cursor table\",\n\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\tcursorTableResult.error,\n\t\t\t),\n\t\t);\n\t}\n\n\t// Begin a manual transaction for atomicity\n\tconst beginResult = await db.exec(\"BEGIN\");\n\tif (!beginResult.ok) {\n\t\treturn Err(\n\t\t\tnew LakeSyncError(\n\t\t\t\t\"Failed to begin transaction for remote delta application\",\n\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\tbeginResult.error,\n\t\t\t),\n\t\t);\n\t}\n\n\tconst peekResult = await pendingQueue.peek(Number.MAX_SAFE_INTEGER);\n\tif (!peekResult.ok) {\n\t\tawait db.exec(\"ROLLBACK\");\n\t\treturn Err(\n\t\t\tnew LakeSyncError(\n\t\t\t\t\"Failed to peek pending queue for conflict detection\",\n\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\tpeekResult.error,\n\t\t\t),\n\t\t);\n\t}\n\tconst pendingMap = new Map<string, QueueEntry>();\n\tfor (const entry of peekResult.value) {\n\t\tpendingMap.set(`${entry.delta.table}:${entry.delta.rowId}`, entry);\n\t}\n\n\tlet appliedCount = 0;\n\t/** Track the maximum HLC per table for cursor advancement */\n\tconst maxHlcPerTable = new Map<string, HLCTimestamp>();\n\n\tfor (const remoteDelta of deltas) {\n\t\tconst result = await applyOneDelta(db, remoteDelta, resolver, pendingQueue, pendingMap);\n\t\tif (!result.ok) {\n\t\t\t// Rollback on any failure\n\t\t\tawait db.exec(\"ROLLBACK\");\n\t\t\treturn result;\n\t\t}\n\n\t\tif (result.value) {\n\t\t\tappliedCount++;\n\t\t}\n\n\t\t// Track the maximum HLC regardless of whether we applied (cursor should still advance)\n\t\tconst currentMax = maxHlcPerTable.get(remoteDelta.table);\n\t\tif (currentMax === undefined || HLC.compare(remoteDelta.hlc, currentMax) > 0) {\n\t\t\tmaxHlcPerTable.set(remoteDelta.table, remoteDelta.hlc);\n\t\t}\n\t}\n\n\t// Update the sync cursor for each table\n\tfor (const [tableName, hlc] of maxHlcPerTable) {\n\t\tconst cursorResult = await db.exec(\n\t\t\t\"INSERT OR REPLACE INTO _sync_cursor (table_name, last_synced_hlc) VALUES (?, ?)\",\n\t\t\t[tableName, hlc.toString()],\n\t\t);\n\t\tif (!cursorResult.ok) {\n\t\t\tawait db.exec(\"ROLLBACK\");\n\t\t\treturn Err(\n\t\t\t\tnew LakeSyncError(\n\t\t\t\t\t`Failed to update sync cursor for table \"${tableName}\"`,\n\t\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\t\tcursorResult.error,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t// Commit the transaction\n\tconst commitResult = await db.exec(\"COMMIT\");\n\tif (!commitResult.ok) {\n\t\tawait db.exec(\"ROLLBACK\");\n\t\treturn Err(\n\t\t\tnew LakeSyncError(\n\t\t\t\t\"Failed to commit transaction for remote delta application\",\n\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\tcommitResult.error,\n\t\t\t),\n\t\t);\n\t}\n\n\treturn Ok(appliedCount);\n}\n\n/**\n * Apply a single remote delta, checking for conflicts with local pending deltas.\n *\n * @param db - The local SQLite database\n * @param remoteDelta - The remote delta to apply\n * @param resolver - Conflict resolution strategy\n * @param pendingQueue - The local sync queue to check for conflicts\n * @returns Ok(true) if the delta was applied, Ok(false) if skipped (local wins), or Err on failure\n */\nasync function applyOneDelta(\n\tdb: LocalDB,\n\tremoteDelta: RowDelta,\n\tresolver: ConflictResolver,\n\tpendingQueue: SyncQueue,\n\tpendingMap: Map<string, QueueEntry>,\n): Promise<Result<boolean, LakeSyncError>> {\n\tconst conflictingEntry = pendingMap.get(`${remoteDelta.table}:${remoteDelta.rowId}`);\n\n\tif (conflictingEntry) {\n\t\tconst localDelta = conflictingEntry.delta;\n\t\tconst resolveResult = resolver.resolve(localDelta, remoteDelta);\n\t\tif (!resolveResult.ok) {\n\t\t\treturn Err(\n\t\t\t\tnew LakeSyncError(\n\t\t\t\t\t`Conflict resolution failed for row \"${remoteDelta.rowId}\" in table \"${remoteDelta.table}\"`,\n\t\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\t\tresolveResult.error,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\tconst resolved = resolveResult.value;\n\n\t\t// Determine who won by comparing the resolved delta's clientId and hlc to the remote\n\t\tconst remoteWon =\n\t\t\tresolved.clientId === remoteDelta.clientId && resolved.hlc === remoteDelta.hlc;\n\n\t\tif (remoteWon) {\n\t\t\t// Remote wins: apply the resolved delta to SQLite and ack the local entry\n\t\t\tconst applyResult = await applySqlDelta(db, resolved);\n\t\t\tif (!applyResult.ok) {\n\t\t\t\treturn applyResult;\n\t\t\t}\n\n\t\t\tconst ackResult = await pendingQueue.ack([conflictingEntry.id]);\n\t\t\tif (!ackResult.ok) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew LakeSyncError(\n\t\t\t\t\t\t`Failed to ack local queue entry \"${conflictingEntry.id}\" after remote win`,\n\t\t\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\t\t\tackResult.error,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\treturn Ok(true);\n\t\t}\n\n\t\t// Local wins: skip this remote delta, keep local in queue\n\t\treturn Ok(false);\n\t}\n\n\t// No conflict: apply the remote delta directly\n\tconst applyResult = await applySqlDelta(db, remoteDelta);\n\tif (!applyResult.ok) {\n\t\treturn applyResult;\n\t}\n\n\treturn Ok(true);\n}\n\n/** Validate that a delta's table name and all column names are safe identifiers */\nfunction validateDeltaIdentifiers(delta: RowDelta): Result<void, LakeSyncError> {\n\tconst tableCheck = assertValidIdentifier(delta.table);\n\tif (!tableCheck.ok) {\n\t\treturn Err(new LakeSyncError(tableCheck.error.message, \"APPLY_ERROR\"));\n\t}\n\tfor (const col of delta.columns) {\n\t\tconst colCheck = assertValidIdentifier(col.column);\n\t\tif (!colCheck.ok) {\n\t\t\treturn Err(new LakeSyncError(colCheck.error.message, \"APPLY_ERROR\"));\n\t\t}\n\t}\n\treturn Ok(undefined);\n}\n\n/**\n * Apply a single delta as SQL against the local database.\n *\n * - INSERT: creates a new row with the given columns\n * - UPDATE: sets the specified columns on an existing row\n * - DELETE: removes the row from the table\n *\n * @param db - The local SQLite database\n * @param delta - The delta to apply\n * @returns Ok(true) on success, or Err on failure\n */\nasync function applySqlDelta(\n\tdb: LocalDB,\n\tdelta: RowDelta,\n): Promise<Result<boolean, LakeSyncError>> {\n\tconst identifierCheck = validateDeltaIdentifiers(delta);\n\tif (!identifierCheck.ok) return identifierCheck;\n\n\tconst quotedTable = quoteIdentifier(delta.table);\n\n\tswitch (delta.op) {\n\t\tcase \"INSERT\": {\n\t\t\tconst colNames = delta.columns.map((c) => quoteIdentifier(c.column));\n\t\t\tconst allColumns = [\"_rowId\", ...colNames];\n\t\t\tconst placeholders = allColumns.map(() => \"?\").join(\", \");\n\t\t\tconst values = [delta.rowId, ...delta.columns.map((c) => c.value)];\n\t\t\tconst sql = `INSERT INTO ${quotedTable} (${allColumns.join(\", \")}) VALUES (${placeholders})`;\n\n\t\t\tconst result = await db.exec(sql, values);\n\t\t\tif (!result.ok) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew LakeSyncError(\n\t\t\t\t\t\t`Failed to apply INSERT for row \"${delta.rowId}\" in table \"${delta.table}\"`,\n\t\t\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\t\t\tresult.error,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\treturn Ok(true);\n\t\t}\n\n\t\tcase \"UPDATE\": {\n\t\t\tif (delta.columns.length === 0) {\n\t\t\t\t// No columns to update — nothing to do\n\t\t\t\treturn Ok(true);\n\t\t\t}\n\n\t\t\tconst setClauses = delta.columns.map((c) => `${quoteIdentifier(c.column)} = ?`).join(\", \");\n\t\t\tconst values = [...delta.columns.map((c) => c.value), delta.rowId];\n\t\t\tconst sql = `UPDATE ${quotedTable} SET ${setClauses} WHERE _rowId = ?`;\n\n\t\t\tconst result = await db.exec(sql, values);\n\t\t\tif (!result.ok) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew LakeSyncError(\n\t\t\t\t\t\t`Failed to apply UPDATE for row \"${delta.rowId}\" in table \"${delta.table}\"`,\n\t\t\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\t\t\tresult.error,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\treturn Ok(true);\n\t\t}\n\n\t\tcase \"DELETE\": {\n\t\t\tconst sql = `DELETE FROM ${quotedTable} WHERE _rowId = ?`;\n\t\t\tconst result = await db.exec(sql, [delta.rowId]);\n\t\t\tif (!result.ok) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew LakeSyncError(\n\t\t\t\t\t\t`Failed to apply DELETE for row \"${delta.rowId}\" in table \"${delta.table}\"`,\n\t\t\t\t\t\t\"APPLY_ERROR\",\n\t\t\t\t\t\tresult.error,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\treturn Ok(true);\n\t\t}\n\t}\n}\n","import type { HLC, Result, TableSchema } from \"@lakesync/core\";\nimport {\n\tassertValidIdentifier,\n\tErr,\n\textractDelta,\n\tLakeSyncError,\n\tOk,\n\tquoteIdentifier,\n} from \"@lakesync/core\";\nimport type { LocalDB } from \"../db/local-db\";\nimport { getSchema } from \"../db/schema-registry\";\nimport type { DbError } from \"../db/types\";\nimport type { SyncQueue } from \"../queue/types\";\n\n/** Extract column values from a row, excluding the internal `_rowId` key. */\nfunction rowWithoutId(row: Record<string, unknown>): Record<string, unknown> {\n\tconst result: Record<string, unknown> = {};\n\tfor (const [key, value] of Object.entries(row)) {\n\t\tif (key !== \"_rowId\") {\n\t\t\tresult[key] = value;\n\t\t}\n\t}\n\treturn result;\n}\n\n/**\n * Tracks local mutations (insert, update, delete) and produces\n * column-level deltas that are pushed to a SyncQueue.\n *\n * Each write operation:\n * 1. Applies the change to the local SQLite database\n * 2. Extracts a RowDelta describing the change\n * 3. Pushes the delta to the sync queue for eventual upstream delivery\n */\nexport class SyncTracker {\n\tprivate schemaCache = new Map<string, TableSchema | null>();\n\n\tconstructor(\n\t\tprivate readonly db: LocalDB,\n\t\tprivate readonly queue: SyncQueue,\n\t\tprivate readonly hlc: HLC,\n\t\tprivate readonly clientId: string,\n\t) {}\n\n\tprivate async getCachedSchema(table: string): Promise<Result<TableSchema | undefined, DbError>> {\n\t\tif (this.schemaCache.has(table)) {\n\t\t\tconst cached = this.schemaCache.get(table);\n\t\t\treturn Ok(cached ?? undefined);\n\t\t}\n\t\tconst result = await getSchema(this.db, table);\n\t\tif (result.ok) {\n\t\t\tthis.schemaCache.set(table, result.value);\n\t\t\treturn Ok(result.value ?? undefined);\n\t\t}\n\t\treturn result;\n\t}\n\n\t/**\n\t * Insert a new row into the specified table.\n\t *\n\t * Writes the row to SQLite and pushes an INSERT delta to the queue.\n\t *\n\t * @param table - The target table name\n\t * @param rowId - The unique row identifier\n\t * @param data - Column name/value pairs for the new row\n\t * @returns Ok on success, or Err with a LakeSyncError on failure\n\t */\n\tasync insert(\n\t\ttable: string,\n\t\trowId: string,\n\t\tdata: Record<string, unknown>,\n\t): Promise<Result<void, LakeSyncError>> {\n\t\tconst tableCheck = assertValidIdentifier(table);\n\t\tif (!tableCheck.ok) return tableCheck;\n\t\tfor (const col of Object.keys(data)) {\n\t\t\tconst colCheck = assertValidIdentifier(col);\n\t\t\tif (!colCheck.ok) return colCheck;\n\t\t}\n\n\t\t// Fetch schema for delta extraction filtering\n\t\tconst schemaResult = await this.getCachedSchema(table);\n\t\tif (!schemaResult.ok) return schemaResult;\n\t\tconst schema = schemaResult.value;\n\n\t\t// Build the INSERT SQL from data keys\n\t\tconst columns = Object.keys(data);\n\t\tconst allColumns = [\"_rowId\", ...columns.map((c) => quoteIdentifier(c))];\n\t\tconst placeholders = allColumns.map(() => \"?\").join(\", \");\n\t\tconst columnList = allColumns.join(\", \");\n\t\tconst values = [rowId, ...columns.map((col) => data[col])];\n\n\t\tconst sql = `INSERT INTO ${quoteIdentifier(table)} (${columnList}) VALUES (${placeholders})`;\n\t\tconst execResult = await this.db.exec(sql, values);\n\t\tif (!execResult.ok) return execResult;\n\n\t\t// Extract delta: null -> data means INSERT\n\t\tconst hlc = this.hlc.now();\n\t\tconst delta = await extractDelta(null, data, {\n\t\t\ttable,\n\t\t\trowId,\n\t\t\tclientId: this.clientId,\n\t\t\thlc,\n\t\t\tschema,\n\t\t});\n\n\t\tif (delta) {\n\t\t\tconst pushResult = await this.queue.push(delta);\n\t\t\tif (!pushResult.ok) return pushResult;\n\t\t}\n\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Update an existing row in the specified table.\n\t *\n\t * Reads the current row state, applies partial updates, and pushes\n\t * an UPDATE delta containing only the changed columns.\n\t *\n\t * @param table - The target table name\n\t * @param rowId - The unique row identifier\n\t * @param data - Column name/value pairs to update (partial)\n\t * @returns Ok on success, Err if the row is not found or on failure\n\t */\n\tasync update(\n\t\ttable: string,\n\t\trowId: string,\n\t\tdata: Record<string, unknown>,\n\t): Promise<Result<void, LakeSyncError>> {\n\t\tconst tableCheck = assertValidIdentifier(table);\n\t\tif (!tableCheck.ok) return tableCheck;\n\t\tfor (const col of Object.keys(data)) {\n\t\t\tconst colCheck = assertValidIdentifier(col);\n\t\t\tif (!colCheck.ok) return colCheck;\n\t\t}\n\n\t\t// Fetch schema for delta extraction filtering\n\t\tconst schemaResult = await this.getCachedSchema(table);\n\t\tif (!schemaResult.ok) return schemaResult;\n\t\tconst schema = schemaResult.value;\n\n\t\t// Read current row\n\t\tconst queryResult = await this.db.query<Record<string, unknown>>(\n\t\t\t`SELECT * FROM ${quoteIdentifier(table)} WHERE _rowId = ?`,\n\t\t\t[rowId],\n\t\t);\n\t\tif (!queryResult.ok) return queryResult;\n\n\t\tconst rows = queryResult.value;\n\t\tif (rows.length === 0 || !rows[0]) {\n\t\t\treturn Err(\n\t\t\t\tnew LakeSyncError(`Row \"${rowId}\" not found in table \"${table}\"`, \"ROW_NOT_FOUND\"),\n\t\t\t);\n\t\t}\n\n\t\tconst before = rowWithoutId(rows[0]);\n\n\t\t// Build SET clause from data keys\n\t\tconst columns = Object.keys(data);\n\t\tconst setClauses = columns.map((col) => `${quoteIdentifier(col)} = ?`).join(\", \");\n\t\tconst values = [...columns.map((col) => data[col]), rowId];\n\n\t\tconst sql = `UPDATE ${quoteIdentifier(table)} SET ${setClauses} WHERE _rowId = ?`;\n\t\tconst execResult = await this.db.exec(sql, values);\n\t\tif (!execResult.ok) return execResult;\n\n\t\t// Build the after state: merge current row with updates\n\t\tconst after: Record<string, unknown> = { ...before, ...data };\n\n\t\t// Extract delta: only changed columns\n\t\tconst hlc = this.hlc.now();\n\t\tconst delta = await extractDelta(before, after, {\n\t\t\ttable,\n\t\t\trowId,\n\t\t\tclientId: this.clientId,\n\t\t\thlc,\n\t\t\tschema,\n\t\t});\n\n\t\tif (delta) {\n\t\t\tconst pushResult = await this.queue.push(delta);\n\t\t\tif (!pushResult.ok) return pushResult;\n\t\t}\n\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Delete a row from the specified table.\n\t *\n\t * Reads the current row state for delta extraction, removes the row\n\t * from SQLite, and pushes a DELETE delta to the queue.\n\t *\n\t * @param table - The target table name\n\t * @param rowId - The unique row identifier\n\t * @returns Ok on success, Err if the row is not found or on failure\n\t */\n\tasync delete(table: string, rowId: string): Promise<Result<void, LakeSyncError>> {\n\t\tconst tableCheck = assertValidIdentifier(table);\n\t\tif (!tableCheck.ok) return tableCheck;\n\n\t\t// Fetch schema for delta extraction filtering\n\t\tconst schemaResult = await this.getCachedSchema(table);\n\t\tif (!schemaResult.ok) return schemaResult;\n\t\tconst schema = schemaResult.value;\n\n\t\t// Read current row for delta extraction\n\t\tconst queryResult = await this.db.query<Record<string, unknown>>(\n\t\t\t`SELECT * FROM ${quoteIdentifier(table)} WHERE _rowId = ?`,\n\t\t\t[rowId],\n\t\t);\n\t\tif (!queryResult.ok) return queryResult;\n\n\t\tconst rows = queryResult.value;\n\t\tif (rows.length === 0 || !rows[0]) {\n\t\t\treturn Err(\n\t\t\t\tnew LakeSyncError(`Row \"${rowId}\" not found in table \"${table}\"`, \"ROW_NOT_FOUND\"),\n\t\t\t);\n\t\t}\n\n\t\tconst before = rowWithoutId(rows[0]);\n\n\t\t// Delete the row\n\t\tconst execResult = await this.db.exec(\n\t\t\t`DELETE FROM ${quoteIdentifier(table)} WHERE _rowId = ?`,\n\t\t\t[rowId],\n\t\t);\n\t\tif (!execResult.ok) return execResult;\n\n\t\t// Extract delta: data -> null means DELETE\n\t\tconst hlc = this.hlc.now();\n\t\tconst delta = await extractDelta(before, null, {\n\t\t\ttable,\n\t\t\trowId,\n\t\t\tclientId: this.clientId,\n\t\t\thlc,\n\t\t\tschema,\n\t\t});\n\n\t\tif (delta) {\n\t\t\tconst pushResult = await this.queue.push(delta);\n\t\t\tif (!pushResult.ok) return pushResult;\n\t\t}\n\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Query the local database.\n\t *\n\t * Pass-through to the underlying LocalDB query method.\n\t *\n\t * @param sql - The SQL query to execute\n\t * @param params - Optional bind parameters\n\t * @returns The query results as typed rows, or a DbError on failure\n\t */\n\tasync query<T>(sql: string, params?: unknown[]): Promise<Result<T[], DbError>> {\n\t\treturn this.db.query<T>(sql, params);\n\t}\n}\n","import {\n\ttype Action,\n\ttype ActionDiscovery,\n\ttype ActionErrorResult,\n\ttype ActionResult,\n\tHLC,\n\ttype HLCTimestamp,\n\tisActionError,\n\ttype LakeSyncError,\n\tLWWResolver,\n\ttype Result,\n\ttype RowDelta,\n} from \"@lakesync/core\";\nimport type { LocalDB } from \"../db/local-db\";\nimport type { ActionQueue } from \"../queue/action-types\";\nimport { IDBQueue } from \"../queue/idb-queue\";\nimport type { SyncQueue } from \"../queue/types\";\nimport { applyRemoteDeltas } from \"./applier\";\nimport { SyncTracker } from \"./tracker\";\nimport type { SyncTransport } from \"./transport\";\n\n/** Controls which operations syncOnce() / startAutoSync() performs */\nexport type SyncMode = \"full\" | \"pushOnly\" | \"pullOnly\";\n\n/** Events emitted by SyncCoordinator */\nexport interface SyncEvents {\n\t/** Fired after remote deltas are applied locally. Count is the number of deltas applied. */\n\tonChange: (count: number) => void;\n\t/** Fired after a successful sync cycle (push + pull) completes. */\n\tonSyncComplete: () => void;\n\t/** Fired when a sync error occurs. */\n\tonError: (error: Error) => void;\n\t/** Fired when an action completes (success or non-retryable failure). */\n\tonActionComplete: (actionId: string, result: ActionResult | ActionErrorResult) => void;\n}\n\n/** Optional configuration for dependency injection (useful for testing) */\nexport interface SyncCoordinatorConfig {\n\t/** Sync queue implementation. Defaults to IDBQueue. */\n\tqueue?: SyncQueue;\n\t/** HLC instance. Defaults to a new HLC(). */\n\thlc?: HLC;\n\t/** Client identifier. Defaults to a random UUID. */\n\tclientId?: string;\n\t/** Maximum retries before dead-lettering an entry. Defaults to 10. */\n\tmaxRetries?: number;\n\t/** Sync mode. Defaults to \"full\" (push + pull). */\n\tsyncMode?: SyncMode;\n\t/** Auto-sync interval in milliseconds. Defaults to 10000 (10 seconds). */\n\tautoSyncIntervalMs?: number;\n\t/** Polling interval when realtime transport is active (heartbeat). Defaults to 60000 (60 seconds). */\n\trealtimeHeartbeatMs?: number;\n\t/** Action queue for imperative command execution. */\n\tactionQueue?: ActionQueue;\n\t/** Maximum retries for actions before dead-lettering. Defaults to 5. */\n\tmaxActionRetries?: number;\n}\n\n/** Auto-sync interval in milliseconds (every 10 seconds) */\nconst AUTO_SYNC_INTERVAL_MS = 10_000;\n\n/** Auto-sync heartbeat interval when realtime is active (every 60 seconds) */\nconst REALTIME_HEARTBEAT_MS = 60_000;\n\n/**\n * Coordinates local mutations (via SyncTracker) with gateway push/pull.\n *\n * Uses a {@link SyncTransport} abstraction to communicate with the gateway,\n * allowing both in-process (LocalTransport) and remote (HttpTransport) usage.\n */\nexport class SyncCoordinator {\n\treadonly tracker: SyncTracker;\n\tprivate readonly queue: SyncQueue;\n\tprivate readonly hlc: HLC;\n\tprivate readonly transport: SyncTransport;\n\tprivate readonly db: LocalDB;\n\tprivate readonly resolver = new LWWResolver();\n\tprivate readonly _clientId: string;\n\tprivate readonly maxRetries: number;\n\tprivate readonly syncMode: SyncMode;\n\tprivate readonly autoSyncIntervalMs: number;\n\tprivate readonly realtimeHeartbeatMs: number;\n\tprivate lastSyncedHlc = HLC.encode(0, 0);\n\tprivate _lastSyncTime: Date | null = null;\n\tprivate syncIntervalId: ReturnType<typeof setInterval> | null = null;\n\tprivate visibilityHandler: (() => void) | null = null;\n\tprivate syncing = false;\n\tprivate readonly actionQueue: ActionQueue | null;\n\tprivate readonly maxActionRetries: number;\n\tprivate listeners: { [K in keyof SyncEvents]: Array<SyncEvents[K]> } = {\n\t\tonChange: [],\n\t\tonSyncComplete: [],\n\t\tonError: [],\n\t\tonActionComplete: [],\n\t};\n\n\tconstructor(db: LocalDB, transport: SyncTransport, config?: SyncCoordinatorConfig) {\n\t\tthis.db = db;\n\t\tthis.transport = transport;\n\t\tthis.hlc = config?.hlc ?? new HLC();\n\t\tthis.queue = config?.queue ?? new IDBQueue();\n\t\tthis._clientId = config?.clientId ?? `client-${crypto.randomUUID()}`;\n\t\tthis.maxRetries = config?.maxRetries ?? 10;\n\t\tthis.syncMode = config?.syncMode ?? \"full\";\n\t\tthis.autoSyncIntervalMs = config?.autoSyncIntervalMs ?? AUTO_SYNC_INTERVAL_MS;\n\t\tthis.realtimeHeartbeatMs = config?.realtimeHeartbeatMs ?? REALTIME_HEARTBEAT_MS;\n\t\tthis.actionQueue = config?.actionQueue ?? null;\n\t\tthis.maxActionRetries = config?.maxActionRetries ?? 5;\n\t\tthis.tracker = new SyncTracker(db, this.queue, this.hlc, this._clientId);\n\n\t\t// Register broadcast handler for realtime transports\n\t\tif (this.transport.onBroadcast) {\n\t\t\tthis.transport.onBroadcast((deltas, serverHlc) => {\n\t\t\t\tvoid this.handleBroadcast(deltas, serverHlc);\n\t\t\t});\n\t\t}\n\t}\n\n\t/** Register an event listener */\n\ton<K extends keyof SyncEvents>(event: K, listener: SyncEvents[K]): void {\n\t\tthis.listeners[event].push(listener);\n\t}\n\n\t/** Remove an event listener */\n\toff<K extends keyof SyncEvents>(event: K, listener: SyncEvents[K]): void {\n\t\tconst arr = this.listeners[event];\n\t\tconst idx = arr.indexOf(listener);\n\t\tif (idx !== -1) arr.splice(idx, 1);\n\t}\n\n\tprivate emit<K extends keyof SyncEvents>(event: K, ...args: Parameters<SyncEvents[K]>): void {\n\t\tfor (const fn of this.listeners[event]) {\n\t\t\ttry {\n\t\t\t\t(fn as (...a: Parameters<SyncEvents[K]>) => void)(...args);\n\t\t\t} catch {\n\t\t\t\t// Swallow listener errors to avoid breaking sync\n\t\t\t}\n\t\t}\n\t}\n\n\t/** Push pending deltas to the gateway via the transport */\n\tasync pushToGateway(): Promise<void> {\n\t\tconst peekResult = await this.queue.peek(100);\n\t\tif (!peekResult.ok || peekResult.value.length === 0) return;\n\n\t\t// Dead-letter entries that exceeded max retries\n\t\tconst deadLettered = peekResult.value.filter((e) => e.retryCount >= this.maxRetries);\n\t\tconst entries = peekResult.value.filter((e) => e.retryCount < this.maxRetries);\n\n\t\tif (deadLettered.length > 0) {\n\t\t\tconsole.warn(\n\t\t\t\t`[SyncCoordinator] Dead-lettering ${deadLettered.length} entries after ${this.maxRetries} retries`,\n\t\t\t);\n\t\t\tawait this.queue.ack(deadLettered.map((e) => e.id));\n\t\t\tthis.emit(\n\t\t\t\t\"onError\",\n\t\t\t\tnew Error(`Dead-lettered ${deadLettered.length} entries after ${this.maxRetries} retries`),\n\t\t\t);\n\t\t}\n\n\t\tif (entries.length === 0) return;\n\n\t\tconst ids = entries.map((e) => e.id);\n\t\tawait this.queue.markSending(ids);\n\n\t\tconst pushResult = await this.transport.push({\n\t\t\tclientId: this._clientId,\n\t\t\tdeltas: entries.map((e) => e.delta),\n\t\t\tlastSeenHlc: this.hlc.now(),\n\t\t});\n\n\t\tif (pushResult.ok) {\n\t\t\tawait this.queue.ack(ids);\n\t\t\tthis.lastSyncedHlc = pushResult.value.serverHlc;\n\t\t\tthis._lastSyncTime = new Date();\n\t\t} else {\n\t\t\tawait this.queue.nack(ids);\n\t\t}\n\t}\n\n\t/**\n\t * Pull deltas from a named adapter source.\n\t *\n\t * Convenience wrapper around {@link pullFromGateway} that passes the\n\t * `source` field through to the gateway, triggering an adapter-sourced\n\t * pull instead of a buffer pull.\n\t */\n\tasync pullFrom(source: string): Promise<number> {\n\t\treturn this.pullFromGateway(source);\n\t}\n\n\t/** Pull remote deltas from the gateway and apply them */\n\tasync pullFromGateway(source?: string): Promise<number> {\n\t\tconst pullResult = await this.transport.pull({\n\t\t\tclientId: this._clientId,\n\t\t\tsinceHlc: this.lastSyncedHlc,\n\t\t\tmaxDeltas: 1000,\n\t\t\tsource,\n\t\t});\n\n\t\tif (!pullResult.ok || pullResult.value.deltas.length === 0) return 0;\n\n\t\tconst { deltas, serverHlc } = pullResult.value;\n\t\tconst applyResult = await applyRemoteDeltas(this.db, deltas, this.resolver, this.queue);\n\n\t\tif (applyResult.ok) {\n\t\t\tthis.lastSyncedHlc = serverHlc;\n\t\t\tthis._lastSyncTime = new Date();\n\t\t\tif (applyResult.value > 0) {\n\t\t\t\tthis.emit(\"onChange\", applyResult.value);\n\t\t\t}\n\t\t\treturn applyResult.value;\n\t\t}\n\t\treturn 0;\n\t}\n\n\t/**\n\t * Handle a server-initiated broadcast of deltas.\n\t *\n\t * Applies the deltas using the same conflict resolution and idempotency\n\t * logic as a regular pull. Advances `lastSyncedHlc` and emits `onChange`.\n\t */\n\tprivate async handleBroadcast(deltas: RowDelta[], serverHlc: HLCTimestamp): Promise<void> {\n\t\tif (deltas.length === 0) return;\n\n\t\ttry {\n\t\t\tconst applyResult = await applyRemoteDeltas(this.db, deltas, this.resolver, this.queue);\n\n\t\t\tif (applyResult.ok) {\n\t\t\t\tif (HLC.compare(serverHlc, this.lastSyncedHlc) > 0) {\n\t\t\t\t\tthis.lastSyncedHlc = serverHlc;\n\t\t\t\t}\n\t\t\t\tthis._lastSyncTime = new Date();\n\t\t\t\tif (applyResult.value > 0) {\n\t\t\t\t\tthis.emit(\"onChange\", applyResult.value);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err) {\n\t\t\tthis.emit(\"onError\", err instanceof Error ? err : new Error(String(err)));\n\t\t}\n\t}\n\n\t/** Get the queue depth */\n\tasync queueDepth(): Promise<number> {\n\t\tconst result = await this.queue.depth();\n\t\treturn result.ok ? result.value : 0;\n\t}\n\n\t/** Get the client identifier */\n\tget clientId(): string {\n\t\treturn this._clientId;\n\t}\n\n\t/** Get the last successful sync time, or null if never synced */\n\tget lastSyncTime(): Date | null {\n\t\treturn this._lastSyncTime;\n\t}\n\n\t/**\n\t * Start auto-sync: periodic interval + visibility change handler.\n\t * Synchronises (push + pull) on tab focus and every 10 seconds.\n\t */\n\tstartAutoSync(): void {\n\t\tthis.transport.connect?.();\n\n\t\tconst intervalMs = this.transport.supportsRealtime\n\t\t\t? this.realtimeHeartbeatMs\n\t\t\t: this.autoSyncIntervalMs;\n\n\t\tthis.syncIntervalId = setInterval(() => {\n\t\t\tvoid this.syncOnce();\n\t\t}, intervalMs);\n\n\t\tthis.setupVisibilitySync();\n\t}\n\n\t/** Register a visibility change listener to sync on tab focus. */\n\tprivate setupVisibilitySync(): void {\n\t\tthis.visibilityHandler = () => {\n\t\t\tif (typeof document !== \"undefined\" && document.visibilityState === \"visible\") {\n\t\t\t\tvoid this.syncOnce();\n\t\t\t}\n\t\t};\n\t\tif (typeof document !== \"undefined\") {\n\t\t\tdocument.addEventListener(\"visibilitychange\", this.visibilityHandler);\n\t\t}\n\t}\n\n\t/**\n\t * Perform initial sync via checkpoint download.\n\t *\n\t * Called on first sync when `lastSyncedHlc` is zero. Downloads the\n\t * server's checkpoint (which is pre-filtered by JWT claims server-side),\n\t * applies the deltas locally, and advances the sync cursor to the\n\t * snapshot's HLC. If no checkpoint is available or the transport does\n\t * not support checkpoints, falls back to incremental pull.\n\t */\n\tprivate async initialSync(): Promise<void> {\n\t\tif (!this.transport.checkpoint) return;\n\t\tconst result = await this.transport.checkpoint();\n\t\tif (!result.ok || result.value === null) return;\n\t\tconst { deltas, snapshotHlc } = result.value;\n\t\tif (deltas.length > 0) {\n\t\t\tawait applyRemoteDeltas(this.db, deltas, this.resolver, this.queue);\n\t\t}\n\t\tthis.lastSyncedHlc = snapshotHlc;\n\t\tthis._lastSyncTime = new Date();\n\t}\n\n\t/** Perform a single sync cycle (push + pull + actions, depending on syncMode). */\n\tasync syncOnce(): Promise<void> {\n\t\tif (this.syncing) return;\n\t\tthis.syncing = true;\n\t\ttry {\n\t\t\tif (this.syncMode !== \"pushOnly\") {\n\t\t\t\tif (this.lastSyncedHlc === HLC.encode(0, 0)) {\n\t\t\t\t\tawait this.initialSync();\n\t\t\t\t}\n\t\t\t\tawait this.pullFromGateway();\n\t\t\t}\n\t\t\tif (this.syncMode !== \"pullOnly\") {\n\t\t\t\tawait this.pushToGateway();\n\t\t\t}\n\t\t\t// Process pending actions after push\n\t\t\tawait this.processActionQueue();\n\t\t\tthis.emit(\"onSyncComplete\");\n\t\t} catch (err) {\n\t\t\tthis.emit(\"onError\", err instanceof Error ? err : new Error(String(err)));\n\t\t} finally {\n\t\t\tthis.syncing = false;\n\t\t}\n\t}\n\n\t/**\n\t * Submit an action for execution.\n\t *\n\t * Pushes the action to the ActionQueue and triggers immediate processing.\n\t * The action will be sent to the gateway on the next sync cycle or\n\t * immediately if not currently syncing.\n\t *\n\t * @param params - Partial action (connector, actionType, params). ActionId and HLC are generated.\n\t */\n\tasync executeAction(params: {\n\t\tconnector: string;\n\t\tactionType: string;\n\t\tparams: Record<string, unknown>;\n\t\tidempotencyKey?: string;\n\t}): Promise<void> {\n\t\tif (!this.actionQueue) {\n\t\t\tthis.emit(\"onError\", new Error(\"No action queue configured\"));\n\t\t\treturn;\n\t\t}\n\n\t\tconst hlc = this.hlc.now();\n\t\tconst { generateActionId } = await import(\"@lakesync/core\");\n\t\tconst actionId = await generateActionId({\n\t\t\tclientId: this._clientId,\n\t\t\thlc,\n\t\t\tconnector: params.connector,\n\t\t\tactionType: params.actionType,\n\t\t\tparams: params.params,\n\t\t});\n\n\t\tconst action: Action = {\n\t\t\tactionId,\n\t\t\tclientId: this._clientId,\n\t\t\thlc,\n\t\t\tconnector: params.connector,\n\t\t\tactionType: params.actionType,\n\t\t\tparams: params.params,\n\t\t\tidempotencyKey: params.idempotencyKey,\n\t\t};\n\n\t\tawait this.actionQueue.push(action);\n\t\t// Trigger immediate processing\n\t\tvoid this.processActionQueue();\n\t}\n\n\t/**\n\t * Process pending actions from the action queue.\n\t *\n\t * Peeks at pending entries, sends them to the gateway via\n\t * `transport.executeAction()`, and acks/nacks based on the result.\n\t * Dead-letters entries after `maxActionRetries` failures.\n\t * Triggers an immediate `syncOnce()` on success to pull fresh state.\n\t */\n\tasync processActionQueue(): Promise<void> {\n\t\tif (!this.actionQueue || !this.transport.executeAction) return;\n\n\t\tconst peekResult = await this.actionQueue.peek(100);\n\t\tif (!peekResult.ok || peekResult.value.length === 0) return;\n\n\t\t// Dead-letter entries that exceeded max retries\n\t\tconst deadLettered = peekResult.value.filter((e) => e.retryCount >= this.maxActionRetries);\n\t\tconst entries = peekResult.value.filter((e) => e.retryCount < this.maxActionRetries);\n\n\t\tif (deadLettered.length > 0) {\n\t\t\tconsole.warn(\n\t\t\t\t`[SyncCoordinator] Dead-lettering ${deadLettered.length} actions after ${this.maxActionRetries} retries`,\n\t\t\t);\n\t\t\tawait this.actionQueue.ack(deadLettered.map((e) => e.id));\n\t\t\tfor (const entry of deadLettered) {\n\t\t\t\tthis.emit(\"onActionComplete\", entry.action.actionId, {\n\t\t\t\t\tactionId: entry.action.actionId,\n\t\t\t\t\tcode: \"DEAD_LETTERED\",\n\t\t\t\t\tmessage: `Action dead-lettered after ${this.maxActionRetries} retries`,\n\t\t\t\t\tretryable: false,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\n\t\tif (entries.length === 0) return;\n\n\t\tconst ids = entries.map((e) => e.id);\n\t\tawait this.actionQueue.markSending(ids);\n\n\t\tconst transportResult = await this.transport.executeAction({\n\t\t\tclientId: this._clientId,\n\t\t\tactions: entries.map((e) => e.action),\n\t\t});\n\n\t\tif (transportResult.ok) {\n\t\t\tawait this.actionQueue.ack(ids);\n\n\t\t\t// Emit events for each result\n\t\t\tfor (const result of transportResult.value.results) {\n\t\t\t\tthis.emit(\"onActionComplete\", result.actionId, result);\n\t\t\t}\n\n\t\t\t// Check if any results were retryable errors — nack those\n\t\t\tconst retryableIds: string[] = [];\n\t\t\tconst ackableIds: string[] = [];\n\t\t\tfor (let i = 0; i < transportResult.value.results.length; i++) {\n\t\t\t\tconst result = transportResult.value.results[i]!;\n\t\t\t\tif (isActionError(result) && result.retryable) {\n\t\t\t\t\tretryableIds.push(ids[i]!);\n\t\t\t\t} else {\n\t\t\t\t\tackableIds.push(ids[i]!);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Note: we already acked all above. For retryable errors in a batch,\n\t\t\t// the client should re-submit. This is handled by the action queue entry\n\t\t\t// being consumed and the event listener deciding to retry.\n\t\t} else {\n\t\t\t// Transport-level failure — nack all for retry\n\t\t\tawait this.actionQueue.nack(ids);\n\t\t}\n\t}\n\n\t/**\n\t * Discover available connectors and their supported action types.\n\t *\n\t * Delegates to the transport's `describeActions()` method. Returns\n\t * empty connectors when the transport does not support discovery.\n\t */\n\tasync describeActions(): Promise<Result<ActionDiscovery, LakeSyncError>> {\n\t\tif (!this.transport.describeActions) {\n\t\t\treturn { ok: true, value: { connectors: {} } };\n\t\t}\n\t\treturn this.transport.describeActions();\n\t}\n\n\t/** Stop auto-sync and clean up listeners */\n\tstopAutoSync(): void {\n\t\tif (this.syncIntervalId !== null) {\n\t\t\tclearInterval(this.syncIntervalId);\n\t\t\tthis.syncIntervalId = null;\n\t\t}\n\t\tif (this.visibilityHandler) {\n\t\t\tif (typeof document !== \"undefined\") {\n\t\t\t\tdocument.removeEventListener(\"visibilitychange\", this.visibilityHandler);\n\t\t\t}\n\t\t\tthis.visibilityHandler = null;\n\t\t}\n\t\t// Disconnect persistent transport (e.g. WebSocket)\n\t\tthis.transport.disconnect?.();\n\t}\n}\n","import {\n\tErr,\n\ttype LakeSyncError,\n\tOk,\n\ttype Result,\n\tSchemaError,\n\ttype TableSchema,\n} from \"@lakesync/core\";\nimport type { LocalDB } from \"../db/local-db\";\nimport { getSchema, migrateSchema } from \"../db/schema-registry\";\nimport type { DbError } from \"../db/types\";\n\n/**\n * Synchronises local table schemas with server-provided schema versions.\n *\n * Compares the locally stored schema version against the server's version\n * and applies additive migrations (ALTER TABLE ... ADD COLUMN) when the\n * client is behind.\n */\nexport class SchemaSynchroniser {\n\tconstructor(private readonly db: LocalDB) {}\n\n\t/**\n\t * Compare local schema version with server and apply migrations if behind.\n\t *\n\t * If the local version is already equal to or ahead of the server version,\n\t * this is a no-op. Otherwise, the local schema is migrated to match the\n\t * server schema via `migrateSchema()`, which runs ALTER TABLE ... ADD COLUMN\n\t * for each new column.\n\t *\n\t * @param table - The table name to synchronise\n\t * @param serverSchema - The server's current TableSchema\n\t * @param serverVersion - The server's schema version number\n\t * @returns Ok on success, or Err with a LakeSyncError on failure\n\t */\n\tasync synchronise(\n\t\ttable: string,\n\t\tserverSchema: TableSchema,\n\t\tserverVersion: number,\n\t): Promise<Result<void, LakeSyncError>> {\n\t\t// Fetch the current local schema via getSchema (ensures _lakesync_meta exists)\n\t\tconst localSchemaResult = await getSchema(this.db, table);\n\t\tif (!localSchemaResult.ok) return localSchemaResult;\n\n\t\tconst localSchema = localSchemaResult.value;\n\n\t\t// If no local schema is registered, we cannot migrate\n\t\tif (!localSchema) {\n\t\t\treturn Err(\n\t\t\t\tnew SchemaError(\n\t\t\t\t\t`Cannot synchronise schema for table \"${table}\": no local schema registered`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\t// Fetch the local schema version from _lakesync_meta\n\t\tconst localVersionResult = await this.getLocalVersion(table);\n\t\tif (!localVersionResult.ok) return localVersionResult;\n\n\t\tconst localVersion = localVersionResult.value;\n\n\t\t// If local is already at or ahead of server, nothing to do\n\t\tif (localVersion >= serverVersion) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\n\t\t// Migrate the schema (diffs and runs ALTER TABLE for new columns)\n\t\tconst migrateResult = await migrateSchema(this.db, localSchema, serverSchema);\n\t\tif (!migrateResult.ok) return migrateResult;\n\n\t\t// Update the version in _lakesync_meta to match the server version.\n\t\t// migrateSchema() increments by 1, but we need to set the exact server version\n\t\t// in case the server is more than one version ahead.\n\t\tconst updateResult = await this.setLocalVersion(table, serverVersion);\n\t\tif (!updateResult.ok) return updateResult;\n\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Retrieve the local schema version for a given table from `_lakesync_meta`.\n\t *\n\t * @param table - The table name to look up\n\t * @returns The schema version number, or 0 if the table is not registered\n\t */\n\tprivate async getLocalVersion(table: string): Promise<Result<number, DbError>> {\n\t\tconst result = await this.db.query<{ schema_version: number }>(\n\t\t\t\"SELECT schema_version FROM _lakesync_meta WHERE table_name = ?\",\n\t\t\t[table],\n\t\t);\n\n\t\tif (!result.ok) return result;\n\n\t\tconst rows = result.value;\n\t\tif (rows.length === 0 || !rows[0]) {\n\t\t\treturn Ok(0);\n\t\t}\n\n\t\treturn Ok(rows[0].schema_version);\n\t}\n\n\t/**\n\t * Set the local schema version for a given table in `_lakesync_meta`.\n\t *\n\t * @param table - The table name to update\n\t * @param version - The version number to set\n\t * @returns Ok on success, or Err with a DbError on failure\n\t */\n\tprivate async setLocalVersion(table: string, version: number): Promise<Result<void, DbError>> {\n\t\treturn this.db.exec(\"UPDATE _lakesync_meta SET schema_version = ? WHERE table_name = ?\", [\n\t\t\tversion,\n\t\t\ttable,\n\t\t]);\n\t}\n}\n","import type {\n\tActionDiscovery,\n\tActionPush,\n\tActionResponse,\n\tHLCTimestamp,\n\tLakeSyncError,\n\tResult,\n\tRowDelta,\n\tSyncPull,\n\tSyncPush,\n\tSyncResponse,\n} from \"@lakesync/core\";\nimport {\n\tbigintReplacer,\n\tbigintReviver,\n\tErr,\n\tLakeSyncError as LSError,\n\tOk,\n\ttoError,\n} from \"@lakesync/core\";\nimport { decodeSyncResponse } from \"@lakesync/proto\";\nimport type { CheckpointResponse, SyncTransport } from \"./transport\";\n\n/** Configuration for the HTTP sync transport */\nexport interface HttpTransportConfig {\n\t/** Base URL of the gateway (e.g. \"https://gateway.example.com\") */\n\tbaseUrl: string;\n\t/** Gateway identifier */\n\tgatewayId: string;\n\t/** Bearer token for authentication */\n\ttoken: string;\n\t/** Optional custom fetch implementation (useful for testing) */\n\tfetch?: typeof globalThis.fetch;\n}\n\n/**\n * HTTP-based sync transport for communicating with a remote gateway.\n *\n * Sends push requests via POST and pull requests via GET, using\n * BigInt-safe JSON serialisation for HLC timestamps.\n */\nexport class HttpTransport implements SyncTransport {\n\tprivate readonly baseUrl: string;\n\tprivate readonly gatewayId: string;\n\tprivate readonly token: string;\n\tprivate readonly _fetch: typeof globalThis.fetch;\n\n\tconstructor(config: HttpTransportConfig) {\n\t\tthis.baseUrl = config.baseUrl.replace(/\\/+$/, \"\");\n\t\tthis.gatewayId = config.gatewayId;\n\t\tthis.token = config.token;\n\t\tthis._fetch = config.fetch ?? globalThis.fetch.bind(globalThis);\n\t}\n\n\t/**\n\t * Push local deltas to the remote gateway.\n\t *\n\t * Sends a POST request with the push payload as BigInt-safe JSON.\n\t */\n\tasync push(\n\t\tmsg: SyncPush,\n\t): Promise<Result<{ serverHlc: HLCTimestamp; accepted: number }, LakeSyncError>> {\n\t\tconst url = `${this.baseUrl}/sync/${this.gatewayId}/push`;\n\n\t\ttry {\n\t\t\tconst response = await this._fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: {\n\t\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t\t\tAuthorization: `Bearer ${this.token}`,\n\t\t\t\t},\n\t\t\t\tbody: JSON.stringify(msg, bigintReplacer),\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"Unknown error\");\n\t\t\t\treturn Err(new LSError(`Push failed (${response.status}): ${text}`, \"TRANSPORT_ERROR\"));\n\t\t\t}\n\n\t\t\tconst raw = await response.text();\n\t\t\tconst data = JSON.parse(raw, bigintReviver) as {\n\t\t\t\tserverHlc: HLCTimestamp;\n\t\t\t\taccepted: number;\n\t\t\t};\n\t\t\treturn Ok(data);\n\t\t} catch (error) {\n\t\t\tconst cause = toError(error);\n\t\t\treturn Err(new LSError(`Push request failed: ${cause.message}`, \"TRANSPORT_ERROR\", cause));\n\t\t}\n\t}\n\n\t/**\n\t * Pull remote deltas from the gateway.\n\t *\n\t * Sends a GET request with query parameters for the pull cursor.\n\t */\n\tasync pull(msg: SyncPull): Promise<Result<SyncResponse, LakeSyncError>> {\n\t\tconst params = new URLSearchParams({\n\t\t\tsince: msg.sinceHlc.toString(),\n\t\t\tclientId: msg.clientId,\n\t\t\tlimit: msg.maxDeltas.toString(),\n\t\t});\n\t\tif (msg.source) {\n\t\t\tparams.set(\"source\", msg.source);\n\t\t}\n\t\tconst url = `${this.baseUrl}/sync/${this.gatewayId}/pull?${params}`;\n\n\t\ttry {\n\t\t\tconst response = await this._fetch(url, {\n\t\t\t\tmethod: \"GET\",\n\t\t\t\theaders: {\n\t\t\t\t\tAuthorization: `Bearer ${this.token}`,\n\t\t\t\t},\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"Unknown error\");\n\t\t\t\treturn Err(new LSError(`Pull failed (${response.status}): ${text}`, \"TRANSPORT_ERROR\"));\n\t\t\t}\n\n\t\t\tconst raw = await response.text();\n\t\t\tconst data = JSON.parse(raw, bigintReviver) as SyncResponse;\n\t\t\treturn Ok(data);\n\t\t} catch (error) {\n\t\t\tconst cause = toError(error);\n\t\t\treturn Err(new LSError(`Pull request failed: ${cause.message}`, \"TRANSPORT_ERROR\", cause));\n\t\t}\n\t}\n\n\t/**\n\t * Execute imperative actions against external systems via the gateway.\n\t *\n\t * Sends a POST request with the action payload as BigInt-safe JSON.\n\t */\n\tasync executeAction(msg: ActionPush): Promise<Result<ActionResponse, LakeSyncError>> {\n\t\tconst url = `${this.baseUrl}/sync/${this.gatewayId}/action`;\n\n\t\ttry {\n\t\t\tconst response = await this._fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: {\n\t\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t\t\tAuthorization: `Bearer ${this.token}`,\n\t\t\t\t},\n\t\t\t\tbody: JSON.stringify(msg, bigintReplacer),\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"Unknown error\");\n\t\t\t\treturn Err(new LSError(`Action failed (${response.status}): ${text}`, \"TRANSPORT_ERROR\"));\n\t\t\t}\n\n\t\t\tconst raw = await response.text();\n\t\t\tconst data = JSON.parse(raw, bigintReviver) as ActionResponse;\n\t\t\treturn Ok(data);\n\t\t} catch (error) {\n\t\t\tconst cause = toError(error);\n\t\t\treturn Err(new LSError(`Action request failed: ${cause.message}`, \"TRANSPORT_ERROR\", cause));\n\t\t}\n\t}\n\n\t/**\n\t * Discover available connectors and their supported action types.\n\t *\n\t * Sends a GET request to the actions discovery endpoint.\n\t */\n\tasync describeActions(): Promise<Result<ActionDiscovery, LakeSyncError>> {\n\t\tconst url = `${this.baseUrl}/sync/${this.gatewayId}/actions`;\n\n\t\ttry {\n\t\t\tconst response = await this._fetch(url, {\n\t\t\t\tmethod: \"GET\",\n\t\t\t\theaders: {\n\t\t\t\t\tAuthorization: `Bearer ${this.token}`,\n\t\t\t\t},\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"Unknown error\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew LSError(`Describe actions failed (${response.status}): ${text}`, \"TRANSPORT_ERROR\"),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst data = (await response.json()) as ActionDiscovery;\n\t\t\treturn Ok(data);\n\t\t} catch (error) {\n\t\t\tconst cause = toError(error);\n\t\t\treturn Err(\n\t\t\t\tnew LSError(`Describe actions request failed: ${cause.message}`, \"TRANSPORT_ERROR\", cause),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Download checkpoint for initial sync.\n\t *\n\t * Requests the streaming checkpoint format via Accept header and reads\n\t * length-prefixed proto frames from the response body.\n\t */\n\tasync checkpoint(): Promise<Result<CheckpointResponse | null, LakeSyncError>> {\n\t\tconst url = `${this.baseUrl}/sync/${this.gatewayId}/checkpoint`;\n\n\t\ttry {\n\t\t\tconst response = await this._fetch(url, {\n\t\t\t\tmethod: \"GET\",\n\t\t\t\theaders: {\n\t\t\t\t\tAuthorization: `Bearer ${this.token}`,\n\t\t\t\t\tAccept: \"application/x-lakesync-checkpoint-stream\",\n\t\t\t\t},\n\t\t\t});\n\n\t\t\tif (response.status === 404) {\n\t\t\t\treturn Ok(null);\n\t\t\t}\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"Unknown error\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew LSError(`Checkpoint failed (${response.status}): ${text}`, \"TRANSPORT_ERROR\"),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst deltas = await readStreamingCheckpointDeltas(response);\n\t\t\tconst hlcHeader = response.headers.get(\"X-Checkpoint-Hlc\");\n\t\t\tconst snapshotHlc = hlcHeader ? (BigInt(hlcHeader) as HLCTimestamp) : (0n as HLCTimestamp);\n\t\t\treturn Ok({ deltas, snapshotHlc });\n\t\t} catch (error) {\n\t\t\tconst cause = toError(error);\n\t\t\treturn Err(\n\t\t\t\tnew LSError(`Checkpoint request failed: ${cause.message}`, \"TRANSPORT_ERROR\", cause),\n\t\t\t);\n\t\t}\n\t}\n}\n\n/**\n * Read length-prefixed proto frames from a streaming checkpoint response.\n *\n * Each frame is: 4-byte big-endian length prefix + proto-encoded SyncResponse.\n * Collects all deltas across frames.\n */\nasync function readStreamingCheckpointDeltas(response: Response): Promise<RowDelta[]> {\n\tconst reader = response.body!.getReader();\n\tconst allDeltas: RowDelta[] = [];\n\tlet buffer = new Uint8Array(0);\n\n\tfor (;;) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\n\t\tconst newBuffer = new Uint8Array(buffer.length + value.length);\n\t\tnewBuffer.set(buffer);\n\t\tnewBuffer.set(value, buffer.length);\n\t\tbuffer = newBuffer;\n\n\t\twhile (buffer.length >= 4) {\n\t\t\tconst frameLength = new DataView(buffer.buffer, buffer.byteOffset).getUint32(0, false);\n\t\t\tif (buffer.length < 4 + frameLength) break;\n\n\t\t\tconst frameData = buffer.slice(4, 4 + frameLength);\n\t\t\tbuffer = buffer.slice(4 + frameLength);\n\n\t\t\tconst decoded = decodeSyncResponse(frameData);\n\t\t\tif (decoded.ok) {\n\t\t\t\tallDeltas.push(...decoded.value.deltas);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn allDeltas;\n}\n","import type {\n\tActionDiscovery,\n\tActionPush,\n\tActionResponse,\n\tActionValidationError,\n\tAuthContext,\n\tHLCTimestamp,\n\tLakeSyncError,\n\tResult,\n\tSyncPull,\n\tSyncPush,\n\tSyncResponse,\n} from \"@lakesync/core\";\nimport { Err, LakeSyncError as LSError, Ok } from \"@lakesync/core\";\nimport type { CheckpointResponse, SyncTransport } from \"./transport\";\n\n/**\n * Gateway-like interface used by LocalTransport.\n *\n * Matches the shape of SyncGateway's push/pull/action methods without\n * requiring a direct dependency on `@lakesync/gateway`.\n */\nexport interface LocalGateway {\n\t/** Handle an incoming push from a client */\n\thandlePush(msg: SyncPush): Result<{ serverHlc: HLCTimestamp; accepted: number }, LakeSyncError>;\n\t/** Handle a pull request from a client */\n\thandlePull(msg: SyncPull): Result<SyncResponse, LakeSyncError>;\n\t/** Handle an action push from a client */\n\thandleAction?(\n\t\tmsg: ActionPush,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResponse, ActionValidationError>>;\n\t/** Describe available action handlers and their supported actions. */\n\tdescribeActions?(): ActionDiscovery;\n}\n\n/**\n * In-process transport that wraps a local SyncGateway instance.\n *\n * Useful for testing and single-tab offline demos where the client\n * and gateway run in the same process.\n */\nexport class LocalTransport implements SyncTransport {\n\tconstructor(private readonly gateway: LocalGateway) {}\n\n\t/** Push local deltas to the in-process gateway */\n\tasync push(\n\t\tmsg: SyncPush,\n\t): Promise<Result<{ serverHlc: HLCTimestamp; accepted: number }, LakeSyncError>> {\n\t\treturn this.gateway.handlePush(msg);\n\t}\n\n\t/** Pull remote deltas from the in-process gateway */\n\tasync pull(msg: SyncPull): Promise<Result<SyncResponse, LakeSyncError>> {\n\t\tconst result = this.gateway.handlePull(msg);\n\t\treturn result instanceof Promise ? result : result;\n\t}\n\n\t/** Local transport has no checkpoint — returns null */\n\tasync checkpoint(): Promise<Result<CheckpointResponse | null, LakeSyncError>> {\n\t\treturn Ok(null);\n\t}\n\n\t/** Execute actions against the in-process gateway. */\n\tasync executeAction(msg: ActionPush): Promise<Result<ActionResponse, LakeSyncError>> {\n\t\tif (!this.gateway.handleAction) {\n\t\t\treturn Err(new LSError(\"Local gateway does not support actions\", \"TRANSPORT_ERROR\"));\n\t\t}\n\t\tconst result = await this.gateway.handleAction(msg);\n\t\tif (!result.ok) {\n\t\t\treturn Err(new LSError(result.error.message, result.error.code));\n\t\t}\n\t\treturn Ok(result.value);\n\t}\n\n\t/** Discover available connectors and their supported action types. */\n\tasync describeActions(): Promise<Result<ActionDiscovery, LakeSyncError>> {\n\t\tif (!this.gateway.describeActions) {\n\t\t\treturn Ok({ connectors: {} });\n\t\t}\n\t\treturn Ok(this.gateway.describeActions());\n\t}\n}\n","import type {\n\tHLCTimestamp,\n\tLakeSyncError,\n\tResult,\n\tRowDelta,\n\tSyncPull,\n\tSyncPush,\n\tSyncResponse,\n} from \"@lakesync/core\";\nimport { Err, LakeSyncError as LSError, Ok, toError } from \"@lakesync/core\";\nimport {\n\tdecodeBroadcastFrame,\n\tdecodeSyncResponse,\n\tencodeSyncPull,\n\tencodeSyncPush,\n\tTAG_BROADCAST,\n\tTAG_SYNC_PULL,\n\tTAG_SYNC_PUSH,\n} from \"@lakesync/proto\";\nimport type { CheckpointResponse, SyncTransport } from \"./transport\";\nimport { HttpTransport, type HttpTransportConfig } from \"./transport-http\";\n\n/** Configuration for the WebSocket sync transport. */\nexport interface WebSocketTransportConfig {\n\t/** WebSocket URL, e.g. \"wss://gateway.example.com/sync/my-gw/ws\" */\n\turl: string;\n\t/** Bearer token (passed as ?token= query param for browser compat). */\n\ttoken: string;\n\t/** Called when server broadcasts deltas. */\n\tonBroadcast?: (deltas: RowDelta[], serverHlc: HLCTimestamp) => void;\n\t/** Reconnect base delay in ms (default 1000). */\n\treconnectBaseMs?: number;\n\t/** Max reconnect delay in ms (default 30000). */\n\treconnectMaxMs?: number;\n\t/** HTTP transport config for checkpoint fallback. */\n\thttpConfig?: HttpTransportConfig;\n}\n\n/** Default reconnect base delay. */\nconst DEFAULT_RECONNECT_BASE_MS = 1000;\n\n/** Default max reconnect delay. */\nconst DEFAULT_RECONNECT_MAX_MS = 30_000;\n\n/**\n * WebSocket-based sync transport for real-time delta synchronisation.\n *\n * Uses the binary protobuf protocol with tag-based framing:\n * - `0x01` = SyncPush (client → server)\n * - `0x02` = SyncPull (client → server)\n * - `0x03` = Broadcast (server → client)\n *\n * Automatically reconnects on disconnect with exponential backoff.\n * Checkpoints are delegated to an internal {@link HttpTransport} (large\n * binary payloads are better suited to HTTP).\n */\nexport class WebSocketTransport implements SyncTransport {\n\tprivate readonly config: WebSocketTransportConfig;\n\tprivate readonly reconnectBaseMs: number;\n\tprivate readonly reconnectMaxMs: number;\n\tprivate readonly httpTransport: HttpTransport | null;\n\n\tprivate ws: WebSocket | null = null;\n\tprivate reconnectTimer: ReturnType<typeof setTimeout> | null = null;\n\tprivate reconnectAttempts = 0;\n\tprivate _connected = false;\n\tprivate intentionalClose = false;\n\n\t/** Pending request/response promise (push or pull). */\n\tprivate pending: {\n\t\tresolve: (value: Result<SyncResponse, LakeSyncError>) => void;\n\t\treject: (reason: Error) => void;\n\t} | null = null;\n\n\t/** Broadcast callback registered by the SyncCoordinator. */\n\tprivate broadcastCallback: ((deltas: RowDelta[], serverHlc: HLCTimestamp) => void) | null = null;\n\n\tconstructor(config: WebSocketTransportConfig) {\n\t\tthis.config = config;\n\t\tthis.reconnectBaseMs = config.reconnectBaseMs ?? DEFAULT_RECONNECT_BASE_MS;\n\t\tthis.reconnectMaxMs = config.reconnectMaxMs ?? DEFAULT_RECONNECT_MAX_MS;\n\t\tthis.httpTransport = config.httpConfig ? new HttpTransport(config.httpConfig) : null;\n\n\t\tif (config.onBroadcast) {\n\t\t\tthis.broadcastCallback = config.onBroadcast;\n\t\t}\n\t}\n\n\t/** Whether the WebSocket is currently connected. */\n\tget connected(): boolean {\n\t\treturn this._connected;\n\t}\n\n\t/** Whether this transport supports real-time server push. */\n\tget supportsRealtime(): boolean {\n\t\treturn true;\n\t}\n\n\t/** Register callback for server-initiated broadcasts. */\n\tonBroadcast(callback: (deltas: RowDelta[], serverHlc: HLCTimestamp) => void): void {\n\t\tthis.broadcastCallback = callback;\n\t}\n\n\t/** Open the WebSocket connection. */\n\tconnect(): void {\n\t\tif (this.ws) return;\n\t\tthis.intentionalClose = false;\n\t\tthis.openWebSocket();\n\t}\n\n\t/** Close the WebSocket connection and stop reconnecting. */\n\tdisconnect(): void {\n\t\tthis.intentionalClose = true;\n\t\tif (this.reconnectTimer !== null) {\n\t\t\tclearTimeout(this.reconnectTimer);\n\t\t\tthis.reconnectTimer = null;\n\t\t}\n\t\tif (this.ws) {\n\t\t\tthis.ws.close(1000, \"Client disconnect\");\n\t\t\tthis.ws = null;\n\t\t}\n\t\tthis._connected = false;\n\t\tthis.reconnectAttempts = 0;\n\n\t\t// Reject any pending request\n\t\tif (this.pending) {\n\t\t\tthis.pending.resolve(Err(new LSError(\"WebSocket disconnected\", \"TRANSPORT_ERROR\")));\n\t\t\tthis.pending = null;\n\t\t}\n\t}\n\n\t/**\n\t * Push local deltas to the gateway via WebSocket.\n\t */\n\tasync push(\n\t\tmsg: SyncPush,\n\t): Promise<Result<{ serverHlc: HLCTimestamp; accepted: number }, LakeSyncError>> {\n\t\tconst encoded = encodeSyncPush({\n\t\t\tclientId: msg.clientId,\n\t\t\tdeltas: msg.deltas,\n\t\t\tlastSeenHlc: msg.lastSeenHlc,\n\t\t});\n\t\tif (!encoded.ok) {\n\t\t\treturn Err(new LSError(`Failed to encode push: ${encoded.error.message}`, \"TRANSPORT_ERROR\"));\n\t\t}\n\n\t\tconst frame = new Uint8Array(1 + encoded.value.length);\n\t\tframe[0] = TAG_SYNC_PUSH;\n\t\tframe.set(encoded.value, 1);\n\n\t\tconst response = await this.sendAndAwaitResponse(frame);\n\t\tif (!response.ok) return response;\n\n\t\treturn Ok({\n\t\t\tserverHlc: response.value.serverHlc,\n\t\t\taccepted:\n\t\t\t\tresponse.value.deltas.length === 0 ? msg.deltas.length : response.value.deltas.length,\n\t\t});\n\t}\n\n\t/**\n\t * Pull remote deltas from the gateway via WebSocket.\n\t */\n\tasync pull(msg: SyncPull): Promise<Result<SyncResponse, LakeSyncError>> {\n\t\tconst encoded = encodeSyncPull({\n\t\t\tclientId: msg.clientId,\n\t\t\tsinceHlc: msg.sinceHlc,\n\t\t\tmaxDeltas: msg.maxDeltas,\n\t\t});\n\t\tif (!encoded.ok) {\n\t\t\treturn Err(new LSError(`Failed to encode pull: ${encoded.error.message}`, \"TRANSPORT_ERROR\"));\n\t\t}\n\n\t\tconst frame = new Uint8Array(1 + encoded.value.length);\n\t\tframe[0] = TAG_SYNC_PULL;\n\t\tframe.set(encoded.value, 1);\n\n\t\treturn this.sendAndAwaitResponse(frame);\n\t}\n\n\t/**\n\t * Download checkpoint via HTTP (large binary payloads are better over HTTP).\n\t */\n\tasync checkpoint(): Promise<Result<CheckpointResponse | null, LakeSyncError>> {\n\t\tif (!this.httpTransport) {\n\t\t\treturn Ok(null);\n\t\t}\n\t\treturn this.httpTransport.checkpoint();\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Internal\n\t// -----------------------------------------------------------------------\n\n\tprivate openWebSocket(): void {\n\t\tconst url = `${this.config.url}?token=${encodeURIComponent(this.config.token)}`;\n\t\tthis.ws = new WebSocket(url);\n\t\tthis.ws.binaryType = \"arraybuffer\";\n\n\t\tthis.ws.onopen = () => {\n\t\t\tthis._connected = true;\n\t\t\tthis.reconnectAttempts = 0;\n\t\t};\n\n\t\tthis.ws.onmessage = (event: MessageEvent) => {\n\t\t\tif (!(event.data instanceof ArrayBuffer)) return;\n\t\t\tconst bytes = new Uint8Array(event.data);\n\t\t\tif (bytes.length < 2) return;\n\n\t\t\tconst tag = bytes[0];\n\n\t\t\tif (tag === TAG_BROADCAST) {\n\t\t\t\t// Server-initiated broadcast\n\t\t\t\tconst decoded = decodeBroadcastFrame(bytes);\n\t\t\t\tif (decoded.ok && this.broadcastCallback) {\n\t\t\t\t\tthis.broadcastCallback(decoded.value.deltas, decoded.value.serverHlc);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Response to a pending push/pull request\n\t\t\t\tconst decoded = decodeSyncResponse(bytes);\n\t\t\t\tif (this.pending) {\n\t\t\t\t\tif (decoded.ok) {\n\t\t\t\t\t\tthis.pending.resolve(Ok(decoded.value));\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthis.pending.resolve(\n\t\t\t\t\t\t\tErr(\n\t\t\t\t\t\t\t\tnew LSError(\n\t\t\t\t\t\t\t\t\t`Failed to decode response: ${decoded.error.message}`,\n\t\t\t\t\t\t\t\t\t\"TRANSPORT_ERROR\",\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t),\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t\tthis.pending = null;\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\n\t\tthis.ws.onclose = () => {\n\t\t\tthis._connected = false;\n\t\t\tthis.ws = null;\n\n\t\t\t// Reject pending request\n\t\t\tif (this.pending) {\n\t\t\t\tthis.pending.resolve(\n\t\t\t\t\tErr(new LSError(\"WebSocket closed before response\", \"TRANSPORT_ERROR\")),\n\t\t\t\t);\n\t\t\t\tthis.pending = null;\n\t\t\t}\n\n\t\t\tif (!this.intentionalClose) {\n\t\t\t\tthis.scheduleReconnect();\n\t\t\t}\n\t\t};\n\n\t\tthis.ws.onerror = () => {\n\t\t\t// onclose will fire after onerror — reconnect handled there\n\t\t};\n\t}\n\n\tprivate scheduleReconnect(): void {\n\t\tconst delay = Math.min(this.reconnectBaseMs * 2 ** this.reconnectAttempts, this.reconnectMaxMs);\n\t\tthis.reconnectAttempts++;\n\t\tthis.reconnectTimer = setTimeout(() => {\n\t\t\tthis.reconnectTimer = null;\n\t\t\tthis.openWebSocket();\n\t\t}, delay);\n\t}\n\n\tprivate sendAndAwaitResponse(frame: Uint8Array): Promise<Result<SyncResponse, LakeSyncError>> {\n\t\treturn new Promise((resolve) => {\n\t\t\tif (!this.ws || !this._connected) {\n\t\t\t\tresolve(Err(new LSError(\"WebSocket not connected\", \"TRANSPORT_ERROR\")));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Reject any existing pending request\n\t\t\tif (this.pending) {\n\t\t\t\tthis.pending.resolve(\n\t\t\t\t\tErr(new LSError(\"New request superseded pending request\", \"TRANSPORT_ERROR\")),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tthis.pending = {\n\t\t\t\tresolve,\n\t\t\t\treject: (reason: Error) => {\n\t\t\t\t\tresolve(Err(new LSError(reason.message, \"TRANSPORT_ERROR\")));\n\t\t\t\t},\n\t\t\t};\n\n\t\t\ttry {\n\t\t\t\tthis.ws.send(frame);\n\t\t\t} catch (error) {\n\t\t\t\tconst cause = toError(error);\n\t\t\t\tthis.pending = null;\n\t\t\t\tresolve(Err(new LSError(`WebSocket send failed: ${cause.message}`, \"TRANSPORT_ERROR\")));\n\t\t\t}\n\t\t});\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAA4B,cAAc;AAE1C,IAAM,cAAc;AACpB,IAAM,iBAAiB;AACvB,IAAM,aAAa;AAGnB,IAAI,WAAgC;AAGpC,eAAe,QAA+B;AAC7C,MAAI,SAAU,QAAO;AACrB,aAAW,MAAM,OAAO,aAAa,gBAAgB;AAAA,IACpD,QAAQ,IAAI;AACX,UAAI,CAAC,GAAG,iBAAiB,SAAS,UAAU,GAAG;AAC9C,WAAG,kBAAkB,UAAU;AAAA,MAChC;AAAA,IACD;AAAA,EACD,CAAC;AACD,SAAO;AACR;AAQA,eAAsB,aAAa,QAA4C;AAC9E,QAAM,MAAM,MAAM,MAAM;AACxB,QAAM,OAAO,MAAM,IAAI,IAAI,YAAY,MAAM;AAC7C,MAAI,gBAAgB,WAAY,QAAO;AACvC,SAAO;AACR;AAQA,eAAsB,aAAa,QAAgB,MAAiC;AACnF,QAAM,MAAM,MAAM,MAAM;AACxB,QAAM,IAAI,IAAI,YAAY,MAAM,MAAM;AACvC;AAOA,eAAsB,eAAe,QAA+B;AACnE,QAAM,MAAM,MAAM,MAAM;AACxB,QAAM,IAAI,OAAO,YAAY,MAAM;AACpC;;;ACnDA,OAAO,eAAe;;;ACSf,IAAM,UAAN,cAAsB,cAAc;AAAA,EAC1C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,YAAY,KAAK;AAAA,EACjC;AACD;;;ADJA,SAAS,cAAiB,SAAiC;AAC1D,MAAI,QAAQ,WAAW,KAAK,CAAC,QAAQ,CAAC,GAAG;AACxC,WAAO,CAAC;AAAA,EACT;AACA,QAAM,EAAE,SAAS,OAAO,IAAI,QAAQ,CAAC;AACrC,SAAO,OAAO,IAAI,CAAC,QAAQ;AAC1B,UAAM,MAA+B,CAAC;AACtC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACxC,YAAM,MAAM,QAAQ,CAAC;AACrB,UAAI,QAAQ,QAAW;AACtB,YAAI,GAAG,IAAI,IAAI,CAAC;AAAA,MACjB;AAAA,IACD;AACA,WAAO;AAAA,EACR,CAAC;AACF;AAGA,SAAS,YAAe,OAAe,IAAiC;AACvE,MAAI;AACH,WAAO,GAAG,GAAG,CAAC;AAAA,EACf,SAAS,KAAK;AACb,WAAO,IAAI,IAAI,QAAQ,OAAO,QAAQ,GAAG,CAAC,CAAC;AAAA,EAC5C;AACD;AAYO,IAAM,UAAN,MAAM,SAAQ;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EAED,YAAY,IAAc,QAAkB,SAA0B;AAC7E,SAAK,MAAM;AACX,SAAK,UAAU;AACf,SAAK,WAAW;AAAA,EACjB;AAAA;AAAA,EAGA,IAAI,OAAe;AAClB,WAAO,KAAK,QAAQ;AAAA,EACrB;AAAA;AAAA,EAGA,IAAI,UAA2B;AAC9B,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,aAAa,KAAK,QAAqD;AACtE,QAAI;AACH,YAAM,UAA2B,eAAe,OAAO,OAAO;AAE9D,YAAM,MAAM,MAAM,UAAU;AAE5B,UAAI,OAA0B;AAC9B,UAAI,YAAY,OAAO;AACtB,eAAO,MAAM,aAAa,OAAO,IAAI;AAAA,MACtC;AAEA,YAAM,KAAK,OAAO,IAAI,IAAI,SAAS,IAAI,IAAI,IAAI,IAAI,SAAS;AAC5D,aAAO,GAAG,IAAI,SAAQ,IAAI,QAAQ,OAAO,CAAC;AAAA,IAC3C,SAAS,KAAK;AACb,aAAO,IAAI,IAAI,QAAQ,4BAA4B,OAAO,IAAI,KAAK,QAAQ,GAAG,CAAC,CAAC;AAAA,IACjF;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KAAK,KAAa,QAAoD;AAC3E,WAAO,YAAY,0BAA0B,GAAG,IAAI,MAAM;AACzD,WAAK,IAAI,IAAI,KAAK,MAAwC;AAAA,IAC3D,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MAAS,KAAa,QAAmD;AAC9E,WAAO,YAAY,wBAAwB,GAAG,IAAI,MAAM;AACvD,YAAM,UAAU,KAAK,IAAI,KAAK,KAAK,MAAyC;AAC5E,aAAO,cAAiB,OAAO;AAAA,IAChC,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,YAAe,IAAyD;AAC7E,UAAM,KAAK,KAAK,mBAAmB;AAEnC,UAAM,cAAc,YAAY,+BAA+B,MAAM;AACpE,WAAK,IAAI,IAAI,OAAO;AAAA,IACrB,CAAC;AACD,QAAI,CAAC,YAAY,GAAI,QAAO;AAE5B,QAAI;AACH,YAAM,SAAS,GAAG,EAAE;AACpB,WAAK,IAAI,IAAI,QAAQ;AACrB,aAAO,GAAG,MAAM;AAAA,IACjB,SAAS,KAAK;AACb,UAAI;AACH,aAAK,IAAI,IAAI,UAAU;AAAA,MACxB,SAAS,cAAc;AAAA,MAEvB;AACA,aAAO,IAAI,IAAI,QAAQ,sBAAsB,QAAQ,GAAG,CAAC,CAAC;AAAA,IAC3D;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAuC;AAC5C,QAAI,KAAK,aAAa,OAAO;AAC5B,aAAO,GAAG,MAAS;AAAA,IACpB;AACA,QAAI;AACH,YAAM,OAAO,KAAK,IAAI,OAAO;AAC7B,YAAM,aAAa,KAAK,QAAQ,MAAM,IAAI;AAC1C,aAAO,GAAG,MAAS;AAAA,IACpB,SAAS,KAAK;AACb,aAAO;AAAA,QACN,IAAI,QAAQ,4BAA4B,KAAK,QAAQ,IAAI,kBAAkB,QAAQ,GAAG,CAAC;AAAA,MACxF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAuB;AAC5B,QAAI,KAAK,aAAa,OAAO;AAC5B,YAAM,KAAK,KAAK;AAAA,IACjB;AACA,SAAK,IAAI,MAAM;AAAA,EAChB;AAAA,EAEA,qBAAkC;AACjC,UAAM,KAAK,KAAK;AAChB,WAAO;AAAA,MACN,KAAK,KAAa,QAA2C;AAC5D,eAAO,YAAY,4BAA4B,GAAG,IAAI,MAAM;AAC3D,aAAG,IAAI,KAAK,MAAwC;AAAA,QACrD,CAAC;AAAA,MACF;AAAA,MACA,MAAS,KAAa,QAA0C;AAC/D,eAAO,YAAY,6BAA6B,GAAG,IAAI,MAAM;AAC5D,gBAAM,UAAU,GAAG,KAAK,KAAK,MAAyC;AACtE,iBAAO,cAAiB,OAAO;AAAA,QAChC,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AACD;AAQA,SAAS,eAAe,YAAmD;AAC1E,MAAI,eAAe,SAAU,QAAO;AACpC,MAAI,eAAe,MAAO,QAAO;AAEjC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,SAAO;AACR;;;AElMA,IAAM,kBAA0E;AAAA,EAC/E,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,MAAM;AAAA,EACN,MAAM;AACP;AASA,eAAe,gBAAgB,IAA6C;AAC3E,SAAO,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOd;AACF;AAmBA,eAAsB,eACrB,IACA,QAC+C;AAC/C,QAAM,aAAa,sBAAsB,OAAO,KAAK;AACrD,MAAI,CAAC,WAAW,GAAI,QAAO;AAC3B,aAAW,OAAO,OAAO,SAAS;AACjC,UAAM,WAAW,sBAAsB,IAAI,IAAI;AAC/C,QAAI,CAAC,SAAS,GAAI,QAAO;AAAA,EAC1B;AAEA,QAAM,aAAa,MAAM,gBAAgB,EAAE;AAC3C,MAAI,CAAC,WAAW,GAAI,QAAO;AAE3B,SAAO,GAAG,YAAY,CAAC,OAAO;AAC7B,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,aAAa,KAAK,UAAU,MAAM;AAGxC;AAAA,MACC,GAAG;AAAA,QACF;AAAA;AAAA;AAAA;AAAA;AAAA,QAKA,CAAC,OAAO,OAAO,YAAY,GAAG;AAAA,MAC/B;AAAA,IACD;AAGA,UAAM,cAAc,gBAAgB,OAAO,KAAK;AAChD,UAAM,aAAa,OAAO,QACxB,IAAI,CAAC,QAAQ,GAAG,gBAAgB,IAAI,IAAI,CAAC,IAAI,gBAAgB,IAAI,IAAI,CAAC,EAAE,EACxE,KAAK,IAAI;AAEX,UAAM,YAAY,aACf,8BAA8B,WAAW,8BAA8B,UAAU,MACjF,8BAA8B,WAAW;AAE5C,kBAAc,GAAG,KAAK,SAAS,CAAC;AAAA,EACjC,CAAC;AACF;AAaA,eAAsB,UACrB,IACA,OAC+C;AAC/C,QAAM,aAAa,MAAM,gBAAgB,EAAE;AAC3C,MAAI,CAAC,WAAW,GAAI,QAAO;AAE3B,QAAM,cAAc,MAAM,GAAG;AAAA,IAC5B;AAAA,IACA,CAAC,KAAK;AAAA,EACP;AAEA,MAAI,CAAC,YAAY,GAAI,QAAO;AAE5B,QAAM,WAAW,YAAY,MAAM,CAAC;AACpC,MAAI,CAAC,UAAU;AACd,WAAO,GAAG,IAAI;AAAA,EACf;AAEA,MAAI;AACH,UAAM,SAAS,KAAK,MAAM,SAAS,WAAW;AAC9C,WAAO,GAAG,MAAM;AAAA,EACjB,SAAS,KAAK;AACb,WAAO;AAAA,MACN,IAAI;AAAA,QACH,0CAA0C,KAAK;AAAA,QAC/C,eAAe,QAAQ,MAAM;AAAA,MAC9B;AAAA,IACD;AAAA,EACD;AACD;AAoBA,eAAsB,cACrB,IACA,WACA,WAC+C;AAE/C,MAAI,UAAU,UAAU,UAAU,OAAO;AACxC,WAAO;AAAA,MACN,IAAI;AAAA,QACH,+CAA+C,UAAU,KAAK,gCAAgC,UAAU,KAAK;AAAA,MAC9G;AAAA,IACD;AAAA,EACD;AAEA,QAAM,YAAY,UAAU;AAE5B,QAAM,aAAa,sBAAsB,SAAS;AAClD,MAAI,CAAC,WAAW,GAAI,QAAO;AAC3B,aAAW,OAAO,UAAU,SAAS;AACpC,UAAM,WAAW,sBAAsB,IAAI,IAAI;AAC/C,QAAI,CAAC,SAAS,GAAI,QAAO;AAAA,EAC1B;AAGA,QAAM,eAAe,oBAAI,IAAoB;AAC7C,aAAW,OAAO,UAAU,SAAS;AACpC,iBAAa,IAAI,IAAI,MAAM,IAAI,IAAI;AAAA,EACpC;AAEA,QAAM,eAAe,oBAAI,IAAoB;AAC7C,aAAW,OAAO,UAAU,SAAS;AACpC,iBAAa,IAAI,IAAI,MAAM,IAAI,IAAI;AAAA,EACpC;AAGA,aAAW,OAAO,UAAU,SAAS;AACpC,QAAI,CAAC,aAAa,IAAI,IAAI,IAAI,GAAG;AAChC,aAAO;AAAA,QACN,IAAI;AAAA,UACH,yBAAyB,IAAI,IAAI,iBAAiB,SAAS;AAAA,QAC5D;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,aAAW,OAAO,UAAU,SAAS;AACpC,UAAM,UAAU,aAAa,IAAI,IAAI,IAAI;AACzC,QAAI,YAAY,UAAa,YAAY,IAAI,MAAM;AAClD,aAAO;AAAA,QACN,IAAI;AAAA,UACH,iCAAiC,IAAI,IAAI,eAAe,SAAS,WAAW,OAAO,SAAS,IAAI,IAAI;AAAA,QACrG;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,QAAM,eAAe,UAAU,QAAQ,OAAO,CAAC,QAAQ,CAAC,aAAa,IAAI,IAAI,IAAI,CAAC;AAElF,QAAM,aAAa,MAAM,gBAAgB,EAAE;AAC3C,MAAI,CAAC,WAAW,GAAI,QAAO;AAE3B,SAAO,GAAG,YAAY,CAAC,OAAO;AAE7B,UAAM,cAAc,gBAAgB,SAAS;AAC7C,eAAW,OAAO,cAAc;AAC/B;AAAA,QACC,GAAG;AAAA,UACF,eAAe,WAAW,eAAe,gBAAgB,IAAI,IAAI,CAAC,IAAI,gBAAgB,IAAI,IAAI,CAAC;AAAA,QAChG;AAAA,MACD;AAAA,IACD;AAGA,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,UAAM,aAAa,KAAK,UAAU,SAAS;AAE3C;AAAA,MACC,GAAG;AAAA,QACF;AAAA;AAAA;AAAA;AAAA;AAAA,QAKA,CAAC,YAAY,KAAK,SAAS;AAAA,MAC5B;AAAA,IACD;AAAA,EACD,CAAC;AACF;;;ACzPA,SAA4B,UAAAA,eAAc;AAG1C,IAAM,UAAU;AAChB,IAAM,aAAa;AACnB,IAAMC,cAAa;AAcnB,SAAS,gBAAgB,QAAkC;AAC1D,SAAO,EAAE,GAAG,QAAQ,KAAK,OAAO,IAAI,SAAS,EAAE;AAChD;AAGA,SAAS,kBAAkB,YAAsC;AAChE,SAAO,EAAE,GAAG,YAAY,KAAK,OAAO,WAAW,GAAG,EAAkB;AACrE;AAGA,SAAS,eAAe,OAAqD;AAC5E,SAAO,EAAE,GAAG,OAAO,QAAQ,gBAAgB,MAAM,MAAM,EAAE;AAC1D;AAGA,SAAS,iBAAiB,YAA0D;AACnF,SAAO,EAAE,GAAG,YAAY,QAAQ,kBAAkB,WAAW,MAAM,EAAE;AACtE;AAGA,eAAe,UACd,WACA,IACoC;AACpC,MAAI;AACH,WAAO,GAAG,MAAM,GAAG,CAAC;AAAA,EACrB,SAAS,OAAO;AACf,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,WAAO,IAAI,IAAI,cAAc,aAAa,SAAS,KAAK,OAAO,IAAI,aAAa,CAAC;AAAA,EAClF;AACD;AAQO,IAAM,iBAAN,MAA4C;AAAA,EAC1C;AAAA,EACA,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOlB,YAAY,SAAiB,SAAS;AACrC,SAAK,YAAYD,QAAO,QAAQ,YAAY;AAAA,MAC3C,QAAQ,IAAI;AACX,cAAM,QAAQ,GAAG,kBAAkBC,aAAY,EAAE,SAAS,KAAK,CAAC;AAChE,cAAM,YAAY,UAAU,QAAQ;AACpC,cAAM,YAAY,aAAa,WAAW;AAAA,MAC3C;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,KAAK,QAAkE;AAC5E,WAAO,UAAU,wBAAwB,YAAY;AACpD,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,QAA0B;AAAA,QAC/B,IAAI,cAAc,KAAK,IAAI,CAAC,IAAI,EAAE,KAAK,OAAO;AAAA,QAC9C;AAAA,QACA,QAAQ;AAAA,QACR,WAAW,KAAK,IAAI;AAAA,QACpB,YAAY;AAAA,MACb;AACA,YAAM,GAAG,IAAIA,aAAY,eAAe,KAAK,CAAC;AAC9C,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,KAAK,OAAmE;AAC7E,WAAO,UAAU,qBAAqB,YAAY;AACjD,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYA,aAAY,UAAU;AAChD,YAAM,QAAQ,GAAG,YAAYA,WAAU,EAAE,MAAM,WAAW;AAC1D,YAAM,UAA8B,CAAC;AAErC,UAAI,SAAS,MAAM,MAAM,WAAW;AACpC,aAAO,UAAU,QAAQ,SAAS,OAAO;AACxC,cAAM,aAAa,OAAO;AAC1B,YAAI,WAAW,WAAW,WAAW;AACpC,gBAAM,QAAQ,iBAAiB,UAAU;AACzC,cAAI,MAAM,eAAe,UAAa,MAAM,cAAc,KAAK,IAAI,GAAG;AACrE,oBAAQ,KAAK,KAAK;AAAA,UACnB;AAAA,QACD;AACA,iBAAS,MAAM,OAAO,SAAS;AAAA,MAChC;AAEA,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,KAAqD;AACtE,WAAO,UAAU,gBAAgB,YAAY;AAC5C,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYA,aAAY,WAAW;AACjD,YAAM,QAAQ,GAAG,YAAYA,WAAU;AAEvC,iBAAW,MAAM,KAAK;AACrB,cAAM,aAAc,MAAM,MAAM,IAAI,EAAE;AACtC,YAAI,YAAY,WAAW,WAAW;AACrC,qBAAW,SAAS;AACpB,gBAAM,MAAM,IAAI,UAAU;AAAA,QAC3B;AAAA,MACD;AAEA,YAAM,GAAG;AAAA,IACV,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,IAAI,KAAqD;AAC9D,WAAO,UAAU,OAAO,YAAY;AACnC,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYA,aAAY,WAAW;AACjD,iBAAW,MAAM,KAAK;AACrB,cAAM,GAAG,YAAYA,WAAU,EAAE,OAAO,EAAE;AAAA,MAC3C;AACA,YAAM,GAAG;AAAA,IACV,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,KAAK,KAAqD;AAC/D,WAAO,UAAU,QAAQ,YAAY;AACpC,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYA,aAAY,WAAW;AACjD,YAAM,QAAQ,GAAG,YAAYA,WAAU;AAEvC,iBAAW,MAAM,KAAK;AACrB,cAAM,aAAc,MAAM,MAAM,IAAI,EAAE;AACtC,YAAI,YAAY;AACf,qBAAW,SAAS;AACpB,qBAAW;AACX,gBAAM,YAAY,KAAK,IAAI,MAAO,KAAK,WAAW,YAAY,GAAM;AACpE,UAAC,WAAuC,aAAa,KAAK,IAAI,IAAI;AAClE,gBAAM,MAAM,IAAI,UAAU;AAAA,QAC3B;AAAA,MACD;AAEA,YAAM,GAAG;AAAA,IACV,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,QAAgD;AACrD,WAAO,UAAU,aAAa,YAAY;AACzC,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,MAAO,MAAM,GAAG,OAAOA,WAAU;AACvC,aAAO,IAAI,OAAO,CAAC,MAAM,EAAE,WAAW,aAAa,EAAE,WAAW,SAAS,EAAE;AAAA,IAC5E,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,QAA8C;AACnD,WAAO,UAAU,sBAAsB,YAAY;AAClD,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,GAAG,MAAMA,WAAU;AAAA,IAC1B,CAAC;AAAA,EACF;AACD;;;ACzLA,SAA4B,UAAAC,eAAc;AAG1C,IAAMC,WAAU;AAChB,IAAMC,cAAa;AACnB,IAAMC,cAAa;AAenB,SAAS,eAAe,OAAqC;AAC5D,SAAO,EAAE,GAAG,OAAO,KAAK,MAAM,IAAI,SAAS,EAAE;AAC9C;AAGA,SAAS,iBAAiB,YAA0C;AACnE,SAAO,EAAE,GAAG,YAAY,KAAK,OAAO,WAAW,GAAG,EAAkB;AACrE;AAGA,SAASC,gBAAe,OAAyC;AAChE,SAAO,EAAE,GAAG,OAAO,OAAO,eAAe,MAAM,KAAK,EAAE;AACvD;AAGA,SAASC,kBAAiB,YAA8C;AACvE,SAAO,EAAE,GAAG,YAAY,OAAO,iBAAiB,WAAW,KAAK,EAAE;AACnE;AAGA,eAAeC,WACd,WACA,IACoC;AACpC,MAAI;AACH,WAAO,GAAG,MAAM,GAAG,CAAC;AAAA,EACrB,SAAS,OAAO;AACf,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,WAAO,IAAI,IAAI,cAAc,aAAa,SAAS,KAAK,OAAO,IAAI,aAAa,CAAC;AAAA,EAClF;AACD;AASO,IAAM,WAAN,MAAoC;AAAA,EAClC;AAAA,EACA,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlB,YAAY,SAAiBL,UAAS;AACrC,SAAK,YAAYD,QAAO,QAAQE,aAAY;AAAA,MAC3C,QAAQ,IAAI;AACX,cAAM,QAAQ,GAAG,kBAAkBC,aAAY,EAAE,SAAS,KAAK,CAAC;AAChE,cAAM,YAAY,UAAU,QAAQ;AACpC,cAAM,YAAY,aAAa,WAAW;AAAA,MAC3C;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,KAAK,OAA6D;AACvE,WAAOG,WAAU,iBAAiB,YAAY;AAC7C,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,QAAoB;AAAA,QACzB,IAAI,OAAO,KAAK,IAAI,CAAC,IAAI,EAAE,KAAK,OAAO;AAAA,QACvC;AAAA,QACA,QAAQ;AAAA,QACR,WAAW,KAAK,IAAI;AAAA,QACpB,YAAY;AAAA,MACb;AACA,YAAM,GAAG,IAAIH,aAAYC,gBAAe,KAAK,CAAC;AAC9C,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,KAAK,OAA6D;AACvE,WAAOE,WAAU,cAAc,YAAY;AAC1C,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYH,aAAY,UAAU;AAChD,YAAM,QAAQ,GAAG,YAAYA,WAAU,EAAE,MAAM,WAAW;AAC1D,YAAM,UAAwB,CAAC;AAE/B,UAAI,SAAS,MAAM,MAAM,WAAW;AACpC,aAAO,UAAU,QAAQ,SAAS,OAAO;AACxC,cAAM,aAAa,OAAO;AAC1B,YAAI,WAAW,WAAW,WAAW;AACpC,gBAAM,QAAQE,kBAAiB,UAAU;AACzC,cAAI,MAAM,eAAe,UAAa,MAAM,cAAc,KAAK,IAAI,GAAG;AACrE,oBAAQ,KAAK,KAAK;AAAA,UACnB;AAAA,QACD;AACA,iBAAS,MAAM,OAAO,SAAS;AAAA,MAChC;AAEA,aAAO;AAAA,IACR,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,KAAqD;AACtE,WAAOC,WAAU,gBAAgB,YAAY;AAC5C,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYH,aAAY,WAAW;AACjD,YAAM,QAAQ,GAAG,YAAYA,WAAU;AAEvC,iBAAW,MAAM,KAAK;AACrB,cAAM,aAAc,MAAM,MAAM,IAAI,EAAE;AACtC,YAAI,YAAY,WAAW,WAAW;AACrC,qBAAW,SAAS;AACpB,gBAAM,MAAM,IAAI,UAAU;AAAA,QAC3B;AAAA,MACD;AAEA,YAAM,GAAG;AAAA,IACV,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,IAAI,KAAqD;AAC9D,WAAOG,WAAU,OAAO,YAAY;AACnC,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYH,aAAY,WAAW;AACjD,iBAAW,MAAM,KAAK;AACrB,cAAM,GAAG,YAAYA,WAAU,EAAE,OAAO,EAAE;AAAA,MAC3C;AACA,YAAM,GAAG;AAAA,IACV,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,KAAK,KAAqD;AAC/D,WAAOG,WAAU,QAAQ,YAAY;AACpC,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,KAAK,GAAG,YAAYH,aAAY,WAAW;AACjD,YAAM,QAAQ,GAAG,YAAYA,WAAU;AAEvC,iBAAW,MAAM,KAAK;AACrB,cAAM,aAAc,MAAM,MAAM,IAAI,EAAE;AACtC,YAAI,YAAY;AACf,qBAAW,SAAS;AACpB,qBAAW;AACX,gBAAM,YAAY,KAAK,IAAI,MAAO,KAAK,WAAW,YAAY,GAAM;AACpE,UAAC,WAAuC,aAAa,KAAK,IAAI,IAAI;AAClE,gBAAM,MAAM,IAAI,UAAU;AAAA,QAC3B;AAAA,MACD;AAEA,YAAM,GAAG;AAAA,IACV,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,QAAgD;AACrD,WAAOG,WAAU,aAAa,YAAY;AACzC,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,MAAO,MAAM,GAAG,OAAOH,WAAU;AACvC,aAAO,IAAI,OAAO,CAAC,MAAM,EAAE,WAAW,OAAO,EAAE;AAAA,IAChD,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,QAA8C;AACnD,WAAOG,WAAU,eAAe,YAAY;AAC3C,YAAM,KAAK,MAAM,KAAK;AACtB,YAAM,GAAG,MAAMH,WAAU;AAAA,IAC1B,CAAC;AAAA,EACF;AACD;;;ACtLO,IAAM,oBAAN,MAA+C;AAAA,EAC7C,UAAyC,oBAAI,IAAI;AAAA,EACjD,UAAU;AAAA;AAAA,EAGlB,MAAM,KAAK,QAAkE;AAC5E,UAAM,QAA0B;AAAA,MAC/B,IAAI,cAAc,EAAE,KAAK,OAAO;AAAA,MAChC;AAAA,MACA,QAAQ;AAAA,MACR,WAAW,KAAK,IAAI;AAAA,MACpB,YAAY;AAAA,IACb;AACA,SAAK,QAAQ,IAAI,MAAM,IAAI,KAAK;AAChC,WAAO,GAAG,KAAK;AAAA,EAChB;AAAA;AAAA,EAGA,MAAM,KAAK,OAAmE;AAC7E,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,UAAU,CAAC,GAAG,KAAK,QAAQ,OAAO,CAAC,EACvC,OAAO,CAAC,MAAM,EAAE,WAAW,cAAc,EAAE,eAAe,UAAa,EAAE,cAAc,IAAI,EAC3F,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS,EACxC,MAAM,GAAG,KAAK;AAChB,WAAO,GAAG,OAAO;AAAA,EAClB;AAAA;AAAA,EAGA,MAAM,YAAY,KAAqD;AACtE,eAAW,MAAM,KAAK;AACrB,YAAM,QAAQ,KAAK,QAAQ,IAAI,EAAE;AACjC,UAAI,OAAO,WAAW,WAAW;AAChC,cAAM,SAAS;AAAA,MAChB;AAAA,IACD;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,IAAI,KAAqD;AAC9D,eAAW,MAAM,KAAK;AACrB,WAAK,QAAQ,OAAO,EAAE;AAAA,IACvB;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,KAAK,KAAqD;AAC/D,eAAW,MAAM,KAAK;AACrB,YAAM,QAAQ,KAAK,QAAQ,IAAI,EAAE;AACjC,UAAI,OAAO;AACV,cAAM,SAAS;AACf,cAAM;AACN,cAAM,YAAY,KAAK,IAAI,MAAO,KAAK,MAAM,YAAY,GAAM;AAC/D,cAAM,aAAa,KAAK,IAAI,IAAI;AAAA,MACjC;AAAA,IACD;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,QAAgD;AACrD,UAAM,QAAQ,CAAC,GAAG,KAAK,QAAQ,OAAO,CAAC,EAAE;AAAA,MACxC,CAAC,MAAM,EAAE,WAAW,aAAa,EAAE,WAAW;AAAA,IAC/C,EAAE;AACF,WAAO,GAAG,KAAK;AAAA,EAChB;AAAA;AAAA,EAGA,MAAM,QAA8C;AACnD,SAAK,QAAQ,MAAM;AACnB,WAAO,GAAG,MAAS;AAAA,EACpB;AACD;;;ACzEO,IAAM,cAAN,MAAuC;AAAA,EACrC,UAAmC,oBAAI,IAAI;AAAA,EAC3C,UAAU;AAAA;AAAA,EAGlB,MAAM,KAAK,OAA6D;AACvE,UAAM,QAAoB;AAAA,MACzB,IAAI,OAAO,EAAE,KAAK,OAAO;AAAA,MACzB;AAAA,MACA,QAAQ;AAAA,MACR,WAAW,KAAK,IAAI;AAAA,MACpB,YAAY;AAAA,IACb;AACA,SAAK,QAAQ,IAAI,MAAM,IAAI,KAAK;AAChC,WAAO,GAAG,KAAK;AAAA,EAChB;AAAA;AAAA,EAGA,MAAM,KAAK,OAA6D;AACvE,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,UAAU,CAAC,GAAG,KAAK,QAAQ,OAAO,CAAC,EACvC,OAAO,CAAC,MAAM,EAAE,WAAW,cAAc,EAAE,eAAe,UAAa,EAAE,cAAc,IAAI,EAC3F,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS,EACxC,MAAM,GAAG,KAAK;AAChB,WAAO,GAAG,OAAO;AAAA,EAClB;AAAA;AAAA,EAGA,MAAM,YAAY,KAAqD;AACtE,eAAW,MAAM,KAAK;AACrB,YAAM,QAAQ,KAAK,QAAQ,IAAI,EAAE;AACjC,UAAI,OAAO,WAAW,WAAW;AAChC,cAAM,SAAS;AAAA,MAChB;AAAA,IACD;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,IAAI,KAAqD;AAC9D,eAAW,MAAM,KAAK;AACrB,WAAK,QAAQ,OAAO,EAAE;AAAA,IACvB;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,KAAK,KAAqD;AAC/D,eAAW,MAAM,KAAK;AACrB,YAAM,QAAQ,KAAK,QAAQ,IAAI,EAAE;AACjC,UAAI,OAAO;AACV,cAAM,SAAS;AACf,cAAM;AACN,cAAM,YAAY,KAAK,IAAI,MAAO,KAAK,MAAM,YAAY,GAAM;AAC/D,cAAM,aAAa,KAAK,IAAI,IAAI;AAAA,MACjC;AAAA,IACD;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,QAAgD;AACrD,UAAM,QAAQ,CAAC,GAAG,KAAK,QAAQ,OAAO,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,WAAW,OAAO,EAAE;AAC7E,WAAO,GAAG,KAAK;AAAA,EAChB;AAAA;AAAA,EAGA,MAAM,QAA8C;AACnD,SAAK,QAAQ,MAAM;AACnB,WAAO,GAAG,MAAS;AAAA,EACpB;AACD;;;AC/CA,eAAsB,kBACrB,IACA,QACA,UACA,cACyC;AACzC,MAAI,OAAO,WAAW,GAAG;AACxB,WAAO,GAAG,CAAC;AAAA,EACZ;AAGA,QAAM,oBAAoB,MAAM,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,EAKvC;AACD,MAAI,CAAC,kBAAkB,IAAI;AAC1B,WAAO;AAAA,MACN,IAAI;AAAA,QACH;AAAA,QACA;AAAA,QACA,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,EACD;AAGA,QAAM,cAAc,MAAM,GAAG,KAAK,OAAO;AACzC,MAAI,CAAC,YAAY,IAAI;AACpB,WAAO;AAAA,MACN,IAAI;AAAA,QACH;AAAA,QACA;AAAA,QACA,YAAY;AAAA,MACb;AAAA,IACD;AAAA,EACD;AAEA,QAAM,aAAa,MAAM,aAAa,KAAK,OAAO,gBAAgB;AAClE,MAAI,CAAC,WAAW,IAAI;AACnB,UAAM,GAAG,KAAK,UAAU;AACxB,WAAO;AAAA,MACN,IAAI;AAAA,QACH;AAAA,QACA;AAAA,QACA,WAAW;AAAA,MACZ;AAAA,IACD;AAAA,EACD;AACA,QAAM,aAAa,oBAAI,IAAwB;AAC/C,aAAW,SAAS,WAAW,OAAO;AACrC,eAAW,IAAI,GAAG,MAAM,MAAM,KAAK,IAAI,MAAM,MAAM,KAAK,IAAI,KAAK;AAAA,EAClE;AAEA,MAAI,eAAe;AAEnB,QAAM,iBAAiB,oBAAI,IAA0B;AAErD,aAAW,eAAe,QAAQ;AACjC,UAAM,SAAS,MAAM,cAAc,IAAI,aAAa,UAAU,cAAc,UAAU;AACtF,QAAI,CAAC,OAAO,IAAI;AAEf,YAAM,GAAG,KAAK,UAAU;AACxB,aAAO;AAAA,IACR;AAEA,QAAI,OAAO,OAAO;AACjB;AAAA,IACD;AAGA,UAAM,aAAa,eAAe,IAAI,YAAY,KAAK;AACvD,QAAI,eAAe,UAAa,IAAI,QAAQ,YAAY,KAAK,UAAU,IAAI,GAAG;AAC7E,qBAAe,IAAI,YAAY,OAAO,YAAY,GAAG;AAAA,IACtD;AAAA,EACD;AAGA,aAAW,CAAC,WAAW,GAAG,KAAK,gBAAgB;AAC9C,UAAM,eAAe,MAAM,GAAG;AAAA,MAC7B;AAAA,MACA,CAAC,WAAW,IAAI,SAAS,CAAC;AAAA,IAC3B;AACA,QAAI,CAAC,aAAa,IAAI;AACrB,YAAM,GAAG,KAAK,UAAU;AACxB,aAAO;AAAA,QACN,IAAI;AAAA,UACH,2CAA2C,SAAS;AAAA,UACpD;AAAA,UACA,aAAa;AAAA,QACd;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAGA,QAAM,eAAe,MAAM,GAAG,KAAK,QAAQ;AAC3C,MAAI,CAAC,aAAa,IAAI;AACrB,UAAM,GAAG,KAAK,UAAU;AACxB,WAAO;AAAA,MACN,IAAI;AAAA,QACH;AAAA,QACA;AAAA,QACA,aAAa;AAAA,MACd;AAAA,IACD;AAAA,EACD;AAEA,SAAO,GAAG,YAAY;AACvB;AAWA,eAAe,cACd,IACA,aACA,UACA,cACA,YAC0C;AAC1C,QAAM,mBAAmB,WAAW,IAAI,GAAG,YAAY,KAAK,IAAI,YAAY,KAAK,EAAE;AAEnF,MAAI,kBAAkB;AACrB,UAAM,aAAa,iBAAiB;AACpC,UAAM,gBAAgB,SAAS,QAAQ,YAAY,WAAW;AAC9D,QAAI,CAAC,cAAc,IAAI;AACtB,aAAO;AAAA,QACN,IAAI;AAAA,UACH,uCAAuC,YAAY,KAAK,eAAe,YAAY,KAAK;AAAA,UACxF;AAAA,UACA,cAAc;AAAA,QACf;AAAA,MACD;AAAA,IACD;AAEA,UAAM,WAAW,cAAc;AAG/B,UAAM,YACL,SAAS,aAAa,YAAY,YAAY,SAAS,QAAQ,YAAY;AAE5E,QAAI,WAAW;AAEd,YAAMI,eAAc,MAAM,cAAc,IAAI,QAAQ;AACpD,UAAI,CAACA,aAAY,IAAI;AACpB,eAAOA;AAAA,MACR;AAEA,YAAM,YAAY,MAAM,aAAa,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC9D,UAAI,CAAC,UAAU,IAAI;AAClB,eAAO;AAAA,UACN,IAAI;AAAA,YACH,oCAAoC,iBAAiB,EAAE;AAAA,YACvD;AAAA,YACA,UAAU;AAAA,UACX;AAAA,QACD;AAAA,MACD;AAEA,aAAO,GAAG,IAAI;AAAA,IACf;AAGA,WAAO,GAAG,KAAK;AAAA,EAChB;AAGA,QAAM,cAAc,MAAM,cAAc,IAAI,WAAW;AACvD,MAAI,CAAC,YAAY,IAAI;AACpB,WAAO;AAAA,EACR;AAEA,SAAO,GAAG,IAAI;AACf;AAGA,SAAS,yBAAyB,OAA8C;AAC/E,QAAM,aAAa,sBAAsB,MAAM,KAAK;AACpD,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,IAAI,IAAI,cAAc,WAAW,MAAM,SAAS,aAAa,CAAC;AAAA,EACtE;AACA,aAAW,OAAO,MAAM,SAAS;AAChC,UAAM,WAAW,sBAAsB,IAAI,MAAM;AACjD,QAAI,CAAC,SAAS,IAAI;AACjB,aAAO,IAAI,IAAI,cAAc,SAAS,MAAM,SAAS,aAAa,CAAC;AAAA,IACpE;AAAA,EACD;AACA,SAAO,GAAG,MAAS;AACpB;AAaA,eAAe,cACd,IACA,OAC0C;AAC1C,QAAM,kBAAkB,yBAAyB,KAAK;AACtD,MAAI,CAAC,gBAAgB,GAAI,QAAO;AAEhC,QAAM,cAAc,gBAAgB,MAAM,KAAK;AAE/C,UAAQ,MAAM,IAAI;AAAA,IACjB,KAAK,UAAU;AACd,YAAM,WAAW,MAAM,QAAQ,IAAI,CAAC,MAAM,gBAAgB,EAAE,MAAM,CAAC;AACnE,YAAM,aAAa,CAAC,UAAU,GAAG,QAAQ;AACzC,YAAM,eAAe,WAAW,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACxD,YAAM,SAAS,CAAC,MAAM,OAAO,GAAG,MAAM,QAAQ,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AACjE,YAAM,MAAM,eAAe,WAAW,KAAK,WAAW,KAAK,IAAI,CAAC,aAAa,YAAY;AAEzF,YAAM,SAAS,MAAM,GAAG,KAAK,KAAK,MAAM;AACxC,UAAI,CAAC,OAAO,IAAI;AACf,eAAO;AAAA,UACN,IAAI;AAAA,YACH,mCAAmC,MAAM,KAAK,eAAe,MAAM,KAAK;AAAA,YACxE;AAAA,YACA,OAAO;AAAA,UACR;AAAA,QACD;AAAA,MACD;AACA,aAAO,GAAG,IAAI;AAAA,IACf;AAAA,IAEA,KAAK,UAAU;AACd,UAAI,MAAM,QAAQ,WAAW,GAAG;AAE/B,eAAO,GAAG,IAAI;AAAA,MACf;AAEA,YAAM,aAAa,MAAM,QAAQ,IAAI,CAAC,MAAM,GAAG,gBAAgB,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,IAAI;AACzF,YAAM,SAAS,CAAC,GAAG,MAAM,QAAQ,IAAI,CAAC,MAAM,EAAE,KAAK,GAAG,MAAM,KAAK;AACjE,YAAM,MAAM,UAAU,WAAW,QAAQ,UAAU;AAEnD,YAAM,SAAS,MAAM,GAAG,KAAK,KAAK,MAAM;AACxC,UAAI,CAAC,OAAO,IAAI;AACf,eAAO;AAAA,UACN,IAAI;AAAA,YACH,mCAAmC,MAAM,KAAK,eAAe,MAAM,KAAK;AAAA,YACxE;AAAA,YACA,OAAO;AAAA,UACR;AAAA,QACD;AAAA,MACD;AACA,aAAO,GAAG,IAAI;AAAA,IACf;AAAA,IAEA,KAAK,UAAU;AACd,YAAM,MAAM,eAAe,WAAW;AACtC,YAAM,SAAS,MAAM,GAAG,KAAK,KAAK,CAAC,MAAM,KAAK,CAAC;AAC/C,UAAI,CAAC,OAAO,IAAI;AACf,eAAO;AAAA,UACN,IAAI;AAAA,YACH,mCAAmC,MAAM,KAAK,eAAe,MAAM,KAAK;AAAA,YACxE;AAAA,YACA,OAAO;AAAA,UACR;AAAA,QACD;AAAA,MACD;AACA,aAAO,GAAG,IAAI;AAAA,IACf;AAAA,EACD;AACD;;;ACtSA,SAAS,aAAa,KAAuD;AAC5E,QAAM,SAAkC,CAAC;AACzC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC/C,QAAI,QAAQ,UAAU;AACrB,aAAO,GAAG,IAAI;AAAA,IACf;AAAA,EACD;AACA,SAAO;AACR;AAWO,IAAM,cAAN,MAAkB;AAAA,EAGxB,YACkB,IACA,OACA,KACA,UAChB;AAJgB;AACA;AACA;AACA;AAAA,EACf;AAAA,EAPK,cAAc,oBAAI,IAAgC;AAAA,EAS1D,MAAc,gBAAgB,OAAkE;AAC/F,QAAI,KAAK,YAAY,IAAI,KAAK,GAAG;AAChC,YAAM,SAAS,KAAK,YAAY,IAAI,KAAK;AACzC,aAAO,GAAG,UAAU,MAAS;AAAA,IAC9B;AACA,UAAM,SAAS,MAAM,UAAU,KAAK,IAAI,KAAK;AAC7C,QAAI,OAAO,IAAI;AACd,WAAK,YAAY,IAAI,OAAO,OAAO,KAAK;AACxC,aAAO,GAAG,OAAO,SAAS,MAAS;AAAA,IACpC;AACA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,OACL,OACA,OACA,MACuC;AACvC,UAAM,aAAa,sBAAsB,KAAK;AAC9C,QAAI,CAAC,WAAW,GAAI,QAAO;AAC3B,eAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACpC,YAAM,WAAW,sBAAsB,GAAG;AAC1C,UAAI,CAAC,SAAS,GAAI,QAAO;AAAA,IAC1B;AAGA,UAAM,eAAe,MAAM,KAAK,gBAAgB,KAAK;AACrD,QAAI,CAAC,aAAa,GAAI,QAAO;AAC7B,UAAM,SAAS,aAAa;AAG5B,UAAM,UAAU,OAAO,KAAK,IAAI;AAChC,UAAM,aAAa,CAAC,UAAU,GAAG,QAAQ,IAAI,CAAC,MAAM,gBAAgB,CAAC,CAAC,CAAC;AACvE,UAAM,eAAe,WAAW,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACxD,UAAM,aAAa,WAAW,KAAK,IAAI;AACvC,UAAM,SAAS,CAAC,OAAO,GAAG,QAAQ,IAAI,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC;AAEzD,UAAM,MAAM,eAAe,gBAAgB,KAAK,CAAC,KAAK,UAAU,aAAa,YAAY;AACzF,UAAM,aAAa,MAAM,KAAK,GAAG,KAAK,KAAK,MAAM;AACjD,QAAI,CAAC,WAAW,GAAI,QAAO;AAG3B,UAAM,MAAM,KAAK,IAAI,IAAI;AACzB,UAAM,QAAQ,MAAM,aAAa,MAAM,MAAM;AAAA,MAC5C;AAAA,MACA;AAAA,MACA,UAAU,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACD,CAAC;AAED,QAAI,OAAO;AACV,YAAM,aAAa,MAAM,KAAK,MAAM,KAAK,KAAK;AAC9C,UAAI,CAAC,WAAW,GAAI,QAAO;AAAA,IAC5B;AAEA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OACL,OACA,OACA,MACuC;AACvC,UAAM,aAAa,sBAAsB,KAAK;AAC9C,QAAI,CAAC,WAAW,GAAI,QAAO;AAC3B,eAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACpC,YAAM,WAAW,sBAAsB,GAAG;AAC1C,UAAI,CAAC,SAAS,GAAI,QAAO;AAAA,IAC1B;AAGA,UAAM,eAAe,MAAM,KAAK,gBAAgB,KAAK;AACrD,QAAI,CAAC,aAAa,GAAI,QAAO;AAC7B,UAAM,SAAS,aAAa;AAG5B,UAAM,cAAc,MAAM,KAAK,GAAG;AAAA,MACjC,iBAAiB,gBAAgB,KAAK,CAAC;AAAA,MACvC,CAAC,KAAK;AAAA,IACP;AACA,QAAI,CAAC,YAAY,GAAI,QAAO;AAE5B,UAAM,OAAO,YAAY;AACzB,QAAI,KAAK,WAAW,KAAK,CAAC,KAAK,CAAC,GAAG;AAClC,aAAO;AAAA,QACN,IAAI,cAAc,QAAQ,KAAK,yBAAyB,KAAK,KAAK,eAAe;AAAA,MAClF;AAAA,IACD;AAEA,UAAM,SAAS,aAAa,KAAK,CAAC,CAAC;AAGnC,UAAM,UAAU,OAAO,KAAK,IAAI;AAChC,UAAM,aAAa,QAAQ,IAAI,CAAC,QAAQ,GAAG,gBAAgB,GAAG,CAAC,MAAM,EAAE,KAAK,IAAI;AAChF,UAAM,SAAS,CAAC,GAAG,QAAQ,IAAI,CAAC,QAAQ,KAAK,GAAG,CAAC,GAAG,KAAK;AAEzD,UAAM,MAAM,UAAU,gBAAgB,KAAK,CAAC,QAAQ,UAAU;AAC9D,UAAM,aAAa,MAAM,KAAK,GAAG,KAAK,KAAK,MAAM;AACjD,QAAI,CAAC,WAAW,GAAI,QAAO;AAG3B,UAAM,QAAiC,EAAE,GAAG,QAAQ,GAAG,KAAK;AAG5D,UAAM,MAAM,KAAK,IAAI,IAAI;AACzB,UAAM,QAAQ,MAAM,aAAa,QAAQ,OAAO;AAAA,MAC/C;AAAA,MACA;AAAA,MACA,UAAU,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACD,CAAC;AAED,QAAI,OAAO;AACV,YAAM,aAAa,MAAM,KAAK,MAAM,KAAK,KAAK;AAC9C,UAAI,CAAC,WAAW,GAAI,QAAO;AAAA,IAC5B;AAEA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,OAAO,OAAe,OAAqD;AAChF,UAAM,aAAa,sBAAsB,KAAK;AAC9C,QAAI,CAAC,WAAW,GAAI,QAAO;AAG3B,UAAM,eAAe,MAAM,KAAK,gBAAgB,KAAK;AACrD,QAAI,CAAC,aAAa,GAAI,QAAO;AAC7B,UAAM,SAAS,aAAa;AAG5B,UAAM,cAAc,MAAM,KAAK,GAAG;AAAA,MACjC,iBAAiB,gBAAgB,KAAK,CAAC;AAAA,MACvC,CAAC,KAAK;AAAA,IACP;AACA,QAAI,CAAC,YAAY,GAAI,QAAO;AAE5B,UAAM,OAAO,YAAY;AACzB,QAAI,KAAK,WAAW,KAAK,CAAC,KAAK,CAAC,GAAG;AAClC,aAAO;AAAA,QACN,IAAI,cAAc,QAAQ,KAAK,yBAAyB,KAAK,KAAK,eAAe;AAAA,MAClF;AAAA,IACD;AAEA,UAAM,SAAS,aAAa,KAAK,CAAC,CAAC;AAGnC,UAAM,aAAa,MAAM,KAAK,GAAG;AAAA,MAChC,eAAe,gBAAgB,KAAK,CAAC;AAAA,MACrC,CAAC,KAAK;AAAA,IACP;AACA,QAAI,CAAC,WAAW,GAAI,QAAO;AAG3B,UAAM,MAAM,KAAK,IAAI,IAAI;AACzB,UAAM,QAAQ,MAAM,aAAa,QAAQ,MAAM;AAAA,MAC9C;AAAA,MACA;AAAA,MACA,UAAU,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACD,CAAC;AAED,QAAI,OAAO;AACV,YAAM,aAAa,MAAM,KAAK,MAAM,KAAK,KAAK;AAC9C,UAAI,CAAC,WAAW,GAAI,QAAO;AAAA,IAC5B;AAEA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,MAAS,KAAa,QAAmD;AAC9E,WAAO,KAAK,GAAG,MAAS,KAAK,MAAM;AAAA,EACpC;AACD;;;ACxMA,IAAM,wBAAwB;AAG9B,IAAM,wBAAwB;AAQvB,IAAM,kBAAN,MAAsB;AAAA,EACnB;AAAA,EACQ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW,IAAI,YAAY;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT,gBAAgB,IAAI,OAAO,GAAG,CAAC;AAAA,EAC/B,gBAA6B;AAAA,EAC7B,iBAAwD;AAAA,EACxD,oBAAyC;AAAA,EACzC,UAAU;AAAA,EACD;AAAA,EACA;AAAA,EACT,YAA+D;AAAA,IACtE,UAAU,CAAC;AAAA,IACX,gBAAgB,CAAC;AAAA,IACjB,SAAS,CAAC;AAAA,IACV,kBAAkB,CAAC;AAAA,EACpB;AAAA,EAEA,YAAY,IAAa,WAA0B,QAAgC;AAClF,SAAK,KAAK;AACV,SAAK,YAAY;AACjB,SAAK,MAAM,QAAQ,OAAO,IAAI,IAAI;AAClC,SAAK,QAAQ,QAAQ,SAAS,IAAI,SAAS;AAC3C,SAAK,YAAY,QAAQ,YAAY,UAAU,OAAO,WAAW,CAAC;AAClE,SAAK,aAAa,QAAQ,cAAc;AACxC,SAAK,WAAW,QAAQ,YAAY;AACpC,SAAK,qBAAqB,QAAQ,sBAAsB;AACxD,SAAK,sBAAsB,QAAQ,uBAAuB;AAC1D,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,mBAAmB,QAAQ,oBAAoB;AACpD,SAAK,UAAU,IAAI,YAAY,IAAI,KAAK,OAAO,KAAK,KAAK,KAAK,SAAS;AAGvE,QAAI,KAAK,UAAU,aAAa;AAC/B,WAAK,UAAU,YAAY,CAAC,QAAQ,cAAc;AACjD,aAAK,KAAK,gBAAgB,QAAQ,SAAS;AAAA,MAC5C,CAAC;AAAA,IACF;AAAA,EACD;AAAA;AAAA,EAGA,GAA+B,OAAU,UAA+B;AACvE,SAAK,UAAU,KAAK,EAAE,KAAK,QAAQ;AAAA,EACpC;AAAA;AAAA,EAGA,IAAgC,OAAU,UAA+B;AACxE,UAAM,MAAM,KAAK,UAAU,KAAK;AAChC,UAAM,MAAM,IAAI,QAAQ,QAAQ;AAChC,QAAI,QAAQ,GAAI,KAAI,OAAO,KAAK,CAAC;AAAA,EAClC;AAAA,EAEQ,KAAiC,UAAa,MAAuC;AAC5F,eAAW,MAAM,KAAK,UAAU,KAAK,GAAG;AACvC,UAAI;AACH,QAAC,GAAiD,GAAG,IAAI;AAAA,MAC1D,QAAQ;AAAA,MAER;AAAA,IACD;AAAA,EACD;AAAA;AAAA,EAGA,MAAM,gBAA+B;AACpC,UAAM,aAAa,MAAM,KAAK,MAAM,KAAK,GAAG;AAC5C,QAAI,CAAC,WAAW,MAAM,WAAW,MAAM,WAAW,EAAG;AAGrD,UAAM,eAAe,WAAW,MAAM,OAAO,CAAC,MAAM,EAAE,cAAc,KAAK,UAAU;AACnF,UAAM,UAAU,WAAW,MAAM,OAAO,CAAC,MAAM,EAAE,aAAa,KAAK,UAAU;AAE7E,QAAI,aAAa,SAAS,GAAG;AAC5B,cAAQ;AAAA,QACP,oCAAoC,aAAa,MAAM,kBAAkB,KAAK,UAAU;AAAA,MACzF;AACA,YAAM,KAAK,MAAM,IAAI,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAClD,WAAK;AAAA,QACJ;AAAA,QACA,IAAI,MAAM,iBAAiB,aAAa,MAAM,kBAAkB,KAAK,UAAU,UAAU;AAAA,MAC1F;AAAA,IACD;AAEA,QAAI,QAAQ,WAAW,EAAG;AAE1B,UAAM,MAAM,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE;AACnC,UAAM,KAAK,MAAM,YAAY,GAAG;AAEhC,UAAM,aAAa,MAAM,KAAK,UAAU,KAAK;AAAA,MAC5C,UAAU,KAAK;AAAA,MACf,QAAQ,QAAQ,IAAI,CAAC,MAAM,EAAE,KAAK;AAAA,MAClC,aAAa,KAAK,IAAI,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,WAAW,IAAI;AAClB,YAAM,KAAK,MAAM,IAAI,GAAG;AACxB,WAAK,gBAAgB,WAAW,MAAM;AACtC,WAAK,gBAAgB,oBAAI,KAAK;AAAA,IAC/B,OAAO;AACN,YAAM,KAAK,MAAM,KAAK,GAAG;AAAA,IAC1B;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,SAAS,QAAiC;AAC/C,WAAO,KAAK,gBAAgB,MAAM;AAAA,EACnC;AAAA;AAAA,EAGA,MAAM,gBAAgB,QAAkC;AACvD,UAAM,aAAa,MAAM,KAAK,UAAU,KAAK;AAAA,MAC5C,UAAU,KAAK;AAAA,MACf,UAAU,KAAK;AAAA,MACf,WAAW;AAAA,MACX;AAAA,IACD,CAAC;AAED,QAAI,CAAC,WAAW,MAAM,WAAW,MAAM,OAAO,WAAW,EAAG,QAAO;AAEnE,UAAM,EAAE,QAAQ,UAAU,IAAI,WAAW;AACzC,UAAM,cAAc,MAAM,kBAAkB,KAAK,IAAI,QAAQ,KAAK,UAAU,KAAK,KAAK;AAEtF,QAAI,YAAY,IAAI;AACnB,WAAK,gBAAgB;AACrB,WAAK,gBAAgB,oBAAI,KAAK;AAC9B,UAAI,YAAY,QAAQ,GAAG;AAC1B,aAAK,KAAK,YAAY,YAAY,KAAK;AAAA,MACxC;AACA,aAAO,YAAY;AAAA,IACpB;AACA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,gBAAgB,QAAoB,WAAwC;AACzF,QAAI,OAAO,WAAW,EAAG;AAEzB,QAAI;AACH,YAAM,cAAc,MAAM,kBAAkB,KAAK,IAAI,QAAQ,KAAK,UAAU,KAAK,KAAK;AAEtF,UAAI,YAAY,IAAI;AACnB,YAAI,IAAI,QAAQ,WAAW,KAAK,aAAa,IAAI,GAAG;AACnD,eAAK,gBAAgB;AAAA,QACtB;AACA,aAAK,gBAAgB,oBAAI,KAAK;AAC9B,YAAI,YAAY,QAAQ,GAAG;AAC1B,eAAK,KAAK,YAAY,YAAY,KAAK;AAAA,QACxC;AAAA,MACD;AAAA,IACD,SAAS,KAAK;AACb,WAAK,KAAK,WAAW,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IACzE;AAAA,EACD;AAAA;AAAA,EAGA,MAAM,aAA8B;AACnC,UAAM,SAAS,MAAM,KAAK,MAAM,MAAM;AACtC,WAAO,OAAO,KAAK,OAAO,QAAQ;AAAA,EACnC;AAAA;AAAA,EAGA,IAAI,WAAmB;AACtB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA,EAGA,IAAI,eAA4B;AAC/B,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAsB;AACrB,SAAK,UAAU,UAAU;AAEzB,UAAM,aAAa,KAAK,UAAU,mBAC/B,KAAK,sBACL,KAAK;AAER,SAAK,iBAAiB,YAAY,MAAM;AACvC,WAAK,KAAK,SAAS;AAAA,IACpB,GAAG,UAAU;AAEb,SAAK,oBAAoB;AAAA,EAC1B;AAAA;AAAA,EAGQ,sBAA4B;AACnC,SAAK,oBAAoB,MAAM;AAC9B,UAAI,OAAO,aAAa,eAAe,SAAS,oBAAoB,WAAW;AAC9E,aAAK,KAAK,SAAS;AAAA,MACpB;AAAA,IACD;AACA,QAAI,OAAO,aAAa,aAAa;AACpC,eAAS,iBAAiB,oBAAoB,KAAK,iBAAiB;AAAA,IACrE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,cAA6B;AAC1C,QAAI,CAAC,KAAK,UAAU,WAAY;AAChC,UAAM,SAAS,MAAM,KAAK,UAAU,WAAW;AAC/C,QAAI,CAAC,OAAO,MAAM,OAAO,UAAU,KAAM;AACzC,UAAM,EAAE,QAAQ,YAAY,IAAI,OAAO;AACvC,QAAI,OAAO,SAAS,GAAG;AACtB,YAAM,kBAAkB,KAAK,IAAI,QAAQ,KAAK,UAAU,KAAK,KAAK;AAAA,IACnE;AACA,SAAK,gBAAgB;AACrB,SAAK,gBAAgB,oBAAI,KAAK;AAAA,EAC/B;AAAA;AAAA,EAGA,MAAM,WAA0B;AAC/B,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AACf,QAAI;AACH,UAAI,KAAK,aAAa,YAAY;AACjC,YAAI,KAAK,kBAAkB,IAAI,OAAO,GAAG,CAAC,GAAG;AAC5C,gBAAM,KAAK,YAAY;AAAA,QACxB;AACA,cAAM,KAAK,gBAAgB;AAAA,MAC5B;AACA,UAAI,KAAK,aAAa,YAAY;AACjC,cAAM,KAAK,cAAc;AAAA,MAC1B;AAEA,YAAM,KAAK,mBAAmB;AAC9B,WAAK,KAAK,gBAAgB;AAAA,IAC3B,SAAS,KAAK;AACb,WAAK,KAAK,WAAW,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IACzE,UAAE;AACD,WAAK,UAAU;AAAA,IAChB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,cAAc,QAKF;AACjB,QAAI,CAAC,KAAK,aAAa;AACtB,WAAK,KAAK,WAAW,IAAI,MAAM,4BAA4B,CAAC;AAC5D;AAAA,IACD;AAEA,UAAM,MAAM,KAAK,IAAI,IAAI;AACzB,UAAM,EAAE,iBAAiB,IAAI,MAAM,OAAO,mBAAgB;AAC1D,UAAM,WAAW,MAAM,iBAAiB;AAAA,MACvC,UAAU,KAAK;AAAA,MACf;AAAA,MACA,WAAW,OAAO;AAAA,MAClB,YAAY,OAAO;AAAA,MACnB,QAAQ,OAAO;AAAA,IAChB,CAAC;AAED,UAAM,SAAiB;AAAA,MACtB;AAAA,MACA,UAAU,KAAK;AAAA,MACf;AAAA,MACA,WAAW,OAAO;AAAA,MAClB,YAAY,OAAO;AAAA,MACnB,QAAQ,OAAO;AAAA,MACf,gBAAgB,OAAO;AAAA,IACxB;AAEA,UAAM,KAAK,YAAY,KAAK,MAAM;AAElC,SAAK,KAAK,mBAAmB;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,qBAAoC;AACzC,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,UAAU,cAAe;AAExD,UAAM,aAAa,MAAM,KAAK,YAAY,KAAK,GAAG;AAClD,QAAI,CAAC,WAAW,MAAM,WAAW,MAAM,WAAW,EAAG;AAGrD,UAAM,eAAe,WAAW,MAAM,OAAO,CAAC,MAAM,EAAE,cAAc,KAAK,gBAAgB;AACzF,UAAM,UAAU,WAAW,MAAM,OAAO,CAAC,MAAM,EAAE,aAAa,KAAK,gBAAgB;AAEnF,QAAI,aAAa,SAAS,GAAG;AAC5B,cAAQ;AAAA,QACP,oCAAoC,aAAa,MAAM,kBAAkB,KAAK,gBAAgB;AAAA,MAC/F;AACA,YAAM,KAAK,YAAY,IAAI,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AACxD,iBAAW,SAAS,cAAc;AACjC,aAAK,KAAK,oBAAoB,MAAM,OAAO,UAAU;AAAA,UACpD,UAAU,MAAM,OAAO;AAAA,UACvB,MAAM;AAAA,UACN,SAAS,8BAA8B,KAAK,gBAAgB;AAAA,UAC5D,WAAW;AAAA,QACZ,CAAC;AAAA,MACF;AAAA,IACD;AAEA,QAAI,QAAQ,WAAW,EAAG;AAE1B,UAAM,MAAM,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE;AACnC,UAAM,KAAK,YAAY,YAAY,GAAG;AAEtC,UAAM,kBAAkB,MAAM,KAAK,UAAU,cAAc;AAAA,MAC1D,UAAU,KAAK;AAAA,MACf,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,MAAM;AAAA,IACrC,CAAC;AAED,QAAI,gBAAgB,IAAI;AACvB,YAAM,KAAK,YAAY,IAAI,GAAG;AAG9B,iBAAW,UAAU,gBAAgB,MAAM,SAAS;AACnD,aAAK,KAAK,oBAAoB,OAAO,UAAU,MAAM;AAAA,MACtD;AAGA,YAAM,eAAyB,CAAC;AAChC,YAAM,aAAuB,CAAC;AAC9B,eAAS,IAAI,GAAG,IAAI,gBAAgB,MAAM,QAAQ,QAAQ,KAAK;AAC9D,cAAM,SAAS,gBAAgB,MAAM,QAAQ,CAAC;AAC9C,YAAI,cAAc,MAAM,KAAK,OAAO,WAAW;AAC9C,uBAAa,KAAK,IAAI,CAAC,CAAE;AAAA,QAC1B,OAAO;AACN,qBAAW,KAAK,IAAI,CAAC,CAAE;AAAA,QACxB;AAAA,MACD;AAAA,IAKD,OAAO;AAEN,YAAM,KAAK,YAAY,KAAK,GAAG;AAAA,IAChC;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,kBAAmE;AACxE,QAAI,CAAC,KAAK,UAAU,iBAAiB;AACpC,aAAO,EAAE,IAAI,MAAM,OAAO,EAAE,YAAY,CAAC,EAAE,EAAE;AAAA,IAC9C;AACA,WAAO,KAAK,UAAU,gBAAgB;AAAA,EACvC;AAAA;AAAA,EAGA,eAAqB;AACpB,QAAI,KAAK,mBAAmB,MAAM;AACjC,oBAAc,KAAK,cAAc;AACjC,WAAK,iBAAiB;AAAA,IACvB;AACA,QAAI,KAAK,mBAAmB;AAC3B,UAAI,OAAO,aAAa,aAAa;AACpC,iBAAS,oBAAoB,oBAAoB,KAAK,iBAAiB;AAAA,MACxE;AACA,WAAK,oBAAoB;AAAA,IAC1B;AAEA,SAAK,UAAU,aAAa;AAAA,EAC7B;AACD;;;AC3cO,IAAM,qBAAN,MAAyB;AAAA,EAC/B,YAA6B,IAAa;AAAb;AAAA,EAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAe3C,MAAM,YACL,OACA,cACA,eACuC;AAEvC,UAAM,oBAAoB,MAAM,UAAU,KAAK,IAAI,KAAK;AACxD,QAAI,CAAC,kBAAkB,GAAI,QAAO;AAElC,UAAM,cAAc,kBAAkB;AAGtC,QAAI,CAAC,aAAa;AACjB,aAAO;AAAA,QACN,IAAI;AAAA,UACH,wCAAwC,KAAK;AAAA,QAC9C;AAAA,MACD;AAAA,IACD;AAGA,UAAM,qBAAqB,MAAM,KAAK,gBAAgB,KAAK;AAC3D,QAAI,CAAC,mBAAmB,GAAI,QAAO;AAEnC,UAAM,eAAe,mBAAmB;AAGxC,QAAI,gBAAgB,eAAe;AAClC,aAAO,GAAG,MAAS;AAAA,IACpB;AAGA,UAAM,gBAAgB,MAAM,cAAc,KAAK,IAAI,aAAa,YAAY;AAC5E,QAAI,CAAC,cAAc,GAAI,QAAO;AAK9B,UAAM,eAAe,MAAM,KAAK,gBAAgB,OAAO,aAAa;AACpE,QAAI,CAAC,aAAa,GAAI,QAAO;AAE7B,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,gBAAgB,OAAiD;AAC9E,UAAM,SAAS,MAAM,KAAK,GAAG;AAAA,MAC5B;AAAA,MACA,CAAC,KAAK;AAAA,IACP;AAEA,QAAI,CAAC,OAAO,GAAI,QAAO;AAEvB,UAAM,OAAO,OAAO;AACpB,QAAI,KAAK,WAAW,KAAK,CAAC,KAAK,CAAC,GAAG;AAClC,aAAO,GAAG,CAAC;AAAA,IACZ;AAEA,WAAO,GAAG,KAAK,CAAC,EAAE,cAAc;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,gBAAgB,OAAe,SAAiD;AAC7F,WAAO,KAAK,GAAG,KAAK,qEAAqE;AAAA,MACxF;AAAA,MACA;AAAA,IACD,CAAC;AAAA,EACF;AACD;;;ACzEO,IAAM,gBAAN,MAA6C;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,QAA6B;AACxC,SAAK,UAAU,OAAO,QAAQ,QAAQ,QAAQ,EAAE;AAChD,SAAK,YAAY,OAAO;AACxB,SAAK,QAAQ,OAAO;AACpB,SAAK,SAAS,OAAO,SAAS,WAAW,MAAM,KAAK,UAAU;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KACL,KACgF;AAChF,UAAM,MAAM,GAAG,KAAK,OAAO,SAAS,KAAK,SAAS;AAElD,QAAI;AACH,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK;AAAA,QACvC,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK,KAAK;AAAA,QACpC;AAAA,QACA,MAAM,KAAK,UAAU,KAAK,cAAc;AAAA,MACzC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AAC9D,eAAO,IAAI,IAAI,cAAQ,gBAAgB,SAAS,MAAM,MAAM,IAAI,IAAI,iBAAiB,CAAC;AAAA,MACvF;AAEA,YAAM,MAAM,MAAM,SAAS,KAAK;AAChC,YAAM,OAAO,KAAK,MAAM,KAAK,aAAa;AAI1C,aAAO,GAAG,IAAI;AAAA,IACf,SAAS,OAAO;AACf,YAAM,QAAQ,QAAQ,KAAK;AAC3B,aAAO,IAAI,IAAI,cAAQ,wBAAwB,MAAM,OAAO,IAAI,mBAAmB,KAAK,CAAC;AAAA,IAC1F;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KAAK,KAA6D;AACvE,UAAM,SAAS,IAAI,gBAAgB;AAAA,MAClC,OAAO,IAAI,SAAS,SAAS;AAAA,MAC7B,UAAU,IAAI;AAAA,MACd,OAAO,IAAI,UAAU,SAAS;AAAA,IAC/B,CAAC;AACD,QAAI,IAAI,QAAQ;AACf,aAAO,IAAI,UAAU,IAAI,MAAM;AAAA,IAChC;AACA,UAAM,MAAM,GAAG,KAAK,OAAO,SAAS,KAAK,SAAS,SAAS,MAAM;AAEjE,QAAI;AACH,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK;AAAA,QACvC,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,eAAe,UAAU,KAAK,KAAK;AAAA,QACpC;AAAA,MACD,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AAC9D,eAAO,IAAI,IAAI,cAAQ,gBAAgB,SAAS,MAAM,MAAM,IAAI,IAAI,iBAAiB,CAAC;AAAA,MACvF;AAEA,YAAM,MAAM,MAAM,SAAS,KAAK;AAChC,YAAM,OAAO,KAAK,MAAM,KAAK,aAAa;AAC1C,aAAO,GAAG,IAAI;AAAA,IACf,SAAS,OAAO;AACf,YAAM,QAAQ,QAAQ,KAAK;AAC3B,aAAO,IAAI,IAAI,cAAQ,wBAAwB,MAAM,OAAO,IAAI,mBAAmB,KAAK,CAAC;AAAA,IAC1F;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,cAAc,KAAiE;AACpF,UAAM,MAAM,GAAG,KAAK,OAAO,SAAS,KAAK,SAAS;AAElD,QAAI;AACH,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK;AAAA,QACvC,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK,KAAK;AAAA,QACpC;AAAA,QACA,MAAM,KAAK,UAAU,KAAK,cAAc;AAAA,MACzC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AAC9D,eAAO,IAAI,IAAI,cAAQ,kBAAkB,SAAS,MAAM,MAAM,IAAI,IAAI,iBAAiB,CAAC;AAAA,MACzF;AAEA,YAAM,MAAM,MAAM,SAAS,KAAK;AAChC,YAAM,OAAO,KAAK,MAAM,KAAK,aAAa;AAC1C,aAAO,GAAG,IAAI;AAAA,IACf,SAAS,OAAO;AACf,YAAM,QAAQ,QAAQ,KAAK;AAC3B,aAAO,IAAI,IAAI,cAAQ,0BAA0B,MAAM,OAAO,IAAI,mBAAmB,KAAK,CAAC;AAAA,IAC5F;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,kBAAmE;AACxE,UAAM,MAAM,GAAG,KAAK,OAAO,SAAS,KAAK,SAAS;AAElD,QAAI;AACH,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK;AAAA,QACvC,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,eAAe,UAAU,KAAK,KAAK;AAAA,QACpC;AAAA,MACD,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AAC9D,eAAO;AAAA,UACN,IAAI,cAAQ,4BAA4B,SAAS,MAAM,MAAM,IAAI,IAAI,iBAAiB;AAAA,QACvF;AAAA,MACD;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,aAAO,GAAG,IAAI;AAAA,IACf,SAAS,OAAO;AACf,YAAM,QAAQ,QAAQ,KAAK;AAC3B,aAAO;AAAA,QACN,IAAI,cAAQ,oCAAoC,MAAM,OAAO,IAAI,mBAAmB,KAAK;AAAA,MAC1F;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aAAwE;AAC7E,UAAM,MAAM,GAAG,KAAK,OAAO,SAAS,KAAK,SAAS;AAElD,QAAI;AACH,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK;AAAA,QACvC,QAAQ;AAAA,QACR,SAAS;AAAA,UACR,eAAe,UAAU,KAAK,KAAK;AAAA,UACnC,QAAQ;AAAA,QACT;AAAA,MACD,CAAC;AAED,UAAI,SAAS,WAAW,KAAK;AAC5B,eAAO,GAAG,IAAI;AAAA,MACf;AAEA,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AAC9D,eAAO;AAAA,UACN,IAAI,cAAQ,sBAAsB,SAAS,MAAM,MAAM,IAAI,IAAI,iBAAiB;AAAA,QACjF;AAAA,MACD;AAEA,YAAM,SAAS,MAAM,8BAA8B,QAAQ;AAC3D,YAAM,YAAY,SAAS,QAAQ,IAAI,kBAAkB;AACzD,YAAM,cAAc,YAAa,OAAO,SAAS,IAAsB;AACvE,aAAO,GAAG,EAAE,QAAQ,YAAY,CAAC;AAAA,IAClC,SAAS,OAAO;AACf,YAAM,QAAQ,QAAQ,KAAK;AAC3B,aAAO;AAAA,QACN,IAAI,cAAQ,8BAA8B,MAAM,OAAO,IAAI,mBAAmB,KAAK;AAAA,MACpF;AAAA,IACD;AAAA,EACD;AACD;AAQA,eAAe,8BAA8B,UAAyC;AACrF,QAAM,SAAS,SAAS,KAAM,UAAU;AACxC,QAAM,YAAwB,CAAC;AAC/B,MAAI,SAAS,IAAI,WAAW,CAAC;AAE7B,aAAS;AACR,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,QAAI,KAAM;AAEV,UAAM,YAAY,IAAI,WAAW,OAAO,SAAS,MAAM,MAAM;AAC7D,cAAU,IAAI,MAAM;AACpB,cAAU,IAAI,OAAO,OAAO,MAAM;AAClC,aAAS;AAET,WAAO,OAAO,UAAU,GAAG;AAC1B,YAAM,cAAc,IAAI,SAAS,OAAO,QAAQ,OAAO,UAAU,EAAE,UAAU,GAAG,KAAK;AACrF,UAAI,OAAO,SAAS,IAAI,YAAa;AAErC,YAAM,YAAY,OAAO,MAAM,GAAG,IAAI,WAAW;AACjD,eAAS,OAAO,MAAM,IAAI,WAAW;AAErC,YAAM,UAAU,mBAAmB,SAAS;AAC5C,UAAI,QAAQ,IAAI;AACf,kBAAU,KAAK,GAAG,QAAQ,MAAM,MAAM;AAAA,MACvC;AAAA,IACD;AAAA,EACD;AAEA,SAAO;AACR;;;ACrOO,IAAM,iBAAN,MAA8C;AAAA,EACpD,YAA6B,SAAuB;AAAvB;AAAA,EAAwB;AAAA;AAAA,EAGrD,MAAM,KACL,KACgF;AAChF,WAAO,KAAK,QAAQ,WAAW,GAAG;AAAA,EACnC;AAAA;AAAA,EAGA,MAAM,KAAK,KAA6D;AACvE,UAAM,SAAS,KAAK,QAAQ,WAAW,GAAG;AAC1C,WAAO,kBAAkB,UAAU,SAAS;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,aAAwE;AAC7E,WAAO,GAAG,IAAI;AAAA,EACf;AAAA;AAAA,EAGA,MAAM,cAAc,KAAiE;AACpF,QAAI,CAAC,KAAK,QAAQ,cAAc;AAC/B,aAAO,IAAI,IAAI,cAAQ,0CAA0C,iBAAiB,CAAC;AAAA,IACpF;AACA,UAAM,SAAS,MAAM,KAAK,QAAQ,aAAa,GAAG;AAClD,QAAI,CAAC,OAAO,IAAI;AACf,aAAO,IAAI,IAAI,cAAQ,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI,CAAC;AAAA,IAChE;AACA,WAAO,GAAG,OAAO,KAAK;AAAA,EACvB;AAAA;AAAA,EAGA,MAAM,kBAAmE;AACxE,QAAI,CAAC,KAAK,QAAQ,iBAAiB;AAClC,aAAO,GAAG,EAAE,YAAY,CAAC,EAAE,CAAC;AAAA,IAC7B;AACA,WAAO,GAAG,KAAK,QAAQ,gBAAgB,CAAC;AAAA,EACzC;AACD;;;AC3CA,IAAM,4BAA4B;AAGlC,IAAM,2BAA2B;AAc1B,IAAM,qBAAN,MAAkD;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,KAAuB;AAAA,EACvB,iBAAuD;AAAA,EACvD,oBAAoB;AAAA,EACpB,aAAa;AAAA,EACb,mBAAmB;AAAA;AAAA,EAGnB,UAGG;AAAA;AAAA,EAGH,oBAAoF;AAAA,EAE5F,YAAY,QAAkC;AAC7C,SAAK,SAAS;AACd,SAAK,kBAAkB,OAAO,mBAAmB;AACjD,SAAK,iBAAiB,OAAO,kBAAkB;AAC/C,SAAK,gBAAgB,OAAO,aAAa,IAAI,cAAc,OAAO,UAAU,IAAI;AAEhF,QAAI,OAAO,aAAa;AACvB,WAAK,oBAAoB,OAAO;AAAA,IACjC;AAAA,EACD;AAAA;AAAA,EAGA,IAAI,YAAqB;AACxB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA,EAGA,IAAI,mBAA4B;AAC/B,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,YAAY,UAAuE;AAClF,SAAK,oBAAoB;AAAA,EAC1B;AAAA;AAAA,EAGA,UAAgB;AACf,QAAI,KAAK,GAAI;AACb,SAAK,mBAAmB;AACxB,SAAK,cAAc;AAAA,EACpB;AAAA;AAAA,EAGA,aAAmB;AAClB,SAAK,mBAAmB;AACxB,QAAI,KAAK,mBAAmB,MAAM;AACjC,mBAAa,KAAK,cAAc;AAChC,WAAK,iBAAiB;AAAA,IACvB;AACA,QAAI,KAAK,IAAI;AACZ,WAAK,GAAG,MAAM,KAAM,mBAAmB;AACvC,WAAK,KAAK;AAAA,IACX;AACA,SAAK,aAAa;AAClB,SAAK,oBAAoB;AAGzB,QAAI,KAAK,SAAS;AACjB,WAAK,QAAQ,QAAQ,IAAI,IAAI,cAAQ,0BAA0B,iBAAiB,CAAC,CAAC;AAClF,WAAK,UAAU;AAAA,IAChB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KACL,KACgF;AAChF,UAAM,UAAU,eAAe;AAAA,MAC9B,UAAU,IAAI;AAAA,MACd,QAAQ,IAAI;AAAA,MACZ,aAAa,IAAI;AAAA,IAClB,CAAC;AACD,QAAI,CAAC,QAAQ,IAAI;AAChB,aAAO,IAAI,IAAI,cAAQ,0BAA0B,QAAQ,MAAM,OAAO,IAAI,iBAAiB,CAAC;AAAA,IAC7F;AAEA,UAAM,QAAQ,IAAI,WAAW,IAAI,QAAQ,MAAM,MAAM;AACrD,UAAM,CAAC,IAAI;AACX,UAAM,IAAI,QAAQ,OAAO,CAAC;AAE1B,UAAM,WAAW,MAAM,KAAK,qBAAqB,KAAK;AACtD,QAAI,CAAC,SAAS,GAAI,QAAO;AAEzB,WAAO,GAAG;AAAA,MACT,WAAW,SAAS,MAAM;AAAA,MAC1B,UACC,SAAS,MAAM,OAAO,WAAW,IAAI,IAAI,OAAO,SAAS,SAAS,MAAM,OAAO;AAAA,IACjF,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,KAA6D;AACvE,UAAM,UAAU,eAAe;AAAA,MAC9B,UAAU,IAAI;AAAA,MACd,UAAU,IAAI;AAAA,MACd,WAAW,IAAI;AAAA,IAChB,CAAC;AACD,QAAI,CAAC,QAAQ,IAAI;AAChB,aAAO,IAAI,IAAI,cAAQ,0BAA0B,QAAQ,MAAM,OAAO,IAAI,iBAAiB,CAAC;AAAA,IAC7F;AAEA,UAAM,QAAQ,IAAI,WAAW,IAAI,QAAQ,MAAM,MAAM;AACrD,UAAM,CAAC,IAAI;AACX,UAAM,IAAI,QAAQ,OAAO,CAAC;AAE1B,WAAO,KAAK,qBAAqB,KAAK;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAwE;AAC7E,QAAI,CAAC,KAAK,eAAe;AACxB,aAAO,GAAG,IAAI;AAAA,IACf;AACA,WAAO,KAAK,cAAc,WAAW;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAMQ,gBAAsB;AAC7B,UAAM,MAAM,GAAG,KAAK,OAAO,GAAG,UAAU,mBAAmB,KAAK,OAAO,KAAK,CAAC;AAC7E,SAAK,KAAK,IAAI,UAAU,GAAG;AAC3B,SAAK,GAAG,aAAa;AAErB,SAAK,GAAG,SAAS,MAAM;AACtB,WAAK,aAAa;AAClB,WAAK,oBAAoB;AAAA,IAC1B;AAEA,SAAK,GAAG,YAAY,CAAC,UAAwB;AAC5C,UAAI,EAAE,MAAM,gBAAgB,aAAc;AAC1C,YAAM,QAAQ,IAAI,WAAW,MAAM,IAAI;AACvC,UAAI,MAAM,SAAS,EAAG;AAEtB,YAAM,MAAM,MAAM,CAAC;AAEnB,UAAI,QAAQ,eAAe;AAE1B,cAAM,UAAU,qBAAqB,KAAK;AAC1C,YAAI,QAAQ,MAAM,KAAK,mBAAmB;AACzC,eAAK,kBAAkB,QAAQ,MAAM,QAAQ,QAAQ,MAAM,SAAS;AAAA,QACrE;AAAA,MACD,OAAO;AAEN,cAAM,UAAU,mBAAmB,KAAK;AACxC,YAAI,KAAK,SAAS;AACjB,cAAI,QAAQ,IAAI;AACf,iBAAK,QAAQ,QAAQ,GAAG,QAAQ,KAAK,CAAC;AAAA,UACvC,OAAO;AACN,iBAAK,QAAQ;AAAA,cACZ;AAAA,gBACC,IAAI;AAAA,kBACH,8BAA8B,QAAQ,MAAM,OAAO;AAAA,kBACnD;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AACA,eAAK,UAAU;AAAA,QAChB;AAAA,MACD;AAAA,IACD;AAEA,SAAK,GAAG,UAAU,MAAM;AACvB,WAAK,aAAa;AAClB,WAAK,KAAK;AAGV,UAAI,KAAK,SAAS;AACjB,aAAK,QAAQ;AAAA,UACZ,IAAI,IAAI,cAAQ,oCAAoC,iBAAiB,CAAC;AAAA,QACvE;AACA,aAAK,UAAU;AAAA,MAChB;AAEA,UAAI,CAAC,KAAK,kBAAkB;AAC3B,aAAK,kBAAkB;AAAA,MACxB;AAAA,IACD;AAEA,SAAK,GAAG,UAAU,MAAM;AAAA,IAExB;AAAA,EACD;AAAA,EAEQ,oBAA0B;AACjC,UAAM,QAAQ,KAAK,IAAI,KAAK,kBAAkB,KAAK,KAAK,mBAAmB,KAAK,cAAc;AAC9F,SAAK;AACL,SAAK,iBAAiB,WAAW,MAAM;AACtC,WAAK,iBAAiB;AACtB,WAAK,cAAc;AAAA,IACpB,GAAG,KAAK;AAAA,EACT;AAAA,EAEQ,qBAAqB,OAAiE;AAC7F,WAAO,IAAI,QAAQ,CAAC,YAAY;AAC/B,UAAI,CAAC,KAAK,MAAM,CAAC,KAAK,YAAY;AACjC,gBAAQ,IAAI,IAAI,cAAQ,2BAA2B,iBAAiB,CAAC,CAAC;AACtE;AAAA,MACD;AAGA,UAAI,KAAK,SAAS;AACjB,aAAK,QAAQ;AAAA,UACZ,IAAI,IAAI,cAAQ,0CAA0C,iBAAiB,CAAC;AAAA,QAC7E;AAAA,MACD;AAEA,WAAK,UAAU;AAAA,QACd;AAAA,QACA,QAAQ,CAAC,WAAkB;AAC1B,kBAAQ,IAAI,IAAI,cAAQ,OAAO,SAAS,iBAAiB,CAAC,CAAC;AAAA,QAC5D;AAAA,MACD;AAEA,UAAI;AACH,aAAK,GAAG,KAAK,KAAK;AAAA,MACnB,SAAS,OAAO;AACf,cAAM,QAAQ,QAAQ,KAAK;AAC3B,aAAK,UAAU;AACf,gBAAQ,IAAI,IAAI,cAAQ,0BAA0B,MAAM,OAAO,IAAI,iBAAiB,CAAC,CAAC;AAAA,MACvF;AAAA,IACD,CAAC;AAAA,EACF;AACD;","names":["openDB","STORE_NAME","openDB","DB_NAME","DB_VERSION","STORE_NAME","serialiseEntry","deserialiseEntry","wrapIdbOp","applyResult"]}
|