lakesync 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter.d.ts +87 -9
- package/dist/adapter.js +8 -2
- package/dist/analyst.js +1 -1
- package/dist/{base-poller-Dfaj05py.d.ts → base-poller-CBvhdvcj.d.ts} +1 -1
- package/dist/catalogue.d.ts +1 -1
- package/dist/catalogue.js +2 -2
- package/dist/{chunk-HAR3YPCY.js → chunk-4LP2EWSC.js} +27 -9
- package/dist/chunk-4LP2EWSC.js.map +1 -0
- package/dist/{chunk-E7ZDOJCP.js → chunk-6OCFE42A.js} +2 -2
- package/dist/{chunk-C7ECMNQ4.js → chunk-B257DXIS.js} +1 -1
- package/dist/{chunk-C7ECMNQ4.js.map → chunk-B257DXIS.js.map} +1 -1
- package/dist/{chunk-265CMYJH.js → chunk-D7KSRAWK.js} +2 -2
- package/dist/{chunk-TIPMVLIG.js → chunk-H3BD4SMD.js} +2 -2
- package/dist/{chunk-SXQB6JT6.js → chunk-HJ2MOKJ5.js} +2 -2
- package/dist/{chunk-NCMXLWEW.js → chunk-LZ6R74PT.js} +337 -20
- package/dist/chunk-LZ6R74PT.js.map +1 -0
- package/dist/{chunk-L6LTCXJ4.js → chunk-VDBZ2AOS.js} +2 -2
- package/dist/client.d.ts +4 -4
- package/dist/client.js +3 -3
- package/dist/compactor.d.ts +1 -1
- package/dist/compactor.js +3 -3
- package/dist/connector-jira.d.ts +2 -2
- package/dist/connector-jira.js +2 -2
- package/dist/connector-salesforce.d.ts +2 -2
- package/dist/connector-salesforce.js +2 -2
- package/dist/{coordinator-CSbsqp5C.d.ts → coordinator-DFbyrQEU.d.ts} +1 -1
- package/dist/{db-types-CPAPw8Ws.d.ts → db-types-B6_JKQWK.d.ts} +1 -1
- package/dist/{gateway-Cej8JUh9.d.ts → gateway-CvO7Xy3T.d.ts} +5 -3
- package/dist/gateway-server.d.ts +4 -4
- package/dist/gateway-server.js +8 -8
- package/dist/gateway.d.ts +7 -6
- package/dist/gateway.js +5 -5
- package/dist/index.d.ts +5 -5
- package/dist/index.js +1 -1
- package/dist/parquet.d.ts +1 -1
- package/dist/parquet.js +2 -2
- package/dist/proto.d.ts +1 -1
- package/dist/proto.js +2 -2
- package/dist/react.d.ts +2 -2
- package/dist/{resolver-B10tk8Er.d.ts → resolver-BZURzdlL.d.ts} +1 -1
- package/dist/{src-VVCNNYND.js → src-2LUI4O6N.js} +3 -3
- package/dist/{src-TLTET7JZ.js → src-5ABL6A7J.js} +2 -2
- package/dist/{src-PPKRY5GD.js → src-CLFH5JSA.js} +3 -3
- package/dist/{types-BUzzVRD6.d.ts → types-GGBfZBKQ.d.ts} +3 -0
- package/package.json +1 -1
- package/dist/chunk-HAR3YPCY.js.map +0 -1
- package/dist/chunk-NCMXLWEW.js.map +0 -1
- /package/dist/{chunk-E7ZDOJCP.js.map → chunk-6OCFE42A.js.map} +0 -0
- /package/dist/{chunk-265CMYJH.js.map → chunk-D7KSRAWK.js.map} +0 -0
- /package/dist/{chunk-TIPMVLIG.js.map → chunk-H3BD4SMD.js.map} +0 -0
- /package/dist/{chunk-SXQB6JT6.js.map → chunk-HJ2MOKJ5.js.map} +0 -0
- /package/dist/{chunk-L6LTCXJ4.js.map → chunk-VDBZ2AOS.js.map} +0 -0
- /package/dist/{src-PPKRY5GD.js.map → src-2LUI4O6N.js.map} +0 -0
- /package/dist/{src-TLTET7JZ.js.map → src-5ABL6A7J.js.map} +0 -0
- /package/dist/{src-VVCNNYND.js.map → src-CLFH5JSA.js.map} +0 -0
package/dist/adapter.d.ts
CHANGED
|
@@ -1,13 +1,62 @@
|
|
|
1
1
|
import { BigQuery } from '@google-cloud/bigquery';
|
|
2
2
|
import { R as Result, A as AdapterError, H as HLCTimestamp } from './result-CojzlFE2.js';
|
|
3
|
-
import { R as RowDelta, T as TableSchema, C as ColumnDelta } from './types-
|
|
4
|
-
import { D as DatabaseAdapter, a as DatabaseAdapterConfig } from './db-types-
|
|
5
|
-
export { i as isDatabaseAdapter, l as lakeSyncTypeToBigQuery } from './db-types-
|
|
3
|
+
import { R as RowDelta, T as TableSchema, C as ColumnDelta } from './types-GGBfZBKQ.js';
|
|
4
|
+
import { D as DatabaseAdapter, a as DatabaseAdapterConfig } from './db-types-B6_JKQWK.js';
|
|
5
|
+
export { i as isDatabaseAdapter, l as lakeSyncTypeToBigQuery } from './db-types-B6_JKQWK.js';
|
|
6
6
|
import { C as ConnectorConfig } from './types-D-E0VrfS.js';
|
|
7
7
|
import { L as LakeAdapter, A as AdapterConfig, O as ObjectInfo } from './types-DSC_EiwR.js';
|
|
8
8
|
import mysql from 'mysql2/promise';
|
|
9
9
|
import { Pool } from 'pg';
|
|
10
10
|
|
|
11
|
+
/**
|
|
12
|
+
* Opt-in capability for adapters that can materialise deltas into destination tables.
|
|
13
|
+
*
|
|
14
|
+
* Materialisation is a separate concern from delta storage — adapters that store
|
|
15
|
+
* deltas (via `DatabaseAdapter.insertDeltas`) may also materialise them into
|
|
16
|
+
* queryable destination tables by implementing this interface.
|
|
17
|
+
*
|
|
18
|
+
* Destination tables follow the hybrid column model:
|
|
19
|
+
* - Synced columns (written by materialiser, derived from `TableSchema.columns`)
|
|
20
|
+
* - `props JSONB DEFAULT '{}'` — consumer-extensible, never touched by materialiser
|
|
21
|
+
* - `synced_at` — updated on every materialise cycle
|
|
22
|
+
*/
|
|
23
|
+
interface Materialisable {
|
|
24
|
+
/**
|
|
25
|
+
* Materialise deltas into destination tables.
|
|
26
|
+
*
|
|
27
|
+
* For each table with a matching schema, merges delta history into the
|
|
28
|
+
* latest row state and upserts into the destination table. Tombstoned
|
|
29
|
+
* rows are deleted. The `props` column is never touched.
|
|
30
|
+
*
|
|
31
|
+
* @param deltas - The deltas that were just flushed.
|
|
32
|
+
* @param schemas - Table schemas defining destination tables and column mappings.
|
|
33
|
+
*/
|
|
34
|
+
materialise(deltas: RowDelta[], schemas: ReadonlyArray<TableSchema>): Promise<Result<void, AdapterError>>;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Type guard to check if an adapter supports materialisation.
|
|
38
|
+
*
|
|
39
|
+
* Uses duck-typing (same pattern as `isDatabaseAdapter`).
|
|
40
|
+
*/
|
|
41
|
+
declare function isMaterialisable(adapter: unknown): adapter is Materialisable;
|
|
42
|
+
/**
|
|
43
|
+
* Group deltas by their table name, collecting the set of affected row IDs per table.
|
|
44
|
+
*
|
|
45
|
+
* @param deltas - The deltas to group.
|
|
46
|
+
* @returns A map from table name to the set of affected row IDs.
|
|
47
|
+
*/
|
|
48
|
+
declare function groupDeltasByTable(deltas: ReadonlyArray<RowDelta>): Map<string, Set<string>>;
|
|
49
|
+
/**
|
|
50
|
+
* Build an index from source table name to schema.
|
|
51
|
+
*
|
|
52
|
+
* Keys are `schema.sourceTable ?? schema.table`, so deltas can be matched
|
|
53
|
+
* by their `table` field to find the correct destination schema.
|
|
54
|
+
*
|
|
55
|
+
* @param schemas - The table schemas to index.
|
|
56
|
+
* @returns A map from source table name to schema.
|
|
57
|
+
*/
|
|
58
|
+
declare function buildSchemaIndex(schemas: ReadonlyArray<TableSchema>): Map<string, TableSchema>;
|
|
59
|
+
|
|
11
60
|
/**
|
|
12
61
|
* Configuration for the BigQuery adapter.
|
|
13
62
|
* Unlike SQL adapters, BigQuery is HTTP-based — no connection string needed.
|
|
@@ -33,7 +82,7 @@ interface BigQueryAdapterConfig {
|
|
|
33
82
|
* on standard (non-partitioned) tables. Query latency is seconds, not
|
|
34
83
|
* milliseconds — this adapter is designed for the analytics tier.
|
|
35
84
|
*/
|
|
36
|
-
declare class BigQueryAdapter implements DatabaseAdapter {
|
|
85
|
+
declare class BigQueryAdapter implements DatabaseAdapter, Materialisable {
|
|
37
86
|
/** @internal */
|
|
38
87
|
readonly client: BigQuery;
|
|
39
88
|
/** @internal */
|
|
@@ -61,6 +110,15 @@ declare class BigQueryAdapter implements DatabaseAdapter {
|
|
|
61
110
|
* internal table structure is fixed (deltas store column data as JSON).
|
|
62
111
|
*/
|
|
63
112
|
ensureSchema(_schema: TableSchema): Promise<Result<void, AdapterError>>;
|
|
113
|
+
/**
|
|
114
|
+
* Materialise deltas into destination tables.
|
|
115
|
+
*
|
|
116
|
+
* For each affected table, queries the full delta history for touched rows,
|
|
117
|
+
* merges to latest state via column-level LWW, then upserts live rows and
|
|
118
|
+
* deletes tombstoned rows. The consumer-owned `props` column is never
|
|
119
|
+
* touched on UPDATE.
|
|
120
|
+
*/
|
|
121
|
+
materialise(deltas: RowDelta[], schemas: ReadonlyArray<TableSchema>): Promise<Result<void, AdapterError>>;
|
|
64
122
|
/**
|
|
65
123
|
* No-op — BigQuery client is HTTP-based with no persistent connections.
|
|
66
124
|
*/
|
|
@@ -128,7 +186,7 @@ interface FanOutAdapterConfig {
|
|
|
128
186
|
* Secondary failures are silently caught and never affect the return value.
|
|
129
187
|
* Use case: write to Postgres (fast, operational), replicate to BigQuery (analytics).
|
|
130
188
|
*/
|
|
131
|
-
declare class FanOutAdapter implements DatabaseAdapter {
|
|
189
|
+
declare class FanOutAdapter implements DatabaseAdapter, Materialisable {
|
|
132
190
|
private readonly primary;
|
|
133
191
|
private readonly secondaries;
|
|
134
192
|
constructor(config: FanOutAdapterConfig);
|
|
@@ -140,6 +198,8 @@ declare class FanOutAdapter implements DatabaseAdapter {
|
|
|
140
198
|
getLatestState(table: string, rowId: string): Promise<Result<Record<string, unknown> | null, AdapterError>>;
|
|
141
199
|
/** Ensure schema on the primary first, then best-effort on secondaries. */
|
|
142
200
|
ensureSchema(schema: TableSchema): Promise<Result<void, AdapterError>>;
|
|
201
|
+
/** Materialise via primary, then replicate to materialisable secondaries (fire-and-forget). */
|
|
202
|
+
materialise(deltas: RowDelta[], schemas: ReadonlyArray<TableSchema>): Promise<Result<void, AdapterError>>;
|
|
143
203
|
/** Close primary and all secondary adapters. */
|
|
144
204
|
close(): Promise<void>;
|
|
145
205
|
}
|
|
@@ -168,7 +228,7 @@ interface LifecycleAdapterConfig {
|
|
|
168
228
|
* Use {@link migrateToTier} as a background job to copy aged-out deltas
|
|
169
229
|
* from hot to cold.
|
|
170
230
|
*/
|
|
171
|
-
declare class LifecycleAdapter implements DatabaseAdapter {
|
|
231
|
+
declare class LifecycleAdapter implements DatabaseAdapter, Materialisable {
|
|
172
232
|
private readonly hot;
|
|
173
233
|
private readonly cold;
|
|
174
234
|
private readonly maxAgeMs;
|
|
@@ -186,6 +246,8 @@ declare class LifecycleAdapter implements DatabaseAdapter {
|
|
|
186
246
|
getLatestState(table: string, rowId: string): Promise<Result<Record<string, unknown> | null, AdapterError>>;
|
|
187
247
|
/** Ensure schema exists on both hot and cold adapters. */
|
|
188
248
|
ensureSchema(schema: TableSchema): Promise<Result<void, AdapterError>>;
|
|
249
|
+
/** Materialise via hot tier only — cold tier stores archived deltas, not destination tables. */
|
|
250
|
+
materialise(deltas: RowDelta[], schemas: ReadonlyArray<TableSchema>): Promise<Result<void, AdapterError>>;
|
|
189
251
|
/** Close both hot and cold adapters. */
|
|
190
252
|
close(): Promise<void>;
|
|
191
253
|
}
|
|
@@ -276,7 +338,7 @@ declare class MinIOAdapter implements LakeAdapter {
|
|
|
276
338
|
* idempotent writes. All public methods return `Result` and never throw.
|
|
277
339
|
* Uses mysql2/promise connection pool for async operations.
|
|
278
340
|
*/
|
|
279
|
-
declare class MySQLAdapter implements DatabaseAdapter {
|
|
341
|
+
declare class MySQLAdapter implements DatabaseAdapter, Materialisable {
|
|
280
342
|
/** @internal */
|
|
281
343
|
readonly pool: mysql.Pool;
|
|
282
344
|
constructor(config: DatabaseAdapterConfig);
|
|
@@ -300,6 +362,14 @@ declare class MySQLAdapter implements DatabaseAdapter {
|
|
|
300
362
|
* and a user table matching the given TableSchema definition.
|
|
301
363
|
*/
|
|
302
364
|
ensureSchema(schema: TableSchema): Promise<Result<void, AdapterError>>;
|
|
365
|
+
/**
|
|
366
|
+
* Materialise deltas into destination tables.
|
|
367
|
+
*
|
|
368
|
+
* For each table with a matching schema, merges delta history into the
|
|
369
|
+
* latest row state and upserts into the destination table. Tombstoned
|
|
370
|
+
* rows are deleted. The `props` column is never touched.
|
|
371
|
+
*/
|
|
372
|
+
materialise(deltas: RowDelta[], schemas: ReadonlyArray<TableSchema>): Promise<Result<void, AdapterError>>;
|
|
303
373
|
/** Close the database connection pool and release resources. */
|
|
304
374
|
close(): Promise<void>;
|
|
305
375
|
}
|
|
@@ -310,7 +380,7 @@ declare class MySQLAdapter implements DatabaseAdapter {
|
|
|
310
380
|
* Stores deltas in a `lakesync_deltas` table using pg Pool.
|
|
311
381
|
* All public methods return `Result` and never throw.
|
|
312
382
|
*/
|
|
313
|
-
declare class PostgresAdapter implements DatabaseAdapter {
|
|
383
|
+
declare class PostgresAdapter implements DatabaseAdapter, Materialisable {
|
|
314
384
|
/** @internal */
|
|
315
385
|
readonly pool: Pool;
|
|
316
386
|
constructor(config: DatabaseAdapterConfig);
|
|
@@ -334,6 +404,14 @@ declare class PostgresAdapter implements DatabaseAdapter {
|
|
|
334
404
|
* internal table structure is fixed (deltas store column data as JSONB).
|
|
335
405
|
*/
|
|
336
406
|
ensureSchema(_schema: TableSchema): Promise<Result<void, AdapterError>>;
|
|
407
|
+
/**
|
|
408
|
+
* Materialise deltas into destination tables.
|
|
409
|
+
*
|
|
410
|
+
* For each table with a matching schema, merges delta history into the
|
|
411
|
+
* latest row state and upserts into the destination table. Tombstoned
|
|
412
|
+
* rows are deleted. The `props` column is never touched.
|
|
413
|
+
*/
|
|
414
|
+
materialise(deltas: RowDelta[], schemas: ReadonlyArray<TableSchema>): Promise<Result<void, AdapterError>>;
|
|
337
415
|
/** Close the database connection pool and release resources. */
|
|
338
416
|
close(): Promise<void>;
|
|
339
417
|
}
|
|
@@ -366,4 +444,4 @@ declare function mergeLatestState(rows: Array<{
|
|
|
366
444
|
op: string;
|
|
367
445
|
}>): Record<string, unknown> | null;
|
|
368
446
|
|
|
369
|
-
export { AdapterConfig, BigQueryAdapter, type BigQueryAdapterConfig, CompositeAdapter, type CompositeAdapterConfig, type CompositeRoute, DatabaseAdapter, DatabaseAdapterConfig, FanOutAdapter, type FanOutAdapterConfig, LakeAdapter, LifecycleAdapter, type LifecycleAdapterConfig, type MigrateOptions, type MigrateProgress, type MigrateResult, MinIOAdapter, MySQLAdapter, ObjectInfo, PostgresAdapter, type QueryFn, createDatabaseAdapter, createQueryFn, mergeLatestState, migrateAdapter, migrateToTier, toCause, wrapAsync };
|
|
447
|
+
export { AdapterConfig, BigQueryAdapter, type BigQueryAdapterConfig, CompositeAdapter, type CompositeAdapterConfig, type CompositeRoute, DatabaseAdapter, DatabaseAdapterConfig, FanOutAdapter, type FanOutAdapterConfig, LakeAdapter, LifecycleAdapter, type LifecycleAdapterConfig, type Materialisable, type MigrateOptions, type MigrateProgress, type MigrateResult, MinIOAdapter, MySQLAdapter, ObjectInfo, PostgresAdapter, type QueryFn, buildSchemaIndex, createDatabaseAdapter, createQueryFn, groupDeltasByTable, isMaterialisable, mergeLatestState, migrateAdapter, migrateToTier, toCause, wrapAsync };
|
package/dist/adapter.js
CHANGED
|
@@ -6,17 +6,20 @@ import {
|
|
|
6
6
|
MinIOAdapter,
|
|
7
7
|
MySQLAdapter,
|
|
8
8
|
PostgresAdapter,
|
|
9
|
+
buildSchemaIndex,
|
|
9
10
|
createDatabaseAdapter,
|
|
10
11
|
createQueryFn,
|
|
12
|
+
groupDeltasByTable,
|
|
11
13
|
isDatabaseAdapter,
|
|
14
|
+
isMaterialisable,
|
|
12
15
|
lakeSyncTypeToBigQuery,
|
|
13
16
|
mergeLatestState,
|
|
14
17
|
migrateAdapter,
|
|
15
18
|
migrateToTier,
|
|
16
19
|
toCause,
|
|
17
20
|
wrapAsync
|
|
18
|
-
} from "./chunk-
|
|
19
|
-
import "./chunk-
|
|
21
|
+
} from "./chunk-LZ6R74PT.js";
|
|
22
|
+
import "./chunk-B257DXIS.js";
|
|
20
23
|
import "./chunk-7D4SUZUM.js";
|
|
21
24
|
export {
|
|
22
25
|
BigQueryAdapter,
|
|
@@ -26,9 +29,12 @@ export {
|
|
|
26
29
|
MinIOAdapter,
|
|
27
30
|
MySQLAdapter,
|
|
28
31
|
PostgresAdapter,
|
|
32
|
+
buildSchemaIndex,
|
|
29
33
|
createDatabaseAdapter,
|
|
30
34
|
createQueryFn,
|
|
35
|
+
groupDeltasByTable,
|
|
31
36
|
isDatabaseAdapter,
|
|
37
|
+
isMaterialisable,
|
|
32
38
|
lakeSyncTypeToBigQuery,
|
|
33
39
|
mergeLatestState,
|
|
34
40
|
migrateAdapter,
|
package/dist/analyst.js
CHANGED
package/dist/catalogue.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { I as IcebergSchema, P as PartitionSpec } from './nessie-client-DrNikVXy.js';
|
|
2
2
|
export { C as CatalogueConfig, a as CatalogueError, D as DataFile, b as IcebergField, N as NessieCatalogueClient, S as Snapshot, T as TableMetadata } from './nessie-client-DrNikVXy.js';
|
|
3
|
-
import { T as TableSchema } from './types-
|
|
3
|
+
import { T as TableSchema } from './types-GGBfZBKQ.js';
|
|
4
4
|
import './result-CojzlFE2.js';
|
|
5
5
|
|
|
6
6
|
/**
|
package/dist/catalogue.js
CHANGED
|
@@ -4,8 +4,8 @@ import {
|
|
|
4
4
|
buildPartitionSpec,
|
|
5
5
|
lakeSyncTableName,
|
|
6
6
|
tableSchemaToIceberg
|
|
7
|
-
} from "./chunk-
|
|
8
|
-
import "./chunk-
|
|
7
|
+
} from "./chunk-H3BD4SMD.js";
|
|
8
|
+
import "./chunk-B257DXIS.js";
|
|
9
9
|
import "./chunk-7D4SUZUM.js";
|
|
10
10
|
export {
|
|
11
11
|
CatalogueError,
|
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
import {
|
|
2
|
-
isDatabaseAdapter
|
|
3
|
-
|
|
2
|
+
isDatabaseAdapter,
|
|
3
|
+
isMaterialisable
|
|
4
|
+
} from "./chunk-LZ6R74PT.js";
|
|
4
5
|
import {
|
|
5
6
|
buildPartitionSpec,
|
|
6
7
|
lakeSyncTableName,
|
|
7
8
|
tableSchemaToIceberg
|
|
8
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-H3BD4SMD.js";
|
|
9
10
|
import {
|
|
10
11
|
writeDeltasToParquet
|
|
11
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-D7KSRAWK.js";
|
|
12
13
|
import {
|
|
13
14
|
AdapterNotFoundError,
|
|
14
15
|
BackpressureError,
|
|
@@ -26,7 +27,7 @@ import {
|
|
|
26
27
|
validateAction,
|
|
27
28
|
validateConnectorConfig,
|
|
28
29
|
validateSyncRules
|
|
29
|
-
} from "./chunk-
|
|
30
|
+
} from "./chunk-B257DXIS.js";
|
|
30
31
|
|
|
31
32
|
// ../gateway/src/action-dispatcher.ts
|
|
32
33
|
var ActionDispatcher = class {
|
|
@@ -365,6 +366,20 @@ async function flushEntries(entries, byteSize, deps, keyPrefix) {
|
|
|
365
366
|
deps.restoreEntries(entries);
|
|
366
367
|
return Err(new FlushError(`Database flush failed: ${result.error.message}`));
|
|
367
368
|
}
|
|
369
|
+
if (deps.schemas && deps.schemas.length > 0 && isMaterialisable(deps.adapter)) {
|
|
370
|
+
try {
|
|
371
|
+
const matResult = await deps.adapter.materialise(entries, deps.schemas);
|
|
372
|
+
if (!matResult.ok) {
|
|
373
|
+
console.warn(
|
|
374
|
+
`[lakesync] Materialisation failed (${entries.length} deltas): ${matResult.error.message}`
|
|
375
|
+
);
|
|
376
|
+
}
|
|
377
|
+
} catch (error) {
|
|
378
|
+
console.warn(
|
|
379
|
+
`[lakesync] Materialisation error (${entries.length} deltas): ${error instanceof Error ? error.message : String(error)}`
|
|
380
|
+
);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
368
383
|
return Ok(void 0);
|
|
369
384
|
} catch (error) {
|
|
370
385
|
deps.restoreEntries(entries);
|
|
@@ -620,7 +635,8 @@ var SyncGateway = class {
|
|
|
620
635
|
tableSchema: this.config.tableSchema,
|
|
621
636
|
catalogue: this.config.catalogue
|
|
622
637
|
},
|
|
623
|
-
restoreEntries: (e) => this.restoreEntries(e)
|
|
638
|
+
restoreEntries: (e) => this.restoreEntries(e),
|
|
639
|
+
schemas: this.config.schemas
|
|
624
640
|
});
|
|
625
641
|
} finally {
|
|
626
642
|
this.flushing = false;
|
|
@@ -637,7 +653,8 @@ var SyncGateway = class {
|
|
|
637
653
|
tableSchema: this.config.tableSchema,
|
|
638
654
|
catalogue: this.config.catalogue
|
|
639
655
|
},
|
|
640
|
-
restoreEntries: (e) => this.restoreEntries(e)
|
|
656
|
+
restoreEntries: (e) => this.restoreEntries(e),
|
|
657
|
+
schemas: this.config.schemas
|
|
641
658
|
});
|
|
642
659
|
} finally {
|
|
643
660
|
this.flushing = false;
|
|
@@ -673,7 +690,8 @@ var SyncGateway = class {
|
|
|
673
690
|
tableSchema: this.config.tableSchema,
|
|
674
691
|
catalogue: this.config.catalogue
|
|
675
692
|
},
|
|
676
|
-
restoreEntries: (e) => this.restoreEntries(e)
|
|
693
|
+
restoreEntries: (e) => this.restoreEntries(e),
|
|
694
|
+
schemas: this.config.schemas
|
|
677
695
|
},
|
|
678
696
|
table
|
|
679
697
|
);
|
|
@@ -1112,4 +1130,4 @@ export {
|
|
|
1112
1130
|
handleMetrics,
|
|
1113
1131
|
SchemaManager
|
|
1114
1132
|
};
|
|
1115
|
-
//# sourceMappingURL=chunk-
|
|
1133
|
+
//# sourceMappingURL=chunk-4LP2EWSC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../gateway/src/action-dispatcher.ts","../../gateway/src/buffer.ts","../../gateway/src/config-store.ts","../../gateway/src/constants.ts","../../gateway/src/flush.ts","../../gateway/src/gateway.ts","../../gateway/src/validation.ts","../../gateway/src/request-handler.ts","../../gateway/src/schema-manager.ts"],"sourcesContent":["import type {\n\tAction,\n\tActionDescriptor,\n\tActionDiscovery,\n\tActionExecutionError,\n\tActionHandler,\n\tActionPush,\n\tActionResponse,\n\tActionResult,\n\tActionValidationError,\n\tAuthContext,\n\tHLCTimestamp,\n\tResult,\n} from \"@lakesync/core\";\nimport { Err, Ok, validateAction } from \"@lakesync/core\";\n\n/**\n * Dispatches imperative actions to registered handlers.\n *\n * Manages idempotency via actionId deduplication and idempotencyKey mapping.\n * Completely decoupled from the HLC clock — takes a callback for timestamp generation.\n */\nexport class ActionDispatcher {\n\tprivate actionHandlers: Map<string, ActionHandler> = new Map();\n\tprivate executedActions: Set<string> = new Set();\n\tprivate idempotencyMap: Map<\n\t\tstring,\n\t\tActionResult | { actionId: string; code: string; message: string; retryable: boolean }\n\t> = new Map();\n\n\tconstructor(handlers?: Record<string, ActionHandler>) {\n\t\tif (handlers) {\n\t\t\tfor (const [name, handler] of Object.entries(handlers)) {\n\t\t\t\tthis.actionHandlers.set(name, handler);\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Dispatch an action push to registered handlers.\n\t *\n\t * Iterates over actions, dispatches each to the registered ActionHandler\n\t * by connector name. Supports idempotency via actionId deduplication and\n\t * idempotencyKey mapping.\n\t *\n\t * @param msg - The action push containing one or more actions.\n\t * @param hlcNow - Callback to get the current server HLC timestamp.\n\t * @param context - Optional auth context for permission checks.\n\t * @returns A `Result` containing results for each action.\n\t */\n\tasync dispatch(\n\t\tmsg: ActionPush,\n\t\thlcNow: () => HLCTimestamp,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResponse, ActionValidationError>> {\n\t\tconst results: Array<\n\t\t\tActionResult | { actionId: string; code: string; message: string; retryable: boolean }\n\t\t> = [];\n\n\t\tfor (const action of msg.actions) {\n\t\t\t// Structural validation\n\t\t\tconst validation = validateAction(action);\n\t\t\tif (!validation.ok) {\n\t\t\t\treturn Err(validation.error);\n\t\t\t}\n\n\t\t\t// Idempotency — check actionId\n\t\t\tif (this.executedActions.has(action.actionId)) {\n\t\t\t\tconst cached = this.idempotencyMap.get(action.actionId);\n\t\t\t\tif (cached) {\n\t\t\t\t\tresults.push(cached);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\t// Already executed but no cached result — skip\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Idempotency — check idempotencyKey\n\t\t\tif (action.idempotencyKey) {\n\t\t\t\tconst cached = this.idempotencyMap.get(`idem:${action.idempotencyKey}`);\n\t\t\t\tif (cached) {\n\t\t\t\t\tresults.push(cached);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Resolve handler\n\t\t\tconst handler = this.actionHandlers.get(action.connector);\n\t\t\tif (!handler) {\n\t\t\t\tconst errorResult = {\n\t\t\t\t\tactionId: action.actionId,\n\t\t\t\t\tcode: \"ACTION_NOT_SUPPORTED\",\n\t\t\t\t\tmessage: `No action handler registered for connector \"${action.connector}\"`,\n\t\t\t\t\tretryable: false,\n\t\t\t\t};\n\t\t\t\tresults.push(errorResult);\n\t\t\t\tthis.cacheActionResult(action, errorResult);\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Check action type is supported\n\t\t\tconst supported = handler.supportedActions.some((d) => d.actionType === action.actionType);\n\t\t\tif (!supported) {\n\t\t\t\tconst errorResult = {\n\t\t\t\t\tactionId: action.actionId,\n\t\t\t\t\tcode: \"ACTION_NOT_SUPPORTED\",\n\t\t\t\t\tmessage: `Action type \"${action.actionType}\" not supported by connector \"${action.connector}\"`,\n\t\t\t\t\tretryable: false,\n\t\t\t\t};\n\t\t\t\tresults.push(errorResult);\n\t\t\t\tthis.cacheActionResult(action, errorResult);\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Execute\n\t\t\tconst execResult = await handler.executeAction(action, context);\n\t\t\tif (execResult.ok) {\n\t\t\t\tresults.push(execResult.value);\n\t\t\t\tthis.cacheActionResult(action, execResult.value);\n\t\t\t} else {\n\t\t\t\tconst err = execResult.error;\n\t\t\t\tconst errorResult = {\n\t\t\t\t\tactionId: action.actionId,\n\t\t\t\t\tcode: err.code,\n\t\t\t\t\tmessage: err.message,\n\t\t\t\t\tretryable: \"retryable\" in err ? (err as ActionExecutionError).retryable : false,\n\t\t\t\t};\n\t\t\t\tresults.push(errorResult);\n\t\t\t\t// Only cache non-retryable errors — retryable errors should be retried\n\t\t\t\tif (!errorResult.retryable) {\n\t\t\t\t\tthis.cacheActionResult(action, errorResult);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tconst serverHlc = hlcNow();\n\t\treturn Ok({ results, serverHlc });\n\t}\n\n\t/**\n\t * Register a named action handler.\n\t *\n\t * @param name - Connector name (matches `Action.connector`).\n\t * @param handler - The action handler to register.\n\t */\n\tregisterHandler(name: string, handler: ActionHandler): void {\n\t\tthis.actionHandlers.set(name, handler);\n\t}\n\n\t/**\n\t * Unregister a named action handler.\n\t *\n\t * @param name - The connector name to remove.\n\t */\n\tunregisterHandler(name: string): void {\n\t\tthis.actionHandlers.delete(name);\n\t}\n\n\t/**\n\t * List all registered action handler names.\n\t *\n\t * @returns Array of registered connector names.\n\t */\n\tlistHandlers(): string[] {\n\t\treturn [...this.actionHandlers.keys()];\n\t}\n\n\t/**\n\t * Describe all registered action handlers and their supported actions.\n\t *\n\t * Returns a map of connector name to its {@link ActionDescriptor} array,\n\t * enabling frontend discovery of available actions.\n\t *\n\t * @returns An {@link ActionDiscovery} object listing connectors and their actions.\n\t */\n\tdescribe(): ActionDiscovery {\n\t\tconst connectors: Record<string, ActionDescriptor[]> = {};\n\t\tfor (const [name, handler] of this.actionHandlers) {\n\t\t\tconnectors[name] = handler.supportedActions;\n\t\t}\n\t\treturn { connectors };\n\t}\n\n\t/** Cache an action result for idempotency deduplication. */\n\tprivate cacheActionResult(\n\t\taction: Action,\n\t\tresult: ActionResult | { actionId: string; code: string; message: string; retryable: boolean },\n\t): void {\n\t\tthis.executedActions.add(action.actionId);\n\t\tthis.idempotencyMap.set(action.actionId, result);\n\t\tif (action.idempotencyKey) {\n\t\t\tthis.idempotencyMap.set(`idem:${action.idempotencyKey}`, result);\n\t\t}\n\t}\n}\n","import type { HLCTimestamp, RowDelta, RowKey } from \"@lakesync/core\";\nimport { HLC, rowKey } from \"@lakesync/core\";\n\n/** Estimated base overhead per delta entry (metadata fields: deltaId, table, rowId, clientId, op + HLC bigint). */\nconst BASE_DELTA_OVERHEAD = 8 + 8 + 8 + 8 + 1;\n\n/**\n * Estimate the byte size of a single column value.\n * Uses type-aware heuristics as a proxy for in-memory size.\n */\nfunction estimateValueBytes(value: unknown): number {\n\tif (value === null || value === undefined) return 4;\n\tswitch (typeof value) {\n\t\tcase \"boolean\":\n\t\t\treturn 4;\n\t\tcase \"number\":\n\t\t\treturn 8;\n\t\tcase \"bigint\":\n\t\t\treturn 8;\n\t\tcase \"string\":\n\t\t\treturn (value as string).length * 2; // UTF-16\n\t\tdefault:\n\t\t\t// Objects, arrays — use JSON.stringify as proxy\n\t\t\ttry {\n\t\t\t\treturn JSON.stringify(value).length;\n\t\t\t} catch {\n\t\t\t\treturn 100; // fallback for circular refs etc.\n\t\t\t}\n\t}\n}\n\n/** Estimate the byte size of a RowDelta. */\nfunction estimateDeltaBytes(delta: RowDelta): number {\n\tlet bytes = BASE_DELTA_OVERHEAD;\n\tbytes += delta.deltaId.length;\n\tbytes += delta.table.length * 2;\n\tbytes += delta.rowId.length * 2;\n\tbytes += delta.clientId.length * 2;\n\tfor (const col of delta.columns) {\n\t\tbytes += col.column.length * 2; // column name\n\t\tbytes += estimateValueBytes(col.value); // column value\n\t}\n\treturn bytes;\n}\n\n/**\n * Dual-structure delta buffer.\n *\n * Maintains an append-only log for event streaming (pull) and flush,\n * plus a row-level index for O(1) conflict resolution lookups.\n */\nexport class DeltaBuffer {\n\tprivate log: RowDelta[] = [];\n\tprivate index: Map<RowKey, RowDelta> = new Map();\n\tprivate deltaIds = new Set<string>();\n\tprivate estimatedBytes = 0;\n\tprivate createdAt: number = Date.now();\n\tprivate tableBytes = new Map<string, number>();\n\tprivate tableLog = new Map<string, RowDelta[]>();\n\n\t/** Append a delta to the log and upsert the index (post-conflict-resolution). */\n\tappend(delta: RowDelta): void {\n\t\tthis.log.push(delta);\n\t\tconst key = rowKey(delta.table, delta.rowId);\n\t\tthis.index.set(key, delta);\n\t\tthis.deltaIds.add(delta.deltaId);\n\t\tconst bytes = estimateDeltaBytes(delta);\n\t\tthis.estimatedBytes += bytes;\n\t\t// Per-table tracking\n\t\tthis.tableBytes.set(delta.table, (this.tableBytes.get(delta.table) ?? 0) + bytes);\n\t\tconst tableEntries = this.tableLog.get(delta.table);\n\t\tif (tableEntries) {\n\t\t\ttableEntries.push(delta);\n\t\t} else {\n\t\t\tthis.tableLog.set(delta.table, [delta]);\n\t\t}\n\t}\n\n\t/** Get the current merged state for a row (for conflict resolution). */\n\tgetRow(key: RowKey): RowDelta | undefined {\n\t\treturn this.index.get(key);\n\t}\n\n\t/** Check if a delta with this ID already exists in the log (for idempotency). */\n\thasDelta(deltaId: string): boolean {\n\t\treturn this.deltaIds.has(deltaId);\n\t}\n\n\t/** Return change events from the log since a given HLC. */\n\tgetEventsSince(hlc: HLCTimestamp, limit: number): { deltas: RowDelta[]; hasMore: boolean } {\n\t\tlet lo = 0;\n\t\tlet hi = this.log.length;\n\t\twhile (lo < hi) {\n\t\t\tconst mid = (lo + hi) >>> 1;\n\t\t\tif (HLC.compare(this.log[mid]!.hlc, hlc) <= 0) {\n\t\t\t\tlo = mid + 1;\n\t\t\t} else {\n\t\t\t\thi = mid;\n\t\t\t}\n\t\t}\n\t\tconst hasMore = this.log.length - lo > limit;\n\t\treturn { deltas: this.log.slice(lo, lo + limit), hasMore };\n\t}\n\n\t/** Check if the buffer should be flushed based on size or age thresholds. */\n\tshouldFlush(config: { maxBytes: number; maxAgeMs: number }): boolean {\n\t\tif (this.log.length === 0) return false;\n\t\treturn this.estimatedBytes >= config.maxBytes || Date.now() - this.createdAt >= config.maxAgeMs;\n\t}\n\n\t/** Per-table buffer statistics. */\n\ttableStats(): Array<{ table: string; byteSize: number; deltaCount: number }> {\n\t\tconst stats: Array<{ table: string; byteSize: number; deltaCount: number }> = [];\n\t\tfor (const [table, bytes] of this.tableBytes) {\n\t\t\tstats.push({\n\t\t\t\ttable,\n\t\t\t\tbyteSize: bytes,\n\t\t\t\tdeltaCount: this.tableLog.get(table)?.length ?? 0,\n\t\t\t});\n\t\t}\n\t\treturn stats;\n\t}\n\n\t/** Drain only the specified table's deltas, leaving other tables intact. */\n\tdrainTable(table: string): RowDelta[] {\n\t\tconst tableDeltas = this.tableLog.get(table) ?? [];\n\t\tif (tableDeltas.length === 0) return [];\n\n\t\t// Remove from main log\n\t\tthis.log = this.log.filter((d) => d.table !== table);\n\n\t\t// Remove from index and deltaIds\n\t\tfor (const delta of tableDeltas) {\n\t\t\tthis.index.delete(rowKey(delta.table, delta.rowId));\n\t\t\tthis.deltaIds.delete(delta.deltaId);\n\t\t}\n\n\t\t// Adjust byte tracking\n\t\tconst tableByteSize = this.tableBytes.get(table) ?? 0;\n\t\tthis.estimatedBytes -= tableByteSize;\n\t\tthis.tableBytes.delete(table);\n\t\tthis.tableLog.delete(table);\n\n\t\treturn tableDeltas;\n\t}\n\n\t/** Drain the log for flush. Returns log entries and clears both structures. */\n\tdrain(): RowDelta[] {\n\t\tconst entries = [...this.log];\n\t\tthis.log = [];\n\t\tthis.index.clear();\n\t\tthis.deltaIds.clear();\n\t\tthis.estimatedBytes = 0;\n\t\tthis.createdAt = Date.now();\n\t\tthis.tableBytes.clear();\n\t\tthis.tableLog.clear();\n\t\treturn entries;\n\t}\n\n\t/** Number of log entries */\n\tget logSize(): number {\n\t\treturn this.log.length;\n\t}\n\n\t/** Number of unique rows in the index */\n\tget indexSize(): number {\n\t\treturn this.index.size;\n\t}\n\n\t/** Estimated byte size of the buffer */\n\tget byteSize(): number {\n\t\treturn this.estimatedBytes;\n\t}\n\n\t/** Average byte size per delta in the buffer (0 if empty). */\n\tget averageDeltaBytes(): number {\n\t\treturn this.log.length === 0 ? 0 : this.estimatedBytes / this.log.length;\n\t}\n}\n","import type { ConnectorConfig, SyncRulesConfig, TableSchema } from \"@lakesync/core\";\n\n/**\n * Platform-agnostic configuration storage interface.\n *\n * Implemented by MemoryConfigStore (tests, gateway-server) and\n * DurableStorageConfigStore (gateway-worker).\n */\nexport interface ConfigStore {\n\tgetSchema(gatewayId: string): Promise<TableSchema | undefined>;\n\tsetSchema(gatewayId: string, schema: TableSchema): Promise<void>;\n\tgetSyncRules(gatewayId: string): Promise<SyncRulesConfig | undefined>;\n\tsetSyncRules(gatewayId: string, rules: SyncRulesConfig): Promise<void>;\n\tgetConnectors(): Promise<Record<string, ConnectorConfig>>;\n\tsetConnectors(connectors: Record<string, ConnectorConfig>): Promise<void>;\n}\n\n/**\n * In-memory implementation of ConfigStore.\n * Used by tests and gateway-server.\n */\nexport class MemoryConfigStore implements ConfigStore {\n\tprivate schemas = new Map<string, TableSchema>();\n\tprivate syncRules = new Map<string, SyncRulesConfig>();\n\tprivate connectors: Record<string, ConnectorConfig> = {};\n\n\tasync getSchema(gatewayId: string): Promise<TableSchema | undefined> {\n\t\treturn this.schemas.get(gatewayId);\n\t}\n\n\tasync setSchema(gatewayId: string, schema: TableSchema): Promise<void> {\n\t\tthis.schemas.set(gatewayId, schema);\n\t}\n\n\tasync getSyncRules(gatewayId: string): Promise<SyncRulesConfig | undefined> {\n\t\treturn this.syncRules.get(gatewayId);\n\t}\n\n\tasync setSyncRules(gatewayId: string, rules: SyncRulesConfig): Promise<void> {\n\t\tthis.syncRules.set(gatewayId, rules);\n\t}\n\n\tasync getConnectors(): Promise<Record<string, ConnectorConfig>> {\n\t\treturn { ...this.connectors };\n\t}\n\n\tasync setConnectors(connectors: Record<string, ConnectorConfig>): Promise<void> {\n\t\tthis.connectors = { ...connectors };\n\t}\n}\n","/** Maximum push payload size (1 MiB). */\nexport const MAX_PUSH_PAYLOAD_BYTES = 1_048_576;\n\n/** Maximum number of deltas allowed in a single push. */\nexport const MAX_DELTAS_PER_PUSH = 10_000;\n\n/** Maximum number of deltas returned in a single pull. */\nexport const MAX_PULL_LIMIT = 10_000;\n\n/** Default number of deltas returned in a pull when no limit is specified. */\nexport const DEFAULT_PULL_LIMIT = 100;\n\n/** Allowed column types for schema validation. */\nexport const VALID_COLUMN_TYPES = new Set([\"string\", \"number\", \"boolean\", \"json\", \"null\"]);\n\n/** Default maximum buffer size before triggering flush (4 MiB). */\nexport const DEFAULT_MAX_BUFFER_BYTES = 4 * 1024 * 1024;\n\n/** Default maximum buffer age before triggering flush (30 seconds). */\nexport const DEFAULT_MAX_BUFFER_AGE_MS = 30_000;\n","import {\n\ttype DatabaseAdapter,\n\tisDatabaseAdapter,\n\tisMaterialisable,\n\ttype LakeAdapter,\n} from \"@lakesync/adapter\";\nimport {\n\tbuildPartitionSpec,\n\ttype DataFile,\n\tlakeSyncTableName,\n\ttype NessieCatalogueClient,\n\ttableSchemaToIceberg,\n} from \"@lakesync/catalogue\";\nimport {\n\tErr,\n\tFlushError,\n\tHLC,\n\ttype HLCTimestamp,\n\tOk,\n\ttype Result,\n\ttype RowDelta,\n\ttype TableSchema,\n\ttoError,\n} from \"@lakesync/core\";\nimport { writeDeltasToParquet } from \"@lakesync/parquet\";\nimport { bigintReplacer } from \"./json\";\nimport type { FlushEnvelope } from \"./types\";\n\n/** Configuration for flush operations. */\nexport interface FlushConfig {\n\tgatewayId: string;\n\tflushFormat?: \"json\" | \"parquet\";\n\ttableSchema?: TableSchema;\n\tcatalogue?: NessieCatalogueClient;\n}\n\n/** Dependencies injected into flush operations. */\nexport interface FlushDeps {\n\tadapter: LakeAdapter | DatabaseAdapter;\n\tconfig: FlushConfig;\n\trestoreEntries: (entries: RowDelta[]) => void;\n\tschemas?: ReadonlyArray<TableSchema>;\n}\n\n/** Find the min and max HLC in a non-empty array of deltas. */\nexport function hlcRange(entries: RowDelta[]): { min: HLCTimestamp; max: HLCTimestamp } {\n\tlet min = entries[0]!.hlc;\n\tlet max = entries[0]!.hlc;\n\tfor (let i = 1; i < entries.length; i++) {\n\t\tconst hlc = entries[i]!.hlc;\n\t\tif (HLC.compare(hlc, min) < 0) min = hlc;\n\t\tif (HLC.compare(hlc, max) > 0) max = hlc;\n\t}\n\treturn { min, max };\n}\n\n/**\n * Flush a set of entries to the configured adapter.\n *\n * Unifies both full-buffer flush and per-table flush. The `keyPrefix`\n * parameter, when provided, is prepended to the HLC range in the object key\n * (e.g. \"todos\" for per-table flush).\n */\nexport async function flushEntries(\n\tentries: RowDelta[],\n\tbyteSize: number,\n\tdeps: FlushDeps,\n\tkeyPrefix?: string,\n): Promise<Result<void, FlushError>> {\n\t// Database adapter path — batch INSERT deltas directly\n\tif (isDatabaseAdapter(deps.adapter)) {\n\t\ttry {\n\t\t\tconst result = await deps.adapter.insertDeltas(entries);\n\t\t\tif (!result.ok) {\n\t\t\t\tdeps.restoreEntries(entries);\n\t\t\t\treturn Err(new FlushError(`Database flush failed: ${result.error.message}`));\n\t\t\t}\n\n\t\t\t// Materialise after successful delta insertion (non-fatal)\n\t\t\tif (deps.schemas && deps.schemas.length > 0 && isMaterialisable(deps.adapter)) {\n\t\t\t\ttry {\n\t\t\t\t\tconst matResult = await deps.adapter.materialise(entries, deps.schemas);\n\t\t\t\t\tif (!matResult.ok) {\n\t\t\t\t\t\tconsole.warn(\n\t\t\t\t\t\t\t`[lakesync] Materialisation failed (${entries.length} deltas): ${matResult.error.message}`,\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t} catch (error: unknown) {\n\t\t\t\t\tconsole.warn(\n\t\t\t\t\t\t`[lakesync] Materialisation error (${entries.length} deltas): ${error instanceof Error ? error.message : String(error)}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn Ok(undefined);\n\t\t} catch (error: unknown) {\n\t\t\tdeps.restoreEntries(entries);\n\t\t\treturn Err(new FlushError(`Unexpected database flush failure: ${toError(error).message}`));\n\t\t}\n\t}\n\n\t// Lake adapter path — write to object storage as Parquet or JSON\n\ttry {\n\t\tconst { min, max } = hlcRange(entries);\n\t\tconst date = new Date().toISOString().split(\"T\")[0];\n\t\tconst prefix = keyPrefix ? `${keyPrefix}-` : \"\";\n\t\tlet objectKey: string;\n\t\tlet data: Uint8Array;\n\t\tlet contentType: string;\n\n\t\tif (deps.config.flushFormat === \"json\") {\n\t\t\tconst envelope: FlushEnvelope = {\n\t\t\t\tversion: 1,\n\t\t\t\tgatewayId: deps.config.gatewayId,\n\t\t\t\tcreatedAt: new Date().toISOString(),\n\t\t\t\thlcRange: { min, max },\n\t\t\t\tdeltaCount: entries.length,\n\t\t\t\tbyteSize,\n\t\t\t\tdeltas: entries,\n\t\t\t};\n\n\t\t\tobjectKey = `deltas/${date}/${deps.config.gatewayId}/${prefix}${min.toString()}-${max.toString()}.json`;\n\t\t\tdata = new TextEncoder().encode(JSON.stringify(envelope, bigintReplacer));\n\t\t\tcontentType = \"application/json\";\n\t\t} else {\n\t\t\t// Parquet path\n\t\t\tif (!deps.config.tableSchema) {\n\t\t\t\tdeps.restoreEntries(entries);\n\t\t\t\treturn Err(new FlushError(\"tableSchema required for Parquet flush\"));\n\t\t\t}\n\n\t\t\tconst parquetResult = await writeDeltasToParquet(entries, deps.config.tableSchema);\n\t\t\tif (!parquetResult.ok) {\n\t\t\t\tdeps.restoreEntries(entries);\n\t\t\t\treturn Err(parquetResult.error);\n\t\t\t}\n\n\t\t\tobjectKey = `deltas/${date}/${deps.config.gatewayId}/${prefix}${min.toString()}-${max.toString()}.parquet`;\n\t\t\tdata = parquetResult.value;\n\t\t\tcontentType = \"application/vnd.apache.parquet\";\n\t\t}\n\n\t\tconst result = await deps.adapter.putObject(objectKey, data, contentType);\n\t\tif (!result.ok) {\n\t\t\tdeps.restoreEntries(entries);\n\t\t\treturn Err(new FlushError(`Failed to write flush envelope: ${result.error.message}`));\n\t\t}\n\n\t\tif (deps.config.catalogue && deps.config.tableSchema) {\n\t\t\tawait commitToCatalogue(\n\t\t\t\tobjectKey,\n\t\t\t\tdata.byteLength,\n\t\t\t\tentries.length,\n\t\t\t\tdeps.config.catalogue,\n\t\t\t\tdeps.config.tableSchema,\n\t\t\t);\n\t\t}\n\n\t\treturn Ok(undefined);\n\t} catch (error: unknown) {\n\t\tdeps.restoreEntries(entries);\n\t\treturn Err(new FlushError(`Unexpected flush failure: ${toError(error).message}`));\n\t}\n}\n\n/**\n * Best-effort catalogue commit. Registers the flushed Parquet file\n * as an Iceberg snapshot via Nessie. Errors are logged but do not\n * fail the flush — the Parquet file is the source of truth.\n */\nexport async function commitToCatalogue(\n\tobjectKey: string,\n\tfileSizeInBytes: number,\n\trecordCount: number,\n\tcatalogue: NessieCatalogueClient,\n\tschema: TableSchema,\n): Promise<void> {\n\tconst { namespace, name } = lakeSyncTableName(schema.table);\n\tconst icebergSchema = tableSchemaToIceberg(schema);\n\tconst partitionSpec = buildPartitionSpec(icebergSchema);\n\n\t// Ensure namespace exists (idempotent)\n\tawait catalogue.createNamespace(namespace);\n\n\t// Ensure table exists (idempotent — catch 409)\n\tconst createResult = await catalogue.createTable(namespace, name, icebergSchema, partitionSpec);\n\tif (!createResult.ok && createResult.error.statusCode !== 409) {\n\t\treturn;\n\t}\n\n\t// Build DataFile reference\n\tconst dataFile: DataFile = {\n\t\tcontent: \"data\",\n\t\t\"file-path\": objectKey,\n\t\t\"file-format\": \"PARQUET\",\n\t\t\"record-count\": recordCount,\n\t\t\"file-size-in-bytes\": fileSizeInBytes,\n\t};\n\n\t// Append file to table snapshot\n\tconst appendResult = await catalogue.appendFiles(namespace, name, [dataFile]);\n\tif (!appendResult.ok && appendResult.error.statusCode === 409) {\n\t\t// On 409 conflict, retry once with fresh metadata\n\t\tawait catalogue.appendFiles(namespace, name, [dataFile]);\n\t}\n}\n","import { type DatabaseAdapter, isDatabaseAdapter, type LakeAdapter } from \"@lakesync/adapter\";\nimport {\n\ttype ActionDiscovery,\n\ttype ActionHandler,\n\ttype ActionPush,\n\ttype ActionResponse,\n\ttype ActionValidationError,\n\ttype AdapterError,\n\tAdapterNotFoundError,\n\ttype AuthContext,\n\tBackpressureError,\n\ttype ClockDriftError,\n\tErr,\n\tFlushError,\n\tfilterDeltas,\n\tHLC,\n\ttype IngestTarget,\n\tOk,\n\ttype Result,\n\ttype RowDelta,\n\tresolveLWW,\n\trowKey,\n\ttype SchemaError,\n\ttype SyncPull,\n\ttype SyncPush,\n\ttype SyncResponse,\n\ttype SyncRulesContext,\n} from \"@lakesync/core\";\nimport { ActionDispatcher } from \"./action-dispatcher\";\nimport { DeltaBuffer } from \"./buffer\";\nimport { flushEntries } from \"./flush\";\nimport type { GatewayConfig, HandlePushResult } from \"./types\";\n\nexport type { SyncPush, SyncPull, SyncResponse };\n\n/**\n * Sync gateway -- coordinates delta ingestion, conflict resolution, and flush.\n *\n * Thin facade composing ActionDispatcher, DeltaBuffer, and flushEntries.\n */\nexport class SyncGateway implements IngestTarget {\n\tprivate hlc: HLC;\n\treadonly buffer: DeltaBuffer;\n\treadonly actions: ActionDispatcher;\n\tprivate config: GatewayConfig;\n\tprivate adapter: LakeAdapter | DatabaseAdapter | null;\n\tprivate flushing = false;\n\n\tconstructor(config: GatewayConfig, adapter?: LakeAdapter | DatabaseAdapter) {\n\t\tthis.config = { sourceAdapters: {}, ...config };\n\t\tthis.hlc = new HLC();\n\t\tthis.buffer = new DeltaBuffer();\n\t\tthis.adapter = this.config.adapter ?? adapter ?? null;\n\t\tthis.actions = new ActionDispatcher(config.actionHandlers);\n\t}\n\n\t/** Restore drained entries back to the buffer for retry. */\n\tprivate restoreEntries(entries: RowDelta[]): void {\n\t\tfor (const entry of entries) {\n\t\t\tthis.buffer.append(entry);\n\t\t}\n\t}\n\n\t/**\n\t * Handle an incoming push from a client.\n\t *\n\t * Validates HLC drift, resolves conflicts via LWW, and appends to the buffer.\n\t *\n\t * @param msg - The push message containing client deltas.\n\t * @returns A `Result` with the new server HLC and accepted count,\n\t * or a `ClockDriftError` if the client clock is too far ahead.\n\t */\n\thandlePush(\n\t\tmsg: SyncPush,\n\t): Result<HandlePushResult, ClockDriftError | SchemaError | BackpressureError> {\n\t\t// Backpressure — reject when buffer exceeds threshold to prevent OOM\n\t\tconst backpressureLimit = this.config.maxBackpressureBytes ?? this.config.maxBufferBytes * 2;\n\t\tif (this.buffer.byteSize >= backpressureLimit) {\n\t\t\treturn Err(\n\t\t\t\tnew BackpressureError(\n\t\t\t\t\t`Buffer backpressure exceeded (${this.buffer.byteSize} >= ${backpressureLimit} bytes)`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\tlet accepted = 0;\n\t\tconst ingested: RowDelta[] = [];\n\n\t\tfor (const delta of msg.deltas) {\n\t\t\t// Check for idempotent re-push\n\t\t\tif (this.buffer.hasDelta(delta.deltaId)) {\n\t\t\t\taccepted++;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Validate delta against the schema if a schema manager is configured\n\t\t\tif (this.config.schemaManager) {\n\t\t\t\tconst schemaResult = this.config.schemaManager.validateDelta(delta);\n\t\t\t\tif (!schemaResult.ok) {\n\t\t\t\t\treturn Err(schemaResult.error);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Validate HLC drift against server's physical clock\n\t\t\tconst recvResult = this.hlc.recv(delta.hlc);\n\t\t\tif (!recvResult.ok) {\n\t\t\t\treturn Err(recvResult.error);\n\t\t\t}\n\n\t\t\t// Check for conflict with existing state\n\t\t\tconst key = rowKey(delta.table, delta.rowId);\n\t\t\tconst existing = this.buffer.getRow(key);\n\n\t\t\tif (existing) {\n\t\t\t\tconst resolved = resolveLWW(existing, delta);\n\t\t\t\tif (resolved.ok) {\n\t\t\t\t\tthis.buffer.append(resolved.value);\n\t\t\t\t\tingested.push(resolved.value);\n\t\t\t\t}\n\t\t\t\t// If resolution fails (should not happen with LWW on same row), skip\n\t\t\t} else {\n\t\t\t\tthis.buffer.append(delta);\n\t\t\t\tingested.push(delta);\n\t\t\t}\n\n\t\t\taccepted++;\n\t\t}\n\n\t\tconst serverHlc = this.hlc.now();\n\t\treturn Ok({ serverHlc, accepted, deltas: ingested });\n\t}\n\n\t/**\n\t * Handle a pull request from a client.\n\t *\n\t * When `msg.source` is set, pulls deltas from the named source adapter\n\t * instead of the in-memory buffer. Otherwise, returns change events\n\t * from the log since the given HLC. When a {@link SyncRulesContext} is\n\t * provided, deltas are post-filtered by the client's bucket definitions\n\t * and JWT claims. The buffer path over-fetches (3x the requested limit)\n\t * and retries up to 5 times to fill the page.\n\t *\n\t * @param msg - The pull message specifying the cursor and limit.\n\t * @param context - Optional sync rules context for row-level filtering.\n\t * @returns A `Result` containing the matching deltas, server HLC, and pagination flag.\n\t */\n\thandlePull(\n\t\tmsg: SyncPull & { source: string },\n\t\tcontext?: SyncRulesContext,\n\t): Promise<Result<SyncResponse, AdapterNotFoundError | AdapterError>>;\n\thandlePull(msg: SyncPull, context?: SyncRulesContext): Result<SyncResponse, never>;\n\thandlePull(\n\t\tmsg: SyncPull,\n\t\tcontext?: SyncRulesContext,\n\t):\n\t\t| Promise<Result<SyncResponse, AdapterNotFoundError | AdapterError>>\n\t\t| Result<SyncResponse, never> {\n\t\tif (msg.source) {\n\t\t\treturn this.handleAdapterPull(msg, context);\n\t\t}\n\n\t\treturn this.handleBufferPull(msg, context);\n\t}\n\n\t/** Pull from the in-memory buffer (original path). */\n\tprivate handleBufferPull(msg: SyncPull, context?: SyncRulesContext): Result<SyncResponse, never> {\n\t\tif (!context) {\n\t\t\tconst { deltas, hasMore } = this.buffer.getEventsSince(msg.sinceHlc, msg.maxDeltas);\n\t\t\tconst serverHlc = this.hlc.now();\n\t\t\treturn Ok({ deltas, serverHlc, hasMore });\n\t\t}\n\n\t\t// Over-fetch and filter with bounded retry\n\t\tconst maxRetries = 5;\n\t\tconst overFetchMultiplier = 3;\n\t\tlet cursor = msg.sinceHlc;\n\t\tconst collected: RowDelta[] = [];\n\n\t\tfor (let attempt = 0; attempt < maxRetries; attempt++) {\n\t\t\tconst fetchLimit = msg.maxDeltas * overFetchMultiplier;\n\t\t\tconst { deltas: raw, hasMore: rawHasMore } = this.buffer.getEventsSince(cursor, fetchLimit);\n\n\t\t\tif (raw.length === 0) {\n\t\t\t\t// No more data in buffer\n\t\t\t\tconst serverHlc = this.hlc.now();\n\t\t\t\treturn Ok({ deltas: collected, serverHlc, hasMore: false });\n\t\t\t}\n\n\t\t\tconst filtered = filterDeltas(raw, context);\n\t\t\tcollected.push(...filtered);\n\n\t\t\tif (collected.length >= msg.maxDeltas) {\n\t\t\t\t// Trim to exactly maxDeltas\n\t\t\t\tconst trimmed = collected.slice(0, msg.maxDeltas);\n\t\t\t\tconst serverHlc = this.hlc.now();\n\t\t\t\treturn Ok({ deltas: trimmed, serverHlc, hasMore: true });\n\t\t\t}\n\n\t\t\tif (!rawHasMore) {\n\t\t\t\t// Exhausted the buffer\n\t\t\t\tconst serverHlc = this.hlc.now();\n\t\t\t\treturn Ok({ deltas: collected, serverHlc, hasMore: false });\n\t\t\t}\n\n\t\t\t// Advance cursor past the last examined delta\n\t\t\tcursor = raw[raw.length - 1]!.hlc;\n\t\t}\n\n\t\t// Exhausted retries — return what we have\n\t\tconst serverHlc = this.hlc.now();\n\t\tconst hasMore = collected.length >= msg.maxDeltas;\n\t\tconst trimmed = collected.slice(0, msg.maxDeltas);\n\t\treturn Ok({ deltas: trimmed, serverHlc, hasMore });\n\t}\n\n\t/** Pull from a named source adapter. */\n\tprivate async handleAdapterPull(\n\t\tmsg: SyncPull,\n\t\tcontext?: SyncRulesContext,\n\t): Promise<Result<SyncResponse, AdapterNotFoundError | AdapterError>> {\n\t\tconst adapter = this.config.sourceAdapters?.[msg.source!];\n\t\tif (!adapter) {\n\t\t\treturn Err(new AdapterNotFoundError(`Source adapter \"${msg.source}\" not found`));\n\t\t}\n\n\t\tconst queryResult = await adapter.queryDeltasSince(msg.sinceHlc);\n\t\tif (!queryResult.ok) {\n\t\t\treturn Err(queryResult.error);\n\t\t}\n\n\t\tlet deltas = queryResult.value;\n\n\t\t// Apply sync rules filtering if context is provided\n\t\tif (context) {\n\t\t\tdeltas = filterDeltas(deltas, context);\n\t\t}\n\n\t\t// Paginate\n\t\tconst hasMore = deltas.length > msg.maxDeltas;\n\t\tconst sliced = deltas.slice(0, msg.maxDeltas);\n\n\t\tconst serverHlc = this.hlc.now();\n\t\treturn Ok({ deltas: sliced, serverHlc, hasMore });\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Flush — delegates to flush module\n\t// -----------------------------------------------------------------------\n\n\t/**\n\t * Flush the buffer to the configured adapter.\n\t *\n\t * Writes deltas as either a Parquet file (default) or a JSON\n\t * {@link FlushEnvelope} to the adapter, depending on\n\t * `config.flushFormat`. If the write fails, the buffer entries\n\t * are restored so they can be retried.\n\t *\n\t * @returns A `Result` indicating success or a `FlushError`.\n\t */\n\tasync flush(): Promise<Result<void, FlushError>> {\n\t\tif (this.flushing) {\n\t\t\treturn Err(new FlushError(\"Flush already in progress\"));\n\t\t}\n\t\tif (this.buffer.logSize === 0) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\t\tif (!this.adapter) {\n\t\t\treturn Err(new FlushError(\"No adapter configured\"));\n\t\t}\n\n\t\tthis.flushing = true;\n\n\t\t// Database adapter path — drain after flushing flag is set\n\t\tif (isDatabaseAdapter(this.adapter)) {\n\t\t\tconst entries = this.buffer.drain();\n\t\t\tif (entries.length === 0) {\n\t\t\t\tthis.flushing = false;\n\t\t\t\treturn Ok(undefined);\n\t\t\t}\n\n\t\t\ttry {\n\t\t\t\treturn await flushEntries(entries, 0, {\n\t\t\t\t\tadapter: this.adapter,\n\t\t\t\t\tconfig: {\n\t\t\t\t\t\tgatewayId: this.config.gatewayId,\n\t\t\t\t\t\tflushFormat: this.config.flushFormat,\n\t\t\t\t\t\ttableSchema: this.config.tableSchema,\n\t\t\t\t\t\tcatalogue: this.config.catalogue,\n\t\t\t\t\t},\n\t\t\t\t\trestoreEntries: (e) => this.restoreEntries(e),\n\t\t\t\t\tschemas: this.config.schemas,\n\t\t\t\t});\n\t\t\t} finally {\n\t\t\t\tthis.flushing = false;\n\t\t\t}\n\t\t}\n\n\t\t// Lake adapter path\n\t\tconst byteSize = this.buffer.byteSize;\n\t\tconst entries = this.buffer.drain();\n\n\t\ttry {\n\t\t\treturn await flushEntries(entries, byteSize, {\n\t\t\t\tadapter: this.adapter,\n\t\t\t\tconfig: {\n\t\t\t\t\tgatewayId: this.config.gatewayId,\n\t\t\t\t\tflushFormat: this.config.flushFormat,\n\t\t\t\t\ttableSchema: this.config.tableSchema,\n\t\t\t\t\tcatalogue: this.config.catalogue,\n\t\t\t\t},\n\t\t\t\trestoreEntries: (e) => this.restoreEntries(e),\n\t\t\t\tschemas: this.config.schemas,\n\t\t\t});\n\t\t} finally {\n\t\t\tthis.flushing = false;\n\t\t}\n\t}\n\n\t/**\n\t * Flush a single table's deltas from the buffer.\n\t *\n\t * Drains only the specified table's deltas and flushes them,\n\t * leaving other tables in the buffer.\n\t */\n\tasync flushTable(table: string): Promise<Result<void, FlushError>> {\n\t\tif (this.flushing) {\n\t\t\treturn Err(new FlushError(\"Flush already in progress\"));\n\t\t}\n\t\tif (!this.adapter) {\n\t\t\treturn Err(new FlushError(\"No adapter configured\"));\n\t\t}\n\n\t\tconst entries = this.buffer.drainTable(table);\n\t\tif (entries.length === 0) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\n\t\tthis.flushing = true;\n\n\t\ttry {\n\t\t\treturn await flushEntries(\n\t\t\t\tentries,\n\t\t\t\t0,\n\t\t\t\t{\n\t\t\t\t\tadapter: this.adapter,\n\t\t\t\t\tconfig: {\n\t\t\t\t\t\tgatewayId: this.config.gatewayId,\n\t\t\t\t\t\tflushFormat: this.config.flushFormat,\n\t\t\t\t\t\ttableSchema: this.config.tableSchema,\n\t\t\t\t\t\tcatalogue: this.config.catalogue,\n\t\t\t\t\t},\n\t\t\t\t\trestoreEntries: (e) => this.restoreEntries(e),\n\t\t\t\t\tschemas: this.config.schemas,\n\t\t\t\t},\n\t\t\t\ttable,\n\t\t\t);\n\t\t} finally {\n\t\t\tthis.flushing = false;\n\t\t}\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Actions — delegates to ActionDispatcher\n\t// -----------------------------------------------------------------------\n\n\t/** Handle an incoming action push from a client. */\n\tasync handleAction(\n\t\tmsg: ActionPush,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResponse, ActionValidationError>> {\n\t\treturn this.actions.dispatch(msg, () => this.hlc.now(), context);\n\t}\n\n\t/** Register a named action handler. */\n\tregisterActionHandler(name: string, handler: ActionHandler): void {\n\t\tthis.actions.registerHandler(name, handler);\n\t}\n\n\t/** Unregister a named action handler. */\n\tunregisterActionHandler(name: string): void {\n\t\tthis.actions.unregisterHandler(name);\n\t}\n\n\t/** List all registered action handler names. */\n\tlistActionHandlers(): string[] {\n\t\treturn this.actions.listHandlers();\n\t}\n\n\t/** Describe all registered action handlers and their supported actions. */\n\tdescribeActions(): ActionDiscovery {\n\t\treturn this.actions.describe();\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Source adapters\n\t// -----------------------------------------------------------------------\n\n\t/**\n\t * Register a named source adapter for adapter-sourced pulls.\n\t *\n\t * @param name - Unique source name (used as the `source` parameter in pull requests).\n\t * @param adapter - The database adapter to register.\n\t */\n\tregisterSource(name: string, adapter: DatabaseAdapter): void {\n\t\tthis.config.sourceAdapters![name] = adapter;\n\t}\n\n\t/**\n\t * Unregister a named source adapter.\n\t *\n\t * @param name - The source name to remove.\n\t */\n\tunregisterSource(name: string): void {\n\t\tdelete this.config.sourceAdapters![name];\n\t}\n\n\t/**\n\t * List all registered source adapter names.\n\t *\n\t * @returns Array of registered source adapter names.\n\t */\n\tlistSources(): string[] {\n\t\treturn Object.keys(this.config.sourceAdapters!);\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Buffer queries\n\t// -----------------------------------------------------------------------\n\n\t/** Get per-table buffer statistics. */\n\tget tableStats(): Array<{ table: string; byteSize: number; deltaCount: number }> {\n\t\treturn this.buffer.tableStats();\n\t}\n\n\t/**\n\t * Get tables that exceed the per-table budget.\n\t */\n\tgetTablesExceedingBudget(): string[] {\n\t\tconst budget = this.config.perTableBudgetBytes;\n\t\tif (!budget) return [];\n\t\treturn this.buffer\n\t\t\t.tableStats()\n\t\t\t.filter((s) => s.byteSize >= budget)\n\t\t\t.map((s) => s.table);\n\t}\n\n\t/** Check if the buffer should be flushed based on config thresholds. */\n\tshouldFlush(): boolean {\n\t\tlet effectiveMaxBytes = this.config.maxBufferBytes;\n\n\t\t// Reduce threshold for wide-column deltas\n\t\tconst adaptive = this.config.adaptiveBufferConfig;\n\t\tif (adaptive && this.buffer.averageDeltaBytes > adaptive.wideColumnThreshold) {\n\t\t\teffectiveMaxBytes = Math.floor(effectiveMaxBytes * adaptive.reductionFactor);\n\t\t}\n\n\t\treturn this.buffer.shouldFlush({\n\t\t\tmaxBytes: effectiveMaxBytes,\n\t\t\tmaxAgeMs: this.config.maxBufferAgeMs,\n\t\t});\n\t}\n\n\t/** Get buffer statistics for monitoring. */\n\tget bufferStats(): {\n\t\tlogSize: number;\n\t\tindexSize: number;\n\t\tbyteSize: number;\n\t} {\n\t\treturn {\n\t\t\tlogSize: this.buffer.logSize,\n\t\t\tindexSize: this.buffer.indexSize,\n\t\t\tbyteSize: this.buffer.byteSize,\n\t\t};\n\t}\n}\n","import type {\n\tActionPush,\n\tHLCTimestamp,\n\tResolvedClaims,\n\tSyncPull,\n\tSyncPush,\n\tSyncRulesConfig,\n\tSyncRulesContext,\n\tTableSchema,\n} from \"@lakesync/core\";\nimport { bigintReviver, Err, Ok, type Result } from \"@lakesync/core\";\nimport {\n\tDEFAULT_PULL_LIMIT,\n\tMAX_DELTAS_PER_PUSH,\n\tMAX_PULL_LIMIT,\n\tVALID_COLUMN_TYPES,\n} from \"./constants\";\n\n/** Validation error with HTTP status code. */\nexport interface RequestError {\n\tstatus: number;\n\tmessage: string;\n}\n\n/**\n * Validate and parse a push request body.\n * Handles JSON parsing with bigint revival.\n */\nexport function validatePushBody(\n\traw: string,\n\theaderClientId?: string | null,\n): Result<SyncPush, RequestError> {\n\tlet body: SyncPush;\n\ttry {\n\t\tbody = JSON.parse(raw, bigintReviver) as SyncPush;\n\t} catch {\n\t\treturn Err({ status: 400, message: \"Invalid JSON body\" });\n\t}\n\n\tif (!body.clientId || !Array.isArray(body.deltas)) {\n\t\treturn Err({ status: 400, message: \"Missing required fields: clientId, deltas\" });\n\t}\n\n\tif (headerClientId && body.clientId !== headerClientId) {\n\t\treturn Err({\n\t\t\tstatus: 403,\n\t\t\tmessage: \"Client ID mismatch: push clientId does not match authenticated identity\",\n\t\t});\n\t}\n\n\tif (body.deltas.length > MAX_DELTAS_PER_PUSH) {\n\t\treturn Err({ status: 400, message: \"Too many deltas in a single push (max 10,000)\" });\n\t}\n\n\treturn Ok(body);\n}\n\n/**\n * Parse and validate pull query parameters.\n */\nexport function parsePullParams(params: {\n\tsince: string | null;\n\tclientId: string | null;\n\tlimit: string | null;\n\tsource: string | null;\n}): Result<SyncPull, RequestError> {\n\tif (!params.since || !params.clientId) {\n\t\treturn Err({ status: 400, message: \"Missing required query params: since, clientId\" });\n\t}\n\n\tlet sinceHlc: HLCTimestamp;\n\ttry {\n\t\tsinceHlc = BigInt(params.since) as HLCTimestamp;\n\t} catch {\n\t\treturn Err({\n\t\t\tstatus: 400,\n\t\t\tmessage: \"Invalid 'since' parameter \\u2014 must be a decimal integer\",\n\t\t});\n\t}\n\n\tconst rawLimit = params.limit ? Number.parseInt(params.limit, 10) : DEFAULT_PULL_LIMIT;\n\tif (Number.isNaN(rawLimit) || rawLimit < 1) {\n\t\treturn Err({\n\t\t\tstatus: 400,\n\t\t\tmessage: \"Invalid 'limit' parameter \\u2014 must be a positive integer\",\n\t\t});\n\t}\n\tconst maxDeltas = Math.min(rawLimit, MAX_PULL_LIMIT);\n\n\tconst msg: SyncPull = {\n\t\tclientId: params.clientId,\n\t\tsinceHlc,\n\t\tmaxDeltas,\n\t\t...(params.source ? { source: params.source } : {}),\n\t};\n\n\treturn Ok(msg);\n}\n\n/**\n * Validate and parse an action request body.\n */\nexport function validateActionBody(\n\traw: string,\n\theaderClientId?: string | null,\n): Result<ActionPush, RequestError> {\n\tlet body: ActionPush;\n\ttry {\n\t\tbody = JSON.parse(raw, bigintReviver) as ActionPush;\n\t} catch {\n\t\treturn Err({ status: 400, message: \"Invalid JSON body\" });\n\t}\n\n\tif (!body.clientId || !Array.isArray(body.actions)) {\n\t\treturn Err({ status: 400, message: \"Missing required fields: clientId, actions\" });\n\t}\n\n\tif (headerClientId && body.clientId !== headerClientId) {\n\t\treturn Err({\n\t\t\tstatus: 403,\n\t\t\tmessage: \"Client ID mismatch: action clientId does not match authenticated identity\",\n\t\t});\n\t}\n\n\treturn Ok(body);\n}\n\n/**\n * Validate a table schema body.\n */\nexport function validateSchemaBody(raw: string): Result<TableSchema, RequestError> {\n\tlet schema: TableSchema;\n\ttry {\n\t\tschema = JSON.parse(raw) as TableSchema;\n\t} catch {\n\t\treturn Err({ status: 400, message: \"Invalid JSON body\" });\n\t}\n\n\tif (!schema.table || !Array.isArray(schema.columns)) {\n\t\treturn Err({ status: 400, message: \"Missing required fields: table, columns\" });\n\t}\n\n\tfor (const col of schema.columns) {\n\t\tif (typeof col.name !== \"string\" || col.name.length === 0) {\n\t\t\treturn Err({ status: 400, message: \"Each column must have a non-empty 'name' string\" });\n\t\t}\n\t\tif (!VALID_COLUMN_TYPES.has(col.type)) {\n\t\t\treturn Err({\n\t\t\t\tstatus: 400,\n\t\t\t\tmessage: `Invalid column type \"${col.type}\" for column \"${col.name}\". Allowed: string, number, boolean, json, null`,\n\t\t\t});\n\t\t}\n\t}\n\n\treturn Ok(schema);\n}\n\n/**\n * Map a gateway push error code to an HTTP status code.\n */\nexport function pushErrorToStatus(code: string): number {\n\tswitch (code) {\n\t\tcase \"CLOCK_DRIFT\":\n\t\t\treturn 409;\n\t\tcase \"SCHEMA_MISMATCH\":\n\t\t\treturn 422;\n\t\tcase \"BACKPRESSURE\":\n\t\t\treturn 503;\n\t\tdefault:\n\t\t\treturn 500;\n\t}\n}\n\n/**\n * Build a SyncRulesContext from rules and claims.\n * Returns undefined when no rules or empty buckets.\n */\nexport function buildSyncRulesContext(\n\trules: SyncRulesConfig | undefined,\n\tclaims: ResolvedClaims,\n): SyncRulesContext | undefined {\n\tif (!rules || rules.buckets.length === 0) {\n\t\treturn undefined;\n\t}\n\treturn { claims, rules };\n}\n","import type { HLCTimestamp, ResolvedClaims, RowDelta, SyncRulesConfig } from \"@lakesync/core\";\nimport { validateConnectorConfig, validateSyncRules } from \"@lakesync/core\";\nimport type { ConfigStore } from \"./config-store\";\nimport type { SyncGateway } from \"./gateway\";\nimport {\n\tbuildSyncRulesContext,\n\tparsePullParams,\n\tpushErrorToStatus,\n\tvalidateActionBody,\n\tvalidatePushBody,\n\tvalidateSchemaBody,\n} from \"./validation\";\n\n/** Result from a request handler, ready for platform-specific serialisation. */\nexport interface HandlerResult {\n\tstatus: number;\n\tbody: unknown;\n}\n\n/**\n * Handle a push request.\n *\n * @param gateway - The SyncGateway instance.\n * @param raw - The raw request body string.\n * @param headerClientId - Client ID from auth header (for mismatch check).\n * @param opts - Optional callbacks for persistence and broadcast.\n */\nexport function handlePushRequest(\n\tgateway: SyncGateway,\n\traw: string,\n\theaderClientId?: string | null,\n\topts?: {\n\t\t/** Persist deltas before processing (WAL-style). */\n\t\tpersistBatch?: (deltas: RowDelta[]) => void;\n\t\t/** Clear persisted deltas after successful push. */\n\t\tclearPersistence?: () => void;\n\t\t/** Broadcast deltas to connected clients. */\n\t\tbroadcastFn?: (\n\t\t\tdeltas: RowDelta[],\n\t\t\tserverHlc: HLCTimestamp,\n\t\t\texcludeClientId: string,\n\t\t) => void | Promise<void>;\n\t},\n): HandlerResult {\n\tconst validation = validatePushBody(raw, headerClientId);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tconst body = validation.value;\n\n\t// Persist before processing (WAL-style)\n\topts?.persistBatch?.(body.deltas);\n\n\tconst result = gateway.handlePush(body);\n\tif (!result.ok) {\n\t\treturn {\n\t\t\tstatus: pushErrorToStatus(result.error.code),\n\t\t\tbody: { error: result.error.message },\n\t\t};\n\t}\n\n\t// Clear persisted deltas on success\n\topts?.clearPersistence?.();\n\n\t// Broadcast to connected clients (fire and forget)\n\tif (opts?.broadcastFn && result.value.deltas.length > 0) {\n\t\topts.broadcastFn(result.value.deltas, result.value.serverHlc, body.clientId);\n\t}\n\n\treturn { status: 200, body: result.value };\n}\n\n/**\n * Handle a pull request.\n */\nexport async function handlePullRequest(\n\tgateway: SyncGateway,\n\tparams: {\n\t\tsince: string | null;\n\t\tclientId: string | null;\n\t\tlimit: string | null;\n\t\tsource: string | null;\n\t},\n\tclaims?: ResolvedClaims,\n\tsyncRules?: SyncRulesConfig,\n): Promise<HandlerResult> {\n\tconst validation = parsePullParams(params);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tconst msg = validation.value;\n\tconst context = buildSyncRulesContext(syncRules, claims ?? {});\n\n\tconst result = msg.source\n\t\t? await gateway.handlePull(\n\t\t\t\tmsg as import(\"@lakesync/core\").SyncPull & { source: string },\n\t\t\t\tcontext,\n\t\t\t)\n\t\t: gateway.handlePull(msg, context);\n\n\tif (!result.ok) {\n\t\tconst err = result.error;\n\t\tif (err.code === \"ADAPTER_NOT_FOUND\") {\n\t\t\treturn { status: 404, body: { error: err.message } };\n\t\t}\n\t\treturn { status: 500, body: { error: err.message } };\n\t}\n\n\treturn { status: 200, body: result.value };\n}\n\n/**\n * Handle an action request.\n */\nexport async function handleActionRequest(\n\tgateway: SyncGateway,\n\traw: string,\n\theaderClientId?: string | null,\n\tclaims?: ResolvedClaims,\n): Promise<HandlerResult> {\n\tconst validation = validateActionBody(raw, headerClientId);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tconst context = claims ? { claims } : undefined;\n\tconst result = await gateway.handleAction(validation.value, context);\n\n\tif (!result.ok) {\n\t\treturn { status: 400, body: { error: result.error.message } };\n\t}\n\n\treturn { status: 200, body: result.value };\n}\n\n/**\n * Handle a flush request.\n */\nexport async function handleFlushRequest(\n\tgateway: SyncGateway,\n\topts?: { clearPersistence?: () => void },\n): Promise<HandlerResult> {\n\tconst result = await gateway.flush();\n\tif (!result.ok) {\n\t\treturn { status: 500, body: { error: result.error.message } };\n\t}\n\n\topts?.clearPersistence?.();\n\treturn { status: 200, body: { flushed: true } };\n}\n\n/**\n * Handle saving a table schema.\n */\nexport async function handleSaveSchema(\n\traw: string,\n\tstore: ConfigStore,\n\tgatewayId: string,\n): Promise<HandlerResult> {\n\tconst validation = validateSchemaBody(raw);\n\tif (!validation.ok) {\n\t\treturn { status: validation.error.status, body: { error: validation.error.message } };\n\t}\n\n\tawait store.setSchema(gatewayId, validation.value);\n\treturn { status: 200, body: { saved: true } };\n}\n\n/**\n * Handle saving sync rules.\n */\nexport async function handleSaveSyncRules(\n\traw: string,\n\tstore: ConfigStore,\n\tgatewayId: string,\n): Promise<HandlerResult> {\n\tlet config: unknown;\n\ttry {\n\t\tconfig = JSON.parse(raw);\n\t} catch {\n\t\treturn { status: 400, body: { error: \"Invalid JSON body\" } };\n\t}\n\n\tconst validation = validateSyncRules(config);\n\tif (!validation.ok) {\n\t\treturn { status: 400, body: { error: validation.error.message } };\n\t}\n\n\tawait store.setSyncRules(gatewayId, config as SyncRulesConfig);\n\treturn { status: 200, body: { saved: true } };\n}\n\n/**\n * Handle registering a connector.\n */\nexport async function handleRegisterConnector(\n\traw: string,\n\tstore: ConfigStore,\n): Promise<HandlerResult> {\n\tlet body: unknown;\n\ttry {\n\t\tbody = JSON.parse(raw);\n\t} catch {\n\t\treturn { status: 400, body: { error: \"Invalid JSON body\" } };\n\t}\n\n\tconst validation = validateConnectorConfig(body);\n\tif (!validation.ok) {\n\t\treturn { status: 400, body: { error: validation.error.message } };\n\t}\n\n\tconst config = validation.value;\n\tconst connectors = await store.getConnectors();\n\n\tif (connectors[config.name]) {\n\t\treturn { status: 409, body: { error: `Connector \"${config.name}\" already exists` } };\n\t}\n\n\tconnectors[config.name] = config;\n\tawait store.setConnectors(connectors);\n\n\treturn { status: 200, body: { registered: true, name: config.name } };\n}\n\n/**\n * Handle unregistering a connector.\n */\nexport async function handleUnregisterConnector(\n\tname: string,\n\tstore: ConfigStore,\n): Promise<HandlerResult> {\n\tconst connectors = await store.getConnectors();\n\n\tif (!connectors[name]) {\n\t\treturn { status: 404, body: { error: `Connector \"${name}\" not found` } };\n\t}\n\n\tdelete connectors[name];\n\tawait store.setConnectors(connectors);\n\n\treturn { status: 200, body: { unregistered: true, name } };\n}\n\n/**\n * Handle listing connectors.\n */\nexport async function handleListConnectors(store: ConfigStore): Promise<HandlerResult> {\n\tconst connectors = await store.getConnectors();\n\tconst list = Object.values(connectors).map((c) => ({\n\t\tname: c.name,\n\t\ttype: c.type,\n\t\thasIngest: c.ingest !== undefined,\n\t}));\n\n\treturn { status: 200, body: list };\n}\n\n/**\n * Handle metrics request.\n */\nexport function handleMetrics(\n\tgateway: SyncGateway,\n\textra?: Record<string, unknown>,\n): HandlerResult {\n\tconst stats = gateway.bufferStats;\n\treturn { status: 200, body: { ...stats, ...extra } };\n}\n","import { Err, Ok, type Result, type RowDelta, SchemaError, type TableSchema } from \"@lakesync/core\";\n\n/**\n * Manages schema versioning and validation for the gateway.\n *\n * Validates incoming deltas against the current schema and supports\n * safe schema evolution (adding nullable columns only).\n */\nexport class SchemaManager {\n\tprivate currentSchema: TableSchema;\n\tprivate version: number;\n\tprivate allowedColumns: Set<string>;\n\n\tconstructor(schema: TableSchema, version?: number) {\n\t\tthis.currentSchema = schema;\n\t\tthis.version = version ?? 1;\n\t\tthis.allowedColumns = new Set(schema.columns.map((c) => c.name));\n\t}\n\n\t/** Get the current schema and version. */\n\tgetSchema(): { schema: TableSchema; version: number } {\n\t\treturn { schema: this.currentSchema, version: this.version };\n\t}\n\n\t/**\n\t * Validate that a delta's columns are compatible with the current schema.\n\t *\n\t * Unknown columns result in a SchemaError. Missing columns are fine (sparse deltas).\n\t * DELETE ops with empty columns are always valid.\n\t */\n\tvalidateDelta(delta: RowDelta): Result<void, SchemaError> {\n\t\tif (delta.op === \"DELETE\" && delta.columns.length === 0) {\n\t\t\treturn Ok(undefined);\n\t\t}\n\n\t\tfor (const col of delta.columns) {\n\t\t\tif (!this.allowedColumns.has(col.column)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SchemaError(\n\t\t\t\t\t\t`Unknown column \"${col.column}\" in delta for table \"${delta.table}\". Schema version ${this.version} does not include this column.`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Evolve the schema by adding new nullable columns.\n\t *\n\t * Only adding columns is allowed. Removing columns or changing types\n\t * returns a SchemaError.\n\t */\n\tevolveSchema(newSchema: TableSchema): Result<{ version: number }, SchemaError> {\n\t\tif (newSchema.table !== this.currentSchema.table) {\n\t\t\treturn Err(new SchemaError(\"Cannot evolve schema: table name mismatch\"));\n\t\t}\n\n\t\tconst oldColumnMap = new Map(this.currentSchema.columns.map((c) => [c.name, c.type]));\n\t\tconst newColumnMap = new Map(newSchema.columns.map((c) => [c.name, c.type]));\n\n\t\t// Check for removed columns\n\t\tfor (const [name] of oldColumnMap) {\n\t\t\tif (!newColumnMap.has(name)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SchemaError(\n\t\t\t\t\t\t`Cannot remove column \"${name}\" — only adding nullable columns is supported`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\t// Check for type changes\n\t\tfor (const [name, oldType] of oldColumnMap) {\n\t\t\tconst newType = newColumnMap.get(name);\n\t\t\tif (newType && newType !== oldType) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SchemaError(\n\t\t\t\t\t\t`Cannot change type of column \"${name}\" from \"${oldType}\" to \"${newType}\"`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\t// Apply evolution\n\t\tthis.currentSchema = newSchema;\n\t\tthis.version++;\n\t\tthis.allowedColumns = new Set(newSchema.columns.map((c) => c.name));\n\n\t\treturn Ok({ version: this.version });\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsBO,IAAM,mBAAN,MAAuB;AAAA,EACrB,iBAA6C,oBAAI,IAAI;AAAA,EACrD,kBAA+B,oBAAI,IAAI;AAAA,EACvC,iBAGJ,oBAAI,IAAI;AAAA,EAEZ,YAAY,UAA0C;AACrD,QAAI,UAAU;AACb,iBAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACvD,aAAK,eAAe,IAAI,MAAM,OAAO;AAAA,MACtC;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,SACL,KACA,QACA,SACyD;AACzD,UAAM,UAEF,CAAC;AAEL,eAAW,UAAU,IAAI,SAAS;AAEjC,YAAM,aAAa,eAAe,MAAM;AACxC,UAAI,CAAC,WAAW,IAAI;AACnB,eAAO,IAAI,WAAW,KAAK;AAAA,MAC5B;AAGA,UAAI,KAAK,gBAAgB,IAAI,OAAO,QAAQ,GAAG;AAC9C,cAAM,SAAS,KAAK,eAAe,IAAI,OAAO,QAAQ;AACtD,YAAI,QAAQ;AACX,kBAAQ,KAAK,MAAM;AACnB;AAAA,QACD;AAEA;AAAA,MACD;AAGA,UAAI,OAAO,gBAAgB;AAC1B,cAAM,SAAS,KAAK,eAAe,IAAI,QAAQ,OAAO,cAAc,EAAE;AACtE,YAAI,QAAQ;AACX,kBAAQ,KAAK,MAAM;AACnB;AAAA,QACD;AAAA,MACD;AAGA,YAAM,UAAU,KAAK,eAAe,IAAI,OAAO,SAAS;AACxD,UAAI,CAAC,SAAS;AACb,cAAM,cAAc;AAAA,UACnB,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,+CAA+C,OAAO,SAAS;AAAA,UACxE,WAAW;AAAA,QACZ;AACA,gBAAQ,KAAK,WAAW;AACxB,aAAK,kBAAkB,QAAQ,WAAW;AAC1C;AAAA,MACD;AAGA,YAAM,YAAY,QAAQ,iBAAiB,KAAK,CAAC,MAAM,EAAE,eAAe,OAAO,UAAU;AACzF,UAAI,CAAC,WAAW;AACf,cAAM,cAAc;AAAA,UACnB,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,gBAAgB,OAAO,UAAU,iCAAiC,OAAO,SAAS;AAAA,UAC3F,WAAW;AAAA,QACZ;AACA,gBAAQ,KAAK,WAAW;AACxB,aAAK,kBAAkB,QAAQ,WAAW;AAC1C;AAAA,MACD;AAGA,YAAM,aAAa,MAAM,QAAQ,cAAc,QAAQ,OAAO;AAC9D,UAAI,WAAW,IAAI;AAClB,gBAAQ,KAAK,WAAW,KAAK;AAC7B,aAAK,kBAAkB,QAAQ,WAAW,KAAK;AAAA,MAChD,OAAO;AACN,cAAM,MAAM,WAAW;AACvB,cAAM,cAAc;AAAA,UACnB,UAAU,OAAO;AAAA,UACjB,MAAM,IAAI;AAAA,UACV,SAAS,IAAI;AAAA,UACb,WAAW,eAAe,MAAO,IAA6B,YAAY;AAAA,QAC3E;AACA,gBAAQ,KAAK,WAAW;AAExB,YAAI,CAAC,YAAY,WAAW;AAC3B,eAAK,kBAAkB,QAAQ,WAAW;AAAA,QAC3C;AAAA,MACD;AAAA,IACD;AAEA,UAAM,YAAY,OAAO;AACzB,WAAO,GAAG,EAAE,SAAS,UAAU,CAAC;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,gBAAgB,MAAc,SAA8B;AAC3D,SAAK,eAAe,IAAI,MAAM,OAAO;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,kBAAkB,MAAoB;AACrC,SAAK,eAAe,OAAO,IAAI;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAyB;AACxB,WAAO,CAAC,GAAG,KAAK,eAAe,KAAK,CAAC;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,WAA4B;AAC3B,UAAM,aAAiD,CAAC;AACxD,eAAW,CAAC,MAAM,OAAO,KAAK,KAAK,gBAAgB;AAClD,iBAAW,IAAI,IAAI,QAAQ;AAAA,IAC5B;AACA,WAAO,EAAE,WAAW;AAAA,EACrB;AAAA;AAAA,EAGQ,kBACP,QACA,QACO;AACP,SAAK,gBAAgB,IAAI,OAAO,QAAQ;AACxC,SAAK,eAAe,IAAI,OAAO,UAAU,MAAM;AAC/C,QAAI,OAAO,gBAAgB;AAC1B,WAAK,eAAe,IAAI,QAAQ,OAAO,cAAc,IAAI,MAAM;AAAA,IAChE;AAAA,EACD;AACD;;;AC9LA,IAAM,sBAAsB,IAAI,IAAI,IAAI,IAAI;AAM5C,SAAS,mBAAmB,OAAwB;AACnD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,UAAQ,OAAO,OAAO;AAAA,IACrB,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAQ,MAAiB,SAAS;AAAA;AAAA,IACnC;AAEC,UAAI;AACH,eAAO,KAAK,UAAU,KAAK,EAAE;AAAA,MAC9B,QAAQ;AACP,eAAO;AAAA,MACR;AAAA,EACF;AACD;AAGA,SAAS,mBAAmB,OAAyB;AACpD,MAAI,QAAQ;AACZ,WAAS,MAAM,QAAQ;AACvB,WAAS,MAAM,MAAM,SAAS;AAC9B,WAAS,MAAM,MAAM,SAAS;AAC9B,WAAS,MAAM,SAAS,SAAS;AACjC,aAAW,OAAO,MAAM,SAAS;AAChC,aAAS,IAAI,OAAO,SAAS;AAC7B,aAAS,mBAAmB,IAAI,KAAK;AAAA,EACtC;AACA,SAAO;AACR;AAQO,IAAM,cAAN,MAAkB;AAAA,EAChB,MAAkB,CAAC;AAAA,EACnB,QAA+B,oBAAI,IAAI;AAAA,EACvC,WAAW,oBAAI,IAAY;AAAA,EAC3B,iBAAiB;AAAA,EACjB,YAAoB,KAAK,IAAI;AAAA,EAC7B,aAAa,oBAAI,IAAoB;AAAA,EACrC,WAAW,oBAAI,IAAwB;AAAA;AAAA,EAG/C,OAAO,OAAuB;AAC7B,SAAK,IAAI,KAAK,KAAK;AACnB,UAAM,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AAC3C,SAAK,MAAM,IAAI,KAAK,KAAK;AACzB,SAAK,SAAS,IAAI,MAAM,OAAO;AAC/B,UAAM,QAAQ,mBAAmB,KAAK;AACtC,SAAK,kBAAkB;AAEvB,SAAK,WAAW,IAAI,MAAM,QAAQ,KAAK,WAAW,IAAI,MAAM,KAAK,KAAK,KAAK,KAAK;AAChF,UAAM,eAAe,KAAK,SAAS,IAAI,MAAM,KAAK;AAClD,QAAI,cAAc;AACjB,mBAAa,KAAK,KAAK;AAAA,IACxB,OAAO;AACN,WAAK,SAAS,IAAI,MAAM,OAAO,CAAC,KAAK,CAAC;AAAA,IACvC;AAAA,EACD;AAAA;AAAA,EAGA,OAAO,KAAmC;AACzC,WAAO,KAAK,MAAM,IAAI,GAAG;AAAA,EAC1B;AAAA;AAAA,EAGA,SAAS,SAA0B;AAClC,WAAO,KAAK,SAAS,IAAI,OAAO;AAAA,EACjC;AAAA;AAAA,EAGA,eAAe,KAAmB,OAAyD;AAC1F,QAAI,KAAK;AACT,QAAI,KAAK,KAAK,IAAI;AAClB,WAAO,KAAK,IAAI;AACf,YAAM,MAAO,KAAK,OAAQ;AAC1B,UAAI,IAAI,QAAQ,KAAK,IAAI,GAAG,EAAG,KAAK,GAAG,KAAK,GAAG;AAC9C,aAAK,MAAM;AAAA,MACZ,OAAO;AACN,aAAK;AAAA,MACN;AAAA,IACD;AACA,UAAM,UAAU,KAAK,IAAI,SAAS,KAAK;AACvC,WAAO,EAAE,QAAQ,KAAK,IAAI,MAAM,IAAI,KAAK,KAAK,GAAG,QAAQ;AAAA,EAC1D;AAAA;AAAA,EAGA,YAAY,QAAyD;AACpE,QAAI,KAAK,IAAI,WAAW,EAAG,QAAO;AAClC,WAAO,KAAK,kBAAkB,OAAO,YAAY,KAAK,IAAI,IAAI,KAAK,aAAa,OAAO;AAAA,EACxF;AAAA;AAAA,EAGA,aAA6E;AAC5E,UAAM,QAAwE,CAAC;AAC/E,eAAW,CAAC,OAAO,KAAK,KAAK,KAAK,YAAY;AAC7C,YAAM,KAAK;AAAA,QACV;AAAA,QACA,UAAU;AAAA,QACV,YAAY,KAAK,SAAS,IAAI,KAAK,GAAG,UAAU;AAAA,MACjD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,WAAW,OAA2B;AACrC,UAAM,cAAc,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC;AACjD,QAAI,YAAY,WAAW,EAAG,QAAO,CAAC;AAGtC,SAAK,MAAM,KAAK,IAAI,OAAO,CAAC,MAAM,EAAE,UAAU,KAAK;AAGnD,eAAW,SAAS,aAAa;AAChC,WAAK,MAAM,OAAO,OAAO,MAAM,OAAO,MAAM,KAAK,CAAC;AAClD,WAAK,SAAS,OAAO,MAAM,OAAO;AAAA,IACnC;AAGA,UAAM,gBAAgB,KAAK,WAAW,IAAI,KAAK,KAAK;AACpD,SAAK,kBAAkB;AACvB,SAAK,WAAW,OAAO,KAAK;AAC5B,SAAK,SAAS,OAAO,KAAK;AAE1B,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,QAAoB;AACnB,UAAM,UAAU,CAAC,GAAG,KAAK,GAAG;AAC5B,SAAK,MAAM,CAAC;AACZ,SAAK,MAAM,MAAM;AACjB,SAAK,SAAS,MAAM;AACpB,SAAK,iBAAiB;AACtB,SAAK,YAAY,KAAK,IAAI;AAC1B,SAAK,WAAW,MAAM;AACtB,SAAK,SAAS,MAAM;AACpB,WAAO;AAAA,EACR;AAAA;AAAA,EAGA,IAAI,UAAkB;AACrB,WAAO,KAAK,IAAI;AAAA,EACjB;AAAA;AAAA,EAGA,IAAI,YAAoB;AACvB,WAAO,KAAK,MAAM;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,WAAmB;AACtB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA,EAGA,IAAI,oBAA4B;AAC/B,WAAO,KAAK,IAAI,WAAW,IAAI,IAAI,KAAK,iBAAiB,KAAK,IAAI;AAAA,EACnE;AACD;;;AC7JO,IAAM,oBAAN,MAA+C;AAAA,EAC7C,UAAU,oBAAI,IAAyB;AAAA,EACvC,YAAY,oBAAI,IAA6B;AAAA,EAC7C,aAA8C,CAAC;AAAA,EAEvD,MAAM,UAAU,WAAqD;AACpE,WAAO,KAAK,QAAQ,IAAI,SAAS;AAAA,EAClC;AAAA,EAEA,MAAM,UAAU,WAAmB,QAAoC;AACtE,SAAK,QAAQ,IAAI,WAAW,MAAM;AAAA,EACnC;AAAA,EAEA,MAAM,aAAa,WAAyD;AAC3E,WAAO,KAAK,UAAU,IAAI,SAAS;AAAA,EACpC;AAAA,EAEA,MAAM,aAAa,WAAmB,OAAuC;AAC5E,SAAK,UAAU,IAAI,WAAW,KAAK;AAAA,EACpC;AAAA,EAEA,MAAM,gBAA0D;AAC/D,WAAO,EAAE,GAAG,KAAK,WAAW;AAAA,EAC7B;AAAA,EAEA,MAAM,cAAc,YAA4D;AAC/E,SAAK,aAAa,EAAE,GAAG,WAAW;AAAA,EACnC;AACD;;;AChDO,IAAM,yBAAyB;AAG/B,IAAM,sBAAsB;AAG5B,IAAM,iBAAiB;AAGvB,IAAM,qBAAqB;AAG3B,IAAM,qBAAqB,oBAAI,IAAI,CAAC,UAAU,UAAU,WAAW,QAAQ,MAAM,CAAC;AAGlF,IAAM,2BAA2B,IAAI,OAAO;AAG5C,IAAM,4BAA4B;;;AC0BlC,SAAS,SAAS,SAA+D;AACvF,MAAI,MAAM,QAAQ,CAAC,EAAG;AACtB,MAAI,MAAM,QAAQ,CAAC,EAAG;AACtB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACxC,UAAM,MAAM,QAAQ,CAAC,EAAG;AACxB,QAAI,IAAI,QAAQ,KAAK,GAAG,IAAI,EAAG,OAAM;AACrC,QAAI,IAAI,QAAQ,KAAK,GAAG,IAAI,EAAG,OAAM;AAAA,EACtC;AACA,SAAO,EAAE,KAAK,IAAI;AACnB;AASA,eAAsB,aACrB,SACA,UACA,MACA,WACoC;AAEpC,MAAI,kBAAkB,KAAK,OAAO,GAAG;AACpC,QAAI;AACH,YAAM,SAAS,MAAM,KAAK,QAAQ,aAAa,OAAO;AACtD,UAAI,CAAC,OAAO,IAAI;AACf,aAAK,eAAe,OAAO;AAC3B,eAAO,IAAI,IAAI,WAAW,0BAA0B,OAAO,MAAM,OAAO,EAAE,CAAC;AAAA,MAC5E;AAGA,UAAI,KAAK,WAAW,KAAK,QAAQ,SAAS,KAAK,iBAAiB,KAAK,OAAO,GAAG;AAC9E,YAAI;AACH,gBAAM,YAAY,MAAM,KAAK,QAAQ,YAAY,SAAS,KAAK,OAAO;AACtE,cAAI,CAAC,UAAU,IAAI;AAClB,oBAAQ;AAAA,cACP,sCAAsC,QAAQ,MAAM,aAAa,UAAU,MAAM,OAAO;AAAA,YACzF;AAAA,UACD;AAAA,QACD,SAAS,OAAgB;AACxB,kBAAQ;AAAA,YACP,qCAAqC,QAAQ,MAAM,aAAa,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACvH;AAAA,QACD;AAAA,MACD;AAEA,aAAO,GAAG,MAAS;AAAA,IACpB,SAAS,OAAgB;AACxB,WAAK,eAAe,OAAO;AAC3B,aAAO,IAAI,IAAI,WAAW,sCAAsC,QAAQ,KAAK,EAAE,OAAO,EAAE,CAAC;AAAA,IAC1F;AAAA,EACD;AAGA,MAAI;AACH,UAAM,EAAE,KAAK,IAAI,IAAI,SAAS,OAAO;AACrC,UAAM,QAAO,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAClD,UAAM,SAAS,YAAY,GAAG,SAAS,MAAM;AAC7C,QAAI;AACJ,QAAI;AACJ,QAAI;AAEJ,QAAI,KAAK,OAAO,gBAAgB,QAAQ;AACvC,YAAM,WAA0B;AAAA,QAC/B,SAAS;AAAA,QACT,WAAW,KAAK,OAAO;AAAA,QACvB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,UAAU,EAAE,KAAK,IAAI;AAAA,QACrB,YAAY,QAAQ;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,MACT;AAEA,kBAAY,UAAU,IAAI,IAAI,KAAK,OAAO,SAAS,IAAI,MAAM,GAAG,IAAI,SAAS,CAAC,IAAI,IAAI,SAAS,CAAC;AAChG,aAAO,IAAI,YAAY,EAAE,OAAO,KAAK,UAAU,UAAU,cAAc,CAAC;AACxE,oBAAc;AAAA,IACf,OAAO;AAEN,UAAI,CAAC,KAAK,OAAO,aAAa;AAC7B,aAAK,eAAe,OAAO;AAC3B,eAAO,IAAI,IAAI,WAAW,wCAAwC,CAAC;AAAA,MACpE;AAEA,YAAM,gBAAgB,MAAM,qBAAqB,SAAS,KAAK,OAAO,WAAW;AACjF,UAAI,CAAC,cAAc,IAAI;AACtB,aAAK,eAAe,OAAO;AAC3B,eAAO,IAAI,cAAc,KAAK;AAAA,MAC/B;AAEA,kBAAY,UAAU,IAAI,IAAI,KAAK,OAAO,SAAS,IAAI,MAAM,GAAG,IAAI,SAAS,CAAC,IAAI,IAAI,SAAS,CAAC;AAChG,aAAO,cAAc;AACrB,oBAAc;AAAA,IACf;AAEA,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,WAAW,MAAM,WAAW;AACxE,QAAI,CAAC,OAAO,IAAI;AACf,WAAK,eAAe,OAAO;AAC3B,aAAO,IAAI,IAAI,WAAW,mCAAmC,OAAO,MAAM,OAAO,EAAE,CAAC;AAAA,IACrF;AAEA,QAAI,KAAK,OAAO,aAAa,KAAK,OAAO,aAAa;AACrD,YAAM;AAAA,QACL;AAAA,QACA,KAAK;AAAA,QACL,QAAQ;AAAA,QACR,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,MACb;AAAA,IACD;AAEA,WAAO,GAAG,MAAS;AAAA,EACpB,SAAS,OAAgB;AACxB,SAAK,eAAe,OAAO;AAC3B,WAAO,IAAI,IAAI,WAAW,6BAA6B,QAAQ,KAAK,EAAE,OAAO,EAAE,CAAC;AAAA,EACjF;AACD;AAOA,eAAsB,kBACrB,WACA,iBACA,aACA,WACA,QACgB;AAChB,QAAM,EAAE,WAAW,KAAK,IAAI,kBAAkB,OAAO,KAAK;AAC1D,QAAM,gBAAgB,qBAAqB,MAAM;AACjD,QAAM,gBAAgB,mBAAmB,aAAa;AAGtD,QAAM,UAAU,gBAAgB,SAAS;AAGzC,QAAM,eAAe,MAAM,UAAU,YAAY,WAAW,MAAM,eAAe,aAAa;AAC9F,MAAI,CAAC,aAAa,MAAM,aAAa,MAAM,eAAe,KAAK;AAC9D;AAAA,EACD;AAGA,QAAM,WAAqB;AAAA,IAC1B,SAAS;AAAA,IACT,aAAa;AAAA,IACb,eAAe;AAAA,IACf,gBAAgB;AAAA,IAChB,sBAAsB;AAAA,EACvB;AAGA,QAAM,eAAe,MAAM,UAAU,YAAY,WAAW,MAAM,CAAC,QAAQ,CAAC;AAC5E,MAAI,CAAC,aAAa,MAAM,aAAa,MAAM,eAAe,KAAK;AAE9D,UAAM,UAAU,YAAY,WAAW,MAAM,CAAC,QAAQ,CAAC;AAAA,EACxD;AACD;;;ACrKO,IAAM,cAAN,MAA0C;AAAA,EACxC;AAAA,EACC;AAAA,EACA;AAAA,EACD;AAAA,EACA;AAAA,EACA,WAAW;AAAA,EAEnB,YAAY,QAAuB,SAAyC;AAC3E,SAAK,SAAS,EAAE,gBAAgB,CAAC,GAAG,GAAG,OAAO;AAC9C,SAAK,MAAM,IAAI,IAAI;AACnB,SAAK,SAAS,IAAI,YAAY;AAC9B,SAAK,UAAU,KAAK,OAAO,WAAW,WAAW;AACjD,SAAK,UAAU,IAAI,iBAAiB,OAAO,cAAc;AAAA,EAC1D;AAAA;AAAA,EAGQ,eAAe,SAA2B;AACjD,eAAW,SAAS,SAAS;AAC5B,WAAK,OAAO,OAAO,KAAK;AAAA,IACzB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,WACC,KAC8E;AAE9E,UAAM,oBAAoB,KAAK,OAAO,wBAAwB,KAAK,OAAO,iBAAiB;AAC3F,QAAI,KAAK,OAAO,YAAY,mBAAmB;AAC9C,aAAO;AAAA,QACN,IAAI;AAAA,UACH,iCAAiC,KAAK,OAAO,QAAQ,OAAO,iBAAiB;AAAA,QAC9E;AAAA,MACD;AAAA,IACD;AAEA,QAAI,WAAW;AACf,UAAM,WAAuB,CAAC;AAE9B,eAAW,SAAS,IAAI,QAAQ;AAE/B,UAAI,KAAK,OAAO,SAAS,MAAM,OAAO,GAAG;AACxC;AACA;AAAA,MACD;AAGA,UAAI,KAAK,OAAO,eAAe;AAC9B,cAAM,eAAe,KAAK,OAAO,cAAc,cAAc,KAAK;AAClE,YAAI,CAAC,aAAa,IAAI;AACrB,iBAAO,IAAI,aAAa,KAAK;AAAA,QAC9B;AAAA,MACD;AAGA,YAAM,aAAa,KAAK,IAAI,KAAK,MAAM,GAAG;AAC1C,UAAI,CAAC,WAAW,IAAI;AACnB,eAAO,IAAI,WAAW,KAAK;AAAA,MAC5B;AAGA,YAAM,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AAC3C,YAAM,WAAW,KAAK,OAAO,OAAO,GAAG;AAEvC,UAAI,UAAU;AACb,cAAM,WAAW,WAAW,UAAU,KAAK;AAC3C,YAAI,SAAS,IAAI;AAChB,eAAK,OAAO,OAAO,SAAS,KAAK;AACjC,mBAAS,KAAK,SAAS,KAAK;AAAA,QAC7B;AAAA,MAED,OAAO;AACN,aAAK,OAAO,OAAO,KAAK;AACxB,iBAAS,KAAK,KAAK;AAAA,MACpB;AAEA;AAAA,IACD;AAEA,UAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,WAAO,GAAG,EAAE,WAAW,UAAU,QAAQ,SAAS,CAAC;AAAA,EACpD;AAAA,EAqBA,WACC,KACA,SAG8B;AAC9B,QAAI,IAAI,QAAQ;AACf,aAAO,KAAK,kBAAkB,KAAK,OAAO;AAAA,IAC3C;AAEA,WAAO,KAAK,iBAAiB,KAAK,OAAO;AAAA,EAC1C;AAAA;AAAA,EAGQ,iBAAiB,KAAe,SAAyD;AAChG,QAAI,CAAC,SAAS;AACb,YAAM,EAAE,QAAQ,SAAAA,SAAQ,IAAI,KAAK,OAAO,eAAe,IAAI,UAAU,IAAI,SAAS;AAClF,YAAMC,aAAY,KAAK,IAAI,IAAI;AAC/B,aAAO,GAAG,EAAE,QAAQ,WAAAA,YAAW,SAAAD,SAAQ,CAAC;AAAA,IACzC;AAGA,UAAM,aAAa;AACnB,UAAM,sBAAsB;AAC5B,QAAI,SAAS,IAAI;AACjB,UAAM,YAAwB,CAAC;AAE/B,aAAS,UAAU,GAAG,UAAU,YAAY,WAAW;AACtD,YAAM,aAAa,IAAI,YAAY;AACnC,YAAM,EAAE,QAAQ,KAAK,SAAS,WAAW,IAAI,KAAK,OAAO,eAAe,QAAQ,UAAU;AAE1F,UAAI,IAAI,WAAW,GAAG;AAErB,cAAMC,aAAY,KAAK,IAAI,IAAI;AAC/B,eAAO,GAAG,EAAE,QAAQ,WAAW,WAAAA,YAAW,SAAS,MAAM,CAAC;AAAA,MAC3D;AAEA,YAAM,WAAW,aAAa,KAAK,OAAO;AAC1C,gBAAU,KAAK,GAAG,QAAQ;AAE1B,UAAI,UAAU,UAAU,IAAI,WAAW;AAEtC,cAAMC,WAAU,UAAU,MAAM,GAAG,IAAI,SAAS;AAChD,cAAMD,aAAY,KAAK,IAAI,IAAI;AAC/B,eAAO,GAAG,EAAE,QAAQC,UAAS,WAAAD,YAAW,SAAS,KAAK,CAAC;AAAA,MACxD;AAEA,UAAI,CAAC,YAAY;AAEhB,cAAMA,aAAY,KAAK,IAAI,IAAI;AAC/B,eAAO,GAAG,EAAE,QAAQ,WAAW,WAAAA,YAAW,SAAS,MAAM,CAAC;AAAA,MAC3D;AAGA,eAAS,IAAI,IAAI,SAAS,CAAC,EAAG;AAAA,IAC/B;AAGA,UAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,UAAM,UAAU,UAAU,UAAU,IAAI;AACxC,UAAM,UAAU,UAAU,MAAM,GAAG,IAAI,SAAS;AAChD,WAAO,GAAG,EAAE,QAAQ,SAAS,WAAW,QAAQ,CAAC;AAAA,EAClD;AAAA;AAAA,EAGA,MAAc,kBACb,KACA,SACqE;AACrE,UAAM,UAAU,KAAK,OAAO,iBAAiB,IAAI,MAAO;AACxD,QAAI,CAAC,SAAS;AACb,aAAO,IAAI,IAAI,qBAAqB,mBAAmB,IAAI,MAAM,aAAa,CAAC;AAAA,IAChF;AAEA,UAAM,cAAc,MAAM,QAAQ,iBAAiB,IAAI,QAAQ;AAC/D,QAAI,CAAC,YAAY,IAAI;AACpB,aAAO,IAAI,YAAY,KAAK;AAAA,IAC7B;AAEA,QAAI,SAAS,YAAY;AAGzB,QAAI,SAAS;AACZ,eAAS,aAAa,QAAQ,OAAO;AAAA,IACtC;AAGA,UAAM,UAAU,OAAO,SAAS,IAAI;AACpC,UAAM,SAAS,OAAO,MAAM,GAAG,IAAI,SAAS;AAE5C,UAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,WAAO,GAAG,EAAE,QAAQ,QAAQ,WAAW,QAAQ,CAAC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,QAA2C;AAChD,QAAI,KAAK,UAAU;AAClB,aAAO,IAAI,IAAI,WAAW,2BAA2B,CAAC;AAAA,IACvD;AACA,QAAI,KAAK,OAAO,YAAY,GAAG;AAC9B,aAAO,GAAG,MAAS;AAAA,IACpB;AACA,QAAI,CAAC,KAAK,SAAS;AAClB,aAAO,IAAI,IAAI,WAAW,uBAAuB,CAAC;AAAA,IACnD;AAEA,SAAK,WAAW;AAGhB,QAAI,kBAAkB,KAAK,OAAO,GAAG;AACpC,YAAME,WAAU,KAAK,OAAO,MAAM;AAClC,UAAIA,SAAQ,WAAW,GAAG;AACzB,aAAK,WAAW;AAChB,eAAO,GAAG,MAAS;AAAA,MACpB;AAEA,UAAI;AACH,eAAO,MAAM,aAAaA,UAAS,GAAG;AAAA,UACrC,SAAS,KAAK;AAAA,UACd,QAAQ;AAAA,YACP,WAAW,KAAK,OAAO;AAAA,YACvB,aAAa,KAAK,OAAO;AAAA,YACzB,aAAa,KAAK,OAAO;AAAA,YACzB,WAAW,KAAK,OAAO;AAAA,UACxB;AAAA,UACA,gBAAgB,CAAC,MAAM,KAAK,eAAe,CAAC;AAAA,UAC5C,SAAS,KAAK,OAAO;AAAA,QACtB,CAAC;AAAA,MACF,UAAE;AACD,aAAK,WAAW;AAAA,MACjB;AAAA,IACD;AAGA,UAAM,WAAW,KAAK,OAAO;AAC7B,UAAM,UAAU,KAAK,OAAO,MAAM;AAElC,QAAI;AACH,aAAO,MAAM,aAAa,SAAS,UAAU;AAAA,QAC5C,SAAS,KAAK;AAAA,QACd,QAAQ;AAAA,UACP,WAAW,KAAK,OAAO;AAAA,UACvB,aAAa,KAAK,OAAO;AAAA,UACzB,aAAa,KAAK,OAAO;AAAA,UACzB,WAAW,KAAK,OAAO;AAAA,QACxB;AAAA,QACA,gBAAgB,CAAC,MAAM,KAAK,eAAe,CAAC;AAAA,QAC5C,SAAS,KAAK,OAAO;AAAA,MACtB,CAAC;AAAA,IACF,UAAE;AACD,WAAK,WAAW;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WAAW,OAAkD;AAClE,QAAI,KAAK,UAAU;AAClB,aAAO,IAAI,IAAI,WAAW,2BAA2B,CAAC;AAAA,IACvD;AACA,QAAI,CAAC,KAAK,SAAS;AAClB,aAAO,IAAI,IAAI,WAAW,uBAAuB,CAAC;AAAA,IACnD;AAEA,UAAM,UAAU,KAAK,OAAO,WAAW,KAAK;AAC5C,QAAI,QAAQ,WAAW,GAAG;AACzB,aAAO,GAAG,MAAS;AAAA,IACpB;AAEA,SAAK,WAAW;AAEhB,QAAI;AACH,aAAO,MAAM;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,UACC,SAAS,KAAK;AAAA,UACd,QAAQ;AAAA,YACP,WAAW,KAAK,OAAO;AAAA,YACvB,aAAa,KAAK,OAAO;AAAA,YACzB,aAAa,KAAK,OAAO;AAAA,YACzB,WAAW,KAAK,OAAO;AAAA,UACxB;AAAA,UACA,gBAAgB,CAAC,MAAM,KAAK,eAAe,CAAC;AAAA,UAC5C,SAAS,KAAK,OAAO;AAAA,QACtB;AAAA,QACA;AAAA,MACD;AAAA,IACD,UAAE;AACD,WAAK,WAAW;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,aACL,KACA,SACyD;AACzD,WAAO,KAAK,QAAQ,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,OAAO;AAAA,EAChE;AAAA;AAAA,EAGA,sBAAsB,MAAc,SAA8B;AACjE,SAAK,QAAQ,gBAAgB,MAAM,OAAO;AAAA,EAC3C;AAAA;AAAA,EAGA,wBAAwB,MAAoB;AAC3C,SAAK,QAAQ,kBAAkB,IAAI;AAAA,EACpC;AAAA;AAAA,EAGA,qBAA+B;AAC9B,WAAO,KAAK,QAAQ,aAAa;AAAA,EAClC;AAAA;AAAA,EAGA,kBAAmC;AAClC,WAAO,KAAK,QAAQ,SAAS;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,eAAe,MAAc,SAAgC;AAC5D,SAAK,OAAO,eAAgB,IAAI,IAAI;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,MAAoB;AACpC,WAAO,KAAK,OAAO,eAAgB,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAwB;AACvB,WAAO,OAAO,KAAK,KAAK,OAAO,cAAe;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,aAA6E;AAChF,WAAO,KAAK,OAAO,WAAW;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,2BAAqC;AACpC,UAAM,SAAS,KAAK,OAAO;AAC3B,QAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,WAAO,KAAK,OACV,WAAW,EACX,OAAO,CAAC,MAAM,EAAE,YAAY,MAAM,EAClC,IAAI,CAAC,MAAM,EAAE,KAAK;AAAA,EACrB;AAAA;AAAA,EAGA,cAAuB;AACtB,QAAI,oBAAoB,KAAK,OAAO;AAGpC,UAAM,WAAW,KAAK,OAAO;AAC7B,QAAI,YAAY,KAAK,OAAO,oBAAoB,SAAS,qBAAqB;AAC7E,0BAAoB,KAAK,MAAM,oBAAoB,SAAS,eAAe;AAAA,IAC5E;AAEA,WAAO,KAAK,OAAO,YAAY;AAAA,MAC9B,UAAU;AAAA,MACV,UAAU,KAAK,OAAO;AAAA,IACvB,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,IAAI,cAIF;AACD,WAAO;AAAA,MACN,SAAS,KAAK,OAAO;AAAA,MACrB,WAAW,KAAK,OAAO;AAAA,MACvB,UAAU,KAAK,OAAO;AAAA,IACvB;AAAA,EACD;AACD;;;AC9bO,SAAS,iBACf,KACA,gBACiC;AACjC,MAAI;AACJ,MAAI;AACH,WAAO,KAAK,MAAM,KAAK,aAAa;AAAA,EACrC,QAAQ;AACP,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,oBAAoB,CAAC;AAAA,EACzD;AAEA,MAAI,CAAC,KAAK,YAAY,CAAC,MAAM,QAAQ,KAAK,MAAM,GAAG;AAClD,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,4CAA4C,CAAC;AAAA,EACjF;AAEA,MAAI,kBAAkB,KAAK,aAAa,gBAAgB;AACvD,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAEA,MAAI,KAAK,OAAO,SAAS,qBAAqB;AAC7C,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,gDAAgD,CAAC;AAAA,EACrF;AAEA,SAAO,GAAG,IAAI;AACf;AAKO,SAAS,gBAAgB,QAKG;AAClC,MAAI,CAAC,OAAO,SAAS,CAAC,OAAO,UAAU;AACtC,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,iDAAiD,CAAC;AAAA,EACtF;AAEA,MAAI;AACJ,MAAI;AACH,eAAW,OAAO,OAAO,KAAK;AAAA,EAC/B,QAAQ;AACP,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,QAAQ,OAAO,SAAS,OAAO,OAAO,EAAE,IAAI;AACpE,MAAI,OAAO,MAAM,QAAQ,KAAK,WAAW,GAAG;AAC3C,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AACA,QAAM,YAAY,KAAK,IAAI,UAAU,cAAc;AAEnD,QAAM,MAAgB;AAAA,IACrB,UAAU,OAAO;AAAA,IACjB;AAAA,IACA;AAAA,IACA,GAAI,OAAO,SAAS,EAAE,QAAQ,OAAO,OAAO,IAAI,CAAC;AAAA,EAClD;AAEA,SAAO,GAAG,GAAG;AACd;AAKO,SAAS,mBACf,KACA,gBACmC;AACnC,MAAI;AACJ,MAAI;AACH,WAAO,KAAK,MAAM,KAAK,aAAa;AAAA,EACrC,QAAQ;AACP,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,oBAAoB,CAAC;AAAA,EACzD;AAEA,MAAI,CAAC,KAAK,YAAY,CAAC,MAAM,QAAQ,KAAK,OAAO,GAAG;AACnD,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,6CAA6C,CAAC;AAAA,EAClF;AAEA,MAAI,kBAAkB,KAAK,aAAa,gBAAgB;AACvD,WAAO,IAAI;AAAA,MACV,QAAQ;AAAA,MACR,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAEA,SAAO,GAAG,IAAI;AACf;AAKO,SAAS,mBAAmB,KAAgD;AAClF,MAAI;AACJ,MAAI;AACH,aAAS,KAAK,MAAM,GAAG;AAAA,EACxB,QAAQ;AACP,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,oBAAoB,CAAC;AAAA,EACzD;AAEA,MAAI,CAAC,OAAO,SAAS,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AACpD,WAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,0CAA0C,CAAC;AAAA,EAC/E;AAEA,aAAW,OAAO,OAAO,SAAS;AACjC,QAAI,OAAO,IAAI,SAAS,YAAY,IAAI,KAAK,WAAW,GAAG;AAC1D,aAAO,IAAI,EAAE,QAAQ,KAAK,SAAS,kDAAkD,CAAC;AAAA,IACvF;AACA,QAAI,CAAC,mBAAmB,IAAI,IAAI,IAAI,GAAG;AACtC,aAAO,IAAI;AAAA,QACV,QAAQ;AAAA,QACR,SAAS,wBAAwB,IAAI,IAAI,iBAAiB,IAAI,IAAI;AAAA,MACnE,CAAC;AAAA,IACF;AAAA,EACD;AAEA,SAAO,GAAG,MAAM;AACjB;AAKO,SAAS,kBAAkB,MAAsB;AACvD,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR;AACC,aAAO;AAAA,EACT;AACD;AAMO,SAAS,sBACf,OACA,QAC+B;AAC/B,MAAI,CAAC,SAAS,MAAM,QAAQ,WAAW,GAAG;AACzC,WAAO;AAAA,EACR;AACA,SAAO,EAAE,QAAQ,MAAM;AACxB;;;AC9JO,SAAS,kBACf,SACA,KACA,gBACA,MAYgB;AAChB,QAAM,aAAa,iBAAiB,KAAK,cAAc;AACvD,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,OAAO,WAAW;AAGxB,QAAM,eAAe,KAAK,MAAM;AAEhC,QAAM,SAAS,QAAQ,WAAW,IAAI;AACtC,MAAI,CAAC,OAAO,IAAI;AACf,WAAO;AAAA,MACN,QAAQ,kBAAkB,OAAO,MAAM,IAAI;AAAA,MAC3C,MAAM,EAAE,OAAO,OAAO,MAAM,QAAQ;AAAA,IACrC;AAAA,EACD;AAGA,QAAM,mBAAmB;AAGzB,MAAI,MAAM,eAAe,OAAO,MAAM,OAAO,SAAS,GAAG;AACxD,SAAK,YAAY,OAAO,MAAM,QAAQ,OAAO,MAAM,WAAW,KAAK,QAAQ;AAAA,EAC5E;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,OAAO,MAAM;AAC1C;AAKA,eAAsB,kBACrB,SACA,QAMA,QACA,WACyB;AACzB,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,MAAM,WAAW;AACvB,QAAM,UAAU,sBAAsB,WAAW,UAAU,CAAC,CAAC;AAE7D,QAAM,SAAS,IAAI,SAChB,MAAM,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,EACD,IACC,QAAQ,WAAW,KAAK,OAAO;AAElC,MAAI,CAAC,OAAO,IAAI;AACf,UAAM,MAAM,OAAO;AACnB,QAAI,IAAI,SAAS,qBAAqB;AACrC,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,IAAI,QAAQ,EAAE;AAAA,IACpD;AACA,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,IAAI,QAAQ,EAAE;AAAA,EACpD;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,OAAO,MAAM;AAC1C;AAKA,eAAsB,oBACrB,SACA,KACA,gBACA,QACyB;AACzB,QAAM,aAAa,mBAAmB,KAAK,cAAc;AACzD,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,UAAU,SAAS,EAAE,OAAO,IAAI;AACtC,QAAM,SAAS,MAAM,QAAQ,aAAa,WAAW,OAAO,OAAO;AAEnE,MAAI,CAAC,OAAO,IAAI;AACf,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,OAAO,MAAM,QAAQ,EAAE;AAAA,EAC7D;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,OAAO,MAAM;AAC1C;AAKA,eAAsB,mBACrB,SACA,MACyB;AACzB,QAAM,SAAS,MAAM,QAAQ,MAAM;AACnC,MAAI,CAAC,OAAO,IAAI;AACf,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,OAAO,MAAM,QAAQ,EAAE;AAAA,EAC7D;AAEA,QAAM,mBAAmB;AACzB,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,SAAS,KAAK,EAAE;AAC/C;AAKA,eAAsB,iBACrB,KACA,OACA,WACyB;AACzB,QAAM,aAAa,mBAAmB,GAAG;AACzC,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,WAAW,MAAM,QAAQ,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACrF;AAEA,QAAM,MAAM,UAAU,WAAW,WAAW,KAAK;AACjD,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,KAAK,EAAE;AAC7C;AAKA,eAAsB,oBACrB,KACA,OACA,WACyB;AACzB,MAAI;AACJ,MAAI;AACH,aAAS,KAAK,MAAM,GAAG;AAAA,EACxB,QAAQ;AACP,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AAAA,EAC5D;AAEA,QAAM,aAAa,kBAAkB,MAAM;AAC3C,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACjE;AAEA,QAAM,MAAM,aAAa,WAAW,MAAyB;AAC7D,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,KAAK,EAAE;AAC7C;AAKA,eAAsB,wBACrB,KACA,OACyB;AACzB,MAAI;AACJ,MAAI;AACH,WAAO,KAAK,MAAM,GAAG;AAAA,EACtB,QAAQ;AACP,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AAAA,EAC5D;AAEA,QAAM,aAAa,wBAAwB,IAAI;AAC/C,MAAI,CAAC,WAAW,IAAI;AACnB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,WAAW,MAAM,QAAQ,EAAE;AAAA,EACjE;AAEA,QAAM,SAAS,WAAW;AAC1B,QAAM,aAAa,MAAM,MAAM,cAAc;AAE7C,MAAI,WAAW,OAAO,IAAI,GAAG;AAC5B,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,OAAO,IAAI,mBAAmB,EAAE;AAAA,EACpF;AAEA,aAAW,OAAO,IAAI,IAAI;AAC1B,QAAM,MAAM,cAAc,UAAU;AAEpC,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,YAAY,MAAM,MAAM,OAAO,KAAK,EAAE;AACrE;AAKA,eAAsB,0BACrB,MACA,OACyB;AACzB,QAAM,aAAa,MAAM,MAAM,cAAc;AAE7C,MAAI,CAAC,WAAW,IAAI,GAAG;AACtB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,IAAI,cAAc,EAAE;AAAA,EACxE;AAEA,SAAO,WAAW,IAAI;AACtB,QAAM,MAAM,cAAc,UAAU;AAEpC,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,cAAc,MAAM,KAAK,EAAE;AAC1D;AAKA,eAAsB,qBAAqB,OAA4C;AACtF,QAAM,aAAa,MAAM,MAAM,cAAc;AAC7C,QAAM,OAAO,OAAO,OAAO,UAAU,EAAE,IAAI,CAAC,OAAO;AAAA,IAClD,MAAM,EAAE;AAAA,IACR,MAAM,EAAE;AAAA,IACR,WAAW,EAAE,WAAW;AAAA,EACzB,EAAE;AAEF,SAAO,EAAE,QAAQ,KAAK,MAAM,KAAK;AAClC;AAKO,SAAS,cACf,SACA,OACgB;AAChB,QAAM,QAAQ,QAAQ;AACtB,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,GAAG,OAAO,GAAG,MAAM,EAAE;AACpD;;;ACpQO,IAAM,gBAAN,MAAoB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAqB,SAAkB;AAClD,SAAK,gBAAgB;AACrB,SAAK,UAAU,WAAW;AAC1B,SAAK,iBAAiB,IAAI,IAAI,OAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAAA,EAChE;AAAA;AAAA,EAGA,YAAsD;AACrD,WAAO,EAAE,QAAQ,KAAK,eAAe,SAAS,KAAK,QAAQ;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,cAAc,OAA4C;AACzD,QAAI,MAAM,OAAO,YAAY,MAAM,QAAQ,WAAW,GAAG;AACxD,aAAO,GAAG,MAAS;AAAA,IACpB;AAEA,eAAW,OAAO,MAAM,SAAS;AAChC,UAAI,CAAC,KAAK,eAAe,IAAI,IAAI,MAAM,GAAG;AACzC,eAAO;AAAA,UACN,IAAI;AAAA,YACH,mBAAmB,IAAI,MAAM,yBAAyB,MAAM,KAAK,qBAAqB,KAAK,OAAO;AAAA,UACnG;AAAA,QACD;AAAA,MACD;AAAA,IACD;AACA,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,WAAkE;AAC9E,QAAI,UAAU,UAAU,KAAK,cAAc,OAAO;AACjD,aAAO,IAAI,IAAI,YAAY,2CAA2C,CAAC;AAAA,IACxE;AAEA,UAAM,eAAe,IAAI,IAAI,KAAK,cAAc,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;AACpF,UAAM,eAAe,IAAI,IAAI,UAAU,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;AAG3E,eAAW,CAAC,IAAI,KAAK,cAAc;AAClC,UAAI,CAAC,aAAa,IAAI,IAAI,GAAG;AAC5B,eAAO;AAAA,UACN,IAAI;AAAA,YACH,yBAAyB,IAAI;AAAA,UAC9B;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAGA,eAAW,CAAC,MAAM,OAAO,KAAK,cAAc;AAC3C,YAAM,UAAU,aAAa,IAAI,IAAI;AACrC,UAAI,WAAW,YAAY,SAAS;AACnC,eAAO;AAAA,UACN,IAAI;AAAA,YACH,iCAAiC,IAAI,WAAW,OAAO,SAAS,OAAO;AAAA,UACxE;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAGA,SAAK,gBAAgB;AACrB,SAAK;AACL,SAAK,iBAAiB,IAAI,IAAI,UAAU,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAElE,WAAO,GAAG,EAAE,SAAS,KAAK,QAAQ,CAAC;AAAA,EACpC;AACD;","names":["hasMore","serverHlc","trimmed","entries"]}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
Err,
|
|
3
3
|
Ok
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-B257DXIS.js";
|
|
5
5
|
|
|
6
6
|
// ../proto/src/gen/lakesync_pb.ts
|
|
7
7
|
import { enumDesc, fileDesc, messageDesc } from "@bufbuild/protobuf/codegenv2";
|
|
@@ -332,4 +332,4 @@ export {
|
|
|
332
332
|
encodeActionResponse,
|
|
333
333
|
decodeActionResponse
|
|
334
334
|
};
|
|
335
|
-
//# sourceMappingURL=chunk-
|
|
335
|
+
//# sourceMappingURL=chunk-6OCFE42A.js.map
|