lakesync 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/README.md +74 -0
  2. package/dist/adapter.d.ts +369 -0
  3. package/dist/adapter.js +39 -0
  4. package/dist/adapter.js.map +1 -0
  5. package/dist/analyst.d.ts +268 -0
  6. package/dist/analyst.js +495 -0
  7. package/dist/analyst.js.map +1 -0
  8. package/dist/auth-CAVutXzx.d.ts +30 -0
  9. package/dist/base-poller-Qo_SmCZs.d.ts +82 -0
  10. package/dist/catalogue.d.ts +65 -0
  11. package/dist/catalogue.js +17 -0
  12. package/dist/catalogue.js.map +1 -0
  13. package/dist/chunk-4ARO6KTJ.js +257 -0
  14. package/dist/chunk-4ARO6KTJ.js.map +1 -0
  15. package/dist/chunk-5YOFCJQ7.js +1115 -0
  16. package/dist/chunk-5YOFCJQ7.js.map +1 -0
  17. package/dist/chunk-7D4SUZUM.js +38 -0
  18. package/dist/chunk-7D4SUZUM.js.map +1 -0
  19. package/dist/chunk-BNJOGBYK.js +335 -0
  20. package/dist/chunk-BNJOGBYK.js.map +1 -0
  21. package/dist/chunk-ICNT7I3K.js +1180 -0
  22. package/dist/chunk-ICNT7I3K.js.map +1 -0
  23. package/dist/chunk-P5DRFKIT.js +413 -0
  24. package/dist/chunk-P5DRFKIT.js.map +1 -0
  25. package/dist/chunk-X3RO5SYJ.js +880 -0
  26. package/dist/chunk-X3RO5SYJ.js.map +1 -0
  27. package/dist/client.d.ts +428 -0
  28. package/dist/client.js +2048 -0
  29. package/dist/client.js.map +1 -0
  30. package/dist/compactor.d.ts +342 -0
  31. package/dist/compactor.js +793 -0
  32. package/dist/compactor.js.map +1 -0
  33. package/dist/coordinator-CxckTzYW.d.ts +396 -0
  34. package/dist/db-types-BR6Kt4uf.d.ts +29 -0
  35. package/dist/gateway-D5SaaMvT.d.ts +337 -0
  36. package/dist/gateway-server.d.ts +306 -0
  37. package/dist/gateway-server.js +4663 -0
  38. package/dist/gateway-server.js.map +1 -0
  39. package/dist/gateway.d.ts +196 -0
  40. package/dist/gateway.js +79 -0
  41. package/dist/gateway.js.map +1 -0
  42. package/dist/hlc-DiD8QNG3.d.ts +70 -0
  43. package/dist/index.d.ts +245 -0
  44. package/dist/index.js +102 -0
  45. package/dist/index.js.map +1 -0
  46. package/dist/json-dYtqiL0F.d.ts +18 -0
  47. package/dist/nessie-client-DrNikVXy.d.ts +160 -0
  48. package/dist/parquet.d.ts +78 -0
  49. package/dist/parquet.js +15 -0
  50. package/dist/parquet.js.map +1 -0
  51. package/dist/proto.d.ts +434 -0
  52. package/dist/proto.js +67 -0
  53. package/dist/proto.js.map +1 -0
  54. package/dist/react.d.ts +147 -0
  55. package/dist/react.js +224 -0
  56. package/dist/react.js.map +1 -0
  57. package/dist/resolver-C3Wphi6O.d.ts +10 -0
  58. package/dist/result-CojzlFE2.d.ts +64 -0
  59. package/dist/src-QU2YLPZY.js +383 -0
  60. package/dist/src-QU2YLPZY.js.map +1 -0
  61. package/dist/src-WYBF5LOI.js +102 -0
  62. package/dist/src-WYBF5LOI.js.map +1 -0
  63. package/dist/src-WZNPHANQ.js +426 -0
  64. package/dist/src-WZNPHANQ.js.map +1 -0
  65. package/dist/types-Bs-QyOe-.d.ts +143 -0
  66. package/dist/types-DAQL_vU_.d.ts +118 -0
  67. package/dist/types-DSC_EiwR.d.ts +45 -0
  68. package/dist/types-V_jVu2sA.d.ts +73 -0
  69. package/package.json +119 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../catalogue/src/types.ts","../../catalogue/src/nessie-client.ts","../../catalogue/src/schema-mapping.ts"],"sourcesContent":["import { LakeSyncError } from \"@lakesync/core\";\n\n/** Configuration for connecting to a Nessie Iceberg REST catalogue */\nexport interface CatalogueConfig {\n\t/** Nessie Iceberg REST API base URI, e.g. \"http://localhost:19120/iceberg\" */\n\tnessieUri: string;\n\t/** Object storage warehouse URI, e.g. \"s3://lakesync-warehouse\" */\n\twarehouseUri: string;\n\t/** Nessie branch name. Defaults to \"main\". */\n\tdefaultBranch?: string;\n}\n\n/** Iceberg schema definition following the Iceberg REST spec */\nexport interface IcebergSchema {\n\ttype: \"struct\";\n\t\"schema-id\": number;\n\tfields: IcebergField[];\n}\n\n/** A single field within an Iceberg schema */\nexport interface IcebergField {\n\tid: number;\n\tname: string;\n\trequired: boolean;\n\ttype: string; // \"string\", \"long\", \"double\", \"boolean\"\n}\n\n/** Iceberg partition specification */\nexport interface PartitionSpec {\n\t\"spec-id\": number;\n\tfields: Array<{\n\t\t\"source-id\": number;\n\t\t\"field-id\": number;\n\t\tname: string;\n\t\ttransform: string; // \"day\", \"identity\", etc.\n\t}>;\n}\n\n/** A data file reference for Iceberg table commits */\nexport interface DataFile {\n\tcontent: \"data\";\n\t\"file-path\": string;\n\t\"file-format\": \"PARQUET\";\n\t\"record-count\": number;\n\t\"file-size-in-bytes\": number;\n\tpartition?: Record<string, string>;\n}\n\n/** An Iceberg table snapshot */\nexport interface Snapshot {\n\t\"snapshot-id\": number;\n\t\"timestamp-ms\": number;\n\tsummary: Record<string, string>;\n\t\"manifest-list\"?: string;\n}\n\n/** Full table metadata as returned by the Iceberg REST catalogue */\nexport interface TableMetadata {\n\t\"metadata-location\"?: string;\n\tmetadata: {\n\t\t\"format-version\": number;\n\t\t\"table-uuid\": string;\n\t\tlocation: string;\n\t\t\"current-schema-id\": number;\n\t\tschemas: IcebergSchema[];\n\t\t\"current-snapshot-id\"?: number;\n\t\tsnapshots?: Snapshot[];\n\t\t\"partition-specs\"?: PartitionSpec[];\n\t};\n}\n\n/** Catalogue operation error */\nexport class CatalogueError extends LakeSyncError {\n\treadonly statusCode: number;\n\n\tconstructor(message: string, statusCode: number, cause?: Error) {\n\t\tsuper(message, \"CATALOGUE_ERROR\", cause);\n\t\tthis.statusCode = statusCode;\n\t}\n}\n","import type { Result } from \"@lakesync/core\";\nimport { Err, Ok } from \"@lakesync/core\";\nimport {\n\ttype CatalogueConfig,\n\tCatalogueError,\n\ttype DataFile,\n\ttype IcebergSchema,\n\ttype PartitionSpec,\n\ttype Snapshot,\n\ttype TableMetadata,\n} from \"./types\";\n\n/** Response shape returned by the Iceberg REST `/v1/config` endpoint. */\ninterface CatalogueConfigResponse {\n\tdefaults?: Record<string, string>;\n\toverrides?: Record<string, string>;\n}\n\n/**\n * Encode a namespace array into a URL path segment.\n * For multi-level namespaces, parts are joined with the ASCII unit separator (%1F).\n */\nfunction encodeNamespace(namespace: string[]): string {\n\treturn namespace.map(encodeURIComponent).join(\"%1F\");\n}\n\n/**\n * Typed client for the Nessie Iceberg REST Catalogue API v1.\n *\n * Wraps standard Iceberg REST endpoints exposed by the Nessie server,\n * returning `Result<T, CatalogueError>` from every public method.\n *\n * On first use, the client fetches `/v1/config` from the server to discover\n * the catalogue prefix (typically the Nessie branch name, e.g. `\"main\"`).\n * All subsequent requests include this prefix in the URL path as required\n * by the Iceberg REST specification: `/v1/{prefix}/namespaces/...`.\n */\nexport class NessieCatalogueClient {\n\tprivate readonly baseUri: string;\n\tprivate readonly warehouseUri: string;\n\tprivate prefixPromise: Promise<string> | null = null;\n\n\tconstructor(config: CatalogueConfig) {\n\t\tthis.baseUri = config.nessieUri.replace(/\\/$/, \"\");\n\t\tthis.warehouseUri = config.warehouseUri;\n\t}\n\n\t/**\n\t * Resolve the catalogue prefix by calling the `/v1/config` endpoint.\n\t *\n\t * The Iceberg REST specification requires a prefix segment in all\n\t * API paths (e.g. `/v1/{prefix}/namespaces`). Nessie returns this\n\t * value in the `defaults.prefix` field of the config response.\n\t *\n\t * The result is cached so the config endpoint is only called once\n\t * per client instance.\n\t *\n\t * @returns The resolved prefix string (e.g. `\"main\"`)\n\t */\n\tprivate resolvePrefix(): Promise<string> {\n\t\tif (this.prefixPromise) {\n\t\t\treturn this.prefixPromise;\n\t\t}\n\n\t\tthis.prefixPromise = (async () => {\n\t\t\ttry {\n\t\t\t\tconst url = `${this.baseUri}/v1/config`;\n\t\t\t\tconst response = await fetch(url, {\n\t\t\t\t\tmethod: \"GET\",\n\t\t\t\t\theaders: { Accept: \"application/json\" },\n\t\t\t\t});\n\n\t\t\t\tif (!response.ok) {\n\t\t\t\t\t// Fall back to empty prefix if config endpoint is unavailable\n\t\t\t\t\treturn \"\";\n\t\t\t\t}\n\n\t\t\t\tconst data = (await response.json()) as CatalogueConfigResponse;\n\t\t\t\treturn data.defaults?.prefix ?? \"\";\n\t\t\t} catch {\n\t\t\t\t// Fall back to empty prefix on network errors\n\t\t\t\treturn \"\";\n\t\t\t}\n\t\t})();\n\n\t\treturn this.prefixPromise;\n\t}\n\n\t/**\n\t * Build the base API path including the resolved prefix.\n\t *\n\t * @returns URL prefix such as `http://host/iceberg/v1/main` or\n\t * `http://host/iceberg/v1` when no prefix is configured\n\t */\n\tprivate async apiBase(): Promise<string> {\n\t\tconst prefix = await this.resolvePrefix();\n\t\tif (prefix) {\n\t\t\treturn `${this.baseUri}/v1/${encodeURIComponent(prefix)}`;\n\t\t}\n\t\treturn `${this.baseUri}/v1`;\n\t}\n\n\t/**\n\t * Create a namespace (idempotent -- ignores 409 Conflict).\n\t *\n\t * @param namespace - Namespace parts, e.g. `[\"lakesync\"]`\n\t * @returns `Ok(void)` on success or if namespace already exists\n\t */\n\tasync createNamespace(namespace: string[]): Promise<Result<void, CatalogueError>> {\n\t\tconst base = await this.apiBase();\n\t\tconst url = `${base}/namespaces`;\n\t\tconst body = {\n\t\t\tnamespace,\n\t\t\tproperties: {},\n\t\t};\n\n\t\ttry {\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: { \"Content-Type\": \"application/json\" },\n\t\t\t\tbody: JSON.stringify(body),\n\t\t\t});\n\n\t\t\t// 409 Conflict means namespace already exists -- treat as success\n\t\t\tif (response.status === 409) {\n\t\t\t\treturn Ok(undefined);\n\t\t\t}\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew CatalogueError(\n\t\t\t\t\t\t`Failed to create namespace: ${response.status} ${response.statusText}${text ? ` - ${text}` : \"\"}`,\n\t\t\t\t\t\tresponse.status,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\treturn Ok(undefined);\n\t\t} catch (error) {\n\t\t\treturn Err(\n\t\t\t\tnew CatalogueError(\n\t\t\t\t\t`Network error creating namespace: ${error instanceof Error ? error.message : String(error)}`,\n\t\t\t\t\t0,\n\t\t\t\t\terror instanceof Error ? error : undefined,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * List all namespaces in the catalogue.\n\t *\n\t * @returns Array of namespace arrays, e.g. `[[\"lakesync\"], [\"other\"]]`\n\t */\n\tasync listNamespaces(): Promise<Result<string[][], CatalogueError>> {\n\t\tconst base = await this.apiBase();\n\t\tconst url = `${base}/namespaces`;\n\n\t\ttry {\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"GET\",\n\t\t\t\theaders: { Accept: \"application/json\" },\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew CatalogueError(\n\t\t\t\t\t\t`Failed to list namespaces: ${response.status} ${response.statusText}${text ? ` - ${text}` : \"\"}`,\n\t\t\t\t\t\tresponse.status,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst data = (await response.json()) as { namespaces: string[][] };\n\t\t\treturn Ok(data.namespaces);\n\t\t} catch (error) {\n\t\t\treturn Err(\n\t\t\t\tnew CatalogueError(\n\t\t\t\t\t`Network error listing namespaces: ${error instanceof Error ? error.message : String(error)}`,\n\t\t\t\t\t0,\n\t\t\t\t\terror instanceof Error ? error : undefined,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Create an Iceberg table within a namespace.\n\t *\n\t * @param namespace - Namespace parts, e.g. `[\"lakesync\"]`\n\t * @param name - Table name\n\t * @param schema - Iceberg schema definition\n\t * @param partitionSpec - Partition specification\n\t */\n\tasync createTable(\n\t\tnamespace: string[],\n\t\tname: string,\n\t\tschema: IcebergSchema,\n\t\tpartitionSpec: PartitionSpec,\n\t): Promise<Result<void, CatalogueError>> {\n\t\tconst ns = encodeNamespace(namespace);\n\t\tconst base = await this.apiBase();\n\t\tconst url = `${base}/namespaces/${ns}/tables`;\n\t\tconst location = `${this.warehouseUri}/${namespace.join(\"/\")}/${name}`;\n\t\tconst body = {\n\t\t\tname,\n\t\t\tschema,\n\t\t\t\"partition-spec\": partitionSpec,\n\t\t\t\"stage-create\": false,\n\t\t\tlocation,\n\t\t\tproperties: {},\n\t\t};\n\n\t\ttry {\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: { \"Content-Type\": \"application/json\" },\n\t\t\t\tbody: JSON.stringify(body),\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew CatalogueError(\n\t\t\t\t\t\t`Failed to create table ${namespace.join(\".\")}.${name}: ${response.status} ${response.statusText}${text ? ` - ${text}` : \"\"}`,\n\t\t\t\t\t\tresponse.status,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\treturn Ok(undefined);\n\t\t} catch (error) {\n\t\t\treturn Err(\n\t\t\t\tnew CatalogueError(\n\t\t\t\t\t`Network error creating table: ${error instanceof Error ? error.message : String(error)}`,\n\t\t\t\t\t0,\n\t\t\t\t\terror instanceof Error ? error : undefined,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Load table metadata from the catalogue.\n\t *\n\t * @param namespace - Namespace parts, e.g. `[\"lakesync\"]`\n\t * @param name - Table name\n\t * @returns Full table metadata including schemas, snapshots, and partition specs\n\t */\n\tasync loadTable(\n\t\tnamespace: string[],\n\t\tname: string,\n\t): Promise<Result<TableMetadata, CatalogueError>> {\n\t\tconst ns = encodeNamespace(namespace);\n\t\tconst base = await this.apiBase();\n\t\tconst url = `${base}/namespaces/${ns}/tables/${encodeURIComponent(name)}`;\n\n\t\ttry {\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"GET\",\n\t\t\t\theaders: { Accept: \"application/json\" },\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew CatalogueError(\n\t\t\t\t\t\t`Failed to load table ${namespace.join(\".\")}.${name}: ${response.status} ${response.statusText}${text ? ` - ${text}` : \"\"}`,\n\t\t\t\t\t\tresponse.status,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst data = (await response.json()) as TableMetadata;\n\t\t\treturn Ok(data);\n\t\t} catch (error) {\n\t\t\treturn Err(\n\t\t\t\tnew CatalogueError(\n\t\t\t\t\t`Network error loading table: ${error instanceof Error ? error.message : String(error)}`,\n\t\t\t\t\t0,\n\t\t\t\t\terror instanceof Error ? error : undefined,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Append data files to a table, creating a new snapshot.\n\t *\n\t * Uses the standard Iceberg REST v1 commit-table endpoint with\n\t * `add-snapshot` and `set-snapshot-ref` metadata updates.\n\t * First loads the current table metadata to determine the current state,\n\t * then commits a new snapshot referencing the provided data files.\n\t *\n\t * @param namespace - Namespace parts, e.g. `[\"lakesync\"]`\n\t * @param table - Table name\n\t * @param files - Data files to append\n\t */\n\tasync appendFiles(\n\t\tnamespace: string[],\n\t\ttable: string,\n\t\tfiles: DataFile[],\n\t): Promise<Result<void, CatalogueError>> {\n\t\t// Load the current table metadata to get schema and snapshot state\n\t\tconst metadataResult = await this.loadTable(namespace, table);\n\t\tif (!metadataResult.ok) {\n\t\t\treturn metadataResult;\n\t\t}\n\n\t\tconst metadata = metadataResult.value;\n\t\tconst currentSchemaId = metadata.metadata[\"current-schema-id\"];\n\n\t\tconst ns = encodeNamespace(namespace);\n\t\tconst base = await this.apiBase();\n\t\tconst url = `${base}/namespaces/${ns}/tables/${encodeURIComponent(table)}`;\n\n\t\t// Generate a unique snapshot ID\n\t\tconst snapshotId = Date.now() * 1000 + Math.floor(Math.random() * 1000);\n\t\tconst timestampMs = Date.now();\n\n\t\t// Compute summary from data files\n\t\tconst totalRecords = files.reduce((sum, f) => sum + f[\"record-count\"], 0);\n\t\tconst totalSize = files.reduce((sum, f) => sum + f[\"file-size-in-bytes\"], 0);\n\n\t\tconst snapshot: Record<string, unknown> = {\n\t\t\t\"snapshot-id\": snapshotId,\n\t\t\t\"timestamp-ms\": timestampMs,\n\t\t\tsummary: {\n\t\t\t\toperation: \"append\",\n\t\t\t\t\"added-data-files\": String(files.length),\n\t\t\t\t\"added-records\": String(totalRecords),\n\t\t\t\t\"added-files-size\": String(totalSize),\n\t\t\t},\n\t\t\t\"schema-id\": currentSchemaId,\n\t\t};\n\n\t\t// Include parent snapshot reference if one exists\n\t\tconst currentSnapshotId = metadata.metadata[\"current-snapshot-id\"];\n\t\tif (currentSnapshotId !== undefined) {\n\t\t\tsnapshot[\"parent-snapshot-id\"] = currentSnapshotId;\n\t\t}\n\n\t\tconst commitBody = {\n\t\t\trequirements: [\n\t\t\t\t{\n\t\t\t\t\ttype: \"assert-current-schema-id\",\n\t\t\t\t\t\"current-schema-id\": currentSchemaId,\n\t\t\t\t},\n\t\t\t],\n\t\t\tupdates: [\n\t\t\t\t{\n\t\t\t\t\taction: \"add-snapshot\",\n\t\t\t\t\tsnapshot,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\taction: \"set-snapshot-ref\",\n\t\t\t\t\t\"ref-name\": \"main\",\n\t\t\t\t\ttype: \"branch\",\n\t\t\t\t\t\"snapshot-id\": snapshotId,\n\t\t\t\t},\n\t\t\t],\n\t\t};\n\n\t\ttry {\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: { \"Content-Type\": \"application/json\" },\n\t\t\t\tbody: JSON.stringify(commitBody),\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst text = await response.text().catch(() => \"\");\n\t\t\t\treturn Err(\n\t\t\t\t\tnew CatalogueError(\n\t\t\t\t\t\t`Failed to append files to ${namespace.join(\".\")}.${table}: ${response.status} ${response.statusText}${text ? ` - ${text}` : \"\"}`,\n\t\t\t\t\t\tresponse.status,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\treturn Ok(undefined);\n\t\t} catch (error) {\n\t\t\treturn Err(\n\t\t\t\tnew CatalogueError(\n\t\t\t\t\t`Network error appending files: ${error instanceof Error ? error.message : String(error)}`,\n\t\t\t\t\t0,\n\t\t\t\t\terror instanceof Error ? error : undefined,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Get the current snapshot of a table, or null if no snapshots exist.\n\t *\n\t * @param namespace - Namespace parts, e.g. `[\"lakesync\"]`\n\t * @param table - Table name\n\t * @returns The current snapshot, or `null` if the table has no snapshots\n\t */\n\tasync currentSnapshot(\n\t\tnamespace: string[],\n\t\ttable: string,\n\t): Promise<Result<Snapshot | null, CatalogueError>> {\n\t\tconst metadataResult = await this.loadTable(namespace, table);\n\t\tif (!metadataResult.ok) {\n\t\t\treturn metadataResult;\n\t\t}\n\n\t\tconst metadata = metadataResult.value;\n\t\tconst currentSnapshotId = metadata.metadata[\"current-snapshot-id\"];\n\t\tconst snapshots = metadata.metadata.snapshots ?? [];\n\n\t\tif (currentSnapshotId === undefined || snapshots.length === 0) {\n\t\t\treturn Ok(null);\n\t\t}\n\n\t\tconst snapshot = snapshots.find((s) => s[\"snapshot-id\"] === currentSnapshotId);\n\t\treturn Ok(snapshot ?? null);\n\t}\n}\n","import type { TableSchema } from \"@lakesync/core\";\nimport type { IcebergField, IcebergSchema, PartitionSpec } from \"./types\";\n\n/**\n * LakeSync column type as defined in the TableSchema interface.\n */\ntype ColumnType = TableSchema[\"columns\"][number][\"type\"];\n\n/**\n * Maps a LakeSync column type to an Iceberg type string.\n *\n * The mapping mirrors the Arrow schema produced by `buildArrowSchema` in\n * `@lakesync/parquet`:\n * - `\"string\"` → `\"string\"` (Utf8 equivalent)\n * - `\"number\"` → `\"double\"` (Float64 equivalent)\n * - `\"boolean\"` → `\"boolean\"` (Bool equivalent)\n * - `\"json\"` → `\"string\"` (serialised as JSON text, same as Utf8)\n * - `\"null\"` → `\"string\"` (nullable Utf8)\n *\n * @param colType - The LakeSync column type to convert\n * @returns The corresponding Iceberg type string\n */\nfunction lakeSyncTypeToIceberg(colType: ColumnType): string {\n\tswitch (colType) {\n\t\tcase \"string\":\n\t\t\treturn \"string\";\n\t\tcase \"number\":\n\t\t\treturn \"double\";\n\t\tcase \"boolean\":\n\t\t\treturn \"boolean\";\n\t\tcase \"json\":\n\t\t\treturn \"string\";\n\t\tcase \"null\":\n\t\t\treturn \"string\";\n\t}\n}\n\n/**\n * Converts a LakeSync `TableSchema` to an Iceberg `IcebergSchema`.\n *\n * The resulting schema always includes six system columns (required) followed\n * by user-defined columns (not required). Column order and types are aligned\n * with the Apache Arrow schema produced by `buildArrowSchema` from\n * `@lakesync/parquet`.\n *\n * System columns (always present, in this order):\n * 1. `op` — `\"string\"` (the delta operation type)\n * 2. `table` — `\"string\"` (the table name)\n * 3. `rowId` — `\"string\"` (the row identifier)\n * 4. `clientId` — `\"string\"` (the client identifier)\n * 5. `hlc` — `\"long\"` (HLC timestamp as Int64)\n * 6. `deltaId` — `\"string\"` (the deterministic delta identifier)\n *\n * User columns are mapped according to their declared LakeSync type:\n * - `\"string\"` → `\"string\"`\n * - `\"number\"` → `\"double\"`\n * - `\"boolean\"` → `\"boolean\"`\n * - `\"json\"` → `\"string\"` (JSON-serialised text)\n * - `\"null\"` → `\"string\"`\n *\n * @param schema - The LakeSync `TableSchema` to convert\n * @returns An `IcebergSchema` with system and user columns, `schema-id` 0\n */\nexport function tableSchemaToIceberg(schema: TableSchema): IcebergSchema {\n\tlet fieldId = 1;\n\n\tconst systemFields: IcebergField[] = [\n\t\t{ id: fieldId++, name: \"op\", required: true, type: \"string\" },\n\t\t{ id: fieldId++, name: \"table\", required: true, type: \"string\" },\n\t\t{ id: fieldId++, name: \"rowId\", required: true, type: \"string\" },\n\t\t{ id: fieldId++, name: \"clientId\", required: true, type: \"string\" },\n\t\t{ id: fieldId++, name: \"hlc\", required: true, type: \"long\" },\n\t\t{ id: fieldId++, name: \"deltaId\", required: true, type: \"string\" },\n\t];\n\n\tconst userFields: IcebergField[] = schema.columns.map((col) => ({\n\t\tid: fieldId++,\n\t\tname: col.name,\n\t\trequired: false,\n\t\ttype: lakeSyncTypeToIceberg(col.type),\n\t}));\n\n\treturn {\n\t\ttype: \"struct\",\n\t\t\"schema-id\": 0,\n\t\tfields: [...systemFields, ...userFields],\n\t};\n}\n\n/**\n * Builds an Iceberg `PartitionSpec` from an `IcebergSchema`.\n *\n * The partition strategy extracts the day from the `hlc` column using the\n * Iceberg `day` transform, which partitions data by the wall-clock date\n * encoded in the HLC timestamp. This ensures efficient time-range queries.\n *\n * The resulting spec has a single partition field:\n * - `source-id`: the field ID of the `hlc` column\n * - `field-id`: 1000 (Iceberg convention — partition field IDs start at 1000)\n * - `name`: `\"hlc_day\"`\n * - `transform`: `\"day\"`\n *\n * @param schema - The Iceberg schema containing an `hlc` field\n * @returns A `PartitionSpec` with `spec-id` 0 and a single day-partitioned field\n * @throws If the schema does not contain an `hlc` field\n */\nexport function buildPartitionSpec(schema: IcebergSchema): PartitionSpec {\n\tconst hlcField = schema.fields.find((f) => f.name === \"hlc\");\n\tif (!hlcField) {\n\t\tthrow new Error(\"Schema must contain an 'hlc' field for partitioning\");\n\t}\n\n\treturn {\n\t\t\"spec-id\": 0,\n\t\tfields: [\n\t\t\t{\n\t\t\t\t\"source-id\": hlcField.id,\n\t\t\t\t\"field-id\": 1000,\n\t\t\t\tname: \"hlc_day\",\n\t\t\t\ttransform: \"day\",\n\t\t\t},\n\t\t],\n\t};\n}\n\n/**\n * Maps a LakeSync table name to an Iceberg namespace and table name.\n *\n * All LakeSync tables reside under the `[\"lakesync\"]` namespace. The table\n * name is passed through as-is, preserving the original casing and format.\n *\n * @param table - The LakeSync table name (e.g. `\"todos\"`)\n * @returns An object with `namespace` (`[\"lakesync\"]`) and `name` (the table name)\n */\nexport function lakeSyncTableName(table: string): {\n\tnamespace: string[];\n\tname: string;\n} {\n\treturn {\n\t\tnamespace: [\"lakesync\"],\n\t\tname: table,\n\t};\n}\n"],"mappings":";;;;;;;AAwEO,IAAM,iBAAN,cAA6B,cAAc;AAAA,EACxC;AAAA,EAET,YAAY,SAAiB,YAAoB,OAAe;AAC/D,UAAM,SAAS,mBAAmB,KAAK;AACvC,SAAK,aAAa;AAAA,EACnB;AACD;;;ACzDA,SAAS,gBAAgB,WAA6B;AACrD,SAAO,UAAU,IAAI,kBAAkB,EAAE,KAAK,KAAK;AACpD;AAaO,IAAM,wBAAN,MAA4B;AAAA,EACjB;AAAA,EACA;AAAA,EACT,gBAAwC;AAAA,EAEhD,YAAY,QAAyB;AACpC,SAAK,UAAU,OAAO,UAAU,QAAQ,OAAO,EAAE;AACjD,SAAK,eAAe,OAAO;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcQ,gBAAiC;AACxC,QAAI,KAAK,eAAe;AACvB,aAAO,KAAK;AAAA,IACb;AAEA,SAAK,iBAAiB,YAAY;AACjC,UAAI;AACH,cAAM,MAAM,GAAG,KAAK,OAAO;AAC3B,cAAM,WAAW,MAAM,MAAM,KAAK;AAAA,UACjC,QAAQ;AAAA,UACR,SAAS,EAAE,QAAQ,mBAAmB;AAAA,QACvC,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAEjB,iBAAO;AAAA,QACR;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,eAAO,KAAK,UAAU,UAAU;AAAA,MACjC,QAAQ;AAEP,eAAO;AAAA,MACR;AAAA,IACD,GAAG;AAEH,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,UAA2B;AACxC,UAAM,SAAS,MAAM,KAAK,cAAc;AACxC,QAAI,QAAQ;AACX,aAAO,GAAG,KAAK,OAAO,OAAO,mBAAmB,MAAM,CAAC;AAAA,IACxD;AACA,WAAO,GAAG,KAAK,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,gBAAgB,WAA4D;AACjF,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,UAAM,MAAM,GAAG,IAAI;AACnB,UAAM,OAAO;AAAA,MACZ;AAAA,MACA,YAAY,CAAC;AAAA,IACd;AAEA,QAAI;AACH,YAAM,WAAW,MAAM,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,MAC1B,CAAC;AAGD,UAAI,SAAS,WAAW,KAAK;AAC5B,eAAO,GAAG,MAAS;AAAA,MACpB;AAEA,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,+BAA+B,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,YAChG,SAAS;AAAA,UACV;AAAA,QACD;AAAA,MACD;AAEA,aAAO,GAAG,MAAS;AAAA,IACpB,SAAS,OAAO;AACf,aAAO;AAAA,QACN,IAAI;AAAA,UACH,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UAC3F;AAAA,UACA,iBAAiB,QAAQ,QAAQ;AAAA,QAClC;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,iBAA8D;AACnE,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,UAAM,MAAM,GAAG,IAAI;AAEnB,QAAI;AACH,YAAM,WAAW,MAAM,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,SAAS,EAAE,QAAQ,mBAAmB;AAAA,MACvC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,8BAA8B,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,YAC/F,SAAS;AAAA,UACV;AAAA,QACD;AAAA,MACD;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,aAAO,GAAG,KAAK,UAAU;AAAA,IAC1B,SAAS,OAAO;AACf,aAAO;AAAA,QACN,IAAI;AAAA,UACH,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UAC3F;AAAA,UACA,iBAAiB,QAAQ,QAAQ;AAAA,QAClC;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,YACL,WACA,MACA,QACA,eACwC;AACxC,UAAM,KAAK,gBAAgB,SAAS;AACpC,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,UAAM,MAAM,GAAG,IAAI,eAAe,EAAE;AACpC,UAAM,WAAW,GAAG,KAAK,YAAY,IAAI,UAAU,KAAK,GAAG,CAAC,IAAI,IAAI;AACpE,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB;AAAA,MACA,YAAY,CAAC;AAAA,IACd;AAEA,QAAI;AACH,YAAM,WAAW,MAAM,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,MAC1B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,0BAA0B,UAAU,KAAK,GAAG,CAAC,IAAI,IAAI,KAAK,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,YAC3H,SAAS;AAAA,UACV;AAAA,QACD;AAAA,MACD;AAEA,aAAO,GAAG,MAAS;AAAA,IACpB,SAAS,OAAO;AACf,aAAO;AAAA,QACN,IAAI;AAAA,UACH,iCAAiC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACvF;AAAA,UACA,iBAAiB,QAAQ,QAAQ;AAAA,QAClC;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,UACL,WACA,MACiD;AACjD,UAAM,KAAK,gBAAgB,SAAS;AACpC,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,UAAM,MAAM,GAAG,IAAI,eAAe,EAAE,WAAW,mBAAmB,IAAI,CAAC;AAEvE,QAAI;AACH,YAAM,WAAW,MAAM,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,SAAS,EAAE,QAAQ,mBAAmB;AAAA,MACvC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,wBAAwB,UAAU,KAAK,GAAG,CAAC,IAAI,IAAI,KAAK,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,YACzH,SAAS;AAAA,UACV;AAAA,QACD;AAAA,MACD;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,aAAO,GAAG,IAAI;AAAA,IACf,SAAS,OAAO;AACf,aAAO;AAAA,QACN,IAAI;AAAA,UACH,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACtF;AAAA,UACA,iBAAiB,QAAQ,QAAQ;AAAA,QAClC;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,YACL,WACA,OACA,OACwC;AAExC,UAAM,iBAAiB,MAAM,KAAK,UAAU,WAAW,KAAK;AAC5D,QAAI,CAAC,eAAe,IAAI;AACvB,aAAO;AAAA,IACR;AAEA,UAAM,WAAW,eAAe;AAChC,UAAM,kBAAkB,SAAS,SAAS,mBAAmB;AAE7D,UAAM,KAAK,gBAAgB,SAAS;AACpC,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,UAAM,MAAM,GAAG,IAAI,eAAe,EAAE,WAAW,mBAAmB,KAAK,CAAC;AAGxE,UAAM,aAAa,KAAK,IAAI,IAAI,MAAO,KAAK,MAAM,KAAK,OAAO,IAAI,GAAI;AACtE,UAAM,cAAc,KAAK,IAAI;AAG7B,UAAM,eAAe,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,cAAc,GAAG,CAAC;AACxE,UAAM,YAAY,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,oBAAoB,GAAG,CAAC;AAE3E,UAAM,WAAoC;AAAA,MACzC,eAAe;AAAA,MACf,gBAAgB;AAAA,MAChB,SAAS;AAAA,QACR,WAAW;AAAA,QACX,oBAAoB,OAAO,MAAM,MAAM;AAAA,QACvC,iBAAiB,OAAO,YAAY;AAAA,QACpC,oBAAoB,OAAO,SAAS;AAAA,MACrC;AAAA,MACA,aAAa;AAAA,IACd;AAGA,UAAM,oBAAoB,SAAS,SAAS,qBAAqB;AACjE,QAAI,sBAAsB,QAAW;AACpC,eAAS,oBAAoB,IAAI;AAAA,IAClC;AAEA,UAAM,aAAa;AAAA,MAClB,cAAc;AAAA,QACb;AAAA,UACC,MAAM;AAAA,UACN,qBAAqB;AAAA,QACtB;AAAA,MACD;AAAA,MACA,SAAS;AAAA,QACR;AAAA,UACC,QAAQ;AAAA,UACR;AAAA,QACD;AAAA,QACA;AAAA,UACC,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,MAAM;AAAA,UACN,eAAe;AAAA,QAChB;AAAA,MACD;AAAA,IACD;AAEA,QAAI;AACH,YAAM,WAAW,MAAM,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,UAAU;AAAA,MAChC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AACjB,cAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,6BAA6B,UAAU,KAAK,GAAG,CAAC,IAAI,KAAK,KAAK,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,YAC/H,SAAS;AAAA,UACV;AAAA,QACD;AAAA,MACD;AAEA,aAAO,GAAG,MAAS;AAAA,IACpB,SAAS,OAAO;AACf,aAAO;AAAA,QACN,IAAI;AAAA,UACH,kCAAkC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACxF;AAAA,UACA,iBAAiB,QAAQ,QAAQ;AAAA,QAClC;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,gBACL,WACA,OACmD;AACnD,UAAM,iBAAiB,MAAM,KAAK,UAAU,WAAW,KAAK;AAC5D,QAAI,CAAC,eAAe,IAAI;AACvB,aAAO;AAAA,IACR;AAEA,UAAM,WAAW,eAAe;AAChC,UAAM,oBAAoB,SAAS,SAAS,qBAAqB;AACjE,UAAM,YAAY,SAAS,SAAS,aAAa,CAAC;AAElD,QAAI,sBAAsB,UAAa,UAAU,WAAW,GAAG;AAC9D,aAAO,GAAG,IAAI;AAAA,IACf;AAEA,UAAM,WAAW,UAAU,KAAK,CAAC,MAAM,EAAE,aAAa,MAAM,iBAAiB;AAC7E,WAAO,GAAG,YAAY,IAAI;AAAA,EAC3B;AACD;;;AC/YA,SAAS,sBAAsB,SAA6B;AAC3D,UAAQ,SAAS;AAAA,IAChB,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,IACR,KAAK;AACJ,aAAO;AAAA,EACT;AACD;AA4BO,SAAS,qBAAqB,QAAoC;AACxE,MAAI,UAAU;AAEd,QAAM,eAA+B;AAAA,IACpC,EAAE,IAAI,WAAW,MAAM,MAAM,UAAU,MAAM,MAAM,SAAS;AAAA,IAC5D,EAAE,IAAI,WAAW,MAAM,SAAS,UAAU,MAAM,MAAM,SAAS;AAAA,IAC/D,EAAE,IAAI,WAAW,MAAM,SAAS,UAAU,MAAM,MAAM,SAAS;AAAA,IAC/D,EAAE,IAAI,WAAW,MAAM,YAAY,UAAU,MAAM,MAAM,SAAS;AAAA,IAClE,EAAE,IAAI,WAAW,MAAM,OAAO,UAAU,MAAM,MAAM,OAAO;AAAA,IAC3D,EAAE,IAAI,WAAW,MAAM,WAAW,UAAU,MAAM,MAAM,SAAS;AAAA,EAClE;AAEA,QAAM,aAA6B,OAAO,QAAQ,IAAI,CAAC,SAAS;AAAA,IAC/D,IAAI;AAAA,IACJ,MAAM,IAAI;AAAA,IACV,UAAU;AAAA,IACV,MAAM,sBAAsB,IAAI,IAAI;AAAA,EACrC,EAAE;AAEF,SAAO;AAAA,IACN,MAAM;AAAA,IACN,aAAa;AAAA,IACb,QAAQ,CAAC,GAAG,cAAc,GAAG,UAAU;AAAA,EACxC;AACD;AAmBO,SAAS,mBAAmB,QAAsC;AACxE,QAAM,WAAW,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,KAAK;AAC3D,MAAI,CAAC,UAAU;AACd,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACtE;AAEA,SAAO;AAAA,IACN,WAAW;AAAA,IACX,QAAQ;AAAA,MACP;AAAA,QACC,aAAa,SAAS;AAAA,QACtB,YAAY;AAAA,QACZ,MAAM;AAAA,QACN,WAAW;AAAA,MACZ;AAAA,IACD;AAAA,EACD;AACD;AAWO,SAAS,kBAAkB,OAGhC;AACD,SAAO;AAAA,IACN,WAAW,CAAC,UAAU;AAAA,IACtB,MAAM;AAAA,EACP;AACD;","names":[]}