lakesync 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/README.md +74 -0
  2. package/dist/adapter.d.ts +369 -0
  3. package/dist/adapter.js +39 -0
  4. package/dist/adapter.js.map +1 -0
  5. package/dist/analyst.d.ts +268 -0
  6. package/dist/analyst.js +495 -0
  7. package/dist/analyst.js.map +1 -0
  8. package/dist/auth-CAVutXzx.d.ts +30 -0
  9. package/dist/base-poller-Qo_SmCZs.d.ts +82 -0
  10. package/dist/catalogue.d.ts +65 -0
  11. package/dist/catalogue.js +17 -0
  12. package/dist/catalogue.js.map +1 -0
  13. package/dist/chunk-4ARO6KTJ.js +257 -0
  14. package/dist/chunk-4ARO6KTJ.js.map +1 -0
  15. package/dist/chunk-5YOFCJQ7.js +1115 -0
  16. package/dist/chunk-5YOFCJQ7.js.map +1 -0
  17. package/dist/chunk-7D4SUZUM.js +38 -0
  18. package/dist/chunk-7D4SUZUM.js.map +1 -0
  19. package/dist/chunk-BNJOGBYK.js +335 -0
  20. package/dist/chunk-BNJOGBYK.js.map +1 -0
  21. package/dist/chunk-ICNT7I3K.js +1180 -0
  22. package/dist/chunk-ICNT7I3K.js.map +1 -0
  23. package/dist/chunk-P5DRFKIT.js +413 -0
  24. package/dist/chunk-P5DRFKIT.js.map +1 -0
  25. package/dist/chunk-X3RO5SYJ.js +880 -0
  26. package/dist/chunk-X3RO5SYJ.js.map +1 -0
  27. package/dist/client.d.ts +428 -0
  28. package/dist/client.js +2048 -0
  29. package/dist/client.js.map +1 -0
  30. package/dist/compactor.d.ts +342 -0
  31. package/dist/compactor.js +793 -0
  32. package/dist/compactor.js.map +1 -0
  33. package/dist/coordinator-CxckTzYW.d.ts +396 -0
  34. package/dist/db-types-BR6Kt4uf.d.ts +29 -0
  35. package/dist/gateway-D5SaaMvT.d.ts +337 -0
  36. package/dist/gateway-server.d.ts +306 -0
  37. package/dist/gateway-server.js +4663 -0
  38. package/dist/gateway-server.js.map +1 -0
  39. package/dist/gateway.d.ts +196 -0
  40. package/dist/gateway.js +79 -0
  41. package/dist/gateway.js.map +1 -0
  42. package/dist/hlc-DiD8QNG3.d.ts +70 -0
  43. package/dist/index.d.ts +245 -0
  44. package/dist/index.js +102 -0
  45. package/dist/index.js.map +1 -0
  46. package/dist/json-dYtqiL0F.d.ts +18 -0
  47. package/dist/nessie-client-DrNikVXy.d.ts +160 -0
  48. package/dist/parquet.d.ts +78 -0
  49. package/dist/parquet.js +15 -0
  50. package/dist/parquet.js.map +1 -0
  51. package/dist/proto.d.ts +434 -0
  52. package/dist/proto.js +67 -0
  53. package/dist/proto.js.map +1 -0
  54. package/dist/react.d.ts +147 -0
  55. package/dist/react.js +224 -0
  56. package/dist/react.js.map +1 -0
  57. package/dist/resolver-C3Wphi6O.d.ts +10 -0
  58. package/dist/result-CojzlFE2.d.ts +64 -0
  59. package/dist/src-QU2YLPZY.js +383 -0
  60. package/dist/src-QU2YLPZY.js.map +1 -0
  61. package/dist/src-WYBF5LOI.js +102 -0
  62. package/dist/src-WYBF5LOI.js.map +1 -0
  63. package/dist/src-WZNPHANQ.js +426 -0
  64. package/dist/src-WZNPHANQ.js.map +1 -0
  65. package/dist/types-Bs-QyOe-.d.ts +143 -0
  66. package/dist/types-DAQL_vU_.d.ts +118 -0
  67. package/dist/types-DSC_EiwR.d.ts +45 -0
  68. package/dist/types-V_jVu2sA.d.ts +73 -0
  69. package/package.json +119 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../core/src/result/errors.ts","../../core/src/action/errors.ts","../../core/src/action/generate-id.ts","../../core/src/action/types.ts","../../core/src/result/result.ts","../../core/src/action/validate.ts","../../core/src/auth.ts","../../core/src/hlc/hlc.ts","../../core/src/base-poller.ts","../../core/src/conflict/lww.ts","../../core/src/connector/action-handler.ts","../../core/src/connector/errors.ts","../../core/src/connector/types.ts","../../core/src/connector/validate.ts","../../core/src/delta/apply.ts","../../core/src/delta/extract.ts","../../core/src/delta/types.ts","../../core/src/json.ts","../../core/src/sync-rules/defaults.ts","../../core/src/sync-rules/errors.ts","../../core/src/sync-rules/evaluator.ts","../../core/src/validation/identifier.ts"],"sourcesContent":["/** Base error class for all LakeSync errors */\nexport class LakeSyncError extends Error {\n\treadonly code: string;\n\toverride readonly cause?: Error;\n\n\tconstructor(message: string, code: string, cause?: Error) {\n\t\tsuper(message);\n\t\tthis.name = this.constructor.name;\n\t\tthis.code = code;\n\t\tthis.cause = cause;\n\t}\n}\n\n/** Clock drift exceeds maximum allowed threshold */\nexport class ClockDriftError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"CLOCK_DRIFT\", cause);\n\t}\n}\n\n/** Conflict resolution failure */\nexport class ConflictError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"CONFLICT\", cause);\n\t}\n}\n\n/** Flush operation failure */\nexport class FlushError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"FLUSH_FAILED\", cause);\n\t}\n}\n\n/** Schema mismatch or validation failure */\nexport class SchemaError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"SCHEMA_MISMATCH\", cause);\n\t}\n}\n\n/** Lake adapter operation failure */\nexport class AdapterError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ADAPTER_ERROR\", cause);\n\t}\n}\n\n/** Named source adapter not found in gateway configuration */\nexport class AdapterNotFoundError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ADAPTER_NOT_FOUND\", cause);\n\t}\n}\n\n/** Buffer backpressure limit exceeded — push rejected to prevent OOM. */\nexport class BackpressureError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"BACKPRESSURE\", cause);\n\t}\n}\n\n/** Coerce an unknown thrown value into an Error instance. */\nexport function toError(err: unknown): Error {\n\treturn err instanceof Error ? err : new Error(String(err));\n}\n","import { LakeSyncError } from \"../result/errors\";\n\n/** Error during action execution (may be retryable). */\nexport class ActionExecutionError extends LakeSyncError {\n\treadonly retryable: boolean;\n\n\tconstructor(message: string, retryable: boolean, cause?: Error) {\n\t\tsuper(message, \"ACTION_EXECUTION_ERROR\", cause);\n\t\tthis.retryable = retryable;\n\t}\n}\n\n/** The requested action type is not supported by the connector. */\nexport class ActionNotSupportedError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ACTION_NOT_SUPPORTED\", cause);\n\t}\n}\n\n/** Action payload failed structural validation. */\nexport class ActionValidationError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ACTION_VALIDATION_ERROR\", cause);\n\t}\n}\n","import stableStringify from \"fast-json-stable-stringify\";\nimport type { HLCTimestamp } from \"../hlc/types\";\n\n/**\n * Generate a deterministic action ID using SHA-256.\n *\n * Same pattern as `generateDeltaId` in `delta/extract.ts` — uses the\n * Web Crypto API for cross-runtime compatibility (Node, Bun, browsers).\n */\nexport async function generateActionId(params: {\n\tclientId: string;\n\thlc: HLCTimestamp;\n\tconnector: string;\n\tactionType: string;\n\tparams: Record<string, unknown>;\n}): Promise<string> {\n\tconst payload = stableStringify({\n\t\tclientId: params.clientId,\n\t\thlc: params.hlc.toString(),\n\t\tconnector: params.connector,\n\t\tactionType: params.actionType,\n\t\tparams: params.params,\n\t});\n\n\tconst data = new TextEncoder().encode(payload);\n\tconst hashBuffer = await crypto.subtle.digest(\"SHA-256\", data);\n\tconst bytes = new Uint8Array(hashBuffer);\n\n\tlet hex = \"\";\n\tfor (const b of bytes) {\n\t\thex += b.toString(16).padStart(2, \"0\");\n\t}\n\treturn hex;\n}\n","import type { ActionDescriptor } from \"../connector/action-handler\";\nimport type { HLCTimestamp } from \"../hlc/types\";\n\n/** Discovery response listing available connectors and their supported actions. */\nexport interface ActionDiscovery {\n\t/** Map of connector name to its supported action descriptors. */\n\tconnectors: Record<string, ActionDescriptor[]>;\n}\n\n/** An imperative action to execute against an external system. */\nexport interface Action {\n\t/** Unique action identifier (deterministic SHA-256 hash). */\n\tactionId: string;\n\t/** Client that initiated the action. */\n\tclientId: string;\n\t/** HLC timestamp when the action was created. */\n\thlc: HLCTimestamp;\n\t/** Target connector name (e.g. \"github\", \"slack\", \"linear\"). */\n\tconnector: string;\n\t/** Action type within the connector (e.g. \"create_pr\", \"send_message\"). */\n\tactionType: string;\n\t/** Action parameters — connector-specific payload. */\n\tparams: Record<string, unknown>;\n\t/** Optional idempotency key for at-most-once delivery. */\n\tidempotencyKey?: string;\n}\n\n/** Successful result of executing an action. */\nexport interface ActionResult {\n\t/** The action that was executed. */\n\tactionId: string;\n\t/** Result data returned by the connector. */\n\tdata: Record<string, unknown>;\n\t/** Server HLC after processing. */\n\tserverHlc: HLCTimestamp;\n}\n\n/** Error result of executing an action. */\nexport interface ActionErrorResult {\n\t/** The action that failed. */\n\tactionId: string;\n\t/** Error code. */\n\tcode: string;\n\t/** Human-readable error message. */\n\tmessage: string;\n\t/** Whether the client can retry this action. */\n\tretryable: boolean;\n}\n\n/** Batch of actions pushed by a client. */\nexport interface ActionPush {\n\t/** Client identifier. */\n\tclientId: string;\n\t/** Actions to execute. */\n\tactions: Action[];\n}\n\n/** Gateway response to an action push. */\nexport interface ActionResponse {\n\t/** Results for each action (success or error). */\n\tresults: Array<ActionResult | ActionErrorResult>;\n\t/** Server HLC after processing. */\n\tserverHlc: HLCTimestamp;\n}\n\n/** Type guard: check whether a result is an error. */\nexport function isActionError(\n\tresult: ActionResult | ActionErrorResult,\n): result is ActionErrorResult {\n\treturn \"code\" in result && \"retryable\" in result;\n}\n","import type { LakeSyncError } from \"./errors\";\n\n/** Discriminated union representing either success or failure */\nexport type Result<T, E = LakeSyncError> = { ok: true; value: T } | { ok: false; error: E };\n\n/** Create a successful Result */\nexport function Ok<T>(value: T): Result<T, never> {\n\treturn { ok: true, value };\n}\n\n/** Create a failed Result */\nexport function Err<E>(error: E): Result<never, E> {\n\treturn { ok: false, error };\n}\n\n/** Transform the success value of a Result */\nexport function mapResult<T, U, E>(result: Result<T, E>, fn: (value: T) => U): Result<U, E> {\n\tif (result.ok) {\n\t\treturn Ok(fn(result.value));\n\t}\n\treturn result;\n}\n\n/** Chain Result-returning operations */\nexport function flatMapResult<T, U, E>(\n\tresult: Result<T, E>,\n\tfn: (value: T) => Result<U, E>,\n): Result<U, E> {\n\tif (result.ok) {\n\t\treturn fn(result.value);\n\t}\n\treturn result;\n}\n\n/** Extract the value from a Result or throw the error */\nexport function unwrapOrThrow<T, E>(result: Result<T, E>): T {\n\tif (result.ok) {\n\t\treturn result.value;\n\t}\n\tthrow result.error;\n}\n\n/** Wrap a Promise into a Result */\nexport async function fromPromise<T>(promise: Promise<T>): Promise<Result<T, Error>> {\n\ttry {\n\t\tconst value = await promise;\n\t\treturn Ok(value);\n\t} catch (error) {\n\t\treturn Err(error instanceof Error ? error : new Error(String(error)));\n\t}\n}\n","import { Err, Ok, type Result } from \"../result/result\";\nimport { ActionValidationError } from \"./errors\";\nimport type { Action } from \"./types\";\n\n/**\n * Validate the structural integrity of an Action.\n *\n * Checks that all required fields are present and of the correct type.\n * Returns a `Result` so callers never need to catch.\n */\nexport function validateAction(action: unknown): Result<Action, ActionValidationError> {\n\tif (action === null || typeof action !== \"object\") {\n\t\treturn Err(new ActionValidationError(\"Action must be a non-null object\"));\n\t}\n\n\tconst a = action as Record<string, unknown>;\n\n\tif (typeof a.actionId !== \"string\" || a.actionId.length === 0) {\n\t\treturn Err(new ActionValidationError(\"actionId must be a non-empty string\"));\n\t}\n\n\tif (typeof a.clientId !== \"string\" || a.clientId.length === 0) {\n\t\treturn Err(new ActionValidationError(\"clientId must be a non-empty string\"));\n\t}\n\n\tif (typeof a.hlc !== \"bigint\") {\n\t\treturn Err(new ActionValidationError(\"hlc must be a bigint\"));\n\t}\n\n\tif (typeof a.connector !== \"string\" || a.connector.length === 0) {\n\t\treturn Err(new ActionValidationError(\"connector must be a non-empty string\"));\n\t}\n\n\tif (typeof a.actionType !== \"string\" || a.actionType.length === 0) {\n\t\treturn Err(new ActionValidationError(\"actionType must be a non-empty string\"));\n\t}\n\n\tif (a.params === null || typeof a.params !== \"object\" || Array.isArray(a.params)) {\n\t\treturn Err(new ActionValidationError(\"params must be a non-null object\"));\n\t}\n\n\tif (a.idempotencyKey !== undefined && typeof a.idempotencyKey !== \"string\") {\n\t\treturn Err(new ActionValidationError(\"idempotencyKey must be a string if provided\"));\n\t}\n\n\treturn Ok(action as Action);\n}\n","import { Err, Ok, type Result } from \"./result/result\";\n\n/**\n * Minimal Web Crypto typing for HMAC operations.\n * The core package uses `lib: [\"ES2022\"]` which doesn't include DOM types.\n * These declarations cover the methods we need without pulling in the full DOM lib.\n */\ninterface HmacSubtle {\n\timportKey(\n\t\tformat: \"raw\",\n\t\tkeyData: Uint8Array,\n\t\talgorithm: { name: string; hash: string },\n\t\textractable: boolean,\n\t\tusages: string[],\n\t): Promise<unknown>;\n\tverify(\n\t\talgorithm: string,\n\t\tkey: unknown,\n\t\tsignature: Uint8Array,\n\t\tdata: Uint8Array,\n\t): Promise<boolean>;\n}\n\n/** Claims extracted from a verified JWT token */\nexport interface AuthClaims {\n\t/** Client identifier (from JWT `sub` claim) */\n\tclientId: string;\n\t/** Authorised gateway ID (from JWT `gw` claim) */\n\tgatewayId: string;\n\t/** Role for route-level access control (from JWT `role` claim, defaults to \"client\") */\n\trole: string;\n\t/** Non-standard JWT claims for sync rule evaluation */\n\tcustomClaims: Record<string, string | string[]>;\n}\n\n/** Authentication error returned when JWT verification fails */\nexport class AuthError extends Error {\n\tconstructor(message: string) {\n\t\tsuper(message);\n\t\tthis.name = \"AuthError\";\n\t}\n}\n\n/** Expected JWT header for HMAC-SHA256 tokens */\ninterface JwtHeader {\n\talg: string;\n\ttyp: string;\n}\n\n/** JWT payload with required claims */\ninterface JwtPayload {\n\tsub?: string;\n\tgw?: string;\n\texp?: number;\n\t[key: string]: unknown;\n}\n\n/**\n * Decode a base64url-encoded string to a Uint8Array.\n * Handles the URL-safe alphabet (+/- replaced with -/_) and missing padding.\n */\nfunction base64urlDecode(input: string): Uint8Array {\n\t// Restore standard base64 characters and padding\n\tconst base64 = input.replace(/-/g, \"+\").replace(/_/g, \"/\");\n\tconst padded = base64.padEnd(base64.length + ((4 - (base64.length % 4)) % 4), \"=\");\n\tconst binary = atob(padded);\n\tconst bytes = new Uint8Array(binary.length);\n\tfor (let i = 0; i < binary.length; i++) {\n\t\tbytes[i] = binary.charCodeAt(i);\n\t}\n\treturn bytes;\n}\n\n/**\n * Parse a JSON string safely, returning null on failure.\n */\nfunction parseJson(text: string): unknown {\n\ttry {\n\t\treturn JSON.parse(text);\n\t} catch {\n\t\treturn null;\n\t}\n}\n\n/**\n * Verify a JWT token signed with HMAC-SHA256 and extract authentication claims.\n *\n * Uses the Web Crypto API exclusively (no external dependencies), making it\n * suitable for Cloudflare Workers and other edge runtimes.\n *\n * @param token - The raw JWT string (header.payload.signature)\n * @param secret - The HMAC-SHA256 secret key\n * @returns A Result containing AuthClaims on success, or AuthError on failure\n */\nexport async function verifyToken(\n\ttoken: string,\n\tsecret: string,\n): Promise<Result<AuthClaims, AuthError>> {\n\t// Split into three parts\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 3) {\n\t\treturn Err(new AuthError(\"Malformed JWT: expected three dot-separated segments\"));\n\t}\n\n\tconst [headerB64, payloadB64, signatureB64] = parts;\n\tif (!headerB64 || !payloadB64 || !signatureB64) {\n\t\treturn Err(new AuthError(\"Malformed JWT: empty segment\"));\n\t}\n\n\t// Decode and verify header\n\tlet headerBytes: Uint8Array;\n\ttry {\n\t\theaderBytes = base64urlDecode(headerB64);\n\t} catch {\n\t\treturn Err(new AuthError(\"Malformed JWT: invalid base64url in header\"));\n\t}\n\n\tconst header = parseJson(new TextDecoder().decode(headerBytes)) as JwtHeader | null;\n\tif (!header || header.alg !== \"HS256\" || header.typ !== \"JWT\") {\n\t\treturn Err(new AuthError('Unsupported JWT: header must be {\"alg\":\"HS256\",\"typ\":\"JWT\"}'));\n\t}\n\n\t// Import the HMAC key via Web Crypto\n\tconst encoder = new TextEncoder();\n\tconst keyData = encoder.encode(secret);\n\n\tlet cryptoKey: unknown;\n\ttry {\n\t\tcryptoKey = await (crypto.subtle as unknown as HmacSubtle).importKey(\n\t\t\t\"raw\",\n\t\t\tkeyData,\n\t\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\t\tfalse,\n\t\t\t[\"verify\"],\n\t\t);\n\t} catch {\n\t\treturn Err(new AuthError(\"Failed to import HMAC key\"));\n\t}\n\n\t// Verify signature\n\tlet signatureBytes: Uint8Array;\n\ttry {\n\t\tsignatureBytes = base64urlDecode(signatureB64);\n\t} catch {\n\t\treturn Err(new AuthError(\"Malformed JWT: invalid base64url in signature\"));\n\t}\n\n\tconst signingInput = encoder.encode(`${headerB64}.${payloadB64}`);\n\n\tlet valid: boolean;\n\ttry {\n\t\tvalid = await (crypto.subtle as unknown as HmacSubtle).verify(\n\t\t\t\"HMAC\",\n\t\t\tcryptoKey,\n\t\t\tsignatureBytes,\n\t\t\tsigningInput,\n\t\t);\n\t} catch {\n\t\treturn Err(new AuthError(\"Signature verification failed\"));\n\t}\n\n\tif (!valid) {\n\t\treturn Err(new AuthError(\"Invalid JWT signature\"));\n\t}\n\n\t// Decode payload\n\tlet payloadBytes: Uint8Array;\n\ttry {\n\t\tpayloadBytes = base64urlDecode(payloadB64);\n\t} catch {\n\t\treturn Err(new AuthError(\"Malformed JWT: invalid base64url in payload\"));\n\t}\n\n\tconst payload = parseJson(new TextDecoder().decode(payloadBytes)) as JwtPayload | null;\n\tif (!payload) {\n\t\treturn Err(new AuthError(\"Malformed JWT: payload is not valid JSON\"));\n\t}\n\n\t// Check expiry — exp claim is mandatory\n\tif (payload.exp === undefined || typeof payload.exp !== \"number\") {\n\t\treturn Err(new AuthError('Missing or invalid \"exp\" claim (expiry)'));\n\t}\n\tconst nowSeconds = Math.floor(Date.now() / 1000);\n\tif (payload.exp <= nowSeconds) {\n\t\treturn Err(new AuthError(\"JWT has expired\"));\n\t}\n\n\t// Extract required claims\n\tif (typeof payload.sub !== \"string\" || payload.sub.length === 0) {\n\t\treturn Err(new AuthError('Missing or invalid \"sub\" claim (clientId)'));\n\t}\n\n\tif (typeof payload.gw !== \"string\" || payload.gw.length === 0) {\n\t\treturn Err(new AuthError('Missing or invalid \"gw\" claim (gatewayId)'));\n\t}\n\n\t// Extract non-standard claims for sync rules evaluation\n\tconst standardClaims = new Set([\"sub\", \"gw\", \"exp\", \"iat\", \"iss\", \"aud\", \"role\"]);\n\tconst customClaims: Record<string, string | string[]> = {};\n\n\tfor (const [key, value] of Object.entries(payload)) {\n\t\tif (standardClaims.has(key)) continue;\n\t\tif (typeof value === \"string\") {\n\t\t\tcustomClaims[key] = value;\n\t\t} else if (Array.isArray(value) && value.every((v) => typeof v === \"string\")) {\n\t\t\tcustomClaims[key] = value as string[];\n\t\t}\n\t}\n\n\t// Always include `sub` in custom claims so sync rules can reference jwt:sub\n\tcustomClaims.sub = payload.sub;\n\n\t// Extract role claim (default to \"client\" if absent)\n\tconst role =\n\t\ttypeof payload.role === \"string\" && payload.role.length > 0 ? payload.role : \"client\";\n\n\treturn Ok({\n\t\tclientId: payload.sub,\n\t\tgatewayId: payload.gw,\n\t\trole,\n\t\tcustomClaims,\n\t});\n}\n","import { ClockDriftError, Err, Ok, type Result } from \"../result\";\nimport type { HLCTimestamp } from \"./types\";\n\n/**\n * Hybrid Logical Clock implementation.\n *\n * 64-bit layout: [48-bit wall clock ms][16-bit logical counter].\n * Maximum allowed clock drift: 5 seconds.\n *\n * The wall clock source is injectable for deterministic testing.\n */\nexport class HLC {\n\tprivate readonly wallClock: () => number;\n\tprivate counter = 0;\n\tprivate lastWall = 0;\n\n\t/** Maximum tolerated drift between local and remote physical clocks (ms). */\n\tstatic readonly MAX_DRIFT_MS = 5_000;\n\n\t/** Maximum value of the 16-bit logical counter. */\n\tstatic readonly MAX_COUNTER = 0xffff;\n\n\t/**\n\t * Create a new HLC instance.\n\t *\n\t * @param wallClock - Optional injectable clock source returning epoch ms.\n\t * Defaults to `Date.now`.\n\t */\n\tconstructor(wallClock?: () => number) {\n\t\tthis.wallClock = wallClock ?? (() => Date.now());\n\t}\n\n\t/**\n\t * Generate a new monotonically increasing HLC timestamp.\n\t *\n\t * The returned timestamp is guaranteed to be strictly greater than any\n\t * previously returned by this instance.\n\t */\n\tnow(): HLCTimestamp {\n\t\tconst physical = this.wallClock();\n\t\tconst wall = Math.max(physical, this.lastWall);\n\n\t\tif (wall === this.lastWall) {\n\t\t\tthis.counter++;\n\t\t\tif (this.counter > HLC.MAX_COUNTER) {\n\t\t\t\t// Counter overflow: advance wall by 1 ms and reset counter\n\t\t\t\tthis.lastWall = wall + 1;\n\t\t\t\tthis.counter = 0;\n\t\t\t}\n\t\t} else {\n\t\t\tthis.lastWall = wall;\n\t\t\tthis.counter = 0;\n\t\t}\n\n\t\treturn HLC.encode(this.lastWall, this.counter);\n\t}\n\n\t/**\n\t * Receive a remote HLC timestamp and advance the local clock.\n\t *\n\t * Returns `Err(ClockDriftError)` if the remote timestamp indicates\n\t * clock drift exceeding {@link MAX_DRIFT_MS}.\n\t *\n\t * @param remote - The HLC timestamp received from a remote node.\n\t * @returns A `Result` containing the new local HLC timestamp, or a\n\t * `ClockDriftError` if the remote clock is too far ahead.\n\t */\n\trecv(remote: HLCTimestamp): Result<HLCTimestamp, ClockDriftError> {\n\t\tconst { wall: remoteWall, counter: remoteCounter } = HLC.decode(remote);\n\t\tconst physical = this.wallClock();\n\t\tconst localWall = Math.max(physical, this.lastWall);\n\n\t\t// Check drift: compare remote wall against physical clock\n\t\tif (remoteWall - physical > HLC.MAX_DRIFT_MS) {\n\t\t\treturn Err(\n\t\t\t\tnew ClockDriftError(\n\t\t\t\t\t`Remote clock is ${remoteWall - physical}ms ahead (max drift: ${HLC.MAX_DRIFT_MS}ms)`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\tif (remoteWall > localWall) {\n\t\t\tthis.lastWall = remoteWall;\n\t\t\tthis.counter = remoteCounter + 1;\n\t\t} else if (remoteWall === localWall) {\n\t\t\tthis.lastWall = localWall;\n\t\t\tthis.counter = Math.max(this.counter, remoteCounter) + 1;\n\t\t} else {\n\t\t\tthis.lastWall = localWall;\n\t\t\tthis.counter++;\n\t\t}\n\n\t\tif (this.counter > HLC.MAX_COUNTER) {\n\t\t\t// Counter overflow: advance wall by 1 ms and reset counter\n\t\t\tthis.lastWall = this.lastWall + 1;\n\t\t\tthis.counter = 0;\n\t\t}\n\n\t\treturn Ok(HLC.encode(this.lastWall, this.counter));\n\t}\n\n\t/**\n\t * Encode a wall clock value (ms) and logical counter into a 64-bit HLC timestamp.\n\t *\n\t * @param wall - Wall clock component in epoch milliseconds (48-bit).\n\t * @param counter - Logical counter component (16-bit, 0..65535).\n\t * @returns The encoded {@link HLCTimestamp}.\n\t */\n\tstatic encode(wall: number, counter: number): HLCTimestamp {\n\t\treturn ((BigInt(wall) << 16n) | BigInt(counter & 0xffff)) as HLCTimestamp;\n\t}\n\n\t/**\n\t * Decode an HLC timestamp into its wall clock (ms) and logical counter components.\n\t *\n\t * @param ts - The {@link HLCTimestamp} to decode.\n\t * @returns An object with `wall` (epoch ms) and `counter` (logical) fields.\n\t */\n\tstatic decode(ts: HLCTimestamp): { wall: number; counter: number } {\n\t\treturn {\n\t\t\twall: Number(ts >> 16n),\n\t\t\tcounter: Number(ts & 0xffffn),\n\t\t};\n\t}\n\n\t/**\n\t * Compare two HLC timestamps.\n\t *\n\t * @returns `-1` if `a < b`, `0` if `a === b`, `1` if `a > b`.\n\t */\n\tstatic compare(a: HLCTimestamp, b: HLCTimestamp): -1 | 0 | 1 {\n\t\tif (a < b) return -1;\n\t\tif (a > b) return 1;\n\t\treturn 0;\n\t}\n}\n","// ---------------------------------------------------------------------------\n// BaseSourcePoller — shared lifecycle and push logic for source connectors\n// ---------------------------------------------------------------------------\n\nimport type { RowDelta, SyncPush } from \"./delta/types\";\nimport { HLC } from \"./hlc/hlc\";\nimport type { HLCTimestamp } from \"./hlc/types\";\nimport type { BackpressureError, FlushError } from \"./result/errors\";\nimport type { Result } from \"./result/result\";\n\n/** Minimal interface for a push target (avoids depending on @lakesync/gateway). */\nexport interface PushTarget {\n\thandlePush(push: SyncPush): unknown;\n}\n\n/**\n * Extended push target that supports flush and buffer inspection.\n * Implemented by SyncGateway so pollers can trigger flushes to relieve memory pressure.\n */\nexport interface IngestTarget extends PushTarget {\n\tflush(): Promise<Result<void, FlushError>>;\n\tshouldFlush(): boolean;\n\treadonly bufferStats: { logSize: number; indexSize: number; byteSize: number };\n}\n\n/** Type guard: returns true if the target supports flush/shouldFlush/bufferStats. */\nexport function isIngestTarget(target: PushTarget): target is IngestTarget {\n\treturn (\n\t\ttypeof (target as IngestTarget).flush === \"function\" &&\n\t\ttypeof (target as IngestTarget).shouldFlush === \"function\" &&\n\t\t\"bufferStats\" in target\n\t);\n}\n\n/** Memory configuration for the streaming accumulator. */\nexport interface PollerMemoryConfig {\n\t/** Number of deltas per push chunk (default 500). */\n\tchunkSize?: number;\n\t/** Approximate memory budget in bytes — triggers flush at 70% (default: no limit). */\n\tmemoryBudgetBytes?: number;\n\t/** Proportion of memoryBudgetBytes at which to trigger a flush (default 0.7). */\n\tflushThreshold?: number;\n}\n\nconst DEFAULT_CHUNK_SIZE = 500;\nconst DEFAULT_FLUSH_THRESHOLD = 0.7;\n\n/**\n * Base class for source pollers that poll an external API and push deltas\n * to a SyncGateway. Handles lifecycle (start/stop/schedule), and push.\n */\nexport abstract class BaseSourcePoller {\n\tprotected readonly gateway: PushTarget;\n\tprotected readonly hlc: HLC;\n\tprotected readonly clientId: string;\n\tprivate readonly intervalMs: number;\n\tprivate timer: ReturnType<typeof setTimeout> | null = null;\n\tprivate running = false;\n\n\tprivate readonly chunkSize: number;\n\tprivate readonly memoryBudgetBytes: number | undefined;\n\tprivate readonly flushThreshold: number;\n\tprivate pendingDeltas: RowDelta[] = [];\n\n\tconstructor(config: {\n\t\tname: string;\n\t\tintervalMs: number;\n\t\tgateway: PushTarget;\n\t\tmemory?: PollerMemoryConfig;\n\t}) {\n\t\tthis.gateway = config.gateway;\n\t\tthis.hlc = new HLC();\n\t\tthis.clientId = `ingest:${config.name}`;\n\t\tthis.intervalMs = config.intervalMs;\n\t\tthis.chunkSize = config.memory?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n\t\tthis.memoryBudgetBytes = config.memory?.memoryBudgetBytes;\n\t\tthis.flushThreshold = config.memory?.flushThreshold ?? DEFAULT_FLUSH_THRESHOLD;\n\t}\n\n\t/** Start the polling loop. */\n\tstart(): void {\n\t\tif (this.running) return;\n\t\tthis.running = true;\n\t\tthis.schedulePoll();\n\t}\n\n\t/** Stop the polling loop. */\n\tstop(): void {\n\t\tthis.running = false;\n\t\tif (this.timer) {\n\t\t\tclearTimeout(this.timer);\n\t\t\tthis.timer = null;\n\t\t}\n\t}\n\n\t/** Whether the poller is currently running. */\n\tget isRunning(): boolean {\n\t\treturn this.running;\n\t}\n\n\t/** Execute a single poll cycle. Subclasses implement their specific polling logic. */\n\tabstract poll(): Promise<void>;\n\n\t/** Push collected deltas to the gateway (single-shot, backward compat). */\n\tprotected pushDeltas(deltas: RowDelta[]): void {\n\t\tif (deltas.length === 0) return;\n\t\tconst push: SyncPush = {\n\t\t\tclientId: this.clientId,\n\t\t\tdeltas,\n\t\t\tlastSeenHlc: 0n as HLCTimestamp,\n\t\t};\n\t\tthis.gateway.handlePush(push);\n\t}\n\n\t/**\n\t * Accumulate a single delta. When `chunkSize` is reached, the pending\n\t * deltas are automatically pushed (and flushed if needed).\n\t */\n\tprotected async accumulateDelta(delta: RowDelta): Promise<void> {\n\t\tthis.pendingDeltas.push(delta);\n\t\tif (this.pendingDeltas.length >= this.chunkSize) {\n\t\t\tawait this.pushPendingChunk();\n\t\t}\n\t}\n\n\t/** Flush any remaining accumulated deltas. Call at the end of `poll()`. */\n\tprotected async flushAccumulator(): Promise<void> {\n\t\tif (this.pendingDeltas.length > 0) {\n\t\t\tawait this.pushPendingChunk();\n\t\t}\n\t}\n\n\t/**\n\t * Push a chunk of pending deltas. If the gateway is an IngestTarget,\n\t * checks memory pressure and flushes before/after push when needed.\n\t * On backpressure, flushes once and retries.\n\t */\n\tprivate async pushPendingChunk(): Promise<void> {\n\t\tconst chunk = this.pendingDeltas;\n\t\tthis.pendingDeltas = [];\n\t\tawait this.pushChunkWithFlush(chunk);\n\t}\n\n\tprivate async pushChunkWithFlush(chunk: RowDelta[]): Promise<void> {\n\t\tif (chunk.length === 0) return;\n\n\t\tconst target = this.gateway;\n\n\t\t// Pre-push: flush if IngestTarget signals pressure\n\t\tif (isIngestTarget(target)) {\n\t\t\tif (this.shouldFlushTarget(target)) {\n\t\t\t\tawait target.flush();\n\t\t\t}\n\t\t}\n\n\t\tconst push: SyncPush = {\n\t\t\tclientId: this.clientId,\n\t\t\tdeltas: chunk,\n\t\t\tlastSeenHlc: 0n as HLCTimestamp,\n\t\t};\n\n\t\tconst result = target.handlePush(push) as Result<unknown, BackpressureError> | undefined;\n\n\t\t// If handlePush returned a Result with backpressure, flush and retry once\n\t\tif (result && typeof result === \"object\" && \"ok\" in result && !result.ok) {\n\t\t\tif (isIngestTarget(target)) {\n\t\t\t\tawait target.flush();\n\t\t\t\ttarget.handlePush(push);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate shouldFlushTarget(target: IngestTarget): boolean {\n\t\tif (target.shouldFlush()) return true;\n\t\tif (this.memoryBudgetBytes != null) {\n\t\t\tconst threshold = Math.floor(this.memoryBudgetBytes * this.flushThreshold);\n\t\t\tif (target.bufferStats.byteSize >= threshold) return true;\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate schedulePoll(): void {\n\t\tif (!this.running) return;\n\t\tthis.timer = setTimeout(async () => {\n\t\t\ttry {\n\t\t\t\tawait this.poll();\n\t\t\t} catch {\n\t\t\t\t// Swallow errors — a failed poll must never crash the server\n\t\t\t}\n\t\t\tthis.schedulePoll();\n\t\t}, this.intervalMs);\n\t}\n}\n","import type { ColumnDelta, DeltaOp, RowDelta } from \"../delta/types\";\nimport { HLC } from \"../hlc/hlc\";\nimport { ConflictError } from \"../result/errors\";\nimport { Err, Ok, type Result } from \"../result/result\";\nimport type { ConflictResolver } from \"./resolver\";\n\n/**\n * Column-level Last-Write-Wins conflict resolver.\n *\n * For each column present in both deltas, the one with the higher HLC wins.\n * Equal HLC tiebreak: lexicographically higher clientId wins (deterministic).\n * Columns only present in one delta are always included in the result.\n */\nexport class LWWResolver implements ConflictResolver {\n\t/**\n\t * Resolve two conflicting deltas for the same row, returning the merged result.\n\t *\n\t * Rules:\n\t * - Both DELETE: the delta with the higher HLC (or clientId tiebreak) wins.\n\t * - One DELETE, one non-DELETE: the delta with the higher HLC wins.\n\t * If the DELETE wins, the row is tombstoned (empty columns).\n\t * If the non-DELETE wins, the row is resurrected.\n\t * - Both non-DELETE: columns are merged per-column using LWW semantics.\n\t *\n\t * @param local - The locally held delta for this row.\n\t * @param remote - The incoming remote delta for this row.\n\t * @returns A `Result` containing the resolved `RowDelta`, or a\n\t * `ConflictError` if the deltas refer to different tables/rows.\n\t */\n\tresolve(local: RowDelta, remote: RowDelta): Result<RowDelta, ConflictError> {\n\t\t// Validate same table + rowId\n\t\tif (local.table !== remote.table || local.rowId !== remote.rowId) {\n\t\t\treturn Err(\n\t\t\t\tnew ConflictError(\n\t\t\t\t\t`Cannot resolve conflict: mismatched table/rowId (${local.table}:${local.rowId} vs ${remote.table}:${remote.rowId})`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\t// Determine which delta has higher HLC (for op-level decisions)\n\t\tconst winner = pickWinner(local, remote);\n\n\t\t// Both DELETE — winner takes all (no columns to merge)\n\t\tif (local.op === \"DELETE\" && remote.op === \"DELETE\") {\n\t\t\treturn Ok({ ...winner, columns: [] });\n\t\t}\n\n\t\t// One is DELETE\n\t\tif (local.op === \"DELETE\" || remote.op === \"DELETE\") {\n\t\t\tconst deleteDelta = local.op === \"DELETE\" ? local : remote;\n\t\t\tconst otherDelta = local.op === \"DELETE\" ? remote : local;\n\n\t\t\t// If the DELETE has higher/equal priority, tombstone wins\n\t\t\tif (deleteDelta === winner) {\n\t\t\t\treturn Ok({ ...deleteDelta, columns: [] });\n\t\t\t}\n\t\t\t// Otherwise the UPDATE/INSERT wins (resurrection)\n\t\t\treturn Ok({ ...otherDelta });\n\t\t}\n\n\t\t// Both are INSERT or UPDATE — merge columns\n\t\tconst mergedColumns = mergeColumns(local, remote);\n\n\t\t// Determine the resulting op: INSERT only if both are INSERT, otherwise UPDATE\n\t\tconst op: DeltaOp = local.op === \"INSERT\" && remote.op === \"INSERT\" ? \"INSERT\" : \"UPDATE\";\n\n\t\treturn Ok({\n\t\t\top,\n\t\t\ttable: local.table,\n\t\t\trowId: local.rowId,\n\t\t\tclientId: winner.clientId,\n\t\t\tcolumns: mergedColumns,\n\t\t\thlc: winner.hlc,\n\t\t\tdeltaId: winner.deltaId,\n\t\t});\n\t}\n}\n\n/**\n * Pick the winning delta based on HLC comparison with clientId tiebreak.\n *\n * @param local - The locally held delta.\n * @param remote - The incoming remote delta.\n * @returns The delta that wins the comparison.\n */\nfunction pickWinner(local: RowDelta, remote: RowDelta): RowDelta {\n\tconst cmp = HLC.compare(local.hlc, remote.hlc);\n\tif (cmp > 0) return local;\n\tif (cmp < 0) return remote;\n\t// Equal HLC — lexicographically higher clientId wins\n\treturn local.clientId > remote.clientId ? local : remote;\n}\n\n/**\n * Merge column-level changes from two non-DELETE deltas using LWW semantics.\n *\n * - Columns present in only one delta are included unconditionally.\n * - Columns present in both: the value from the delta with the higher HLC wins;\n * equal HLC uses lexicographic clientId tiebreak.\n *\n * @param local - The locally held delta.\n * @param remote - The incoming remote delta.\n * @returns The merged array of column deltas.\n */\nfunction mergeColumns(local: RowDelta, remote: RowDelta): ColumnDelta[] {\n\tconst localMap = new Map(local.columns.map((c) => [c.column, c]));\n\tconst remoteMap = new Map(remote.columns.map((c) => [c.column, c]));\n\tconst allColumns = new Set([...localMap.keys(), ...remoteMap.keys()]);\n\tconst winner = pickWinner(local, remote);\n\n\tconst merged: ColumnDelta[] = [];\n\n\tfor (const col of allColumns) {\n\t\tconst localCol = localMap.get(col);\n\t\tconst remoteCol = remoteMap.get(col);\n\n\t\tif (!remoteCol) {\n\t\t\tmerged.push(localCol!);\n\t\t} else if (!localCol) {\n\t\t\tmerged.push(remoteCol);\n\t\t} else {\n\t\t\t// Both have this column — winner takes it\n\t\t\tmerged.push(winner === local ? localCol : remoteCol);\n\t\t}\n\t}\n\n\treturn merged;\n}\n\nconst _singleton = new LWWResolver();\n\n/**\n * Convenience function — resolves two conflicting deltas using the\n * column-level Last-Write-Wins strategy.\n *\n * @param local - The locally held delta for this row.\n * @param remote - The incoming remote delta for this row.\n * @returns A `Result` containing the resolved `RowDelta`, or a\n * `ConflictError` if the deltas refer to different tables/rows.\n */\nexport function resolveLWW(local: RowDelta, remote: RowDelta): Result<RowDelta, ConflictError> {\n\treturn _singleton.resolve(local, remote);\n}\n","import type { ActionExecutionError, ActionNotSupportedError } from \"../action/errors\";\nimport type { Action, ActionResult } from \"../action/types\";\nimport type { Result } from \"../result/result\";\nimport type { ResolvedClaims } from \"../sync-rules/types\";\n\n/** Describes an action type supported by a connector. */\nexport interface ActionDescriptor {\n\t/** The action type identifier (e.g. \"create_pr\", \"send_message\"). */\n\tactionType: string;\n\t/** Human-readable description of what this action does. */\n\tdescription: string;\n\t/** Optional JSON Schema for the action's params. */\n\tparamsSchema?: Record<string, unknown>;\n}\n\n/** Authentication context passed to action handlers for permission checks. */\nexport interface AuthContext {\n\t/** Resolved JWT claims for resource-level permission checks. */\n\tclaims: ResolvedClaims;\n}\n\n/**\n * Interface for connectors that can execute imperative actions.\n *\n * Separate from `DatabaseAdapter` — not all connectors support actions\n * (e.g. S3 doesn't). A connector can implement `DatabaseAdapter` (read/write\n * data), `ActionHandler` (execute commands), or both.\n */\nexport interface ActionHandler {\n\t/** Descriptors for all action types this handler supports. */\n\treadonly supportedActions: ActionDescriptor[];\n\t/** Execute a single action against the external system. */\n\texecuteAction(\n\t\taction: Action,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResult, ActionExecutionError | ActionNotSupportedError>>;\n}\n\n/** Type guard: check whether an object implements the ActionHandler interface. */\nexport function isActionHandler(obj: unknown): obj is ActionHandler {\n\tif (obj === null || typeof obj !== \"object\") return false;\n\tconst candidate = obj as Record<string, unknown>;\n\treturn Array.isArray(candidate.supportedActions) && typeof candidate.executeAction === \"function\";\n}\n","import { LakeSyncError } from \"../result/errors\";\n\n/** Connector configuration validation error. */\nexport class ConnectorValidationError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"CONNECTOR_VALIDATION\", cause);\n\t}\n}\n","/** Supported connector types. */\nexport const CONNECTOR_TYPES = [\"postgres\", \"mysql\", \"bigquery\", \"jira\", \"salesforce\"] as const;\n\n/** Union of supported connector type strings. */\nexport type ConnectorType = (typeof CONNECTOR_TYPES)[number];\n\n/** Connection configuration for a PostgreSQL source. */\nexport interface PostgresConnectorConfig {\n\t/** PostgreSQL connection string (e.g. \"postgres://user:pass@host/db\"). */\n\tconnectionString: string;\n}\n\n/** Connection configuration for a MySQL source. */\nexport interface MySQLConnectorConfig {\n\t/** MySQL connection string (e.g. \"mysql://user:pass@host/db\"). */\n\tconnectionString: string;\n}\n\n/** Connection configuration for a BigQuery source. */\nexport interface BigQueryConnectorConfig {\n\t/** GCP project ID. */\n\tprojectId: string;\n\t/** BigQuery dataset name. */\n\tdataset: string;\n\t/** Path to service account JSON key file. Falls back to ADC when omitted. */\n\tkeyFilename?: string;\n\t/** Dataset location (default \"US\"). */\n\tlocation?: string;\n}\n\n/** Ingest table configuration — defines a single table to poll. */\nexport interface ConnectorIngestTable {\n\t/** Target table name in LakeSync. */\n\ttable: string;\n\t/** SQL query to poll (must return rowId + data columns). */\n\tquery: string;\n\t/** Primary key column name (default \"id\"). */\n\trowIdColumn?: string;\n\t/** Change detection strategy. */\n\tstrategy: { type: \"cursor\"; cursorColumn: string; lookbackMs?: number } | { type: \"diff\" };\n}\n\n/** Connection configuration for a Salesforce CRM source. */\nexport interface SalesforceConnectorConfig {\n\t/** Salesforce instance URL (e.g. \"https://mycompany.salesforce.com\"). */\n\tinstanceUrl: string;\n\t/** Connected App consumer key. */\n\tclientId: string;\n\t/** Connected App consumer secret. */\n\tclientSecret: string;\n\t/** Salesforce username. */\n\tusername: string;\n\t/** Salesforce password + security token concatenated. */\n\tpassword: string;\n\t/** REST API version (default \"v62.0\"). */\n\tapiVersion?: string;\n\t/** Use test.salesforce.com for auth (default false). */\n\tisSandbox?: boolean;\n\t/** Optional WHERE clause fragment appended to all SOQL queries. */\n\tsoqlFilter?: string;\n\t/** Whether to include Account objects (default true). */\n\tincludeAccounts?: boolean;\n\t/** Whether to include Contact objects (default true). */\n\tincludeContacts?: boolean;\n\t/** Whether to include Opportunity objects (default true). */\n\tincludeOpportunities?: boolean;\n\t/** Whether to include Lead objects (default true). */\n\tincludeLeads?: boolean;\n}\n\n/** Connection configuration for a Jira Cloud source. */\nexport interface JiraConnectorConfig {\n\t/** Jira Cloud domain (e.g. \"mycompany\" for mycompany.atlassian.net). */\n\tdomain: string;\n\t/** Email address for Basic auth. */\n\temail: string;\n\t/** API token paired with the email. */\n\tapiToken: string;\n\t/** Optional JQL filter to scope issue polling. */\n\tjql?: string;\n\t/** Whether to include comments (default true). */\n\tincludeComments?: boolean;\n\t/** Whether to include projects (default true). */\n\tincludeProjects?: boolean;\n}\n\n/** Optional ingest polling configuration attached to a connector. */\nexport interface ConnectorIngestConfig {\n\t/** Tables to poll for changes. */\n\ttables: ConnectorIngestTable[];\n\t/** Poll interval in milliseconds (default 10 000). */\n\tintervalMs?: number;\n}\n\n/**\n * Configuration for a dynamically registered connector (data source).\n *\n * Each connector maps to a named {@link DatabaseAdapter} in the gateway,\n * optionally with an ingest poller that pushes detected changes into\n * the sync buffer.\n */\nexport interface ConnectorConfig {\n\t/** Unique connector name (used as source adapter key). */\n\tname: string;\n\t/** Connector type — determines which adapter implementation to instantiate. */\n\ttype: ConnectorType;\n\t/** PostgreSQL connection configuration (required when type is \"postgres\"). */\n\tpostgres?: PostgresConnectorConfig;\n\t/** MySQL connection configuration (required when type is \"mysql\"). */\n\tmysql?: MySQLConnectorConfig;\n\t/** BigQuery connection configuration (required when type is \"bigquery\"). */\n\tbigquery?: BigQueryConnectorConfig;\n\t/** Jira Cloud connection configuration (required when type is \"jira\"). */\n\tjira?: JiraConnectorConfig;\n\t/** Salesforce CRM connection configuration (required when type is \"salesforce\"). */\n\tsalesforce?: SalesforceConnectorConfig;\n\t/** Optional ingest polling configuration. */\n\tingest?: ConnectorIngestConfig;\n}\n","import { Err, Ok, type Result } from \"../result/result\";\nimport { ConnectorValidationError } from \"./errors\";\nimport { CONNECTOR_TYPES, type ConnectorConfig } from \"./types\";\n\nconst VALID_STRATEGIES = new Set([\"cursor\", \"diff\"]);\n\n/**\n * Validate a connector configuration for structural correctness.\n *\n * Checks:\n * - `name` is a non-empty string\n * - `type` is one of the supported connector types\n * - Type-specific config object is present and valid\n * - Optional ingest config has valid table definitions\n *\n * @param input - Raw input to validate.\n * @returns The validated {@link ConnectorConfig} or a validation error.\n */\nexport function validateConnectorConfig(\n\tinput: unknown,\n): Result<ConnectorConfig, ConnectorValidationError> {\n\tif (typeof input !== \"object\" || input === null) {\n\t\treturn Err(new ConnectorValidationError(\"Connector config must be an object\"));\n\t}\n\n\tconst obj = input as Record<string, unknown>;\n\n\t// --- name ---\n\tif (typeof obj.name !== \"string\" || obj.name.length === 0) {\n\t\treturn Err(new ConnectorValidationError(\"Connector name must be a non-empty string\"));\n\t}\n\n\t// --- type ---\n\tif (typeof obj.type !== \"string\" || !(CONNECTOR_TYPES as readonly string[]).includes(obj.type)) {\n\t\treturn Err(\n\t\t\tnew ConnectorValidationError(`Connector type must be one of: ${CONNECTOR_TYPES.join(\", \")}`),\n\t\t);\n\t}\n\n\tconst connectorType = obj.type as ConnectorConfig[\"type\"];\n\n\t// --- type-specific config ---\n\tswitch (connectorType) {\n\t\tcase \"postgres\": {\n\t\t\tconst pg = obj.postgres;\n\t\t\tif (typeof pg !== \"object\" || pg === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t'Connector type \"postgres\" requires a postgres config object',\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst pgObj = pg as Record<string, unknown>;\n\t\t\tif (typeof pgObj.connectionString !== \"string\" || pgObj.connectionString.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Postgres connector requires a non-empty connectionString\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"mysql\": {\n\t\t\tconst my = obj.mysql;\n\t\t\tif (typeof my !== \"object\" || my === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError('Connector type \"mysql\" requires a mysql config object'),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst myObj = my as Record<string, unknown>;\n\t\t\tif (typeof myObj.connectionString !== \"string\" || myObj.connectionString.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"MySQL connector requires a non-empty connectionString\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"bigquery\": {\n\t\t\tconst bq = obj.bigquery;\n\t\t\tif (typeof bq !== \"object\" || bq === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t'Connector type \"bigquery\" requires a bigquery config object',\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst bqObj = bq as Record<string, unknown>;\n\t\t\tif (typeof bqObj.projectId !== \"string\" || bqObj.projectId.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"BigQuery connector requires a non-empty projectId\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof bqObj.dataset !== \"string\" || bqObj.dataset.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"BigQuery connector requires a non-empty dataset\"));\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"jira\": {\n\t\t\tconst jira = obj.jira;\n\t\t\tif (typeof jira !== \"object\" || jira === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError('Connector type \"jira\" requires a jira config object'),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst jiraObj = jira as Record<string, unknown>;\n\t\t\tif (typeof jiraObj.domain !== \"string\" || jiraObj.domain.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Jira connector requires a non-empty domain\"));\n\t\t\t}\n\t\t\tif (typeof jiraObj.email !== \"string\" || jiraObj.email.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Jira connector requires a non-empty email\"));\n\t\t\t}\n\t\t\tif (typeof jiraObj.apiToken !== \"string\" || jiraObj.apiToken.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Jira connector requires a non-empty apiToken\"));\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"salesforce\": {\n\t\t\tconst sf = obj.salesforce;\n\t\t\tif (typeof sf !== \"object\" || sf === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t'Connector type \"salesforce\" requires a salesforce config object',\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst sfObj = sf as Record<string, unknown>;\n\t\t\tif (typeof sfObj.instanceUrl !== \"string\" || sfObj.instanceUrl.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty instanceUrl\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.clientId !== \"string\" || sfObj.clientId.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty clientId\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.clientSecret !== \"string\" || sfObj.clientSecret.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty clientSecret\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.username !== \"string\" || sfObj.username.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty username\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.password !== \"string\" || sfObj.password.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty password\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// --- optional ingest config ---\n\tif (obj.ingest !== undefined) {\n\t\tif (typeof obj.ingest !== \"object\" || obj.ingest === null) {\n\t\t\treturn Err(new ConnectorValidationError(\"Ingest config must be an object\"));\n\t\t}\n\n\t\tconst ingest = obj.ingest as Record<string, unknown>;\n\n\t\t// API-based connectors define tables internally — only validate intervalMs\n\t\tif (connectorType === \"jira\" || connectorType === \"salesforce\") {\n\t\t\tif (ingest.intervalMs !== undefined) {\n\t\t\t\tif (typeof ingest.intervalMs !== \"number\" || ingest.intervalMs < 1) {\n\t\t\t\t\treturn Err(new ConnectorValidationError(\"Ingest intervalMs must be a positive number\"));\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Ok(input as ConnectorConfig);\n\t\t}\n\n\t\tif (!Array.isArray(ingest.tables) || ingest.tables.length === 0) {\n\t\t\treturn Err(new ConnectorValidationError(\"Ingest config must have a non-empty tables array\"));\n\t\t}\n\n\t\tfor (let i = 0; i < ingest.tables.length; i++) {\n\t\t\tconst table = ingest.tables[i] as Record<string, unknown>;\n\n\t\t\tif (typeof table !== \"object\" || table === null) {\n\t\t\t\treturn Err(new ConnectorValidationError(`Ingest table at index ${i} must be an object`));\n\t\t\t}\n\n\t\t\tif (typeof table.table !== \"string\" || (table.table as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t`Ingest table at index ${i} must have a non-empty table name`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof table.query !== \"string\" || (table.query as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(`Ingest table at index ${i} must have a non-empty query`),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof table.strategy !== \"object\" || table.strategy === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(`Ingest table at index ${i} must have a strategy object`),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst strategy = table.strategy as Record<string, unknown>;\n\t\t\tif (!VALID_STRATEGIES.has(strategy.type as string)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t`Ingest table at index ${i} strategy type must be \"cursor\" or \"diff\"`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (strategy.type === \"cursor\") {\n\t\t\t\tif (\n\t\t\t\t\ttypeof strategy.cursorColumn !== \"string\" ||\n\t\t\t\t\t(strategy.cursorColumn as string).length === 0\n\t\t\t\t) {\n\t\t\t\t\treturn Err(\n\t\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t\t`Ingest table at index ${i} cursor strategy requires a non-empty cursorColumn`,\n\t\t\t\t\t\t),\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif (ingest.intervalMs !== undefined) {\n\t\t\tif (typeof ingest.intervalMs !== \"number\" || ingest.intervalMs < 1) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Ingest intervalMs must be a positive number\"));\n\t\t\t}\n\t\t}\n\t}\n\n\treturn Ok(input as ConnectorConfig);\n}\n","import type { RowDelta } from \"./types\";\n\n/**\n * Apply a delta to an existing row, returning the merged result.\n *\n * - DELETE → returns null\n * - INSERT → creates a new row from delta columns\n * - UPDATE → merges delta columns onto existing row (immutable — returns a new object)\n *\n * @param row - The current row state, or null if no row exists\n * @param delta - The delta to apply\n * @returns The merged row, or null for DELETE operations\n */\nexport function applyDelta(\n\trow: Record<string, unknown> | null,\n\tdelta: RowDelta,\n): Record<string, unknown> | null {\n\tif (delta.op === \"DELETE\") return null;\n\n\tconst base: Record<string, unknown> = row ? { ...row } : {};\n\tfor (const col of delta.columns) {\n\t\tbase[col.column] = col.value;\n\t}\n\treturn base;\n}\n","import equal from \"fast-deep-equal\";\nimport stableStringify from \"fast-json-stable-stringify\";\nimport type { HLCTimestamp } from \"../hlc/types\";\nimport type { ColumnDelta, RowDelta, TableSchema } from \"./types\";\n\n/**\n * Extract a column-level delta between two row states.\n *\n * - `before` null/undefined + `after` present -> INSERT (all columns)\n * - `before` present + `after` null/undefined -> DELETE (empty columns)\n * - Both present -> compare each column, emit only changed columns as UPDATE\n * - No columns changed -> returns null (no-op)\n *\n * If `schema` is provided, only columns listed in the schema are considered.\n *\n * @param before - The previous row state, or null/undefined for a new row\n * @param after - The current row state, or null/undefined for a deleted row\n * @param opts - Table name, row ID, client ID, HLC timestamp, and optional schema\n * @returns The extracted RowDelta, or null if nothing changed\n */\nexport async function extractDelta(\n\tbefore: Record<string, unknown> | null | undefined,\n\tafter: Record<string, unknown> | null | undefined,\n\topts: {\n\t\ttable: string;\n\t\trowId: string;\n\t\tclientId: string;\n\t\thlc: HLCTimestamp;\n\t\tschema?: TableSchema;\n\t},\n): Promise<RowDelta | null> {\n\tconst { table, rowId, clientId, hlc, schema } = opts;\n\n\tconst beforeExists = before != null;\n\tconst afterExists = after != null;\n\n\tif (!beforeExists && !afterExists) {\n\t\treturn null;\n\t}\n\n\t// INSERT: no previous state, new state exists\n\tif (!beforeExists && afterExists) {\n\t\tconst columns = buildColumns(after, schema);\n\t\tconst deltaId = await generateDeltaId({ clientId, hlc, table, rowId, columns });\n\t\treturn { op: \"INSERT\", table, rowId, clientId, columns, hlc, deltaId };\n\t}\n\n\t// DELETE: previous state exists, no new state\n\tif (beforeExists && !afterExists) {\n\t\tconst columns: ColumnDelta[] = [];\n\t\tconst deltaId = await generateDeltaId({ clientId, hlc, table, rowId, columns });\n\t\treturn { op: \"DELETE\", table, rowId, clientId, columns, hlc, deltaId };\n\t}\n\n\t// UPDATE: both states exist — compare columns\n\tconst columns = diffColumns(before!, after!, schema);\n\tif (columns.length === 0) {\n\t\treturn null;\n\t}\n\n\tconst deltaId = await generateDeltaId({ clientId, hlc, table, rowId, columns });\n\treturn { op: \"UPDATE\", table, rowId, clientId, columns, hlc, deltaId };\n}\n\n/** Build an allow-set from a schema, or null if no schema is provided. */\nfunction allowedSet(schema?: TableSchema): Set<string> | null {\n\treturn schema ? new Set(schema.columns.map((c) => c.name)) : null;\n}\n\n/**\n * Build column deltas from a row, optionally filtered by schema.\n * Skips columns whose value is undefined (treated as absent).\n */\nfunction buildColumns(row: Record<string, unknown>, schema?: TableSchema): ColumnDelta[] {\n\tconst allowed = allowedSet(schema);\n\tconst columns: ColumnDelta[] = [];\n\n\tfor (const [key, value] of Object.entries(row)) {\n\t\tif (value === undefined) continue;\n\t\tif (allowed && !allowed.has(key)) continue;\n\t\tcolumns.push({ column: key, value });\n\t}\n\n\treturn columns;\n}\n\n/**\n * Diff two row objects and return only the changed columns.\n * Uses Object.is() for primitives and fast-deep-equal for objects/arrays.\n */\nfunction diffColumns(\n\tbefore: Record<string, unknown>,\n\tafter: Record<string, unknown>,\n\tschema?: TableSchema,\n): ColumnDelta[] {\n\tconst allowed = allowedSet(schema);\n\tconst allKeys = new Set([...Object.keys(before), ...Object.keys(after)]);\n\tconst columns: ColumnDelta[] = [];\n\n\tfor (const key of allKeys) {\n\t\tif (allowed && !allowed.has(key)) continue;\n\n\t\tconst beforeVal = before[key];\n\t\tconst afterVal = after[key];\n\n\t\t// Skip absent or removed columns\n\t\tif (afterVal === undefined) continue;\n\n\t\t// New column — before was undefined\n\t\tif (beforeVal === undefined) {\n\t\t\tcolumns.push({ column: key, value: afterVal });\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Exact primitive equality (handles NaN, +0/-0)\n\t\tif (Object.is(beforeVal, afterVal)) continue;\n\n\t\t// Deep equality for objects/arrays (key-order-agnostic)\n\t\tif (\n\t\t\ttypeof beforeVal === \"object\" &&\n\t\t\tbeforeVal !== null &&\n\t\t\ttypeof afterVal === \"object\" &&\n\t\t\tafterVal !== null &&\n\t\t\tequal(beforeVal, afterVal)\n\t\t) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tcolumns.push({ column: key, value: afterVal });\n\t}\n\n\treturn columns;\n}\n\n/**\n * Generate a deterministic delta ID using SHA-256.\n * Uses the Web Crypto API (works in both Bun and browsers).\n */\nasync function generateDeltaId(params: {\n\tclientId: string;\n\thlc: HLCTimestamp;\n\ttable: string;\n\trowId: string;\n\tcolumns: ColumnDelta[];\n}): Promise<string> {\n\tconst payload = stableStringify({\n\t\tclientId: params.clientId,\n\t\thlc: params.hlc.toString(),\n\t\ttable: params.table,\n\t\trowId: params.rowId,\n\t\tcolumns: params.columns,\n\t});\n\n\tconst data = new TextEncoder().encode(payload);\n\tconst hashBuffer = await crypto.subtle.digest(\"SHA-256\", data);\n\tconst bytes = new Uint8Array(hashBuffer);\n\n\tlet hex = \"\";\n\tfor (const b of bytes) {\n\t\thex += b.toString(16).padStart(2, \"0\");\n\t}\n\treturn hex;\n}\n","import type { HLCTimestamp } from \"../hlc/types\";\n\n/** Delta operation type */\nexport type DeltaOp = \"INSERT\" | \"UPDATE\" | \"DELETE\";\n\n/** A single column-level change */\nexport interface ColumnDelta {\n\t/** Column name */\n\tcolumn: string;\n\t/** Serialisable JSON value — NEVER undefined, use null instead */\n\tvalue: unknown;\n}\n\n/** A row-level delta containing column-level changes */\nexport interface RowDelta {\n\t/** Operation type */\n\top: DeltaOp;\n\t/** Table name */\n\ttable: string;\n\t/** Row identifier */\n\trowId: string;\n\t/** Client identifier — used for LWW tiebreak and audit */\n\tclientId: string;\n\t/** Changed columns — empty for DELETE */\n\tcolumns: ColumnDelta[];\n\t/** HLC timestamp (branded bigint) */\n\thlc: HLCTimestamp;\n\t/** Deterministic identifier: hash(clientId + hlc + table + rowId + columns) */\n\tdeltaId: string;\n}\n\n/** Minimal schema for Phase 1. Column allow-list + type hints. */\nexport interface TableSchema {\n\ttable: string;\n\tcolumns: Array<{\n\t\tname: string;\n\t\ttype: \"string\" | \"number\" | \"boolean\" | \"json\" | \"null\";\n\t}>;\n}\n\n/** Composite key utility — avoids string concatenation bugs */\nexport type RowKey = string & { readonly __brand: \"RowKey\" };\n\n/** Create a composite row key from table and row ID */\nexport function rowKey(table: string, rowId: string): RowKey {\n\treturn `${table}:${rowId}` as RowKey;\n}\n\n/** SyncPush input message — sent by clients to push local deltas to the gateway */\nexport interface SyncPush {\n\t/** Client that sent the push */\n\tclientId: string;\n\t/** Deltas to push */\n\tdeltas: RowDelta[];\n\t/** Client's last-seen HLC */\n\tlastSeenHlc: HLCTimestamp;\n}\n\n/** SyncPull input message — sent by clients to pull remote deltas from the gateway */\nexport interface SyncPull {\n\t/** Client that sent the pull */\n\tclientId: string;\n\t/** Return deltas with HLC strictly after this value */\n\tsinceHlc: HLCTimestamp;\n\t/** Maximum number of deltas to return */\n\tmaxDeltas: number;\n\t/** Optional source adapter name — when set, pull from the named adapter instead of the buffer */\n\tsource?: string;\n}\n\n/** SyncResponse output — returned by the gateway after push or pull */\nexport interface SyncResponse {\n\t/** Deltas matching the pull criteria */\n\tdeltas: RowDelta[];\n\t/** Current server HLC */\n\tserverHlc: HLCTimestamp;\n\t/** Whether there are more deltas to fetch */\n\thasMore: boolean;\n}\n","/**\n * BigInt-safe JSON replacer.\n *\n * Converts BigInt values to strings so they survive `JSON.stringify`,\n * which otherwise throws on BigInt.\n */\nexport function bigintReplacer(_key: string, value: unknown): unknown {\n\treturn typeof value === \"bigint\" ? value.toString() : value;\n}\n\n/**\n * BigInt-aware JSON reviver.\n *\n * Restores string-encoded HLC timestamps (fields ending in `Hlc` or `hlc`)\n * back to BigInt so they match the branded `HLCTimestamp` type.\n *\n * Invalid numeric strings are left as-is to prevent runtime crashes.\n */\nexport function bigintReviver(key: string, value: unknown): unknown {\n\tif (typeof value === \"string\" && /hlc$/i.test(key)) {\n\t\ttry {\n\t\t\treturn BigInt(value);\n\t\t} catch {\n\t\t\treturn value;\n\t\t}\n\t}\n\treturn value;\n}\n","import type { SyncRulesConfig } from \"./types\";\n\n/**\n * Create a pass-all sync rules configuration.\n *\n * Every delta reaches every client — equivalent to having no rules at all.\n * Useful for apps without multi-tenancy or per-user data isolation.\n */\nexport function createPassAllRules(): SyncRulesConfig {\n\treturn {\n\t\tversion: 1,\n\t\tbuckets: [\n\t\t\t{\n\t\t\t\tname: \"all\",\n\t\t\t\ttables: [],\n\t\t\t\tfilters: [],\n\t\t\t},\n\t\t],\n\t};\n}\n\n/**\n * Create user-scoped sync rules configuration.\n *\n * Filters rows by matching a configurable column against the JWT `sub` claim,\n * so each client only receives deltas belonging to the authenticated user.\n *\n * @param tables - Which tables to scope. Empty array means all tables.\n * @param userColumn - Column to match against `jwt:sub`. Defaults to `\"user_id\"`.\n */\nexport function createUserScopedRules(tables: string[], userColumn = \"user_id\"): SyncRulesConfig {\n\treturn {\n\t\tversion: 1,\n\t\tbuckets: [\n\t\t\t{\n\t\t\t\tname: \"user\",\n\t\t\t\ttables,\n\t\t\t\tfilters: [\n\t\t\t\t\t{\n\t\t\t\t\t\tcolumn: userColumn,\n\t\t\t\t\t\top: \"eq\",\n\t\t\t\t\t\tvalue: \"jwt:sub\",\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n}\n","import { LakeSyncError } from \"../result/errors\";\n\n/** Sync rule configuration or evaluation error */\nexport class SyncRuleError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"SYNC_RULE_ERROR\", cause);\n\t}\n}\n","import type { RowDelta } from \"../delta/types\";\nimport { Err, Ok, type Result } from \"../result/result\";\nimport { SyncRuleError } from \"./errors\";\nimport type {\n\tBucketDefinition,\n\tResolvedClaims,\n\tSyncRuleFilter,\n\tSyncRulesConfig,\n\tSyncRulesContext,\n} from \"./types\";\n\n/**\n * Resolve a filter value, substituting JWT claim references.\n *\n * Values prefixed with `jwt:` are looked up in the claims record.\n * Literal values are returned as-is (wrapped in an array for uniform handling).\n *\n * @param value - The filter value string (e.g. \"jwt:sub\" or \"tenant-1\")\n * @param claims - Resolved JWT claims\n * @returns An array of resolved values, or an empty array if the claim is missing\n */\nexport function resolveFilterValue(value: string, claims: ResolvedClaims): string[] {\n\tif (!value.startsWith(\"jwt:\")) {\n\t\treturn [value];\n\t}\n\n\tconst claimKey = value.slice(4);\n\tconst claimValue = claims[claimKey];\n\n\tif (claimValue === undefined) {\n\t\treturn [];\n\t}\n\n\treturn Array.isArray(claimValue) ? claimValue : [claimValue];\n}\n\n/**\n * Check whether a delta matches a single bucket definition.\n *\n * A delta matches if:\n * 1. The bucket's `tables` list is empty (matches all tables) or includes the delta's table\n * 2. All filters match (conjunctive AND):\n * - `eq`: the delta column value equals one of the resolved filter values\n * - `in`: the delta column value is contained in the resolved filter values\n *\n * @param delta - The row delta to evaluate\n * @param bucket - The bucket definition\n * @param claims - Resolved JWT claims\n * @returns true if the delta matches this bucket\n */\nexport function deltaMatchesBucket(\n\tdelta: RowDelta,\n\tbucket: BucketDefinition,\n\tclaims: ResolvedClaims,\n): boolean {\n\t// Table filter: empty tables list = match all\n\tif (bucket.tables.length > 0 && !bucket.tables.includes(delta.table)) {\n\t\treturn false;\n\t}\n\n\t// All filters must match (conjunctive AND)\n\tfor (const filter of bucket.filters) {\n\t\tif (!filterMatchesDelta(delta, filter, claims)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\n\treturn true;\n}\n\n/**\n * Compare two values using a comparison operator.\n * Attempts numeric comparison first; falls back to string localeCompare.\n */\nfunction compareValues(\n\tdeltaValue: string,\n\tfilterValue: string,\n\top: \"gt\" | \"lt\" | \"gte\" | \"lte\",\n): boolean {\n\tconst numDelta = parseFloat(deltaValue);\n\tconst numFilter = parseFloat(filterValue);\n\tconst useNumeric = !Number.isNaN(numDelta) && !Number.isNaN(numFilter);\n\n\tif (useNumeric) {\n\t\tswitch (op) {\n\t\t\tcase \"gt\":\n\t\t\t\treturn numDelta > numFilter;\n\t\t\tcase \"lt\":\n\t\t\t\treturn numDelta < numFilter;\n\t\t\tcase \"gte\":\n\t\t\t\treturn numDelta >= numFilter;\n\t\t\tcase \"lte\":\n\t\t\t\treturn numDelta <= numFilter;\n\t\t}\n\t}\n\n\tconst cmp = deltaValue.localeCompare(filterValue);\n\tswitch (op) {\n\t\tcase \"gt\":\n\t\t\treturn cmp > 0;\n\t\tcase \"lt\":\n\t\t\treturn cmp < 0;\n\t\tcase \"gte\":\n\t\t\treturn cmp >= 0;\n\t\tcase \"lte\":\n\t\t\treturn cmp <= 0;\n\t}\n}\n\nconst FILTER_OPS: Record<string, (dv: string, rv: string[]) => boolean> = {\n\teq: (dv, rv) => rv.includes(dv),\n\tin: (dv, rv) => rv.includes(dv),\n\tneq: (dv, rv) => !rv.includes(dv),\n\tgt: (dv, rv) => compareValues(dv, rv[0]!, \"gt\"),\n\tlt: (dv, rv) => compareValues(dv, rv[0]!, \"lt\"),\n\tgte: (dv, rv) => compareValues(dv, rv[0]!, \"gte\"),\n\tlte: (dv, rv) => compareValues(dv, rv[0]!, \"lte\"),\n};\n\n/**\n * Check whether a single filter matches a delta's column values.\n */\nfunction filterMatchesDelta(\n\tdelta: RowDelta,\n\tfilter: SyncRuleFilter,\n\tclaims: ResolvedClaims,\n): boolean {\n\tconst col = delta.columns.find((c) => c.column === filter.column);\n\tif (!col) {\n\t\t// Column not present in delta — filter does not match\n\t\treturn false;\n\t}\n\n\tconst deltaValue = String(col.value);\n\tconst resolvedValues = resolveFilterValue(filter.value, claims);\n\n\tif (resolvedValues.length === 0) {\n\t\t// JWT claim missing — filter cannot match\n\t\treturn false;\n\t}\n\n\treturn FILTER_OPS[filter.op]?.(deltaValue, resolvedValues) ?? false;\n}\n\n/**\n * Filter an array of deltas by sync rules.\n *\n * A delta is included if it matches **any** bucket (union across buckets).\n * If no sync rules are configured (empty buckets), all deltas pass through.\n *\n * @param deltas - The deltas to filter\n * @param context - Sync rules context (rules + resolved claims)\n * @returns Filtered array of deltas\n */\nexport function filterDeltas(deltas: RowDelta[], context: SyncRulesContext): RowDelta[] {\n\tif (context.rules.buckets.length === 0) {\n\t\treturn deltas;\n\t}\n\n\treturn deltas.filter((delta) =>\n\t\tcontext.rules.buckets.some((bucket) => deltaMatchesBucket(delta, bucket, context.claims)),\n\t);\n}\n\n/**\n * Determine which buckets a client matches based on their claims.\n *\n * A client matches a bucket if the bucket has no table-level restrictions\n * or if the client's claims satisfy all filter conditions for at least\n * one possible row. This is used for bucket-level access decisions, not\n * row-level filtering.\n *\n * @param rules - The sync rules configuration\n * @param claims - Resolved JWT claims\n * @returns Array of bucket names the client matches\n */\nexport function resolveClientBuckets(rules: SyncRulesConfig, claims: ResolvedClaims): string[] {\n\treturn rules.buckets\n\t\t.filter((bucket) => {\n\t\t\t// A client matches a bucket if all JWT-referenced filters\n\t\t\t// can be resolved (i.e. the required claims exist)\n\t\t\tfor (const filter of bucket.filters) {\n\t\t\t\tif (filter.value.startsWith(\"jwt:\")) {\n\t\t\t\t\tconst resolved = resolveFilterValue(filter.value, claims);\n\t\t\t\t\tif (resolved.length === 0) {\n\t\t\t\t\t\treturn false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn true;\n\t\t})\n\t\t.map((b) => b.name);\n}\n\n/**\n * Validate a sync rules configuration for structural correctness.\n *\n * Checks:\n * - Version is a positive integer\n * - Buckets is an array\n * - Each bucket has a non-empty name, valid tables array, valid filters\n * - Filter operators are \"eq\" or \"in\"\n * - Filter values and columns are non-empty strings\n * - Bucket names are unique\n *\n * @param config - The sync rules configuration to validate\n * @returns Ok(void) if valid, Err(SyncRuleError) with details if invalid\n */\nexport function validateSyncRules(config: unknown): Result<void, SyncRuleError> {\n\tif (typeof config !== \"object\" || config === null) {\n\t\treturn Err(new SyncRuleError(\"Sync rules config must be an object\"));\n\t}\n\n\tconst obj = config as Record<string, unknown>;\n\n\tif (typeof obj.version !== \"number\" || !Number.isInteger(obj.version) || obj.version < 1) {\n\t\treturn Err(new SyncRuleError(\"Sync rules version must be a positive integer\"));\n\t}\n\n\tif (!Array.isArray(obj.buckets)) {\n\t\treturn Err(new SyncRuleError(\"Sync rules buckets must be an array\"));\n\t}\n\n\tconst seenNames = new Set<string>();\n\n\tfor (let i = 0; i < obj.buckets.length; i++) {\n\t\tconst bucket = obj.buckets[i] as Record<string, unknown>;\n\n\t\tif (typeof bucket !== \"object\" || bucket === null) {\n\t\t\treturn Err(new SyncRuleError(`Bucket at index ${i} must be an object`));\n\t\t}\n\n\t\tif (typeof bucket.name !== \"string\" || bucket.name.length === 0) {\n\t\t\treturn Err(new SyncRuleError(`Bucket at index ${i} must have a non-empty name`));\n\t\t}\n\n\t\tif (seenNames.has(bucket.name as string)) {\n\t\t\treturn Err(new SyncRuleError(`Duplicate bucket name: \"${bucket.name}\"`));\n\t\t}\n\t\tseenNames.add(bucket.name as string);\n\n\t\tif (!Array.isArray(bucket.tables)) {\n\t\t\treturn Err(new SyncRuleError(`Bucket \"${bucket.name}\" tables must be an array`));\n\t\t}\n\n\t\tfor (const table of bucket.tables as unknown[]) {\n\t\t\tif (typeof table !== \"string\" || table.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(`Bucket \"${bucket.name}\" tables must contain non-empty strings`),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tif (!Array.isArray(bucket.filters)) {\n\t\t\treturn Err(new SyncRuleError(`Bucket \"${bucket.name}\" filters must be an array`));\n\t\t}\n\n\t\tfor (let j = 0; j < (bucket.filters as unknown[]).length; j++) {\n\t\t\tconst filter = (bucket.filters as Record<string, unknown>[])[j]!;\n\n\t\t\tif (typeof filter !== \"object\" || filter === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(`Bucket \"${bucket.name}\" filter at index ${j} must be an object`),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof filter.column !== \"string\" || (filter.column as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(\n\t\t\t\t\t\t`Bucket \"${bucket.name}\" filter at index ${j} must have a non-empty column`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst validOps = [\"eq\", \"in\", \"neq\", \"gt\", \"lt\", \"gte\", \"lte\"];\n\t\t\tif (!validOps.includes(filter.op as string)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(\n\t\t\t\t\t\t`Bucket \"${bucket.name}\" filter at index ${j} op must be one of: ${validOps.join(\", \")}`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof filter.value !== \"string\" || (filter.value as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(\n\t\t\t\t\t\t`Bucket \"${bucket.name}\" filter at index ${j} must have a non-empty value`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn Ok(undefined);\n}\n","import { SchemaError } from \"../result/errors\";\nimport { Err, Ok, type Result } from \"../result/result\";\n\n/** Valid SQL identifier: starts with letter or underscore, alphanumeric + underscore, max 64 chars. */\nconst IDENTIFIER_RE = /^[a-zA-Z_][a-zA-Z0-9_]{0,63}$/;\n\n/**\n * Check whether a string is a valid SQL identifier.\n *\n * Valid identifiers start with a letter or underscore, contain only\n * alphanumeric characters and underscores, and are at most 64 characters long.\n *\n * @param name - The identifier to validate\n * @returns `true` if valid, `false` otherwise\n */\nexport function isValidIdentifier(name: string): boolean {\n\treturn IDENTIFIER_RE.test(name);\n}\n\n/**\n * Assert that a string is a valid SQL identifier, returning a Result.\n *\n * @param name - The identifier to validate\n * @returns Ok(undefined) if valid, Err(SchemaError) if invalid\n */\nexport function assertValidIdentifier(name: string): Result<void, SchemaError> {\n\tif (isValidIdentifier(name)) {\n\t\treturn Ok(undefined);\n\t}\n\treturn Err(\n\t\tnew SchemaError(\n\t\t\t`Invalid SQL identifier: \"${name}\". Identifiers must start with a letter or underscore, contain only alphanumeric characters and underscores, and be at most 64 characters long.`,\n\t\t),\n\t);\n}\n\n/**\n * Quote a SQL identifier using double quotes as defence-in-depth.\n *\n * Any embedded double-quote characters are escaped by doubling them,\n * following the SQL standard for delimited identifiers.\n *\n * @param name - The identifier to quote\n * @returns The double-quoted identifier string\n */\nexport function quoteIdentifier(name: string): string {\n\treturn `\"${name.replace(/\"/g, '\"\"')}\"`;\n}\n"],"mappings":";AACO,IAAM,gBAAN,cAA4B,MAAM;AAAA,EAC/B;AAAA,EACS;AAAA,EAElB,YAAY,SAAiB,MAAc,OAAe;AACzD,UAAM,OAAO;AACb,SAAK,OAAO,KAAK,YAAY;AAC7B,SAAK,OAAO;AACZ,SAAK,QAAQ;AAAA,EACd;AACD;AAGO,IAAM,kBAAN,cAA8B,cAAc;AAAA,EAClD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,eAAe,KAAK;AAAA,EACpC;AACD;AAGO,IAAM,gBAAN,cAA4B,cAAc;AAAA,EAChD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,YAAY,KAAK;AAAA,EACjC;AACD;AAGO,IAAM,aAAN,cAAyB,cAAc;AAAA,EAC7C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,gBAAgB,KAAK;AAAA,EACrC;AACD;AAGO,IAAM,cAAN,cAA0B,cAAc;AAAA,EAC9C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,mBAAmB,KAAK;AAAA,EACxC;AACD;AAGO,IAAM,eAAN,cAA2B,cAAc;AAAA,EAC/C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,iBAAiB,KAAK;AAAA,EACtC;AACD;AAGO,IAAM,uBAAN,cAAmC,cAAc;AAAA,EACvD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,qBAAqB,KAAK;AAAA,EAC1C;AACD;AAGO,IAAM,oBAAN,cAAgC,cAAc;AAAA,EACpD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,gBAAgB,KAAK;AAAA,EACrC;AACD;AAGO,SAAS,QAAQ,KAAqB;AAC5C,SAAO,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAC1D;;;AC9DO,IAAM,uBAAN,cAAmC,cAAc;AAAA,EAC9C;AAAA,EAET,YAAY,SAAiB,WAAoB,OAAe;AAC/D,UAAM,SAAS,0BAA0B,KAAK;AAC9C,SAAK,YAAY;AAAA,EAClB;AACD;AAGO,IAAM,0BAAN,cAAsC,cAAc;AAAA,EAC1D,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,wBAAwB,KAAK;AAAA,EAC7C;AACD;AAGO,IAAM,wBAAN,cAAoC,cAAc;AAAA,EACxD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,2BAA2B,KAAK;AAAA,EAChD;AACD;;;ACxBA,OAAO,qBAAqB;AAS5B,eAAsB,iBAAiB,QAMnB;AACnB,QAAM,UAAU,gBAAgB;AAAA,IAC/B,UAAU,OAAO;AAAA,IACjB,KAAK,OAAO,IAAI,SAAS;AAAA,IACzB,WAAW,OAAO;AAAA,IAClB,YAAY,OAAO;AAAA,IACnB,QAAQ,OAAO;AAAA,EAChB,CAAC;AAED,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,OAAO;AAC7C,QAAM,aAAa,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AAC7D,QAAM,QAAQ,IAAI,WAAW,UAAU;AAEvC,MAAI,MAAM;AACV,aAAW,KAAK,OAAO;AACtB,WAAO,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EACtC;AACA,SAAO;AACR;;;ACiCO,SAAS,cACf,QAC8B;AAC9B,SAAO,UAAU,UAAU,eAAe;AAC3C;;;AChEO,SAAS,GAAM,OAA4B;AACjD,SAAO,EAAE,IAAI,MAAM,MAAM;AAC1B;AAGO,SAAS,IAAO,OAA4B;AAClD,SAAO,EAAE,IAAI,OAAO,MAAM;AAC3B;AAGO,SAAS,UAAmB,QAAsB,IAAmC;AAC3F,MAAI,OAAO,IAAI;AACd,WAAO,GAAG,GAAG,OAAO,KAAK,CAAC;AAAA,EAC3B;AACA,SAAO;AACR;AAGO,SAAS,cACf,QACA,IACe;AACf,MAAI,OAAO,IAAI;AACd,WAAO,GAAG,OAAO,KAAK;AAAA,EACvB;AACA,SAAO;AACR;AAGO,SAAS,cAAoB,QAAyB;AAC5D,MAAI,OAAO,IAAI;AACd,WAAO,OAAO;AAAA,EACf;AACA,QAAM,OAAO;AACd;AAGA,eAAsB,YAAe,SAAgD;AACpF,MAAI;AACH,UAAM,QAAQ,MAAM;AACpB,WAAO,GAAG,KAAK;AAAA,EAChB,SAAS,OAAO;AACf,WAAO,IAAI,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,EACrE;AACD;;;ACxCO,SAAS,eAAe,QAAwD;AACtF,MAAI,WAAW,QAAQ,OAAO,WAAW,UAAU;AAClD,WAAO,IAAI,IAAI,sBAAsB,kCAAkC,CAAC;AAAA,EACzE;AAEA,QAAM,IAAI;AAEV,MAAI,OAAO,EAAE,aAAa,YAAY,EAAE,SAAS,WAAW,GAAG;AAC9D,WAAO,IAAI,IAAI,sBAAsB,qCAAqC,CAAC;AAAA,EAC5E;AAEA,MAAI,OAAO,EAAE,aAAa,YAAY,EAAE,SAAS,WAAW,GAAG;AAC9D,WAAO,IAAI,IAAI,sBAAsB,qCAAqC,CAAC;AAAA,EAC5E;AAEA,MAAI,OAAO,EAAE,QAAQ,UAAU;AAC9B,WAAO,IAAI,IAAI,sBAAsB,sBAAsB,CAAC;AAAA,EAC7D;AAEA,MAAI,OAAO,EAAE,cAAc,YAAY,EAAE,UAAU,WAAW,GAAG;AAChE,WAAO,IAAI,IAAI,sBAAsB,sCAAsC,CAAC;AAAA,EAC7E;AAEA,MAAI,OAAO,EAAE,eAAe,YAAY,EAAE,WAAW,WAAW,GAAG;AAClE,WAAO,IAAI,IAAI,sBAAsB,uCAAuC,CAAC;AAAA,EAC9E;AAEA,MAAI,EAAE,WAAW,QAAQ,OAAO,EAAE,WAAW,YAAY,MAAM,QAAQ,EAAE,MAAM,GAAG;AACjF,WAAO,IAAI,IAAI,sBAAsB,kCAAkC,CAAC;AAAA,EACzE;AAEA,MAAI,EAAE,mBAAmB,UAAa,OAAO,EAAE,mBAAmB,UAAU;AAC3E,WAAO,IAAI,IAAI,sBAAsB,6CAA6C,CAAC;AAAA,EACpF;AAEA,SAAO,GAAG,MAAgB;AAC3B;;;ACVO,IAAM,YAAN,cAAwB,MAAM;AAAA,EACpC,YAAY,SAAiB;AAC5B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACb;AACD;AAoBA,SAAS,gBAAgB,OAA2B;AAEnD,QAAM,SAAS,MAAM,QAAQ,MAAM,GAAG,EAAE,QAAQ,MAAM,GAAG;AACzD,QAAM,SAAS,OAAO,OAAO,OAAO,UAAW,IAAK,OAAO,SAAS,KAAM,GAAI,GAAG;AACjF,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACvC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAC/B;AACA,SAAO;AACR;AAKA,SAAS,UAAU,MAAuB;AACzC,MAAI;AACH,WAAO,KAAK,MAAM,IAAI;AAAA,EACvB,QAAQ;AACP,WAAO;AAAA,EACR;AACD;AAYA,eAAsB,YACrB,OACA,QACyC;AAEzC,QAAM,QAAQ,MAAM,MAAM,GAAG;AAC7B,MAAI,MAAM,WAAW,GAAG;AACvB,WAAO,IAAI,IAAI,UAAU,sDAAsD,CAAC;AAAA,EACjF;AAEA,QAAM,CAAC,WAAW,YAAY,YAAY,IAAI;AAC9C,MAAI,CAAC,aAAa,CAAC,cAAc,CAAC,cAAc;AAC/C,WAAO,IAAI,IAAI,UAAU,8BAA8B,CAAC;AAAA,EACzD;AAGA,MAAI;AACJ,MAAI;AACH,kBAAc,gBAAgB,SAAS;AAAA,EACxC,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,4CAA4C,CAAC;AAAA,EACvE;AAEA,QAAM,SAAS,UAAU,IAAI,YAAY,EAAE,OAAO,WAAW,CAAC;AAC9D,MAAI,CAAC,UAAU,OAAO,QAAQ,WAAW,OAAO,QAAQ,OAAO;AAC9D,WAAO,IAAI,IAAI,UAAU,6DAA6D,CAAC;AAAA,EACxF;AAGA,QAAM,UAAU,IAAI,YAAY;AAChC,QAAM,UAAU,QAAQ,OAAO,MAAM;AAErC,MAAI;AACJ,MAAI;AACH,gBAAY,MAAO,OAAO,OAAiC;AAAA,MAC1D;AAAA,MACA;AAAA,MACA,EAAE,MAAM,QAAQ,MAAM,UAAU;AAAA,MAChC;AAAA,MACA,CAAC,QAAQ;AAAA,IACV;AAAA,EACD,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,2BAA2B,CAAC;AAAA,EACtD;AAGA,MAAI;AACJ,MAAI;AACH,qBAAiB,gBAAgB,YAAY;AAAA,EAC9C,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,+CAA+C,CAAC;AAAA,EAC1E;AAEA,QAAM,eAAe,QAAQ,OAAO,GAAG,SAAS,IAAI,UAAU,EAAE;AAEhE,MAAI;AACJ,MAAI;AACH,YAAQ,MAAO,OAAO,OAAiC;AAAA,MACtD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAAA,EACD,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,+BAA+B,CAAC;AAAA,EAC1D;AAEA,MAAI,CAAC,OAAO;AACX,WAAO,IAAI,IAAI,UAAU,uBAAuB,CAAC;AAAA,EAClD;AAGA,MAAI;AACJ,MAAI;AACH,mBAAe,gBAAgB,UAAU;AAAA,EAC1C,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,6CAA6C,CAAC;AAAA,EACxE;AAEA,QAAM,UAAU,UAAU,IAAI,YAAY,EAAE,OAAO,YAAY,CAAC;AAChE,MAAI,CAAC,SAAS;AACb,WAAO,IAAI,IAAI,UAAU,0CAA0C,CAAC;AAAA,EACrE;AAGA,MAAI,QAAQ,QAAQ,UAAa,OAAO,QAAQ,QAAQ,UAAU;AACjE,WAAO,IAAI,IAAI,UAAU,yCAAyC,CAAC;AAAA,EACpE;AACA,QAAM,aAAa,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAC/C,MAAI,QAAQ,OAAO,YAAY;AAC9B,WAAO,IAAI,IAAI,UAAU,iBAAiB,CAAC;AAAA,EAC5C;AAGA,MAAI,OAAO,QAAQ,QAAQ,YAAY,QAAQ,IAAI,WAAW,GAAG;AAChE,WAAO,IAAI,IAAI,UAAU,2CAA2C,CAAC;AAAA,EACtE;AAEA,MAAI,OAAO,QAAQ,OAAO,YAAY,QAAQ,GAAG,WAAW,GAAG;AAC9D,WAAO,IAAI,IAAI,UAAU,2CAA2C,CAAC;AAAA,EACtE;AAGA,QAAM,iBAAiB,oBAAI,IAAI,CAAC,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,MAAM,CAAC;AAChF,QAAM,eAAkD,CAAC;AAEzD,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,QAAI,eAAe,IAAI,GAAG,EAAG;AAC7B,QAAI,OAAO,UAAU,UAAU;AAC9B,mBAAa,GAAG,IAAI;AAAA,IACrB,WAAW,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,CAAC,MAAM,OAAO,MAAM,QAAQ,GAAG;AAC7E,mBAAa,GAAG,IAAI;AAAA,IACrB;AAAA,EACD;AAGA,eAAa,MAAM,QAAQ;AAG3B,QAAM,OACL,OAAO,QAAQ,SAAS,YAAY,QAAQ,KAAK,SAAS,IAAI,QAAQ,OAAO;AAE9E,SAAO,GAAG;AAAA,IACT,UAAU,QAAQ;AAAA,IAClB,WAAW,QAAQ;AAAA,IACnB;AAAA,IACA;AAAA,EACD,CAAC;AACF;;;ACnNO,IAAM,MAAN,MAAM,KAAI;AAAA,EACC;AAAA,EACT,UAAU;AAAA,EACV,WAAW;AAAA;AAAA,EAGnB,OAAgB,eAAe;AAAA;AAAA,EAG/B,OAAgB,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ9B,YAAY,WAA0B;AACrC,SAAK,YAAY,cAAc,MAAM,KAAK,IAAI;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAoB;AACnB,UAAM,WAAW,KAAK,UAAU;AAChC,UAAM,OAAO,KAAK,IAAI,UAAU,KAAK,QAAQ;AAE7C,QAAI,SAAS,KAAK,UAAU;AAC3B,WAAK;AACL,UAAI,KAAK,UAAU,KAAI,aAAa;AAEnC,aAAK,WAAW,OAAO;AACvB,aAAK,UAAU;AAAA,MAChB;AAAA,IACD,OAAO;AACN,WAAK,WAAW;AAChB,WAAK,UAAU;AAAA,IAChB;AAEA,WAAO,KAAI,OAAO,KAAK,UAAU,KAAK,OAAO;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAK,QAA6D;AACjE,UAAM,EAAE,MAAM,YAAY,SAAS,cAAc,IAAI,KAAI,OAAO,MAAM;AACtE,UAAM,WAAW,KAAK,UAAU;AAChC,UAAM,YAAY,KAAK,IAAI,UAAU,KAAK,QAAQ;AAGlD,QAAI,aAAa,WAAW,KAAI,cAAc;AAC7C,aAAO;AAAA,QACN,IAAI;AAAA,UACH,mBAAmB,aAAa,QAAQ,wBAAwB,KAAI,YAAY;AAAA,QACjF;AAAA,MACD;AAAA,IACD;AAEA,QAAI,aAAa,WAAW;AAC3B,WAAK,WAAW;AAChB,WAAK,UAAU,gBAAgB;AAAA,IAChC,WAAW,eAAe,WAAW;AACpC,WAAK,WAAW;AAChB,WAAK,UAAU,KAAK,IAAI,KAAK,SAAS,aAAa,IAAI;AAAA,IACxD,OAAO;AACN,WAAK,WAAW;AAChB,WAAK;AAAA,IACN;AAEA,QAAI,KAAK,UAAU,KAAI,aAAa;AAEnC,WAAK,WAAW,KAAK,WAAW;AAChC,WAAK,UAAU;AAAA,IAChB;AAEA,WAAO,GAAG,KAAI,OAAO,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,OAAO,MAAc,SAA+B;AAC1D,WAAS,OAAO,IAAI,KAAK,MAAO,OAAO,UAAU,KAAM;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,OAAO,IAAqD;AAClE,WAAO;AAAA,MACN,MAAM,OAAO,MAAM,GAAG;AAAA,MACtB,SAAS,OAAO,KAAK,OAAO;AAAA,IAC7B;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,QAAQ,GAAiB,GAA6B;AAC5D,QAAI,IAAI,EAAG,QAAO;AAClB,QAAI,IAAI,EAAG,QAAO;AAClB,WAAO;AAAA,EACR;AACD;;;AC7GO,SAAS,eAAe,QAA4C;AAC1E,SACC,OAAQ,OAAwB,UAAU,cAC1C,OAAQ,OAAwB,gBAAgB,cAChD,iBAAiB;AAEnB;AAYA,IAAM,qBAAqB;AAC3B,IAAM,0BAA0B;AAMzB,IAAe,mBAAf,MAAgC;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACF;AAAA,EACT,QAA8C;AAAA,EAC9C,UAAU;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACT,gBAA4B,CAAC;AAAA,EAErC,YAAY,QAKT;AACF,SAAK,UAAU,OAAO;AACtB,SAAK,MAAM,IAAI,IAAI;AACnB,SAAK,WAAW,UAAU,OAAO,IAAI;AACrC,SAAK,aAAa,OAAO;AACzB,SAAK,YAAY,OAAO,QAAQ,aAAa;AAC7C,SAAK,oBAAoB,OAAO,QAAQ;AACxC,SAAK,iBAAiB,OAAO,QAAQ,kBAAkB;AAAA,EACxD;AAAA;AAAA,EAGA,QAAc;AACb,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AACf,SAAK,aAAa;AAAA,EACnB;AAAA;AAAA,EAGA,OAAa;AACZ,SAAK,UAAU;AACf,QAAI,KAAK,OAAO;AACf,mBAAa,KAAK,KAAK;AACvB,WAAK,QAAQ;AAAA,IACd;AAAA,EACD;AAAA;AAAA,EAGA,IAAI,YAAqB;AACxB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA,EAMU,WAAW,QAA0B;AAC9C,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,OAAiB;AAAA,MACtB,UAAU,KAAK;AAAA,MACf;AAAA,MACA,aAAa;AAAA,IACd;AACA,SAAK,QAAQ,WAAW,IAAI;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAgB,gBAAgB,OAAgC;AAC/D,SAAK,cAAc,KAAK,KAAK;AAC7B,QAAI,KAAK,cAAc,UAAU,KAAK,WAAW;AAChD,YAAM,KAAK,iBAAiB;AAAA,IAC7B;AAAA,EACD;AAAA;AAAA,EAGA,MAAgB,mBAAkC;AACjD,QAAI,KAAK,cAAc,SAAS,GAAG;AAClC,YAAM,KAAK,iBAAiB;AAAA,IAC7B;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,mBAAkC;AAC/C,UAAM,QAAQ,KAAK;AACnB,SAAK,gBAAgB,CAAC;AACtB,UAAM,KAAK,mBAAmB,KAAK;AAAA,EACpC;AAAA,EAEA,MAAc,mBAAmB,OAAkC;AAClE,QAAI,MAAM,WAAW,EAAG;AAExB,UAAM,SAAS,KAAK;AAGpB,QAAI,eAAe,MAAM,GAAG;AAC3B,UAAI,KAAK,kBAAkB,MAAM,GAAG;AACnC,cAAM,OAAO,MAAM;AAAA,MACpB;AAAA,IACD;AAEA,UAAM,OAAiB;AAAA,MACtB,UAAU,KAAK;AAAA,MACf,QAAQ;AAAA,MACR,aAAa;AAAA,IACd;AAEA,UAAM,SAAS,OAAO,WAAW,IAAI;AAGrC,QAAI,UAAU,OAAO,WAAW,YAAY,QAAQ,UAAU,CAAC,OAAO,IAAI;AACzE,UAAI,eAAe,MAAM,GAAG;AAC3B,cAAM,OAAO,MAAM;AACnB,eAAO,WAAW,IAAI;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA,EAEQ,kBAAkB,QAA+B;AACxD,QAAI,OAAO,YAAY,EAAG,QAAO;AACjC,QAAI,KAAK,qBAAqB,MAAM;AACnC,YAAM,YAAY,KAAK,MAAM,KAAK,oBAAoB,KAAK,cAAc;AACzE,UAAI,OAAO,YAAY,YAAY,UAAW,QAAO;AAAA,IACtD;AACA,WAAO;AAAA,EACR;AAAA,EAEQ,eAAqB;AAC5B,QAAI,CAAC,KAAK,QAAS;AACnB,SAAK,QAAQ,WAAW,YAAY;AACnC,UAAI;AACH,cAAM,KAAK,KAAK;AAAA,MACjB,QAAQ;AAAA,MAER;AACA,WAAK,aAAa;AAAA,IACnB,GAAG,KAAK,UAAU;AAAA,EACnB;AACD;;;ACnLO,IAAM,cAAN,MAA8C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBpD,QAAQ,OAAiB,QAAmD;AAE3E,QAAI,MAAM,UAAU,OAAO,SAAS,MAAM,UAAU,OAAO,OAAO;AACjE,aAAO;AAAA,QACN,IAAI;AAAA,UACH,oDAAoD,MAAM,KAAK,IAAI,MAAM,KAAK,OAAO,OAAO,KAAK,IAAI,OAAO,KAAK;AAAA,QAClH;AAAA,MACD;AAAA,IACD;AAGA,UAAM,SAAS,WAAW,OAAO,MAAM;AAGvC,QAAI,MAAM,OAAO,YAAY,OAAO,OAAO,UAAU;AACpD,aAAO,GAAG,EAAE,GAAG,QAAQ,SAAS,CAAC,EAAE,CAAC;AAAA,IACrC;AAGA,QAAI,MAAM,OAAO,YAAY,OAAO,OAAO,UAAU;AACpD,YAAM,cAAc,MAAM,OAAO,WAAW,QAAQ;AACpD,YAAM,aAAa,MAAM,OAAO,WAAW,SAAS;AAGpD,UAAI,gBAAgB,QAAQ;AAC3B,eAAO,GAAG,EAAE,GAAG,aAAa,SAAS,CAAC,EAAE,CAAC;AAAA,MAC1C;AAEA,aAAO,GAAG,EAAE,GAAG,WAAW,CAAC;AAAA,IAC5B;AAGA,UAAM,gBAAgB,aAAa,OAAO,MAAM;AAGhD,UAAM,KAAc,MAAM,OAAO,YAAY,OAAO,OAAO,WAAW,WAAW;AAEjF,WAAO,GAAG;AAAA,MACT;AAAA,MACA,OAAO,MAAM;AAAA,MACb,OAAO,MAAM;AAAA,MACb,UAAU,OAAO;AAAA,MACjB,SAAS;AAAA,MACT,KAAK,OAAO;AAAA,MACZ,SAAS,OAAO;AAAA,IACjB,CAAC;AAAA,EACF;AACD;AASA,SAAS,WAAW,OAAiB,QAA4B;AAChE,QAAM,MAAM,IAAI,QAAQ,MAAM,KAAK,OAAO,GAAG;AAC7C,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,MAAM,EAAG,QAAO;AAEpB,SAAO,MAAM,WAAW,OAAO,WAAW,QAAQ;AACnD;AAaA,SAAS,aAAa,OAAiB,QAAiC;AACvE,QAAM,WAAW,IAAI,IAAI,MAAM,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAChE,QAAM,YAAY,IAAI,IAAI,OAAO,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAClE,QAAM,aAAa,oBAAI,IAAI,CAAC,GAAG,SAAS,KAAK,GAAG,GAAG,UAAU,KAAK,CAAC,CAAC;AACpE,QAAM,SAAS,WAAW,OAAO,MAAM;AAEvC,QAAM,SAAwB,CAAC;AAE/B,aAAW,OAAO,YAAY;AAC7B,UAAM,WAAW,SAAS,IAAI,GAAG;AACjC,UAAM,YAAY,UAAU,IAAI,GAAG;AAEnC,QAAI,CAAC,WAAW;AACf,aAAO,KAAK,QAAS;AAAA,IACtB,WAAW,CAAC,UAAU;AACrB,aAAO,KAAK,SAAS;AAAA,IACtB,OAAO;AAEN,aAAO,KAAK,WAAW,QAAQ,WAAW,SAAS;AAAA,IACpD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,IAAM,aAAa,IAAI,YAAY;AAW5B,SAAS,WAAW,OAAiB,QAAmD;AAC9F,SAAO,WAAW,QAAQ,OAAO,MAAM;AACxC;;;ACvGO,SAAS,gBAAgB,KAAoC;AACnE,MAAI,QAAQ,QAAQ,OAAO,QAAQ,SAAU,QAAO;AACpD,QAAM,YAAY;AAClB,SAAO,MAAM,QAAQ,UAAU,gBAAgB,KAAK,OAAO,UAAU,kBAAkB;AACxF;;;ACxCO,IAAM,2BAAN,cAAuC,cAAc;AAAA,EAC3D,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,wBAAwB,KAAK;AAAA,EAC7C;AACD;;;ACNO,IAAM,kBAAkB,CAAC,YAAY,SAAS,YAAY,QAAQ,YAAY;;;ACGrF,IAAM,mBAAmB,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAc5C,SAAS,wBACf,OACoD;AACpD,MAAI,OAAO,UAAU,YAAY,UAAU,MAAM;AAChD,WAAO,IAAI,IAAI,yBAAyB,oCAAoC,CAAC;AAAA,EAC9E;AAEA,QAAM,MAAM;AAGZ,MAAI,OAAO,IAAI,SAAS,YAAY,IAAI,KAAK,WAAW,GAAG;AAC1D,WAAO,IAAI,IAAI,yBAAyB,2CAA2C,CAAC;AAAA,EACrF;AAGA,MAAI,OAAO,IAAI,SAAS,YAAY,CAAE,gBAAsC,SAAS,IAAI,IAAI,GAAG;AAC/F,WAAO;AAAA,MACN,IAAI,yBAAyB,kCAAkC,gBAAgB,KAAK,IAAI,CAAC,EAAE;AAAA,IAC5F;AAAA,EACD;AAEA,QAAM,gBAAgB,IAAI;AAG1B,UAAQ,eAAe;AAAA,IACtB,KAAK,YAAY;AAChB,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI;AAAA,YACH;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,qBAAqB,YAAY,MAAM,iBAAiB,WAAW,GAAG;AACtF,eAAO;AAAA,UACN,IAAI,yBAAyB,0DAA0D;AAAA,QACxF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IACA,KAAK,SAAS;AACb,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI,yBAAyB,uDAAuD;AAAA,QACrF;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,qBAAqB,YAAY,MAAM,iBAAiB,WAAW,GAAG;AACtF,eAAO;AAAA,UACN,IAAI,yBAAyB,uDAAuD;AAAA,QACrF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IACA,KAAK,YAAY;AAChB,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI;AAAA,YACH;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,cAAc,YAAY,MAAM,UAAU,WAAW,GAAG;AACxE,eAAO;AAAA,UACN,IAAI,yBAAyB,mDAAmD;AAAA,QACjF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,YAAY,YAAY,MAAM,QAAQ,WAAW,GAAG;AACpE,eAAO,IAAI,IAAI,yBAAyB,iDAAiD,CAAC;AAAA,MAC3F;AACA;AAAA,IACD;AAAA,IACA,KAAK,QAAQ;AACZ,YAAM,OAAO,IAAI;AACjB,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC9C,eAAO;AAAA,UACN,IAAI,yBAAyB,qDAAqD;AAAA,QACnF;AAAA,MACD;AACA,YAAM,UAAU;AAChB,UAAI,OAAO,QAAQ,WAAW,YAAY,QAAQ,OAAO,WAAW,GAAG;AACtE,eAAO,IAAI,IAAI,yBAAyB,4CAA4C,CAAC;AAAA,MACtF;AACA,UAAI,OAAO,QAAQ,UAAU,YAAY,QAAQ,MAAM,WAAW,GAAG;AACpE,eAAO,IAAI,IAAI,yBAAyB,2CAA2C,CAAC;AAAA,MACrF;AACA,UAAI,OAAO,QAAQ,aAAa,YAAY,QAAQ,SAAS,WAAW,GAAG;AAC1E,eAAO,IAAI,IAAI,yBAAyB,8CAA8C,CAAC;AAAA,MACxF;AACA;AAAA,IACD;AAAA,IACA,KAAK,cAAc;AAClB,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI;AAAA,YACH;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,gBAAgB,YAAY,MAAM,YAAY,WAAW,GAAG;AAC5E,eAAO;AAAA,UACN,IAAI,yBAAyB,uDAAuD;AAAA,QACrF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,SAAS,WAAW,GAAG;AACtE,eAAO;AAAA,UACN,IAAI,yBAAyB,oDAAoD;AAAA,QAClF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,iBAAiB,YAAY,MAAM,aAAa,WAAW,GAAG;AAC9E,eAAO;AAAA,UACN,IAAI,yBAAyB,wDAAwD;AAAA,QACtF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,SAAS,WAAW,GAAG;AACtE,eAAO;AAAA,UACN,IAAI,yBAAyB,oDAAoD;AAAA,QAClF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,SAAS,WAAW,GAAG;AACtE,eAAO;AAAA,UACN,IAAI,yBAAyB,oDAAoD;AAAA,QAClF;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAGA,MAAI,IAAI,WAAW,QAAW;AAC7B,QAAI,OAAO,IAAI,WAAW,YAAY,IAAI,WAAW,MAAM;AAC1D,aAAO,IAAI,IAAI,yBAAyB,iCAAiC,CAAC;AAAA,IAC3E;AAEA,UAAM,SAAS,IAAI;AAGnB,QAAI,kBAAkB,UAAU,kBAAkB,cAAc;AAC/D,UAAI,OAAO,eAAe,QAAW;AACpC,YAAI,OAAO,OAAO,eAAe,YAAY,OAAO,aAAa,GAAG;AACnE,iBAAO,IAAI,IAAI,yBAAyB,6CAA6C,CAAC;AAAA,QACvF;AAAA,MACD;AACA,aAAO,GAAG,KAAwB;AAAA,IACnC;AAEA,QAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,KAAK,OAAO,OAAO,WAAW,GAAG;AAChE,aAAO,IAAI,IAAI,yBAAyB,kDAAkD,CAAC;AAAA,IAC5F;AAEA,aAAS,IAAI,GAAG,IAAI,OAAO,OAAO,QAAQ,KAAK;AAC9C,YAAM,QAAQ,OAAO,OAAO,CAAC;AAE7B,UAAI,OAAO,UAAU,YAAY,UAAU,MAAM;AAChD,eAAO,IAAI,IAAI,yBAAyB,yBAAyB,CAAC,oBAAoB,CAAC;AAAA,MACxF;AAEA,UAAI,OAAO,MAAM,UAAU,YAAa,MAAM,MAAiB,WAAW,GAAG;AAC5E,eAAO;AAAA,UACN,IAAI;AAAA,YACH,yBAAyB,CAAC;AAAA,UAC3B;AAAA,QACD;AAAA,MACD;AAEA,UAAI,OAAO,MAAM,UAAU,YAAa,MAAM,MAAiB,WAAW,GAAG;AAC5E,eAAO;AAAA,UACN,IAAI,yBAAyB,yBAAyB,CAAC,8BAA8B;AAAA,QACtF;AAAA,MACD;AAEA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,aAAa,MAAM;AAClE,eAAO;AAAA,UACN,IAAI,yBAAyB,yBAAyB,CAAC,8BAA8B;AAAA,QACtF;AAAA,MACD;AAEA,YAAM,WAAW,MAAM;AACvB,UAAI,CAAC,iBAAiB,IAAI,SAAS,IAAc,GAAG;AACnD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,yBAAyB,CAAC;AAAA,UAC3B;AAAA,QACD;AAAA,MACD;AAEA,UAAI,SAAS,SAAS,UAAU;AAC/B,YACC,OAAO,SAAS,iBAAiB,YAChC,SAAS,aAAwB,WAAW,GAC5C;AACD,iBAAO;AAAA,YACN,IAAI;AAAA,cACH,yBAAyB,CAAC;AAAA,YAC3B;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAEA,QAAI,OAAO,eAAe,QAAW;AACpC,UAAI,OAAO,OAAO,eAAe,YAAY,OAAO,aAAa,GAAG;AACnE,eAAO,IAAI,IAAI,yBAAyB,6CAA6C,CAAC;AAAA,MACvF;AAAA,IACD;AAAA,EACD;AAEA,SAAO,GAAG,KAAwB;AACnC;;;AC5NO,SAAS,WACf,KACA,OACiC;AACjC,MAAI,MAAM,OAAO,SAAU,QAAO;AAElC,QAAM,OAAgC,MAAM,EAAE,GAAG,IAAI,IAAI,CAAC;AAC1D,aAAW,OAAO,MAAM,SAAS;AAChC,SAAK,IAAI,MAAM,IAAI,IAAI;AAAA,EACxB;AACA,SAAO;AACR;;;ACxBA,OAAO,WAAW;AAClB,OAAOA,sBAAqB;AAmB5B,eAAsB,aACrB,QACA,OACA,MAO2B;AAC3B,QAAM,EAAE,OAAO,OAAO,UAAU,KAAK,OAAO,IAAI;AAEhD,QAAM,eAAe,UAAU;AAC/B,QAAM,cAAc,SAAS;AAE7B,MAAI,CAAC,gBAAgB,CAAC,aAAa;AAClC,WAAO;AAAA,EACR;AAGA,MAAI,CAAC,gBAAgB,aAAa;AACjC,UAAMC,WAAU,aAAa,OAAO,MAAM;AAC1C,UAAMC,WAAU,MAAM,gBAAgB,EAAE,UAAU,KAAK,OAAO,OAAO,SAAAD,SAAQ,CAAC;AAC9E,WAAO,EAAE,IAAI,UAAU,OAAO,OAAO,UAAU,SAAAA,UAAS,KAAK,SAAAC,SAAQ;AAAA,EACtE;AAGA,MAAI,gBAAgB,CAAC,aAAa;AACjC,UAAMD,WAAyB,CAAC;AAChC,UAAMC,WAAU,MAAM,gBAAgB,EAAE,UAAU,KAAK,OAAO,OAAO,SAAAD,SAAQ,CAAC;AAC9E,WAAO,EAAE,IAAI,UAAU,OAAO,OAAO,UAAU,SAAAA,UAAS,KAAK,SAAAC,SAAQ;AAAA,EACtE;AAGA,QAAM,UAAU,YAAY,QAAS,OAAQ,MAAM;AACnD,MAAI,QAAQ,WAAW,GAAG;AACzB,WAAO;AAAA,EACR;AAEA,QAAM,UAAU,MAAM,gBAAgB,EAAE,UAAU,KAAK,OAAO,OAAO,QAAQ,CAAC;AAC9E,SAAO,EAAE,IAAI,UAAU,OAAO,OAAO,UAAU,SAAS,KAAK,QAAQ;AACtE;AAGA,SAAS,WAAW,QAA0C;AAC7D,SAAO,SAAS,IAAI,IAAI,OAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI;AAC9D;AAMA,SAAS,aAAa,KAA8B,QAAqC;AACxF,QAAM,UAAU,WAAW,MAAM;AACjC,QAAM,UAAyB,CAAC;AAEhC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC/C,QAAI,UAAU,OAAW;AACzB,QAAI,WAAW,CAAC,QAAQ,IAAI,GAAG,EAAG;AAClC,YAAQ,KAAK,EAAE,QAAQ,KAAK,MAAM,CAAC;AAAA,EACpC;AAEA,SAAO;AACR;AAMA,SAAS,YACR,QACA,OACA,QACgB;AAChB,QAAM,UAAU,WAAW,MAAM;AACjC,QAAM,UAAU,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,MAAM,GAAG,GAAG,OAAO,KAAK,KAAK,CAAC,CAAC;AACvE,QAAM,UAAyB,CAAC;AAEhC,aAAW,OAAO,SAAS;AAC1B,QAAI,WAAW,CAAC,QAAQ,IAAI,GAAG,EAAG;AAElC,UAAM,YAAY,OAAO,GAAG;AAC5B,UAAM,WAAW,MAAM,GAAG;AAG1B,QAAI,aAAa,OAAW;AAG5B,QAAI,cAAc,QAAW;AAC5B,cAAQ,KAAK,EAAE,QAAQ,KAAK,OAAO,SAAS,CAAC;AAC7C;AAAA,IACD;AAGA,QAAI,OAAO,GAAG,WAAW,QAAQ,EAAG;AAGpC,QACC,OAAO,cAAc,YACrB,cAAc,QACd,OAAO,aAAa,YACpB,aAAa,QACb,MAAM,WAAW,QAAQ,GACxB;AACD;AAAA,IACD;AAEA,YAAQ,KAAK,EAAE,QAAQ,KAAK,OAAO,SAAS,CAAC;AAAA,EAC9C;AAEA,SAAO;AACR;AAMA,eAAe,gBAAgB,QAMX;AACnB,QAAM,UAAUF,iBAAgB;AAAA,IAC/B,UAAU,OAAO;AAAA,IACjB,KAAK,OAAO,IAAI,SAAS;AAAA,IACzB,OAAO,OAAO;AAAA,IACd,OAAO,OAAO;AAAA,IACd,SAAS,OAAO;AAAA,EACjB,CAAC;AAED,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,OAAO;AAC7C,QAAM,aAAa,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AAC7D,QAAM,QAAQ,IAAI,WAAW,UAAU;AAEvC,MAAI,MAAM;AACV,aAAW,KAAK,OAAO;AACtB,WAAO,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EACtC;AACA,SAAO;AACR;;;ACtHO,SAAS,OAAO,OAAe,OAAuB;AAC5D,SAAO,GAAG,KAAK,IAAI,KAAK;AACzB;;;ACxCO,SAAS,eAAe,MAAc,OAAyB;AACrE,SAAO,OAAO,UAAU,WAAW,MAAM,SAAS,IAAI;AACvD;AAUO,SAAS,cAAc,KAAa,OAAyB;AACnE,MAAI,OAAO,UAAU,YAAY,QAAQ,KAAK,GAAG,GAAG;AACnD,QAAI;AACH,aAAO,OAAO,KAAK;AAAA,IACpB,QAAQ;AACP,aAAO;AAAA,IACR;AAAA,EACD;AACA,SAAO;AACR;;;ACnBO,SAAS,qBAAsC;AACrD,SAAO;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,MACR;AAAA,QACC,MAAM;AAAA,QACN,QAAQ,CAAC;AAAA,QACT,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAWO,SAAS,sBAAsB,QAAkB,aAAa,WAA4B;AAChG,SAAO;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,MACR;AAAA,QACC,MAAM;AAAA,QACN;AAAA,QACA,SAAS;AAAA,UACR;AAAA,YACC,QAAQ;AAAA,YACR,IAAI;AAAA,YACJ,OAAO;AAAA,UACR;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AACD;;;AC5CO,IAAM,gBAAN,cAA4B,cAAc;AAAA,EAChD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,mBAAmB,KAAK;AAAA,EACxC;AACD;;;ACcO,SAAS,mBAAmB,OAAe,QAAkC;AACnF,MAAI,CAAC,MAAM,WAAW,MAAM,GAAG;AAC9B,WAAO,CAAC,KAAK;AAAA,EACd;AAEA,QAAM,WAAW,MAAM,MAAM,CAAC;AAC9B,QAAM,aAAa,OAAO,QAAQ;AAElC,MAAI,eAAe,QAAW;AAC7B,WAAO,CAAC;AAAA,EACT;AAEA,SAAO,MAAM,QAAQ,UAAU,IAAI,aAAa,CAAC,UAAU;AAC5D;AAgBO,SAAS,mBACf,OACA,QACA,QACU;AAEV,MAAI,OAAO,OAAO,SAAS,KAAK,CAAC,OAAO,OAAO,SAAS,MAAM,KAAK,GAAG;AACrE,WAAO;AAAA,EACR;AAGA,aAAW,UAAU,OAAO,SAAS;AACpC,QAAI,CAAC,mBAAmB,OAAO,QAAQ,MAAM,GAAG;AAC/C,aAAO;AAAA,IACR;AAAA,EACD;AAEA,SAAO;AACR;AAMA,SAAS,cACR,YACA,aACA,IACU;AACV,QAAM,WAAW,WAAW,UAAU;AACtC,QAAM,YAAY,WAAW,WAAW;AACxC,QAAM,aAAa,CAAC,OAAO,MAAM,QAAQ,KAAK,CAAC,OAAO,MAAM,SAAS;AAErE,MAAI,YAAY;AACf,YAAQ,IAAI;AAAA,MACX,KAAK;AACJ,eAAO,WAAW;AAAA,MACnB,KAAK;AACJ,eAAO,WAAW;AAAA,MACnB,KAAK;AACJ,eAAO,YAAY;AAAA,MACpB,KAAK;AACJ,eAAO,YAAY;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,MAAM,WAAW,cAAc,WAAW;AAChD,UAAQ,IAAI;AAAA,IACX,KAAK;AACJ,aAAO,MAAM;AAAA,IACd,KAAK;AACJ,aAAO,MAAM;AAAA,IACd,KAAK;AACJ,aAAO,OAAO;AAAA,IACf,KAAK;AACJ,aAAO,OAAO;AAAA,EAChB;AACD;AAEA,IAAM,aAAoE;AAAA,EACzE,IAAI,CAAC,IAAI,OAAO,GAAG,SAAS,EAAE;AAAA,EAC9B,IAAI,CAAC,IAAI,OAAO,GAAG,SAAS,EAAE;AAAA,EAC9B,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,SAAS,EAAE;AAAA,EAChC,IAAI,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,IAAI;AAAA,EAC9C,IAAI,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,IAAI;AAAA,EAC9C,KAAK,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,KAAK;AAAA,EAChD,KAAK,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,KAAK;AACjD;AAKA,SAAS,mBACR,OACA,QACA,QACU;AACV,QAAM,MAAM,MAAM,QAAQ,KAAK,CAAC,MAAM,EAAE,WAAW,OAAO,MAAM;AAChE,MAAI,CAAC,KAAK;AAET,WAAO;AAAA,EACR;AAEA,QAAM,aAAa,OAAO,IAAI,KAAK;AACnC,QAAM,iBAAiB,mBAAmB,OAAO,OAAO,MAAM;AAE9D,MAAI,eAAe,WAAW,GAAG;AAEhC,WAAO;AAAA,EACR;AAEA,SAAO,WAAW,OAAO,EAAE,IAAI,YAAY,cAAc,KAAK;AAC/D;AAYO,SAAS,aAAa,QAAoB,SAAuC;AACvF,MAAI,QAAQ,MAAM,QAAQ,WAAW,GAAG;AACvC,WAAO;AAAA,EACR;AAEA,SAAO,OAAO;AAAA,IAAO,CAAC,UACrB,QAAQ,MAAM,QAAQ,KAAK,CAAC,WAAW,mBAAmB,OAAO,QAAQ,QAAQ,MAAM,CAAC;AAAA,EACzF;AACD;AAcO,SAAS,qBAAqB,OAAwB,QAAkC;AAC9F,SAAO,MAAM,QACX,OAAO,CAAC,WAAW;AAGnB,eAAW,UAAU,OAAO,SAAS;AACpC,UAAI,OAAO,MAAM,WAAW,MAAM,GAAG;AACpC,cAAM,WAAW,mBAAmB,OAAO,OAAO,MAAM;AACxD,YAAI,SAAS,WAAW,GAAG;AAC1B,iBAAO;AAAA,QACR;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR,CAAC,EACA,IAAI,CAAC,MAAM,EAAE,IAAI;AACpB;AAgBO,SAAS,kBAAkB,QAA8C;AAC/E,MAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAClD,WAAO,IAAI,IAAI,cAAc,qCAAqC,CAAC;AAAA,EACpE;AAEA,QAAM,MAAM;AAEZ,MAAI,OAAO,IAAI,YAAY,YAAY,CAAC,OAAO,UAAU,IAAI,OAAO,KAAK,IAAI,UAAU,GAAG;AACzF,WAAO,IAAI,IAAI,cAAc,+CAA+C,CAAC;AAAA,EAC9E;AAEA,MAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,GAAG;AAChC,WAAO,IAAI,IAAI,cAAc,qCAAqC,CAAC;AAAA,EACpE;AAEA,QAAM,YAAY,oBAAI,IAAY;AAElC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,QAAQ,KAAK;AAC5C,UAAM,SAAS,IAAI,QAAQ,CAAC;AAE5B,QAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAClD,aAAO,IAAI,IAAI,cAAc,mBAAmB,CAAC,oBAAoB,CAAC;AAAA,IACvE;AAEA,QAAI,OAAO,OAAO,SAAS,YAAY,OAAO,KAAK,WAAW,GAAG;AAChE,aAAO,IAAI,IAAI,cAAc,mBAAmB,CAAC,6BAA6B,CAAC;AAAA,IAChF;AAEA,QAAI,UAAU,IAAI,OAAO,IAAc,GAAG;AACzC,aAAO,IAAI,IAAI,cAAc,2BAA2B,OAAO,IAAI,GAAG,CAAC;AAAA,IACxE;AACA,cAAU,IAAI,OAAO,IAAc;AAEnC,QAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,GAAG;AAClC,aAAO,IAAI,IAAI,cAAc,WAAW,OAAO,IAAI,2BAA2B,CAAC;AAAA,IAChF;AAEA,eAAW,SAAS,OAAO,QAAqB;AAC/C,UAAI,OAAO,UAAU,YAAY,MAAM,WAAW,GAAG;AACpD,eAAO;AAAA,UACN,IAAI,cAAc,WAAW,OAAO,IAAI,yCAAyC;AAAA,QAClF;AAAA,MACD;AAAA,IACD;AAEA,QAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AACnC,aAAO,IAAI,IAAI,cAAc,WAAW,OAAO,IAAI,4BAA4B,CAAC;AAAA,IACjF;AAEA,aAAS,IAAI,GAAG,IAAK,OAAO,QAAsB,QAAQ,KAAK;AAC9D,YAAM,SAAU,OAAO,QAAsC,CAAC;AAE9D,UAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAClD,eAAO;AAAA,UACN,IAAI,cAAc,WAAW,OAAO,IAAI,qBAAqB,CAAC,oBAAoB;AAAA,QACnF;AAAA,MACD;AAEA,UAAI,OAAO,OAAO,WAAW,YAAa,OAAO,OAAkB,WAAW,GAAG;AAChF,eAAO;AAAA,UACN,IAAI;AAAA,YACH,WAAW,OAAO,IAAI,qBAAqB,CAAC;AAAA,UAC7C;AAAA,QACD;AAAA,MACD;AAEA,YAAM,WAAW,CAAC,MAAM,MAAM,OAAO,MAAM,MAAM,OAAO,KAAK;AAC7D,UAAI,CAAC,SAAS,SAAS,OAAO,EAAY,GAAG;AAC5C,eAAO;AAAA,UACN,IAAI;AAAA,YACH,WAAW,OAAO,IAAI,qBAAqB,CAAC,uBAAuB,SAAS,KAAK,IAAI,CAAC;AAAA,UACvF;AAAA,QACD;AAAA,MACD;AAEA,UAAI,OAAO,OAAO,UAAU,YAAa,OAAO,MAAiB,WAAW,GAAG;AAC9E,eAAO;AAAA,UACN,IAAI;AAAA,YACH,WAAW,OAAO,IAAI,qBAAqB,CAAC;AAAA,UAC7C;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,GAAG,MAAS;AACpB;;;AClSA,IAAM,gBAAgB;AAWf,SAAS,kBAAkB,MAAuB;AACxD,SAAO,cAAc,KAAK,IAAI;AAC/B;AAQO,SAAS,sBAAsB,MAAyC;AAC9E,MAAI,kBAAkB,IAAI,GAAG;AAC5B,WAAO,GAAG,MAAS;AAAA,EACpB;AACA,SAAO;AAAA,IACN,IAAI;AAAA,MACH,4BAA4B,IAAI;AAAA,IACjC;AAAA,EACD;AACD;AAWO,SAAS,gBAAgB,MAAsB;AACrD,SAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AACpC;","names":["stableStringify","columns","deltaId"]}
@@ -0,0 +1,413 @@
1
+ import {
2
+ Err,
3
+ LakeSyncError,
4
+ Ok
5
+ } from "./chunk-ICNT7I3K.js";
6
+
7
+ // ../catalogue/src/types.ts
8
+ var CatalogueError = class extends LakeSyncError {
9
+ statusCode;
10
+ constructor(message, statusCode, cause) {
11
+ super(message, "CATALOGUE_ERROR", cause);
12
+ this.statusCode = statusCode;
13
+ }
14
+ };
15
+
16
+ // ../catalogue/src/nessie-client.ts
17
+ function encodeNamespace(namespace) {
18
+ return namespace.map(encodeURIComponent).join("%1F");
19
+ }
20
+ var NessieCatalogueClient = class {
21
+ baseUri;
22
+ warehouseUri;
23
+ prefixPromise = null;
24
+ constructor(config) {
25
+ this.baseUri = config.nessieUri.replace(/\/$/, "");
26
+ this.warehouseUri = config.warehouseUri;
27
+ }
28
+ /**
29
+ * Resolve the catalogue prefix by calling the `/v1/config` endpoint.
30
+ *
31
+ * The Iceberg REST specification requires a prefix segment in all
32
+ * API paths (e.g. `/v1/{prefix}/namespaces`). Nessie returns this
33
+ * value in the `defaults.prefix` field of the config response.
34
+ *
35
+ * The result is cached so the config endpoint is only called once
36
+ * per client instance.
37
+ *
38
+ * @returns The resolved prefix string (e.g. `"main"`)
39
+ */
40
+ resolvePrefix() {
41
+ if (this.prefixPromise) {
42
+ return this.prefixPromise;
43
+ }
44
+ this.prefixPromise = (async () => {
45
+ try {
46
+ const url = `${this.baseUri}/v1/config`;
47
+ const response = await fetch(url, {
48
+ method: "GET",
49
+ headers: { Accept: "application/json" }
50
+ });
51
+ if (!response.ok) {
52
+ return "";
53
+ }
54
+ const data = await response.json();
55
+ return data.defaults?.prefix ?? "";
56
+ } catch {
57
+ return "";
58
+ }
59
+ })();
60
+ return this.prefixPromise;
61
+ }
62
+ /**
63
+ * Build the base API path including the resolved prefix.
64
+ *
65
+ * @returns URL prefix such as `http://host/iceberg/v1/main` or
66
+ * `http://host/iceberg/v1` when no prefix is configured
67
+ */
68
+ async apiBase() {
69
+ const prefix = await this.resolvePrefix();
70
+ if (prefix) {
71
+ return `${this.baseUri}/v1/${encodeURIComponent(prefix)}`;
72
+ }
73
+ return `${this.baseUri}/v1`;
74
+ }
75
+ /**
76
+ * Create a namespace (idempotent -- ignores 409 Conflict).
77
+ *
78
+ * @param namespace - Namespace parts, e.g. `["lakesync"]`
79
+ * @returns `Ok(void)` on success or if namespace already exists
80
+ */
81
+ async createNamespace(namespace) {
82
+ const base = await this.apiBase();
83
+ const url = `${base}/namespaces`;
84
+ const body = {
85
+ namespace,
86
+ properties: {}
87
+ };
88
+ try {
89
+ const response = await fetch(url, {
90
+ method: "POST",
91
+ headers: { "Content-Type": "application/json" },
92
+ body: JSON.stringify(body)
93
+ });
94
+ if (response.status === 409) {
95
+ return Ok(void 0);
96
+ }
97
+ if (!response.ok) {
98
+ const text = await response.text().catch(() => "");
99
+ return Err(
100
+ new CatalogueError(
101
+ `Failed to create namespace: ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`,
102
+ response.status
103
+ )
104
+ );
105
+ }
106
+ return Ok(void 0);
107
+ } catch (error) {
108
+ return Err(
109
+ new CatalogueError(
110
+ `Network error creating namespace: ${error instanceof Error ? error.message : String(error)}`,
111
+ 0,
112
+ error instanceof Error ? error : void 0
113
+ )
114
+ );
115
+ }
116
+ }
117
+ /**
118
+ * List all namespaces in the catalogue.
119
+ *
120
+ * @returns Array of namespace arrays, e.g. `[["lakesync"], ["other"]]`
121
+ */
122
+ async listNamespaces() {
123
+ const base = await this.apiBase();
124
+ const url = `${base}/namespaces`;
125
+ try {
126
+ const response = await fetch(url, {
127
+ method: "GET",
128
+ headers: { Accept: "application/json" }
129
+ });
130
+ if (!response.ok) {
131
+ const text = await response.text().catch(() => "");
132
+ return Err(
133
+ new CatalogueError(
134
+ `Failed to list namespaces: ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`,
135
+ response.status
136
+ )
137
+ );
138
+ }
139
+ const data = await response.json();
140
+ return Ok(data.namespaces);
141
+ } catch (error) {
142
+ return Err(
143
+ new CatalogueError(
144
+ `Network error listing namespaces: ${error instanceof Error ? error.message : String(error)}`,
145
+ 0,
146
+ error instanceof Error ? error : void 0
147
+ )
148
+ );
149
+ }
150
+ }
151
+ /**
152
+ * Create an Iceberg table within a namespace.
153
+ *
154
+ * @param namespace - Namespace parts, e.g. `["lakesync"]`
155
+ * @param name - Table name
156
+ * @param schema - Iceberg schema definition
157
+ * @param partitionSpec - Partition specification
158
+ */
159
+ async createTable(namespace, name, schema, partitionSpec) {
160
+ const ns = encodeNamespace(namespace);
161
+ const base = await this.apiBase();
162
+ const url = `${base}/namespaces/${ns}/tables`;
163
+ const location = `${this.warehouseUri}/${namespace.join("/")}/${name}`;
164
+ const body = {
165
+ name,
166
+ schema,
167
+ "partition-spec": partitionSpec,
168
+ "stage-create": false,
169
+ location,
170
+ properties: {}
171
+ };
172
+ try {
173
+ const response = await fetch(url, {
174
+ method: "POST",
175
+ headers: { "Content-Type": "application/json" },
176
+ body: JSON.stringify(body)
177
+ });
178
+ if (!response.ok) {
179
+ const text = await response.text().catch(() => "");
180
+ return Err(
181
+ new CatalogueError(
182
+ `Failed to create table ${namespace.join(".")}.${name}: ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`,
183
+ response.status
184
+ )
185
+ );
186
+ }
187
+ return Ok(void 0);
188
+ } catch (error) {
189
+ return Err(
190
+ new CatalogueError(
191
+ `Network error creating table: ${error instanceof Error ? error.message : String(error)}`,
192
+ 0,
193
+ error instanceof Error ? error : void 0
194
+ )
195
+ );
196
+ }
197
+ }
198
+ /**
199
+ * Load table metadata from the catalogue.
200
+ *
201
+ * @param namespace - Namespace parts, e.g. `["lakesync"]`
202
+ * @param name - Table name
203
+ * @returns Full table metadata including schemas, snapshots, and partition specs
204
+ */
205
+ async loadTable(namespace, name) {
206
+ const ns = encodeNamespace(namespace);
207
+ const base = await this.apiBase();
208
+ const url = `${base}/namespaces/${ns}/tables/${encodeURIComponent(name)}`;
209
+ try {
210
+ const response = await fetch(url, {
211
+ method: "GET",
212
+ headers: { Accept: "application/json" }
213
+ });
214
+ if (!response.ok) {
215
+ const text = await response.text().catch(() => "");
216
+ return Err(
217
+ new CatalogueError(
218
+ `Failed to load table ${namespace.join(".")}.${name}: ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`,
219
+ response.status
220
+ )
221
+ );
222
+ }
223
+ const data = await response.json();
224
+ return Ok(data);
225
+ } catch (error) {
226
+ return Err(
227
+ new CatalogueError(
228
+ `Network error loading table: ${error instanceof Error ? error.message : String(error)}`,
229
+ 0,
230
+ error instanceof Error ? error : void 0
231
+ )
232
+ );
233
+ }
234
+ }
235
+ /**
236
+ * Append data files to a table, creating a new snapshot.
237
+ *
238
+ * Uses the standard Iceberg REST v1 commit-table endpoint with
239
+ * `add-snapshot` and `set-snapshot-ref` metadata updates.
240
+ * First loads the current table metadata to determine the current state,
241
+ * then commits a new snapshot referencing the provided data files.
242
+ *
243
+ * @param namespace - Namespace parts, e.g. `["lakesync"]`
244
+ * @param table - Table name
245
+ * @param files - Data files to append
246
+ */
247
+ async appendFiles(namespace, table, files) {
248
+ const metadataResult = await this.loadTable(namespace, table);
249
+ if (!metadataResult.ok) {
250
+ return metadataResult;
251
+ }
252
+ const metadata = metadataResult.value;
253
+ const currentSchemaId = metadata.metadata["current-schema-id"];
254
+ const ns = encodeNamespace(namespace);
255
+ const base = await this.apiBase();
256
+ const url = `${base}/namespaces/${ns}/tables/${encodeURIComponent(table)}`;
257
+ const snapshotId = Date.now() * 1e3 + Math.floor(Math.random() * 1e3);
258
+ const timestampMs = Date.now();
259
+ const totalRecords = files.reduce((sum, f) => sum + f["record-count"], 0);
260
+ const totalSize = files.reduce((sum, f) => sum + f["file-size-in-bytes"], 0);
261
+ const snapshot = {
262
+ "snapshot-id": snapshotId,
263
+ "timestamp-ms": timestampMs,
264
+ summary: {
265
+ operation: "append",
266
+ "added-data-files": String(files.length),
267
+ "added-records": String(totalRecords),
268
+ "added-files-size": String(totalSize)
269
+ },
270
+ "schema-id": currentSchemaId
271
+ };
272
+ const currentSnapshotId = metadata.metadata["current-snapshot-id"];
273
+ if (currentSnapshotId !== void 0) {
274
+ snapshot["parent-snapshot-id"] = currentSnapshotId;
275
+ }
276
+ const commitBody = {
277
+ requirements: [
278
+ {
279
+ type: "assert-current-schema-id",
280
+ "current-schema-id": currentSchemaId
281
+ }
282
+ ],
283
+ updates: [
284
+ {
285
+ action: "add-snapshot",
286
+ snapshot
287
+ },
288
+ {
289
+ action: "set-snapshot-ref",
290
+ "ref-name": "main",
291
+ type: "branch",
292
+ "snapshot-id": snapshotId
293
+ }
294
+ ]
295
+ };
296
+ try {
297
+ const response = await fetch(url, {
298
+ method: "POST",
299
+ headers: { "Content-Type": "application/json" },
300
+ body: JSON.stringify(commitBody)
301
+ });
302
+ if (!response.ok) {
303
+ const text = await response.text().catch(() => "");
304
+ return Err(
305
+ new CatalogueError(
306
+ `Failed to append files to ${namespace.join(".")}.${table}: ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`,
307
+ response.status
308
+ )
309
+ );
310
+ }
311
+ return Ok(void 0);
312
+ } catch (error) {
313
+ return Err(
314
+ new CatalogueError(
315
+ `Network error appending files: ${error instanceof Error ? error.message : String(error)}`,
316
+ 0,
317
+ error instanceof Error ? error : void 0
318
+ )
319
+ );
320
+ }
321
+ }
322
+ /**
323
+ * Get the current snapshot of a table, or null if no snapshots exist.
324
+ *
325
+ * @param namespace - Namespace parts, e.g. `["lakesync"]`
326
+ * @param table - Table name
327
+ * @returns The current snapshot, or `null` if the table has no snapshots
328
+ */
329
+ async currentSnapshot(namespace, table) {
330
+ const metadataResult = await this.loadTable(namespace, table);
331
+ if (!metadataResult.ok) {
332
+ return metadataResult;
333
+ }
334
+ const metadata = metadataResult.value;
335
+ const currentSnapshotId = metadata.metadata["current-snapshot-id"];
336
+ const snapshots = metadata.metadata.snapshots ?? [];
337
+ if (currentSnapshotId === void 0 || snapshots.length === 0) {
338
+ return Ok(null);
339
+ }
340
+ const snapshot = snapshots.find((s) => s["snapshot-id"] === currentSnapshotId);
341
+ return Ok(snapshot ?? null);
342
+ }
343
+ };
344
+
345
+ // ../catalogue/src/schema-mapping.ts
346
+ function lakeSyncTypeToIceberg(colType) {
347
+ switch (colType) {
348
+ case "string":
349
+ return "string";
350
+ case "number":
351
+ return "double";
352
+ case "boolean":
353
+ return "boolean";
354
+ case "json":
355
+ return "string";
356
+ case "null":
357
+ return "string";
358
+ }
359
+ }
360
+ function tableSchemaToIceberg(schema) {
361
+ let fieldId = 1;
362
+ const systemFields = [
363
+ { id: fieldId++, name: "op", required: true, type: "string" },
364
+ { id: fieldId++, name: "table", required: true, type: "string" },
365
+ { id: fieldId++, name: "rowId", required: true, type: "string" },
366
+ { id: fieldId++, name: "clientId", required: true, type: "string" },
367
+ { id: fieldId++, name: "hlc", required: true, type: "long" },
368
+ { id: fieldId++, name: "deltaId", required: true, type: "string" }
369
+ ];
370
+ const userFields = schema.columns.map((col) => ({
371
+ id: fieldId++,
372
+ name: col.name,
373
+ required: false,
374
+ type: lakeSyncTypeToIceberg(col.type)
375
+ }));
376
+ return {
377
+ type: "struct",
378
+ "schema-id": 0,
379
+ fields: [...systemFields, ...userFields]
380
+ };
381
+ }
382
+ function buildPartitionSpec(schema) {
383
+ const hlcField = schema.fields.find((f) => f.name === "hlc");
384
+ if (!hlcField) {
385
+ throw new Error("Schema must contain an 'hlc' field for partitioning");
386
+ }
387
+ return {
388
+ "spec-id": 0,
389
+ fields: [
390
+ {
391
+ "source-id": hlcField.id,
392
+ "field-id": 1e3,
393
+ name: "hlc_day",
394
+ transform: "day"
395
+ }
396
+ ]
397
+ };
398
+ }
399
+ function lakeSyncTableName(table) {
400
+ return {
401
+ namespace: ["lakesync"],
402
+ name: table
403
+ };
404
+ }
405
+
406
+ export {
407
+ CatalogueError,
408
+ NessieCatalogueClient,
409
+ tableSchemaToIceberg,
410
+ buildPartitionSpec,
411
+ lakeSyncTableName
412
+ };
413
+ //# sourceMappingURL=chunk-P5DRFKIT.js.map