lakesync 0.1.2 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter.d.ts +1 -1
- package/dist/adapter.js +2 -2
- package/dist/analyst.js +1 -1
- package/dist/catalogue.js +2 -2
- package/dist/{chunk-RDUDHNFX.js → chunk-265CMYJH.js} +2 -2
- package/dist/{chunk-DKFUPHCU.js → chunk-C7ECMNQ4.js} +1 -1
- package/dist/chunk-C7ECMNQ4.js.map +1 -0
- package/dist/{chunk-X3UOVRV7.js → chunk-E7ZDOJCP.js} +2 -2
- package/dist/{chunk-35NGFKNN.js → chunk-HAR3YPCY.js} +5 -5
- package/dist/{chunk-KDRXVETO.js → chunk-L6LTCXJ4.js} +16 -4
- package/dist/chunk-L6LTCXJ4.js.map +1 -0
- package/dist/{chunk-OXF6RDTK.js → chunk-NCMXLWEW.js} +2 -2
- package/dist/{chunk-I5XWBDII.js → chunk-SXQB6JT6.js} +26 -4
- package/dist/chunk-SXQB6JT6.js.map +1 -0
- package/dist/{chunk-RBL3N5ND.js → chunk-TIPMVLIG.js} +2 -2
- package/dist/client.js +3 -3
- package/dist/compactor.js +3 -3
- package/dist/connector-jira.d.ts +18 -1
- package/dist/connector-jira.js +6 -4
- package/dist/connector-salesforce.d.ts +9 -1
- package/dist/connector-salesforce.js +6 -4
- package/dist/gateway-server.js +8 -8
- package/dist/gateway.d.ts +1 -1
- package/dist/gateway.js +5 -5
- package/dist/index.d.ts +2 -2
- package/dist/index.js +1 -1
- package/dist/parquet.js +2 -2
- package/dist/proto.js +2 -2
- package/dist/{src-PFJ5BJZY.js → src-PPKRY5GD.js} +7 -5
- package/dist/{src-3CWNXNX6.js → src-TLTET7JZ.js} +2 -2
- package/dist/{src-BAFKQNDC.js → src-VVCNNYND.js} +7 -5
- package/dist/{types-DAQL_vU_.d.ts → types-D-E0VrfS.d.ts} +4 -0
- package/package.json +1 -1
- package/dist/chunk-DKFUPHCU.js.map +0 -1
- package/dist/chunk-I5XWBDII.js.map +0 -1
- package/dist/chunk-KDRXVETO.js.map +0 -1
- /package/dist/{chunk-RDUDHNFX.js.map → chunk-265CMYJH.js.map} +0 -0
- /package/dist/{chunk-X3UOVRV7.js.map → chunk-E7ZDOJCP.js.map} +0 -0
- /package/dist/{chunk-35NGFKNN.js.map → chunk-HAR3YPCY.js.map} +0 -0
- /package/dist/{chunk-OXF6RDTK.js.map → chunk-NCMXLWEW.js.map} +0 -0
- /package/dist/{chunk-RBL3N5ND.js.map → chunk-TIPMVLIG.js.map} +0 -0
- /package/dist/{src-3CWNXNX6.js.map → src-PPKRY5GD.js.map} +0 -0
- /package/dist/{src-BAFKQNDC.js.map → src-TLTET7JZ.js.map} +0 -0
- /package/dist/{src-PFJ5BJZY.js.map → src-VVCNNYND.js.map} +0 -0
package/dist/adapter.d.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { R as Result, A as AdapterError, H as HLCTimestamp } from './result-Cojz
|
|
|
3
3
|
import { R as RowDelta, T as TableSchema, C as ColumnDelta } from './types-BUzzVRD6.js';
|
|
4
4
|
import { D as DatabaseAdapter, a as DatabaseAdapterConfig } from './db-types-CPAPw8Ws.js';
|
|
5
5
|
export { i as isDatabaseAdapter, l as lakeSyncTypeToBigQuery } from './db-types-CPAPw8Ws.js';
|
|
6
|
-
import { C as ConnectorConfig } from './types-
|
|
6
|
+
import { C as ConnectorConfig } from './types-D-E0VrfS.js';
|
|
7
7
|
import { L as LakeAdapter, A as AdapterConfig, O as ObjectInfo } from './types-DSC_EiwR.js';
|
|
8
8
|
import mysql from 'mysql2/promise';
|
|
9
9
|
import { Pool } from 'pg';
|
package/dist/adapter.js
CHANGED
package/dist/analyst.js
CHANGED
package/dist/catalogue.js
CHANGED
|
@@ -4,8 +4,8 @@ import {
|
|
|
4
4
|
buildPartitionSpec,
|
|
5
5
|
lakeSyncTableName,
|
|
6
6
|
tableSchemaToIceberg
|
|
7
|
-
} from "./chunk-
|
|
8
|
-
import "./chunk-
|
|
7
|
+
} from "./chunk-TIPMVLIG.js";
|
|
8
|
+
import "./chunk-C7ECMNQ4.js";
|
|
9
9
|
import "./chunk-7D4SUZUM.js";
|
|
10
10
|
export {
|
|
11
11
|
CatalogueError,
|
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
Err,
|
|
3
3
|
FlushError,
|
|
4
4
|
Ok
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-C7ECMNQ4.js";
|
|
6
6
|
|
|
7
7
|
// ../parquet/src/arrow-schema.ts
|
|
8
8
|
import * as arrow from "apache-arrow";
|
|
@@ -254,4 +254,4 @@ export {
|
|
|
254
254
|
readParquetToDeltas,
|
|
255
255
|
writeDeltasToParquet
|
|
256
256
|
};
|
|
257
|
-
//# sourceMappingURL=chunk-
|
|
257
|
+
//# sourceMappingURL=chunk-265CMYJH.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../core/src/result/errors.ts","../../core/src/action/errors.ts","../../core/src/action/generate-id.ts","../../core/src/action/types.ts","../../core/src/result/result.ts","../../core/src/action/validate.ts","../../core/src/auth.ts","../../core/src/hlc/hlc.ts","../../core/src/base-poller.ts","../../core/src/callback-push-target.ts","../../core/src/conflict/lww.ts","../../core/src/connector/action-handler.ts","../../core/src/connector/errors.ts","../../core/src/connector/types.ts","../../core/src/connector/validate.ts","../../core/src/create-poller.ts","../../core/src/delta/apply.ts","../../core/src/delta/extract.ts","../../core/src/delta/types.ts","../../core/src/json.ts","../../core/src/sync-rules/defaults.ts","../../core/src/sync-rules/errors.ts","../../core/src/sync-rules/evaluator.ts","../../core/src/validation/identifier.ts"],"sourcesContent":["/** Base error class for all LakeSync errors */\nexport class LakeSyncError extends Error {\n\treadonly code: string;\n\toverride readonly cause?: Error;\n\n\tconstructor(message: string, code: string, cause?: Error) {\n\t\tsuper(message);\n\t\tthis.name = this.constructor.name;\n\t\tthis.code = code;\n\t\tthis.cause = cause;\n\t}\n}\n\n/** Clock drift exceeds maximum allowed threshold */\nexport class ClockDriftError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"CLOCK_DRIFT\", cause);\n\t}\n}\n\n/** Conflict resolution failure */\nexport class ConflictError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"CONFLICT\", cause);\n\t}\n}\n\n/** Flush operation failure */\nexport class FlushError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"FLUSH_FAILED\", cause);\n\t}\n}\n\n/** Schema mismatch or validation failure */\nexport class SchemaError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"SCHEMA_MISMATCH\", cause);\n\t}\n}\n\n/** Lake adapter operation failure */\nexport class AdapterError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ADAPTER_ERROR\", cause);\n\t}\n}\n\n/** Named source adapter not found in gateway configuration */\nexport class AdapterNotFoundError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ADAPTER_NOT_FOUND\", cause);\n\t}\n}\n\n/** Buffer backpressure limit exceeded — push rejected to prevent OOM. */\nexport class BackpressureError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"BACKPRESSURE\", cause);\n\t}\n}\n\n/** Coerce an unknown thrown value into an Error instance. */\nexport function toError(err: unknown): Error {\n\treturn err instanceof Error ? err : new Error(String(err));\n}\n","import { LakeSyncError } from \"../result/errors\";\n\n/** Error during action execution (may be retryable). */\nexport class ActionExecutionError extends LakeSyncError {\n\treadonly retryable: boolean;\n\n\tconstructor(message: string, retryable: boolean, cause?: Error) {\n\t\tsuper(message, \"ACTION_EXECUTION_ERROR\", cause);\n\t\tthis.retryable = retryable;\n\t}\n}\n\n/** The requested action type is not supported by the connector. */\nexport class ActionNotSupportedError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ACTION_NOT_SUPPORTED\", cause);\n\t}\n}\n\n/** Action payload failed structural validation. */\nexport class ActionValidationError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"ACTION_VALIDATION_ERROR\", cause);\n\t}\n}\n","import stableStringify from \"fast-json-stable-stringify\";\nimport type { HLCTimestamp } from \"../hlc/types\";\n\n/**\n * Generate a deterministic action ID using SHA-256.\n *\n * Same pattern as `generateDeltaId` in `delta/extract.ts` — uses the\n * Web Crypto API for cross-runtime compatibility (Node, Bun, browsers).\n */\nexport async function generateActionId(params: {\n\tclientId: string;\n\thlc: HLCTimestamp;\n\tconnector: string;\n\tactionType: string;\n\tparams: Record<string, unknown>;\n}): Promise<string> {\n\tconst payload = stableStringify({\n\t\tclientId: params.clientId,\n\t\thlc: params.hlc.toString(),\n\t\tconnector: params.connector,\n\t\tactionType: params.actionType,\n\t\tparams: params.params,\n\t});\n\n\tconst data = new TextEncoder().encode(payload);\n\tconst hashBuffer = await crypto.subtle.digest(\"SHA-256\", data);\n\tconst bytes = new Uint8Array(hashBuffer);\n\n\tlet hex = \"\";\n\tfor (const b of bytes) {\n\t\thex += b.toString(16).padStart(2, \"0\");\n\t}\n\treturn hex;\n}\n","import type { ActionDescriptor } from \"../connector/action-handler\";\nimport type { HLCTimestamp } from \"../hlc/types\";\n\n/** Discovery response listing available connectors and their supported actions. */\nexport interface ActionDiscovery {\n\t/** Map of connector name to its supported action descriptors. */\n\tconnectors: Record<string, ActionDescriptor[]>;\n}\n\n/** An imperative action to execute against an external system. */\nexport interface Action {\n\t/** Unique action identifier (deterministic SHA-256 hash). */\n\tactionId: string;\n\t/** Client that initiated the action. */\n\tclientId: string;\n\t/** HLC timestamp when the action was created. */\n\thlc: HLCTimestamp;\n\t/** Target connector name (e.g. \"github\", \"slack\", \"linear\"). */\n\tconnector: string;\n\t/** Action type within the connector (e.g. \"create_pr\", \"send_message\"). */\n\tactionType: string;\n\t/** Action parameters — connector-specific payload. */\n\tparams: Record<string, unknown>;\n\t/** Optional idempotency key for at-most-once delivery. */\n\tidempotencyKey?: string;\n}\n\n/** Successful result of executing an action. */\nexport interface ActionResult {\n\t/** The action that was executed. */\n\tactionId: string;\n\t/** Result data returned by the connector. */\n\tdata: Record<string, unknown>;\n\t/** Server HLC after processing. */\n\tserverHlc: HLCTimestamp;\n}\n\n/** Error result of executing an action. */\nexport interface ActionErrorResult {\n\t/** The action that failed. */\n\tactionId: string;\n\t/** Error code. */\n\tcode: string;\n\t/** Human-readable error message. */\n\tmessage: string;\n\t/** Whether the client can retry this action. */\n\tretryable: boolean;\n}\n\n/** Batch of actions pushed by a client. */\nexport interface ActionPush {\n\t/** Client identifier. */\n\tclientId: string;\n\t/** Actions to execute. */\n\tactions: Action[];\n}\n\n/** Gateway response to an action push. */\nexport interface ActionResponse {\n\t/** Results for each action (success or error). */\n\tresults: Array<ActionResult | ActionErrorResult>;\n\t/** Server HLC after processing. */\n\tserverHlc: HLCTimestamp;\n}\n\n/** Type guard: check whether a result is an error. */\nexport function isActionError(\n\tresult: ActionResult | ActionErrorResult,\n): result is ActionErrorResult {\n\treturn \"code\" in result && \"retryable\" in result;\n}\n","import type { LakeSyncError } from \"./errors\";\n\n/** Discriminated union representing either success or failure */\nexport type Result<T, E = LakeSyncError> = { ok: true; value: T } | { ok: false; error: E };\n\n/** Create a successful Result */\nexport function Ok<T>(value: T): Result<T, never> {\n\treturn { ok: true, value };\n}\n\n/** Create a failed Result */\nexport function Err<E>(error: E): Result<never, E> {\n\treturn { ok: false, error };\n}\n\n/** Transform the success value of a Result */\nexport function mapResult<T, U, E>(result: Result<T, E>, fn: (value: T) => U): Result<U, E> {\n\tif (result.ok) {\n\t\treturn Ok(fn(result.value));\n\t}\n\treturn result;\n}\n\n/** Chain Result-returning operations */\nexport function flatMapResult<T, U, E>(\n\tresult: Result<T, E>,\n\tfn: (value: T) => Result<U, E>,\n): Result<U, E> {\n\tif (result.ok) {\n\t\treturn fn(result.value);\n\t}\n\treturn result;\n}\n\n/** Extract the value from a Result or throw the error */\nexport function unwrapOrThrow<T, E>(result: Result<T, E>): T {\n\tif (result.ok) {\n\t\treturn result.value;\n\t}\n\tthrow result.error;\n}\n\n/** Wrap a Promise into a Result */\nexport async function fromPromise<T>(promise: Promise<T>): Promise<Result<T, Error>> {\n\ttry {\n\t\tconst value = await promise;\n\t\treturn Ok(value);\n\t} catch (error) {\n\t\treturn Err(error instanceof Error ? error : new Error(String(error)));\n\t}\n}\n","import { Err, Ok, type Result } from \"../result/result\";\nimport { ActionValidationError } from \"./errors\";\nimport type { Action } from \"./types\";\n\n/**\n * Validate the structural integrity of an Action.\n *\n * Checks that all required fields are present and of the correct type.\n * Returns a `Result` so callers never need to catch.\n */\nexport function validateAction(action: unknown): Result<Action, ActionValidationError> {\n\tif (action === null || typeof action !== \"object\") {\n\t\treturn Err(new ActionValidationError(\"Action must be a non-null object\"));\n\t}\n\n\tconst a = action as Record<string, unknown>;\n\n\tif (typeof a.actionId !== \"string\" || a.actionId.length === 0) {\n\t\treturn Err(new ActionValidationError(\"actionId must be a non-empty string\"));\n\t}\n\n\tif (typeof a.clientId !== \"string\" || a.clientId.length === 0) {\n\t\treturn Err(new ActionValidationError(\"clientId must be a non-empty string\"));\n\t}\n\n\tif (typeof a.hlc !== \"bigint\") {\n\t\treturn Err(new ActionValidationError(\"hlc must be a bigint\"));\n\t}\n\n\tif (typeof a.connector !== \"string\" || a.connector.length === 0) {\n\t\treturn Err(new ActionValidationError(\"connector must be a non-empty string\"));\n\t}\n\n\tif (typeof a.actionType !== \"string\" || a.actionType.length === 0) {\n\t\treturn Err(new ActionValidationError(\"actionType must be a non-empty string\"));\n\t}\n\n\tif (a.params === null || typeof a.params !== \"object\" || Array.isArray(a.params)) {\n\t\treturn Err(new ActionValidationError(\"params must be a non-null object\"));\n\t}\n\n\tif (a.idempotencyKey !== undefined && typeof a.idempotencyKey !== \"string\") {\n\t\treturn Err(new ActionValidationError(\"idempotencyKey must be a string if provided\"));\n\t}\n\n\treturn Ok(action as Action);\n}\n","import { Err, Ok, type Result } from \"./result/result\";\n\n/**\n * Minimal Web Crypto typing for HMAC operations.\n * The core package uses `lib: [\"ES2022\"]` which doesn't include DOM types.\n * These declarations cover the methods we need without pulling in the full DOM lib.\n */\ninterface HmacSubtle {\n\timportKey(\n\t\tformat: \"raw\",\n\t\tkeyData: Uint8Array,\n\t\talgorithm: { name: string; hash: string },\n\t\textractable: boolean,\n\t\tusages: string[],\n\t): Promise<unknown>;\n\tverify(\n\t\talgorithm: string,\n\t\tkey: unknown,\n\t\tsignature: Uint8Array,\n\t\tdata: Uint8Array,\n\t): Promise<boolean>;\n}\n\n/** Claims extracted from a verified JWT token */\nexport interface AuthClaims {\n\t/** Client identifier (from JWT `sub` claim) */\n\tclientId: string;\n\t/** Authorised gateway ID (from JWT `gw` claim) */\n\tgatewayId: string;\n\t/** Role for route-level access control (from JWT `role` claim, defaults to \"client\") */\n\trole: string;\n\t/** Non-standard JWT claims for sync rule evaluation */\n\tcustomClaims: Record<string, string | string[]>;\n}\n\n/** Authentication error returned when JWT verification fails */\nexport class AuthError extends Error {\n\tconstructor(message: string) {\n\t\tsuper(message);\n\t\tthis.name = \"AuthError\";\n\t}\n}\n\n/** Expected JWT header for HMAC-SHA256 tokens */\ninterface JwtHeader {\n\talg: string;\n\ttyp: string;\n}\n\n/** JWT payload with required claims */\ninterface JwtPayload {\n\tsub?: string;\n\tgw?: string;\n\texp?: number;\n\t[key: string]: unknown;\n}\n\n/**\n * Decode a base64url-encoded string to a Uint8Array.\n * Handles the URL-safe alphabet (+/- replaced with -/_) and missing padding.\n */\nfunction base64urlDecode(input: string): Uint8Array {\n\t// Restore standard base64 characters and padding\n\tconst base64 = input.replace(/-/g, \"+\").replace(/_/g, \"/\");\n\tconst padded = base64.padEnd(base64.length + ((4 - (base64.length % 4)) % 4), \"=\");\n\tconst binary = atob(padded);\n\tconst bytes = new Uint8Array(binary.length);\n\tfor (let i = 0; i < binary.length; i++) {\n\t\tbytes[i] = binary.charCodeAt(i);\n\t}\n\treturn bytes;\n}\n\n/**\n * Parse a JSON string safely, returning null on failure.\n */\nfunction parseJson(text: string): unknown {\n\ttry {\n\t\treturn JSON.parse(text);\n\t} catch {\n\t\treturn null;\n\t}\n}\n\n/**\n * Verify a JWT token signed with HMAC-SHA256 and extract authentication claims.\n *\n * Uses the Web Crypto API exclusively (no external dependencies), making it\n * suitable for Cloudflare Workers and other edge runtimes.\n *\n * @param token - The raw JWT string (header.payload.signature)\n * @param secret - The HMAC-SHA256 secret key\n * @returns A Result containing AuthClaims on success, or AuthError on failure\n */\nexport async function verifyToken(\n\ttoken: string,\n\tsecret: string,\n): Promise<Result<AuthClaims, AuthError>> {\n\t// Split into three parts\n\tconst parts = token.split(\".\");\n\tif (parts.length !== 3) {\n\t\treturn Err(new AuthError(\"Malformed JWT: expected three dot-separated segments\"));\n\t}\n\n\tconst [headerB64, payloadB64, signatureB64] = parts;\n\tif (!headerB64 || !payloadB64 || !signatureB64) {\n\t\treturn Err(new AuthError(\"Malformed JWT: empty segment\"));\n\t}\n\n\t// Decode and verify header\n\tlet headerBytes: Uint8Array;\n\ttry {\n\t\theaderBytes = base64urlDecode(headerB64);\n\t} catch {\n\t\treturn Err(new AuthError(\"Malformed JWT: invalid base64url in header\"));\n\t}\n\n\tconst header = parseJson(new TextDecoder().decode(headerBytes)) as JwtHeader | null;\n\tif (!header || header.alg !== \"HS256\" || header.typ !== \"JWT\") {\n\t\treturn Err(new AuthError('Unsupported JWT: header must be {\"alg\":\"HS256\",\"typ\":\"JWT\"}'));\n\t}\n\n\t// Import the HMAC key via Web Crypto\n\tconst encoder = new TextEncoder();\n\tconst keyData = encoder.encode(secret);\n\n\tlet cryptoKey: unknown;\n\ttry {\n\t\tcryptoKey = await (crypto.subtle as unknown as HmacSubtle).importKey(\n\t\t\t\"raw\",\n\t\t\tkeyData,\n\t\t\t{ name: \"HMAC\", hash: \"SHA-256\" },\n\t\t\tfalse,\n\t\t\t[\"verify\"],\n\t\t);\n\t} catch {\n\t\treturn Err(new AuthError(\"Failed to import HMAC key\"));\n\t}\n\n\t// Verify signature\n\tlet signatureBytes: Uint8Array;\n\ttry {\n\t\tsignatureBytes = base64urlDecode(signatureB64);\n\t} catch {\n\t\treturn Err(new AuthError(\"Malformed JWT: invalid base64url in signature\"));\n\t}\n\n\tconst signingInput = encoder.encode(`${headerB64}.${payloadB64}`);\n\n\tlet valid: boolean;\n\ttry {\n\t\tvalid = await (crypto.subtle as unknown as HmacSubtle).verify(\n\t\t\t\"HMAC\",\n\t\t\tcryptoKey,\n\t\t\tsignatureBytes,\n\t\t\tsigningInput,\n\t\t);\n\t} catch {\n\t\treturn Err(new AuthError(\"Signature verification failed\"));\n\t}\n\n\tif (!valid) {\n\t\treturn Err(new AuthError(\"Invalid JWT signature\"));\n\t}\n\n\t// Decode payload\n\tlet payloadBytes: Uint8Array;\n\ttry {\n\t\tpayloadBytes = base64urlDecode(payloadB64);\n\t} catch {\n\t\treturn Err(new AuthError(\"Malformed JWT: invalid base64url in payload\"));\n\t}\n\n\tconst payload = parseJson(new TextDecoder().decode(payloadBytes)) as JwtPayload | null;\n\tif (!payload) {\n\t\treturn Err(new AuthError(\"Malformed JWT: payload is not valid JSON\"));\n\t}\n\n\t// Check expiry — exp claim is mandatory\n\tif (payload.exp === undefined || typeof payload.exp !== \"number\") {\n\t\treturn Err(new AuthError('Missing or invalid \"exp\" claim (expiry)'));\n\t}\n\tconst nowSeconds = Math.floor(Date.now() / 1000);\n\tif (payload.exp <= nowSeconds) {\n\t\treturn Err(new AuthError(\"JWT has expired\"));\n\t}\n\n\t// Extract required claims\n\tif (typeof payload.sub !== \"string\" || payload.sub.length === 0) {\n\t\treturn Err(new AuthError('Missing or invalid \"sub\" claim (clientId)'));\n\t}\n\n\tif (typeof payload.gw !== \"string\" || payload.gw.length === 0) {\n\t\treturn Err(new AuthError('Missing or invalid \"gw\" claim (gatewayId)'));\n\t}\n\n\t// Extract non-standard claims for sync rules evaluation\n\tconst standardClaims = new Set([\"sub\", \"gw\", \"exp\", \"iat\", \"iss\", \"aud\", \"role\"]);\n\tconst customClaims: Record<string, string | string[]> = {};\n\n\tfor (const [key, value] of Object.entries(payload)) {\n\t\tif (standardClaims.has(key)) continue;\n\t\tif (typeof value === \"string\") {\n\t\t\tcustomClaims[key] = value;\n\t\t} else if (Array.isArray(value) && value.every((v) => typeof v === \"string\")) {\n\t\t\tcustomClaims[key] = value as string[];\n\t\t}\n\t}\n\n\t// Always include `sub` in custom claims so sync rules can reference jwt:sub\n\tcustomClaims.sub = payload.sub;\n\n\t// Extract role claim (default to \"client\" if absent)\n\tconst role =\n\t\ttypeof payload.role === \"string\" && payload.role.length > 0 ? payload.role : \"client\";\n\n\treturn Ok({\n\t\tclientId: payload.sub,\n\t\tgatewayId: payload.gw,\n\t\trole,\n\t\tcustomClaims,\n\t});\n}\n","import { ClockDriftError, Err, Ok, type Result } from \"../result\";\nimport type { HLCTimestamp } from \"./types\";\n\n/**\n * Hybrid Logical Clock implementation.\n *\n * 64-bit layout: [48-bit wall clock ms][16-bit logical counter].\n * Maximum allowed clock drift: 5 seconds.\n *\n * The wall clock source is injectable for deterministic testing.\n */\nexport class HLC {\n\tprivate readonly wallClock: () => number;\n\tprivate counter = 0;\n\tprivate lastWall = 0;\n\n\t/** Maximum tolerated drift between local and remote physical clocks (ms). */\n\tstatic readonly MAX_DRIFT_MS = 5_000;\n\n\t/** Maximum value of the 16-bit logical counter. */\n\tstatic readonly MAX_COUNTER = 0xffff;\n\n\t/**\n\t * Create a new HLC instance.\n\t *\n\t * @param wallClock - Optional injectable clock source returning epoch ms.\n\t * Defaults to `Date.now`.\n\t */\n\tconstructor(wallClock?: () => number) {\n\t\tthis.wallClock = wallClock ?? (() => Date.now());\n\t}\n\n\t/**\n\t * Generate a new monotonically increasing HLC timestamp.\n\t *\n\t * The returned timestamp is guaranteed to be strictly greater than any\n\t * previously returned by this instance.\n\t */\n\tnow(): HLCTimestamp {\n\t\tconst physical = this.wallClock();\n\t\tconst wall = Math.max(physical, this.lastWall);\n\n\t\tif (wall === this.lastWall) {\n\t\t\tthis.counter++;\n\t\t\tif (this.counter > HLC.MAX_COUNTER) {\n\t\t\t\t// Counter overflow: advance wall by 1 ms and reset counter\n\t\t\t\tthis.lastWall = wall + 1;\n\t\t\t\tthis.counter = 0;\n\t\t\t}\n\t\t} else {\n\t\t\tthis.lastWall = wall;\n\t\t\tthis.counter = 0;\n\t\t}\n\n\t\treturn HLC.encode(this.lastWall, this.counter);\n\t}\n\n\t/**\n\t * Receive a remote HLC timestamp and advance the local clock.\n\t *\n\t * Returns `Err(ClockDriftError)` if the remote timestamp indicates\n\t * clock drift exceeding {@link MAX_DRIFT_MS}.\n\t *\n\t * @param remote - The HLC timestamp received from a remote node.\n\t * @returns A `Result` containing the new local HLC timestamp, or a\n\t * `ClockDriftError` if the remote clock is too far ahead.\n\t */\n\trecv(remote: HLCTimestamp): Result<HLCTimestamp, ClockDriftError> {\n\t\tconst { wall: remoteWall, counter: remoteCounter } = HLC.decode(remote);\n\t\tconst physical = this.wallClock();\n\t\tconst localWall = Math.max(physical, this.lastWall);\n\n\t\t// Check drift: compare remote wall against physical clock\n\t\tif (remoteWall - physical > HLC.MAX_DRIFT_MS) {\n\t\t\treturn Err(\n\t\t\t\tnew ClockDriftError(\n\t\t\t\t\t`Remote clock is ${remoteWall - physical}ms ahead (max drift: ${HLC.MAX_DRIFT_MS}ms)`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\tif (remoteWall > localWall) {\n\t\t\tthis.lastWall = remoteWall;\n\t\t\tthis.counter = remoteCounter + 1;\n\t\t} else if (remoteWall === localWall) {\n\t\t\tthis.lastWall = localWall;\n\t\t\tthis.counter = Math.max(this.counter, remoteCounter) + 1;\n\t\t} else {\n\t\t\tthis.lastWall = localWall;\n\t\t\tthis.counter++;\n\t\t}\n\n\t\tif (this.counter > HLC.MAX_COUNTER) {\n\t\t\t// Counter overflow: advance wall by 1 ms and reset counter\n\t\t\tthis.lastWall = this.lastWall + 1;\n\t\t\tthis.counter = 0;\n\t\t}\n\n\t\treturn Ok(HLC.encode(this.lastWall, this.counter));\n\t}\n\n\t/**\n\t * Encode a wall clock value (ms) and logical counter into a 64-bit HLC timestamp.\n\t *\n\t * @param wall - Wall clock component in epoch milliseconds (48-bit).\n\t * @param counter - Logical counter component (16-bit, 0..65535).\n\t * @returns The encoded {@link HLCTimestamp}.\n\t */\n\tstatic encode(wall: number, counter: number): HLCTimestamp {\n\t\treturn ((BigInt(wall) << 16n) | BigInt(counter & 0xffff)) as HLCTimestamp;\n\t}\n\n\t/**\n\t * Decode an HLC timestamp into its wall clock (ms) and logical counter components.\n\t *\n\t * @param ts - The {@link HLCTimestamp} to decode.\n\t * @returns An object with `wall` (epoch ms) and `counter` (logical) fields.\n\t */\n\tstatic decode(ts: HLCTimestamp): { wall: number; counter: number } {\n\t\treturn {\n\t\t\twall: Number(ts >> 16n),\n\t\t\tcounter: Number(ts & 0xffffn),\n\t\t};\n\t}\n\n\t/**\n\t * Compare two HLC timestamps.\n\t *\n\t * @returns `-1` if `a < b`, `0` if `a === b`, `1` if `a > b`.\n\t */\n\tstatic compare(a: HLCTimestamp, b: HLCTimestamp): -1 | 0 | 1 {\n\t\tif (a < b) return -1;\n\t\tif (a > b) return 1;\n\t\treturn 0;\n\t}\n}\n","// ---------------------------------------------------------------------------\n// BaseSourcePoller — shared lifecycle and push logic for source connectors\n// ---------------------------------------------------------------------------\n\nimport type { RowDelta, SyncPush } from \"./delta/types\";\nimport { HLC } from \"./hlc/hlc\";\nimport type { HLCTimestamp } from \"./hlc/types\";\nimport type { BackpressureError, FlushError } from \"./result/errors\";\nimport type { Result } from \"./result/result\";\n\n/** Minimal interface for a push target (avoids depending on @lakesync/gateway). */\nexport interface PushTarget {\n\thandlePush(push: SyncPush): unknown;\n}\n\n/**\n * Extended push target that supports flush and buffer inspection.\n * Implemented by SyncGateway so pollers can trigger flushes to relieve memory pressure.\n */\nexport interface IngestTarget extends PushTarget {\n\tflush(): Promise<Result<void, FlushError>>;\n\tshouldFlush(): boolean;\n\treadonly bufferStats: { logSize: number; indexSize: number; byteSize: number };\n}\n\n/** Type guard: returns true if the target supports flush/shouldFlush/bufferStats. */\nexport function isIngestTarget(target: PushTarget): target is IngestTarget {\n\treturn (\n\t\ttypeof (target as IngestTarget).flush === \"function\" &&\n\t\ttypeof (target as IngestTarget).shouldFlush === \"function\" &&\n\t\t\"bufferStats\" in target\n\t);\n}\n\n/** Memory configuration for the streaming accumulator. */\nexport interface PollerMemoryConfig {\n\t/** Number of deltas per push chunk (default 500). */\n\tchunkSize?: number;\n\t/** Approximate memory budget in bytes — triggers flush at 70% (default: no limit). */\n\tmemoryBudgetBytes?: number;\n\t/** Proportion of memoryBudgetBytes at which to trigger a flush (default 0.7). */\n\tflushThreshold?: number;\n}\n\nconst DEFAULT_CHUNK_SIZE = 500;\nconst DEFAULT_FLUSH_THRESHOLD = 0.7;\n\n/**\n * Base class for source pollers that poll an external API and push deltas\n * to a SyncGateway. Handles lifecycle (start/stop/schedule), and push.\n */\nexport abstract class BaseSourcePoller {\n\tprotected readonly gateway: PushTarget;\n\tprotected readonly hlc: HLC;\n\tprotected readonly clientId: string;\n\tprivate readonly intervalMs: number;\n\tprivate timer: ReturnType<typeof setTimeout> | null = null;\n\tprivate running = false;\n\n\tprivate readonly chunkSize: number;\n\tprivate readonly memoryBudgetBytes: number | undefined;\n\tprivate readonly flushThreshold: number;\n\tprivate pendingDeltas: RowDelta[] = [];\n\n\tconstructor(config: {\n\t\tname: string;\n\t\tintervalMs: number;\n\t\tgateway: PushTarget;\n\t\tmemory?: PollerMemoryConfig;\n\t}) {\n\t\tthis.gateway = config.gateway;\n\t\tthis.hlc = new HLC();\n\t\tthis.clientId = `ingest:${config.name}`;\n\t\tthis.intervalMs = config.intervalMs;\n\t\tthis.chunkSize = config.memory?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n\t\tthis.memoryBudgetBytes = config.memory?.memoryBudgetBytes;\n\t\tthis.flushThreshold = config.memory?.flushThreshold ?? DEFAULT_FLUSH_THRESHOLD;\n\t}\n\n\t/** Start the polling loop. */\n\tstart(): void {\n\t\tif (this.running) return;\n\t\tthis.running = true;\n\t\tthis.schedulePoll();\n\t}\n\n\t/** Stop the polling loop. */\n\tstop(): void {\n\t\tthis.running = false;\n\t\tif (this.timer) {\n\t\t\tclearTimeout(this.timer);\n\t\t\tthis.timer = null;\n\t\t}\n\t}\n\n\t/** Whether the poller is currently running. */\n\tget isRunning(): boolean {\n\t\treturn this.running;\n\t}\n\n\t/** Execute a single poll cycle. Subclasses implement their specific polling logic. */\n\tabstract poll(): Promise<void>;\n\n\t/** Export cursor state as a JSON-serialisable object for external persistence. */\n\tabstract getCursorState(): Record<string, unknown>;\n\n\t/** Restore cursor state from a previously exported snapshot. */\n\tabstract setCursorState(state: Record<string, unknown>): void;\n\n\t/**\n\t * Execute a single poll cycle without the timer loop.\n\t * Convenience for serverless consumers who trigger polls manually.\n\t */\n\tasync pollOnce(): Promise<void> {\n\t\treturn this.poll();\n\t}\n\n\t/** Push collected deltas to the gateway (single-shot, backward compat). */\n\tprotected pushDeltas(deltas: RowDelta[]): void {\n\t\tif (deltas.length === 0) return;\n\t\tconst push: SyncPush = {\n\t\t\tclientId: this.clientId,\n\t\t\tdeltas,\n\t\t\tlastSeenHlc: 0n as HLCTimestamp,\n\t\t};\n\t\tthis.gateway.handlePush(push);\n\t}\n\n\t/**\n\t * Accumulate a single delta. When `chunkSize` is reached, the pending\n\t * deltas are automatically pushed (and flushed if needed).\n\t */\n\tprotected async accumulateDelta(delta: RowDelta): Promise<void> {\n\t\tthis.pendingDeltas.push(delta);\n\t\tif (this.pendingDeltas.length >= this.chunkSize) {\n\t\t\tawait this.pushPendingChunk();\n\t\t}\n\t}\n\n\t/** Flush any remaining accumulated deltas. Call at the end of `poll()`. */\n\tprotected async flushAccumulator(): Promise<void> {\n\t\tif (this.pendingDeltas.length > 0) {\n\t\t\tawait this.pushPendingChunk();\n\t\t}\n\t}\n\n\t/**\n\t * Push a chunk of pending deltas. If the gateway is an IngestTarget,\n\t * checks memory pressure and flushes before/after push when needed.\n\t * On backpressure, flushes once and retries.\n\t */\n\tprivate async pushPendingChunk(): Promise<void> {\n\t\tconst chunk = this.pendingDeltas;\n\t\tthis.pendingDeltas = [];\n\t\tawait this.pushChunkWithFlush(chunk);\n\t}\n\n\tprivate async pushChunkWithFlush(chunk: RowDelta[]): Promise<void> {\n\t\tif (chunk.length === 0) return;\n\n\t\tconst target = this.gateway;\n\n\t\t// Pre-push: flush if IngestTarget signals pressure\n\t\tif (isIngestTarget(target)) {\n\t\t\tif (this.shouldFlushTarget(target)) {\n\t\t\t\tawait target.flush();\n\t\t\t}\n\t\t}\n\n\t\tconst push: SyncPush = {\n\t\t\tclientId: this.clientId,\n\t\t\tdeltas: chunk,\n\t\t\tlastSeenHlc: 0n as HLCTimestamp,\n\t\t};\n\n\t\tconst result = target.handlePush(push) as Result<unknown, BackpressureError> | undefined;\n\n\t\t// If handlePush returned a Result with backpressure, flush and retry once\n\t\tif (result && typeof result === \"object\" && \"ok\" in result && !result.ok) {\n\t\t\tif (isIngestTarget(target)) {\n\t\t\t\tawait target.flush();\n\t\t\t\ttarget.handlePush(push);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate shouldFlushTarget(target: IngestTarget): boolean {\n\t\tif (target.shouldFlush()) return true;\n\t\tif (this.memoryBudgetBytes != null) {\n\t\t\tconst threshold = Math.floor(this.memoryBudgetBytes * this.flushThreshold);\n\t\t\tif (target.bufferStats.byteSize >= threshold) return true;\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate schedulePoll(): void {\n\t\tif (!this.running) return;\n\t\tthis.timer = setTimeout(async () => {\n\t\t\ttry {\n\t\t\t\tawait this.poll();\n\t\t\t} catch {\n\t\t\t\t// Swallow errors — a failed poll must never crash the server\n\t\t\t}\n\t\t\tthis.schedulePoll();\n\t\t}, this.intervalMs);\n\t}\n}\n","// ---------------------------------------------------------------------------\n// CallbackPushTarget — a PushTarget that delegates to a user-provided callback\n// ---------------------------------------------------------------------------\n\nimport type { PushTarget } from \"./base-poller\";\nimport type { SyncPush } from \"./delta/types\";\n\n/**\n * A simple PushTarget implementation that forwards every push to a\n * user-supplied callback. Useful for testing, logging, or lightweight\n * integrations where a full gateway is not required.\n */\nexport class CallbackPushTarget implements PushTarget {\n\tprivate readonly onPush: (push: SyncPush) => void | Promise<void>;\n\n\tconstructor(onPush: (push: SyncPush) => void | Promise<void>) {\n\t\tthis.onPush = onPush;\n\t}\n\n\thandlePush(push: SyncPush): void {\n\t\tthis.onPush(push);\n\t}\n}\n","import type { ColumnDelta, DeltaOp, RowDelta } from \"../delta/types\";\nimport { HLC } from \"../hlc/hlc\";\nimport { ConflictError } from \"../result/errors\";\nimport { Err, Ok, type Result } from \"../result/result\";\nimport type { ConflictResolver } from \"./resolver\";\n\n/**\n * Column-level Last-Write-Wins conflict resolver.\n *\n * For each column present in both deltas, the one with the higher HLC wins.\n * Equal HLC tiebreak: lexicographically higher clientId wins (deterministic).\n * Columns only present in one delta are always included in the result.\n */\nexport class LWWResolver implements ConflictResolver {\n\t/**\n\t * Resolve two conflicting deltas for the same row, returning the merged result.\n\t *\n\t * Rules:\n\t * - Both DELETE: the delta with the higher HLC (or clientId tiebreak) wins.\n\t * - One DELETE, one non-DELETE: the delta with the higher HLC wins.\n\t * If the DELETE wins, the row is tombstoned (empty columns).\n\t * If the non-DELETE wins, the row is resurrected.\n\t * - Both non-DELETE: columns are merged per-column using LWW semantics.\n\t *\n\t * @param local - The locally held delta for this row.\n\t * @param remote - The incoming remote delta for this row.\n\t * @returns A `Result` containing the resolved `RowDelta`, or a\n\t * `ConflictError` if the deltas refer to different tables/rows.\n\t */\n\tresolve(local: RowDelta, remote: RowDelta): Result<RowDelta, ConflictError> {\n\t\t// Validate same table + rowId\n\t\tif (local.table !== remote.table || local.rowId !== remote.rowId) {\n\t\t\treturn Err(\n\t\t\t\tnew ConflictError(\n\t\t\t\t\t`Cannot resolve conflict: mismatched table/rowId (${local.table}:${local.rowId} vs ${remote.table}:${remote.rowId})`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\t// Determine which delta has higher HLC (for op-level decisions)\n\t\tconst winner = pickWinner(local, remote);\n\n\t\t// Both DELETE — winner takes all (no columns to merge)\n\t\tif (local.op === \"DELETE\" && remote.op === \"DELETE\") {\n\t\t\treturn Ok({ ...winner, columns: [] });\n\t\t}\n\n\t\t// One is DELETE\n\t\tif (local.op === \"DELETE\" || remote.op === \"DELETE\") {\n\t\t\tconst deleteDelta = local.op === \"DELETE\" ? local : remote;\n\t\t\tconst otherDelta = local.op === \"DELETE\" ? remote : local;\n\n\t\t\t// If the DELETE has higher/equal priority, tombstone wins\n\t\t\tif (deleteDelta === winner) {\n\t\t\t\treturn Ok({ ...deleteDelta, columns: [] });\n\t\t\t}\n\t\t\t// Otherwise the UPDATE/INSERT wins (resurrection)\n\t\t\treturn Ok({ ...otherDelta });\n\t\t}\n\n\t\t// Both are INSERT or UPDATE — merge columns\n\t\tconst mergedColumns = mergeColumns(local, remote);\n\n\t\t// Determine the resulting op: INSERT only if both are INSERT, otherwise UPDATE\n\t\tconst op: DeltaOp = local.op === \"INSERT\" && remote.op === \"INSERT\" ? \"INSERT\" : \"UPDATE\";\n\n\t\treturn Ok({\n\t\t\top,\n\t\t\ttable: local.table,\n\t\t\trowId: local.rowId,\n\t\t\tclientId: winner.clientId,\n\t\t\tcolumns: mergedColumns,\n\t\t\thlc: winner.hlc,\n\t\t\tdeltaId: winner.deltaId,\n\t\t});\n\t}\n}\n\n/**\n * Pick the winning delta based on HLC comparison with clientId tiebreak.\n *\n * @param local - The locally held delta.\n * @param remote - The incoming remote delta.\n * @returns The delta that wins the comparison.\n */\nfunction pickWinner(local: RowDelta, remote: RowDelta): RowDelta {\n\tconst cmp = HLC.compare(local.hlc, remote.hlc);\n\tif (cmp > 0) return local;\n\tif (cmp < 0) return remote;\n\t// Equal HLC — lexicographically higher clientId wins\n\treturn local.clientId > remote.clientId ? local : remote;\n}\n\n/**\n * Merge column-level changes from two non-DELETE deltas using LWW semantics.\n *\n * - Columns present in only one delta are included unconditionally.\n * - Columns present in both: the value from the delta with the higher HLC wins;\n * equal HLC uses lexicographic clientId tiebreak.\n *\n * @param local - The locally held delta.\n * @param remote - The incoming remote delta.\n * @returns The merged array of column deltas.\n */\nfunction mergeColumns(local: RowDelta, remote: RowDelta): ColumnDelta[] {\n\tconst localMap = new Map(local.columns.map((c) => [c.column, c]));\n\tconst remoteMap = new Map(remote.columns.map((c) => [c.column, c]));\n\tconst allColumns = new Set([...localMap.keys(), ...remoteMap.keys()]);\n\tconst winner = pickWinner(local, remote);\n\n\tconst merged: ColumnDelta[] = [];\n\n\tfor (const col of allColumns) {\n\t\tconst localCol = localMap.get(col);\n\t\tconst remoteCol = remoteMap.get(col);\n\n\t\tif (!remoteCol) {\n\t\t\tmerged.push(localCol!);\n\t\t} else if (!localCol) {\n\t\t\tmerged.push(remoteCol);\n\t\t} else {\n\t\t\t// Both have this column — winner takes it\n\t\t\tmerged.push(winner === local ? localCol : remoteCol);\n\t\t}\n\t}\n\n\treturn merged;\n}\n\nconst _singleton = new LWWResolver();\n\n/**\n * Convenience function — resolves two conflicting deltas using the\n * column-level Last-Write-Wins strategy.\n *\n * @param local - The locally held delta for this row.\n * @param remote - The incoming remote delta for this row.\n * @returns A `Result` containing the resolved `RowDelta`, or a\n * `ConflictError` if the deltas refer to different tables/rows.\n */\nexport function resolveLWW(local: RowDelta, remote: RowDelta): Result<RowDelta, ConflictError> {\n\treturn _singleton.resolve(local, remote);\n}\n","import type { ActionExecutionError, ActionNotSupportedError } from \"../action/errors\";\nimport type { Action, ActionResult } from \"../action/types\";\nimport type { Result } from \"../result/result\";\nimport type { ResolvedClaims } from \"../sync-rules/types\";\n\n/** Describes an action type supported by a connector. */\nexport interface ActionDescriptor {\n\t/** The action type identifier (e.g. \"create_pr\", \"send_message\"). */\n\tactionType: string;\n\t/** Human-readable description of what this action does. */\n\tdescription: string;\n\t/** Optional JSON Schema for the action's params. */\n\tparamsSchema?: Record<string, unknown>;\n}\n\n/** Authentication context passed to action handlers for permission checks. */\nexport interface AuthContext {\n\t/** Resolved JWT claims for resource-level permission checks. */\n\tclaims: ResolvedClaims;\n}\n\n/**\n * Interface for connectors that can execute imperative actions.\n *\n * Separate from `DatabaseAdapter` — not all connectors support actions\n * (e.g. S3 doesn't). A connector can implement `DatabaseAdapter` (read/write\n * data), `ActionHandler` (execute commands), or both.\n */\nexport interface ActionHandler {\n\t/** Descriptors for all action types this handler supports. */\n\treadonly supportedActions: ActionDescriptor[];\n\t/** Execute a single action against the external system. */\n\texecuteAction(\n\t\taction: Action,\n\t\tcontext?: AuthContext,\n\t): Promise<Result<ActionResult, ActionExecutionError | ActionNotSupportedError>>;\n}\n\n/** Type guard: check whether an object implements the ActionHandler interface. */\nexport function isActionHandler(obj: unknown): obj is ActionHandler {\n\tif (obj === null || typeof obj !== \"object\") return false;\n\tconst candidate = obj as Record<string, unknown>;\n\treturn Array.isArray(candidate.supportedActions) && typeof candidate.executeAction === \"function\";\n}\n","import { LakeSyncError } from \"../result/errors\";\n\n/** Connector configuration validation error. */\nexport class ConnectorValidationError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"CONNECTOR_VALIDATION\", cause);\n\t}\n}\n","/** Supported connector types. */\nexport const CONNECTOR_TYPES = [\"postgres\", \"mysql\", \"bigquery\", \"jira\", \"salesforce\"] as const;\n\n/** Union of supported connector type strings. */\nexport type ConnectorType = (typeof CONNECTOR_TYPES)[number];\n\n/** Connection configuration for a PostgreSQL source. */\nexport interface PostgresConnectorConfig {\n\t/** PostgreSQL connection string (e.g. \"postgres://user:pass@host/db\"). */\n\tconnectionString: string;\n}\n\n/** Connection configuration for a MySQL source. */\nexport interface MySQLConnectorConfig {\n\t/** MySQL connection string (e.g. \"mysql://user:pass@host/db\"). */\n\tconnectionString: string;\n}\n\n/** Connection configuration for a BigQuery source. */\nexport interface BigQueryConnectorConfig {\n\t/** GCP project ID. */\n\tprojectId: string;\n\t/** BigQuery dataset name. */\n\tdataset: string;\n\t/** Path to service account JSON key file. Falls back to ADC when omitted. */\n\tkeyFilename?: string;\n\t/** Dataset location (default \"US\"). */\n\tlocation?: string;\n}\n\n/** Ingest table configuration — defines a single table to poll. */\nexport interface ConnectorIngestTable {\n\t/** Target table name in LakeSync. */\n\ttable: string;\n\t/** SQL query to poll (must return rowId + data columns). */\n\tquery: string;\n\t/** Primary key column name (default \"id\"). */\n\trowIdColumn?: string;\n\t/** Change detection strategy. */\n\tstrategy: { type: \"cursor\"; cursorColumn: string; lookbackMs?: number } | { type: \"diff\" };\n}\n\n/** Connection configuration for a Salesforce CRM source. */\nexport interface SalesforceConnectorConfig {\n\t/** Salesforce instance URL (e.g. \"https://mycompany.salesforce.com\"). */\n\tinstanceUrl: string;\n\t/** Connected App consumer key. */\n\tclientId: string;\n\t/** Connected App consumer secret. */\n\tclientSecret: string;\n\t/** Salesforce username. */\n\tusername: string;\n\t/** Salesforce password + security token concatenated. */\n\tpassword: string;\n\t/** REST API version (default \"v62.0\"). */\n\tapiVersion?: string;\n\t/** Use test.salesforce.com for auth (default false). */\n\tisSandbox?: boolean;\n\t/** Optional WHERE clause fragment appended to all SOQL queries. */\n\tsoqlFilter?: string;\n\t/** Whether to include Account objects (default true). */\n\tincludeAccounts?: boolean;\n\t/** Whether to include Contact objects (default true). */\n\tincludeContacts?: boolean;\n\t/** Whether to include Opportunity objects (default true). */\n\tincludeOpportunities?: boolean;\n\t/** Whether to include Lead objects (default true). */\n\tincludeLeads?: boolean;\n}\n\n/** Connection configuration for a Jira Cloud source. */\nexport interface JiraConnectorConfig {\n\t/** Jira Cloud domain (e.g. \"mycompany\" for mycompany.atlassian.net). */\n\tdomain: string;\n\t/** Email address for Basic auth. */\n\temail: string;\n\t/** API token paired with the email. */\n\tapiToken: string;\n\t/** Optional JQL filter to scope issue polling. */\n\tjql?: string;\n\t/** Whether to include comments (default true). */\n\tincludeComments?: boolean;\n\t/** Whether to include projects (default true). */\n\tincludeProjects?: boolean;\n}\n\n/** Optional ingest polling configuration attached to a connector. */\nexport interface ConnectorIngestConfig {\n\t/** Tables to poll for changes. */\n\ttables: ConnectorIngestTable[];\n\t/** Poll interval in milliseconds (default 10 000). */\n\tintervalMs?: number;\n\t/** Deltas per push chunk (default 500). */\n\tchunkSize?: number;\n\t/** Approximate memory budget in bytes — triggers flush at 70%. */\n\tmemoryBudgetBytes?: number;\n}\n\n/**\n * Configuration for a dynamically registered connector (data source).\n *\n * Each connector maps to a named {@link DatabaseAdapter} in the gateway,\n * optionally with an ingest poller that pushes detected changes into\n * the sync buffer.\n */\nexport interface ConnectorConfig {\n\t/** Unique connector name (used as source adapter key). */\n\tname: string;\n\t/** Connector type — determines which adapter implementation to instantiate. */\n\ttype: ConnectorType;\n\t/** PostgreSQL connection configuration (required when type is \"postgres\"). */\n\tpostgres?: PostgresConnectorConfig;\n\t/** MySQL connection configuration (required when type is \"mysql\"). */\n\tmysql?: MySQLConnectorConfig;\n\t/** BigQuery connection configuration (required when type is \"bigquery\"). */\n\tbigquery?: BigQueryConnectorConfig;\n\t/** Jira Cloud connection configuration (required when type is \"jira\"). */\n\tjira?: JiraConnectorConfig;\n\t/** Salesforce CRM connection configuration (required when type is \"salesforce\"). */\n\tsalesforce?: SalesforceConnectorConfig;\n\t/** Optional ingest polling configuration. */\n\tingest?: ConnectorIngestConfig;\n}\n","import { Err, Ok, type Result } from \"../result/result\";\nimport { ConnectorValidationError } from \"./errors\";\nimport { CONNECTOR_TYPES, type ConnectorConfig } from \"./types\";\n\nconst VALID_STRATEGIES = new Set([\"cursor\", \"diff\"]);\n\n/**\n * Validate a connector configuration for structural correctness.\n *\n * Checks:\n * - `name` is a non-empty string\n * - `type` is one of the supported connector types\n * - Type-specific config object is present and valid\n * - Optional ingest config has valid table definitions\n *\n * @param input - Raw input to validate.\n * @returns The validated {@link ConnectorConfig} or a validation error.\n */\nexport function validateConnectorConfig(\n\tinput: unknown,\n): Result<ConnectorConfig, ConnectorValidationError> {\n\tif (typeof input !== \"object\" || input === null) {\n\t\treturn Err(new ConnectorValidationError(\"Connector config must be an object\"));\n\t}\n\n\tconst obj = input as Record<string, unknown>;\n\n\t// --- name ---\n\tif (typeof obj.name !== \"string\" || obj.name.length === 0) {\n\t\treturn Err(new ConnectorValidationError(\"Connector name must be a non-empty string\"));\n\t}\n\n\t// --- type ---\n\tif (typeof obj.type !== \"string\" || !(CONNECTOR_TYPES as readonly string[]).includes(obj.type)) {\n\t\treturn Err(\n\t\t\tnew ConnectorValidationError(`Connector type must be one of: ${CONNECTOR_TYPES.join(\", \")}`),\n\t\t);\n\t}\n\n\tconst connectorType = obj.type as ConnectorConfig[\"type\"];\n\n\t// --- type-specific config ---\n\tswitch (connectorType) {\n\t\tcase \"postgres\": {\n\t\t\tconst pg = obj.postgres;\n\t\t\tif (typeof pg !== \"object\" || pg === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t'Connector type \"postgres\" requires a postgres config object',\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst pgObj = pg as Record<string, unknown>;\n\t\t\tif (typeof pgObj.connectionString !== \"string\" || pgObj.connectionString.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Postgres connector requires a non-empty connectionString\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"mysql\": {\n\t\t\tconst my = obj.mysql;\n\t\t\tif (typeof my !== \"object\" || my === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError('Connector type \"mysql\" requires a mysql config object'),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst myObj = my as Record<string, unknown>;\n\t\t\tif (typeof myObj.connectionString !== \"string\" || myObj.connectionString.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"MySQL connector requires a non-empty connectionString\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"bigquery\": {\n\t\t\tconst bq = obj.bigquery;\n\t\t\tif (typeof bq !== \"object\" || bq === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t'Connector type \"bigquery\" requires a bigquery config object',\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst bqObj = bq as Record<string, unknown>;\n\t\t\tif (typeof bqObj.projectId !== \"string\" || bqObj.projectId.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"BigQuery connector requires a non-empty projectId\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof bqObj.dataset !== \"string\" || bqObj.dataset.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"BigQuery connector requires a non-empty dataset\"));\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"jira\": {\n\t\t\tconst jira = obj.jira;\n\t\t\tif (typeof jira !== \"object\" || jira === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError('Connector type \"jira\" requires a jira config object'),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst jiraObj = jira as Record<string, unknown>;\n\t\t\tif (typeof jiraObj.domain !== \"string\" || jiraObj.domain.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Jira connector requires a non-empty domain\"));\n\t\t\t}\n\t\t\tif (typeof jiraObj.email !== \"string\" || jiraObj.email.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Jira connector requires a non-empty email\"));\n\t\t\t}\n\t\t\tif (typeof jiraObj.apiToken !== \"string\" || jiraObj.apiToken.length === 0) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Jira connector requires a non-empty apiToken\"));\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t\tcase \"salesforce\": {\n\t\t\tconst sf = obj.salesforce;\n\t\t\tif (typeof sf !== \"object\" || sf === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t'Connector type \"salesforce\" requires a salesforce config object',\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t\tconst sfObj = sf as Record<string, unknown>;\n\t\t\tif (typeof sfObj.instanceUrl !== \"string\" || sfObj.instanceUrl.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty instanceUrl\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.clientId !== \"string\" || sfObj.clientId.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty clientId\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.clientSecret !== \"string\" || sfObj.clientSecret.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty clientSecret\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.username !== \"string\" || sfObj.username.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty username\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (typeof sfObj.password !== \"string\" || sfObj.password.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\"Salesforce connector requires a non-empty password\"),\n\t\t\t\t);\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// --- optional ingest config ---\n\tif (obj.ingest !== undefined) {\n\t\tif (typeof obj.ingest !== \"object\" || obj.ingest === null) {\n\t\t\treturn Err(new ConnectorValidationError(\"Ingest config must be an object\"));\n\t\t}\n\n\t\tconst ingest = obj.ingest as Record<string, unknown>;\n\n\t\t// API-based connectors define tables internally — only validate intervalMs\n\t\tif (connectorType === \"jira\" || connectorType === \"salesforce\") {\n\t\t\tif (ingest.intervalMs !== undefined) {\n\t\t\t\tif (typeof ingest.intervalMs !== \"number\" || ingest.intervalMs < 1) {\n\t\t\t\t\treturn Err(new ConnectorValidationError(\"Ingest intervalMs must be a positive number\"));\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Ok(input as ConnectorConfig);\n\t\t}\n\n\t\tif (!Array.isArray(ingest.tables) || ingest.tables.length === 0) {\n\t\t\treturn Err(new ConnectorValidationError(\"Ingest config must have a non-empty tables array\"));\n\t\t}\n\n\t\tfor (let i = 0; i < ingest.tables.length; i++) {\n\t\t\tconst table = ingest.tables[i] as Record<string, unknown>;\n\n\t\t\tif (typeof table !== \"object\" || table === null) {\n\t\t\t\treturn Err(new ConnectorValidationError(`Ingest table at index ${i} must be an object`));\n\t\t\t}\n\n\t\t\tif (typeof table.table !== \"string\" || (table.table as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t`Ingest table at index ${i} must have a non-empty table name`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof table.query !== \"string\" || (table.query as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(`Ingest table at index ${i} must have a non-empty query`),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof table.strategy !== \"object\" || table.strategy === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(`Ingest table at index ${i} must have a strategy object`),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst strategy = table.strategy as Record<string, unknown>;\n\t\t\tif (!VALID_STRATEGIES.has(strategy.type as string)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t`Ingest table at index ${i} strategy type must be \"cursor\" or \"diff\"`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (strategy.type === \"cursor\") {\n\t\t\t\tif (\n\t\t\t\t\ttypeof strategy.cursorColumn !== \"string\" ||\n\t\t\t\t\t(strategy.cursorColumn as string).length === 0\n\t\t\t\t) {\n\t\t\t\t\treturn Err(\n\t\t\t\t\t\tnew ConnectorValidationError(\n\t\t\t\t\t\t\t`Ingest table at index ${i} cursor strategy requires a non-empty cursorColumn`,\n\t\t\t\t\t\t),\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif (ingest.intervalMs !== undefined) {\n\t\t\tif (typeof ingest.intervalMs !== \"number\" || ingest.intervalMs < 1) {\n\t\t\t\treturn Err(new ConnectorValidationError(\"Ingest intervalMs must be a positive number\"));\n\t\t\t}\n\t\t}\n\t}\n\n\treturn Ok(input as ConnectorConfig);\n}\n","// ---------------------------------------------------------------------------\n// createPoller — registry-based factory for creating pollers from ConnectorConfig\n// ---------------------------------------------------------------------------\n\nimport type { BaseSourcePoller, PushTarget } from \"./base-poller\";\nimport type { ConnectorConfig } from \"./connector/types\";\n\n/** Factory function that creates a poller from a ConnectorConfig. */\nexport type PollerFactory = (config: ConnectorConfig, gateway: PushTarget) => BaseSourcePoller;\n\n/** Registry of poller factory functions keyed by connector type. */\nconst pollerFactories = new Map<string, PollerFactory>();\n\n/**\n * Register a poller factory for a connector type.\n * Connector packages call this at module load time so that\n * `createPoller()` can instantiate the correct poller.\n */\nexport function registerPollerFactory(type: string, factory: PollerFactory): void {\n\tpollerFactories.set(type, factory);\n}\n\n/**\n * Create a poller from a {@link ConnectorConfig}.\n *\n * @throws If no factory has been registered for the config's `type`.\n */\nexport function createPoller(config: ConnectorConfig, gateway: PushTarget): BaseSourcePoller {\n\tconst factory = pollerFactories.get(config.type);\n\tif (!factory) {\n\t\tthrow new Error(\n\t\t\t`No poller factory registered for connector type \"${config.type}\". ` +\n\t\t\t\t`Did you import the connector package (e.g. \"@lakesync/connector-${config.type}\")?`,\n\t\t);\n\t}\n\treturn factory(config, gateway);\n}\n","import type { RowDelta } from \"./types\";\n\n/**\n * Apply a delta to an existing row, returning the merged result.\n *\n * - DELETE → returns null\n * - INSERT → creates a new row from delta columns\n * - UPDATE → merges delta columns onto existing row (immutable — returns a new object)\n *\n * @param row - The current row state, or null if no row exists\n * @param delta - The delta to apply\n * @returns The merged row, or null for DELETE operations\n */\nexport function applyDelta(\n\trow: Record<string, unknown> | null,\n\tdelta: RowDelta,\n): Record<string, unknown> | null {\n\tif (delta.op === \"DELETE\") return null;\n\n\tconst base: Record<string, unknown> = row ? { ...row } : {};\n\tfor (const col of delta.columns) {\n\t\tbase[col.column] = col.value;\n\t}\n\treturn base;\n}\n","import equal from \"fast-deep-equal\";\nimport stableStringify from \"fast-json-stable-stringify\";\nimport type { HLCTimestamp } from \"../hlc/types\";\nimport type { ColumnDelta, RowDelta, TableSchema } from \"./types\";\n\n/**\n * Extract a column-level delta between two row states.\n *\n * - `before` null/undefined + `after` present -> INSERT (all columns)\n * - `before` present + `after` null/undefined -> DELETE (empty columns)\n * - Both present -> compare each column, emit only changed columns as UPDATE\n * - No columns changed -> returns null (no-op)\n *\n * If `schema` is provided, only columns listed in the schema are considered.\n *\n * @param before - The previous row state, or null/undefined for a new row\n * @param after - The current row state, or null/undefined for a deleted row\n * @param opts - Table name, row ID, client ID, HLC timestamp, and optional schema\n * @returns The extracted RowDelta, or null if nothing changed\n */\nexport async function extractDelta(\n\tbefore: Record<string, unknown> | null | undefined,\n\tafter: Record<string, unknown> | null | undefined,\n\topts: {\n\t\ttable: string;\n\t\trowId: string;\n\t\tclientId: string;\n\t\thlc: HLCTimestamp;\n\t\tschema?: TableSchema;\n\t},\n): Promise<RowDelta | null> {\n\tconst { table, rowId, clientId, hlc, schema } = opts;\n\n\tconst beforeExists = before != null;\n\tconst afterExists = after != null;\n\n\tif (!beforeExists && !afterExists) {\n\t\treturn null;\n\t}\n\n\t// INSERT: no previous state, new state exists\n\tif (!beforeExists && afterExists) {\n\t\tconst columns = buildColumns(after, schema);\n\t\tconst deltaId = await generateDeltaId({ clientId, hlc, table, rowId, columns });\n\t\treturn { op: \"INSERT\", table, rowId, clientId, columns, hlc, deltaId };\n\t}\n\n\t// DELETE: previous state exists, no new state\n\tif (beforeExists && !afterExists) {\n\t\tconst columns: ColumnDelta[] = [];\n\t\tconst deltaId = await generateDeltaId({ clientId, hlc, table, rowId, columns });\n\t\treturn { op: \"DELETE\", table, rowId, clientId, columns, hlc, deltaId };\n\t}\n\n\t// UPDATE: both states exist — compare columns\n\tconst columns = diffColumns(before!, after!, schema);\n\tif (columns.length === 0) {\n\t\treturn null;\n\t}\n\n\tconst deltaId = await generateDeltaId({ clientId, hlc, table, rowId, columns });\n\treturn { op: \"UPDATE\", table, rowId, clientId, columns, hlc, deltaId };\n}\n\n/** Build an allow-set from a schema, or null if no schema is provided. */\nfunction allowedSet(schema?: TableSchema): Set<string> | null {\n\treturn schema ? new Set(schema.columns.map((c) => c.name)) : null;\n}\n\n/**\n * Build column deltas from a row, optionally filtered by schema.\n * Skips columns whose value is undefined (treated as absent).\n */\nfunction buildColumns(row: Record<string, unknown>, schema?: TableSchema): ColumnDelta[] {\n\tconst allowed = allowedSet(schema);\n\tconst columns: ColumnDelta[] = [];\n\n\tfor (const [key, value] of Object.entries(row)) {\n\t\tif (value === undefined) continue;\n\t\tif (allowed && !allowed.has(key)) continue;\n\t\tcolumns.push({ column: key, value });\n\t}\n\n\treturn columns;\n}\n\n/**\n * Diff two row objects and return only the changed columns.\n * Uses Object.is() for primitives and fast-deep-equal for objects/arrays.\n */\nfunction diffColumns(\n\tbefore: Record<string, unknown>,\n\tafter: Record<string, unknown>,\n\tschema?: TableSchema,\n): ColumnDelta[] {\n\tconst allowed = allowedSet(schema);\n\tconst allKeys = new Set([...Object.keys(before), ...Object.keys(after)]);\n\tconst columns: ColumnDelta[] = [];\n\n\tfor (const key of allKeys) {\n\t\tif (allowed && !allowed.has(key)) continue;\n\n\t\tconst beforeVal = before[key];\n\t\tconst afterVal = after[key];\n\n\t\t// Skip absent or removed columns\n\t\tif (afterVal === undefined) continue;\n\n\t\t// New column — before was undefined\n\t\tif (beforeVal === undefined) {\n\t\t\tcolumns.push({ column: key, value: afterVal });\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Exact primitive equality (handles NaN, +0/-0)\n\t\tif (Object.is(beforeVal, afterVal)) continue;\n\n\t\t// Deep equality for objects/arrays (key-order-agnostic)\n\t\tif (\n\t\t\ttypeof beforeVal === \"object\" &&\n\t\t\tbeforeVal !== null &&\n\t\t\ttypeof afterVal === \"object\" &&\n\t\t\tafterVal !== null &&\n\t\t\tequal(beforeVal, afterVal)\n\t\t) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tcolumns.push({ column: key, value: afterVal });\n\t}\n\n\treturn columns;\n}\n\n/**\n * Generate a deterministic delta ID using SHA-256.\n * Uses the Web Crypto API (works in both Bun and browsers).\n */\nasync function generateDeltaId(params: {\n\tclientId: string;\n\thlc: HLCTimestamp;\n\ttable: string;\n\trowId: string;\n\tcolumns: ColumnDelta[];\n}): Promise<string> {\n\tconst payload = stableStringify({\n\t\tclientId: params.clientId,\n\t\thlc: params.hlc.toString(),\n\t\ttable: params.table,\n\t\trowId: params.rowId,\n\t\tcolumns: params.columns,\n\t});\n\n\tconst data = new TextEncoder().encode(payload);\n\tconst hashBuffer = await crypto.subtle.digest(\"SHA-256\", data);\n\tconst bytes = new Uint8Array(hashBuffer);\n\n\tlet hex = \"\";\n\tfor (const b of bytes) {\n\t\thex += b.toString(16).padStart(2, \"0\");\n\t}\n\treturn hex;\n}\n","import type { HLCTimestamp } from \"../hlc/types\";\n\n/** Delta operation type */\nexport type DeltaOp = \"INSERT\" | \"UPDATE\" | \"DELETE\";\n\n/** A single column-level change */\nexport interface ColumnDelta {\n\t/** Column name */\n\tcolumn: string;\n\t/** Serialisable JSON value — NEVER undefined, use null instead */\n\tvalue: unknown;\n}\n\n/** A row-level delta containing column-level changes */\nexport interface RowDelta {\n\t/** Operation type */\n\top: DeltaOp;\n\t/** Table name */\n\ttable: string;\n\t/** Row identifier */\n\trowId: string;\n\t/** Client identifier — used for LWW tiebreak and audit */\n\tclientId: string;\n\t/** Changed columns — empty for DELETE */\n\tcolumns: ColumnDelta[];\n\t/** HLC timestamp (branded bigint) */\n\thlc: HLCTimestamp;\n\t/** Deterministic identifier: hash(clientId + hlc + table + rowId + columns) */\n\tdeltaId: string;\n}\n\n/** Minimal schema for Phase 1. Column allow-list + type hints. */\nexport interface TableSchema {\n\ttable: string;\n\tcolumns: Array<{\n\t\tname: string;\n\t\ttype: \"string\" | \"number\" | \"boolean\" | \"json\" | \"null\";\n\t}>;\n}\n\n/** Composite key utility — avoids string concatenation bugs */\nexport type RowKey = string & { readonly __brand: \"RowKey\" };\n\n/** Create a composite row key from table and row ID */\nexport function rowKey(table: string, rowId: string): RowKey {\n\treturn `${table}:${rowId}` as RowKey;\n}\n\n/** SyncPush input message — sent by clients to push local deltas to the gateway */\nexport interface SyncPush {\n\t/** Client that sent the push */\n\tclientId: string;\n\t/** Deltas to push */\n\tdeltas: RowDelta[];\n\t/** Client's last-seen HLC */\n\tlastSeenHlc: HLCTimestamp;\n}\n\n/** SyncPull input message — sent by clients to pull remote deltas from the gateway */\nexport interface SyncPull {\n\t/** Client that sent the pull */\n\tclientId: string;\n\t/** Return deltas with HLC strictly after this value */\n\tsinceHlc: HLCTimestamp;\n\t/** Maximum number of deltas to return */\n\tmaxDeltas: number;\n\t/** Optional source adapter name — when set, pull from the named adapter instead of the buffer */\n\tsource?: string;\n}\n\n/** SyncResponse output — returned by the gateway after push or pull */\nexport interface SyncResponse {\n\t/** Deltas matching the pull criteria */\n\tdeltas: RowDelta[];\n\t/** Current server HLC */\n\tserverHlc: HLCTimestamp;\n\t/** Whether there are more deltas to fetch */\n\thasMore: boolean;\n}\n","/**\n * BigInt-safe JSON replacer.\n *\n * Converts BigInt values to strings so they survive `JSON.stringify`,\n * which otherwise throws on BigInt.\n */\nexport function bigintReplacer(_key: string, value: unknown): unknown {\n\treturn typeof value === \"bigint\" ? value.toString() : value;\n}\n\n/**\n * BigInt-aware JSON reviver.\n *\n * Restores string-encoded HLC timestamps (fields ending in `Hlc` or `hlc`)\n * back to BigInt so they match the branded `HLCTimestamp` type.\n *\n * Invalid numeric strings are left as-is to prevent runtime crashes.\n */\nexport function bigintReviver(key: string, value: unknown): unknown {\n\tif (typeof value === \"string\" && /hlc$/i.test(key)) {\n\t\ttry {\n\t\t\treturn BigInt(value);\n\t\t} catch {\n\t\t\treturn value;\n\t\t}\n\t}\n\treturn value;\n}\n","import type { SyncRulesConfig } from \"./types\";\n\n/**\n * Create a pass-all sync rules configuration.\n *\n * Every delta reaches every client — equivalent to having no rules at all.\n * Useful for apps without multi-tenancy or per-user data isolation.\n */\nexport function createPassAllRules(): SyncRulesConfig {\n\treturn {\n\t\tversion: 1,\n\t\tbuckets: [\n\t\t\t{\n\t\t\t\tname: \"all\",\n\t\t\t\ttables: [],\n\t\t\t\tfilters: [],\n\t\t\t},\n\t\t],\n\t};\n}\n\n/**\n * Create user-scoped sync rules configuration.\n *\n * Filters rows by matching a configurable column against the JWT `sub` claim,\n * so each client only receives deltas belonging to the authenticated user.\n *\n * @param tables - Which tables to scope. Empty array means all tables.\n * @param userColumn - Column to match against `jwt:sub`. Defaults to `\"user_id\"`.\n */\nexport function createUserScopedRules(tables: string[], userColumn = \"user_id\"): SyncRulesConfig {\n\treturn {\n\t\tversion: 1,\n\t\tbuckets: [\n\t\t\t{\n\t\t\t\tname: \"user\",\n\t\t\t\ttables,\n\t\t\t\tfilters: [\n\t\t\t\t\t{\n\t\t\t\t\t\tcolumn: userColumn,\n\t\t\t\t\t\top: \"eq\",\n\t\t\t\t\t\tvalue: \"jwt:sub\",\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n}\n","import { LakeSyncError } from \"../result/errors\";\n\n/** Sync rule configuration or evaluation error */\nexport class SyncRuleError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"SYNC_RULE_ERROR\", cause);\n\t}\n}\n","import type { RowDelta } from \"../delta/types\";\nimport { Err, Ok, type Result } from \"../result/result\";\nimport { SyncRuleError } from \"./errors\";\nimport type {\n\tBucketDefinition,\n\tResolvedClaims,\n\tSyncRuleFilter,\n\tSyncRulesConfig,\n\tSyncRulesContext,\n} from \"./types\";\n\n/**\n * Resolve a filter value, substituting JWT claim references.\n *\n * Values prefixed with `jwt:` are looked up in the claims record.\n * Literal values are returned as-is (wrapped in an array for uniform handling).\n *\n * @param value - The filter value string (e.g. \"jwt:sub\" or \"tenant-1\")\n * @param claims - Resolved JWT claims\n * @returns An array of resolved values, or an empty array if the claim is missing\n */\nexport function resolveFilterValue(value: string, claims: ResolvedClaims): string[] {\n\tif (!value.startsWith(\"jwt:\")) {\n\t\treturn [value];\n\t}\n\n\tconst claimKey = value.slice(4);\n\tconst claimValue = claims[claimKey];\n\n\tif (claimValue === undefined) {\n\t\treturn [];\n\t}\n\n\treturn Array.isArray(claimValue) ? claimValue : [claimValue];\n}\n\n/**\n * Check whether a delta matches a single bucket definition.\n *\n * A delta matches if:\n * 1. The bucket's `tables` list is empty (matches all tables) or includes the delta's table\n * 2. All filters match (conjunctive AND):\n * - `eq`: the delta column value equals one of the resolved filter values\n * - `in`: the delta column value is contained in the resolved filter values\n *\n * @param delta - The row delta to evaluate\n * @param bucket - The bucket definition\n * @param claims - Resolved JWT claims\n * @returns true if the delta matches this bucket\n */\nexport function deltaMatchesBucket(\n\tdelta: RowDelta,\n\tbucket: BucketDefinition,\n\tclaims: ResolvedClaims,\n): boolean {\n\t// Table filter: empty tables list = match all\n\tif (bucket.tables.length > 0 && !bucket.tables.includes(delta.table)) {\n\t\treturn false;\n\t}\n\n\t// All filters must match (conjunctive AND)\n\tfor (const filter of bucket.filters) {\n\t\tif (!filterMatchesDelta(delta, filter, claims)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\n\treturn true;\n}\n\n/**\n * Compare two values using a comparison operator.\n * Attempts numeric comparison first; falls back to string localeCompare.\n */\nfunction compareValues(\n\tdeltaValue: string,\n\tfilterValue: string,\n\top: \"gt\" | \"lt\" | \"gte\" | \"lte\",\n): boolean {\n\tconst numDelta = parseFloat(deltaValue);\n\tconst numFilter = parseFloat(filterValue);\n\tconst useNumeric = !Number.isNaN(numDelta) && !Number.isNaN(numFilter);\n\n\tif (useNumeric) {\n\t\tswitch (op) {\n\t\t\tcase \"gt\":\n\t\t\t\treturn numDelta > numFilter;\n\t\t\tcase \"lt\":\n\t\t\t\treturn numDelta < numFilter;\n\t\t\tcase \"gte\":\n\t\t\t\treturn numDelta >= numFilter;\n\t\t\tcase \"lte\":\n\t\t\t\treturn numDelta <= numFilter;\n\t\t}\n\t}\n\n\tconst cmp = deltaValue.localeCompare(filterValue);\n\tswitch (op) {\n\t\tcase \"gt\":\n\t\t\treturn cmp > 0;\n\t\tcase \"lt\":\n\t\t\treturn cmp < 0;\n\t\tcase \"gte\":\n\t\t\treturn cmp >= 0;\n\t\tcase \"lte\":\n\t\t\treturn cmp <= 0;\n\t}\n}\n\nconst FILTER_OPS: Record<string, (dv: string, rv: string[]) => boolean> = {\n\teq: (dv, rv) => rv.includes(dv),\n\tin: (dv, rv) => rv.includes(dv),\n\tneq: (dv, rv) => !rv.includes(dv),\n\tgt: (dv, rv) => compareValues(dv, rv[0]!, \"gt\"),\n\tlt: (dv, rv) => compareValues(dv, rv[0]!, \"lt\"),\n\tgte: (dv, rv) => compareValues(dv, rv[0]!, \"gte\"),\n\tlte: (dv, rv) => compareValues(dv, rv[0]!, \"lte\"),\n};\n\n/**\n * Check whether a single filter matches a delta's column values.\n */\nfunction filterMatchesDelta(\n\tdelta: RowDelta,\n\tfilter: SyncRuleFilter,\n\tclaims: ResolvedClaims,\n): boolean {\n\tconst col = delta.columns.find((c) => c.column === filter.column);\n\tif (!col) {\n\t\t// Column not present in delta — filter does not match\n\t\treturn false;\n\t}\n\n\tconst deltaValue = String(col.value);\n\tconst resolvedValues = resolveFilterValue(filter.value, claims);\n\n\tif (resolvedValues.length === 0) {\n\t\t// JWT claim missing — filter cannot match\n\t\treturn false;\n\t}\n\n\treturn FILTER_OPS[filter.op]?.(deltaValue, resolvedValues) ?? false;\n}\n\n/**\n * Filter an array of deltas by sync rules.\n *\n * A delta is included if it matches **any** bucket (union across buckets).\n * If no sync rules are configured (empty buckets), all deltas pass through.\n *\n * @param deltas - The deltas to filter\n * @param context - Sync rules context (rules + resolved claims)\n * @returns Filtered array of deltas\n */\nexport function filterDeltas(deltas: RowDelta[], context: SyncRulesContext): RowDelta[] {\n\tif (context.rules.buckets.length === 0) {\n\t\treturn deltas;\n\t}\n\n\treturn deltas.filter((delta) =>\n\t\tcontext.rules.buckets.some((bucket) => deltaMatchesBucket(delta, bucket, context.claims)),\n\t);\n}\n\n/**\n * Determine which buckets a client matches based on their claims.\n *\n * A client matches a bucket if the bucket has no table-level restrictions\n * or if the client's claims satisfy all filter conditions for at least\n * one possible row. This is used for bucket-level access decisions, not\n * row-level filtering.\n *\n * @param rules - The sync rules configuration\n * @param claims - Resolved JWT claims\n * @returns Array of bucket names the client matches\n */\nexport function resolveClientBuckets(rules: SyncRulesConfig, claims: ResolvedClaims): string[] {\n\treturn rules.buckets\n\t\t.filter((bucket) => {\n\t\t\t// A client matches a bucket if all JWT-referenced filters\n\t\t\t// can be resolved (i.e. the required claims exist)\n\t\t\tfor (const filter of bucket.filters) {\n\t\t\t\tif (filter.value.startsWith(\"jwt:\")) {\n\t\t\t\t\tconst resolved = resolveFilterValue(filter.value, claims);\n\t\t\t\t\tif (resolved.length === 0) {\n\t\t\t\t\t\treturn false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn true;\n\t\t})\n\t\t.map((b) => b.name);\n}\n\n/**\n * Validate a sync rules configuration for structural correctness.\n *\n * Checks:\n * - Version is a positive integer\n * - Buckets is an array\n * - Each bucket has a non-empty name, valid tables array, valid filters\n * - Filter operators are \"eq\" or \"in\"\n * - Filter values and columns are non-empty strings\n * - Bucket names are unique\n *\n * @param config - The sync rules configuration to validate\n * @returns Ok(void) if valid, Err(SyncRuleError) with details if invalid\n */\nexport function validateSyncRules(config: unknown): Result<void, SyncRuleError> {\n\tif (typeof config !== \"object\" || config === null) {\n\t\treturn Err(new SyncRuleError(\"Sync rules config must be an object\"));\n\t}\n\n\tconst obj = config as Record<string, unknown>;\n\n\tif (typeof obj.version !== \"number\" || !Number.isInteger(obj.version) || obj.version < 1) {\n\t\treturn Err(new SyncRuleError(\"Sync rules version must be a positive integer\"));\n\t}\n\n\tif (!Array.isArray(obj.buckets)) {\n\t\treturn Err(new SyncRuleError(\"Sync rules buckets must be an array\"));\n\t}\n\n\tconst seenNames = new Set<string>();\n\n\tfor (let i = 0; i < obj.buckets.length; i++) {\n\t\tconst bucket = obj.buckets[i] as Record<string, unknown>;\n\n\t\tif (typeof bucket !== \"object\" || bucket === null) {\n\t\t\treturn Err(new SyncRuleError(`Bucket at index ${i} must be an object`));\n\t\t}\n\n\t\tif (typeof bucket.name !== \"string\" || bucket.name.length === 0) {\n\t\t\treturn Err(new SyncRuleError(`Bucket at index ${i} must have a non-empty name`));\n\t\t}\n\n\t\tif (seenNames.has(bucket.name as string)) {\n\t\t\treturn Err(new SyncRuleError(`Duplicate bucket name: \"${bucket.name}\"`));\n\t\t}\n\t\tseenNames.add(bucket.name as string);\n\n\t\tif (!Array.isArray(bucket.tables)) {\n\t\t\treturn Err(new SyncRuleError(`Bucket \"${bucket.name}\" tables must be an array`));\n\t\t}\n\n\t\tfor (const table of bucket.tables as unknown[]) {\n\t\t\tif (typeof table !== \"string\" || table.length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(`Bucket \"${bucket.name}\" tables must contain non-empty strings`),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tif (!Array.isArray(bucket.filters)) {\n\t\t\treturn Err(new SyncRuleError(`Bucket \"${bucket.name}\" filters must be an array`));\n\t\t}\n\n\t\tfor (let j = 0; j < (bucket.filters as unknown[]).length; j++) {\n\t\t\tconst filter = (bucket.filters as Record<string, unknown>[])[j]!;\n\n\t\t\tif (typeof filter !== \"object\" || filter === null) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(`Bucket \"${bucket.name}\" filter at index ${j} must be an object`),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof filter.column !== \"string\" || (filter.column as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(\n\t\t\t\t\t\t`Bucket \"${bucket.name}\" filter at index ${j} must have a non-empty column`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst validOps = [\"eq\", \"in\", \"neq\", \"gt\", \"lt\", \"gte\", \"lte\"];\n\t\t\tif (!validOps.includes(filter.op as string)) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(\n\t\t\t\t\t\t`Bucket \"${bucket.name}\" filter at index ${j} op must be one of: ${validOps.join(\", \")}`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tif (typeof filter.value !== \"string\" || (filter.value as string).length === 0) {\n\t\t\t\treturn Err(\n\t\t\t\t\tnew SyncRuleError(\n\t\t\t\t\t\t`Bucket \"${bucket.name}\" filter at index ${j} must have a non-empty value`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\t}\n\n\treturn Ok(undefined);\n}\n","import { SchemaError } from \"../result/errors\";\nimport { Err, Ok, type Result } from \"../result/result\";\n\n/** Valid SQL identifier: starts with letter or underscore, alphanumeric + underscore, max 64 chars. */\nconst IDENTIFIER_RE = /^[a-zA-Z_][a-zA-Z0-9_]{0,63}$/;\n\n/**\n * Check whether a string is a valid SQL identifier.\n *\n * Valid identifiers start with a letter or underscore, contain only\n * alphanumeric characters and underscores, and are at most 64 characters long.\n *\n * @param name - The identifier to validate\n * @returns `true` if valid, `false` otherwise\n */\nexport function isValidIdentifier(name: string): boolean {\n\treturn IDENTIFIER_RE.test(name);\n}\n\n/**\n * Assert that a string is a valid SQL identifier, returning a Result.\n *\n * @param name - The identifier to validate\n * @returns Ok(undefined) if valid, Err(SchemaError) if invalid\n */\nexport function assertValidIdentifier(name: string): Result<void, SchemaError> {\n\tif (isValidIdentifier(name)) {\n\t\treturn Ok(undefined);\n\t}\n\treturn Err(\n\t\tnew SchemaError(\n\t\t\t`Invalid SQL identifier: \"${name}\". Identifiers must start with a letter or underscore, contain only alphanumeric characters and underscores, and be at most 64 characters long.`,\n\t\t),\n\t);\n}\n\n/**\n * Quote a SQL identifier using double quotes as defence-in-depth.\n *\n * Any embedded double-quote characters are escaped by doubling them,\n * following the SQL standard for delimited identifiers.\n *\n * @param name - The identifier to quote\n * @returns The double-quoted identifier string\n */\nexport function quoteIdentifier(name: string): string {\n\treturn `\"${name.replace(/\"/g, '\"\"')}\"`;\n}\n"],"mappings":";AACO,IAAM,gBAAN,cAA4B,MAAM;AAAA,EAC/B;AAAA,EACS;AAAA,EAElB,YAAY,SAAiB,MAAc,OAAe;AACzD,UAAM,OAAO;AACb,SAAK,OAAO,KAAK,YAAY;AAC7B,SAAK,OAAO;AACZ,SAAK,QAAQ;AAAA,EACd;AACD;AAGO,IAAM,kBAAN,cAA8B,cAAc;AAAA,EAClD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,eAAe,KAAK;AAAA,EACpC;AACD;AAGO,IAAM,gBAAN,cAA4B,cAAc;AAAA,EAChD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,YAAY,KAAK;AAAA,EACjC;AACD;AAGO,IAAM,aAAN,cAAyB,cAAc;AAAA,EAC7C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,gBAAgB,KAAK;AAAA,EACrC;AACD;AAGO,IAAM,cAAN,cAA0B,cAAc;AAAA,EAC9C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,mBAAmB,KAAK;AAAA,EACxC;AACD;AAGO,IAAM,eAAN,cAA2B,cAAc;AAAA,EAC/C,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,iBAAiB,KAAK;AAAA,EACtC;AACD;AAGO,IAAM,uBAAN,cAAmC,cAAc;AAAA,EACvD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,qBAAqB,KAAK;AAAA,EAC1C;AACD;AAGO,IAAM,oBAAN,cAAgC,cAAc;AAAA,EACpD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,gBAAgB,KAAK;AAAA,EACrC;AACD;AAGO,SAAS,QAAQ,KAAqB;AAC5C,SAAO,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAC1D;;;AC9DO,IAAM,uBAAN,cAAmC,cAAc;AAAA,EAC9C;AAAA,EAET,YAAY,SAAiB,WAAoB,OAAe;AAC/D,UAAM,SAAS,0BAA0B,KAAK;AAC9C,SAAK,YAAY;AAAA,EAClB;AACD;AAGO,IAAM,0BAAN,cAAsC,cAAc;AAAA,EAC1D,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,wBAAwB,KAAK;AAAA,EAC7C;AACD;AAGO,IAAM,wBAAN,cAAoC,cAAc;AAAA,EACxD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,2BAA2B,KAAK;AAAA,EAChD;AACD;;;ACxBA,OAAO,qBAAqB;AAS5B,eAAsB,iBAAiB,QAMnB;AACnB,QAAM,UAAU,gBAAgB;AAAA,IAC/B,UAAU,OAAO;AAAA,IACjB,KAAK,OAAO,IAAI,SAAS;AAAA,IACzB,WAAW,OAAO;AAAA,IAClB,YAAY,OAAO;AAAA,IACnB,QAAQ,OAAO;AAAA,EAChB,CAAC;AAED,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,OAAO;AAC7C,QAAM,aAAa,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AAC7D,QAAM,QAAQ,IAAI,WAAW,UAAU;AAEvC,MAAI,MAAM;AACV,aAAW,KAAK,OAAO;AACtB,WAAO,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EACtC;AACA,SAAO;AACR;;;ACiCO,SAAS,cACf,QAC8B;AAC9B,SAAO,UAAU,UAAU,eAAe;AAC3C;;;AChEO,SAAS,GAAM,OAA4B;AACjD,SAAO,EAAE,IAAI,MAAM,MAAM;AAC1B;AAGO,SAAS,IAAO,OAA4B;AAClD,SAAO,EAAE,IAAI,OAAO,MAAM;AAC3B;AAGO,SAAS,UAAmB,QAAsB,IAAmC;AAC3F,MAAI,OAAO,IAAI;AACd,WAAO,GAAG,GAAG,OAAO,KAAK,CAAC;AAAA,EAC3B;AACA,SAAO;AACR;AAGO,SAAS,cACf,QACA,IACe;AACf,MAAI,OAAO,IAAI;AACd,WAAO,GAAG,OAAO,KAAK;AAAA,EACvB;AACA,SAAO;AACR;AAGO,SAAS,cAAoB,QAAyB;AAC5D,MAAI,OAAO,IAAI;AACd,WAAO,OAAO;AAAA,EACf;AACA,QAAM,OAAO;AACd;AAGA,eAAsB,YAAe,SAAgD;AACpF,MAAI;AACH,UAAM,QAAQ,MAAM;AACpB,WAAO,GAAG,KAAK;AAAA,EAChB,SAAS,OAAO;AACf,WAAO,IAAI,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,EACrE;AACD;;;ACxCO,SAAS,eAAe,QAAwD;AACtF,MAAI,WAAW,QAAQ,OAAO,WAAW,UAAU;AAClD,WAAO,IAAI,IAAI,sBAAsB,kCAAkC,CAAC;AAAA,EACzE;AAEA,QAAM,IAAI;AAEV,MAAI,OAAO,EAAE,aAAa,YAAY,EAAE,SAAS,WAAW,GAAG;AAC9D,WAAO,IAAI,IAAI,sBAAsB,qCAAqC,CAAC;AAAA,EAC5E;AAEA,MAAI,OAAO,EAAE,aAAa,YAAY,EAAE,SAAS,WAAW,GAAG;AAC9D,WAAO,IAAI,IAAI,sBAAsB,qCAAqC,CAAC;AAAA,EAC5E;AAEA,MAAI,OAAO,EAAE,QAAQ,UAAU;AAC9B,WAAO,IAAI,IAAI,sBAAsB,sBAAsB,CAAC;AAAA,EAC7D;AAEA,MAAI,OAAO,EAAE,cAAc,YAAY,EAAE,UAAU,WAAW,GAAG;AAChE,WAAO,IAAI,IAAI,sBAAsB,sCAAsC,CAAC;AAAA,EAC7E;AAEA,MAAI,OAAO,EAAE,eAAe,YAAY,EAAE,WAAW,WAAW,GAAG;AAClE,WAAO,IAAI,IAAI,sBAAsB,uCAAuC,CAAC;AAAA,EAC9E;AAEA,MAAI,EAAE,WAAW,QAAQ,OAAO,EAAE,WAAW,YAAY,MAAM,QAAQ,EAAE,MAAM,GAAG;AACjF,WAAO,IAAI,IAAI,sBAAsB,kCAAkC,CAAC;AAAA,EACzE;AAEA,MAAI,EAAE,mBAAmB,UAAa,OAAO,EAAE,mBAAmB,UAAU;AAC3E,WAAO,IAAI,IAAI,sBAAsB,6CAA6C,CAAC;AAAA,EACpF;AAEA,SAAO,GAAG,MAAgB;AAC3B;;;ACVO,IAAM,YAAN,cAAwB,MAAM;AAAA,EACpC,YAAY,SAAiB;AAC5B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACb;AACD;AAoBA,SAAS,gBAAgB,OAA2B;AAEnD,QAAM,SAAS,MAAM,QAAQ,MAAM,GAAG,EAAE,QAAQ,MAAM,GAAG;AACzD,QAAM,SAAS,OAAO,OAAO,OAAO,UAAW,IAAK,OAAO,SAAS,KAAM,GAAI,GAAG;AACjF,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACvC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAC/B;AACA,SAAO;AACR;AAKA,SAAS,UAAU,MAAuB;AACzC,MAAI;AACH,WAAO,KAAK,MAAM,IAAI;AAAA,EACvB,QAAQ;AACP,WAAO;AAAA,EACR;AACD;AAYA,eAAsB,YACrB,OACA,QACyC;AAEzC,QAAM,QAAQ,MAAM,MAAM,GAAG;AAC7B,MAAI,MAAM,WAAW,GAAG;AACvB,WAAO,IAAI,IAAI,UAAU,sDAAsD,CAAC;AAAA,EACjF;AAEA,QAAM,CAAC,WAAW,YAAY,YAAY,IAAI;AAC9C,MAAI,CAAC,aAAa,CAAC,cAAc,CAAC,cAAc;AAC/C,WAAO,IAAI,IAAI,UAAU,8BAA8B,CAAC;AAAA,EACzD;AAGA,MAAI;AACJ,MAAI;AACH,kBAAc,gBAAgB,SAAS;AAAA,EACxC,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,4CAA4C,CAAC;AAAA,EACvE;AAEA,QAAM,SAAS,UAAU,IAAI,YAAY,EAAE,OAAO,WAAW,CAAC;AAC9D,MAAI,CAAC,UAAU,OAAO,QAAQ,WAAW,OAAO,QAAQ,OAAO;AAC9D,WAAO,IAAI,IAAI,UAAU,6DAA6D,CAAC;AAAA,EACxF;AAGA,QAAM,UAAU,IAAI,YAAY;AAChC,QAAM,UAAU,QAAQ,OAAO,MAAM;AAErC,MAAI;AACJ,MAAI;AACH,gBAAY,MAAO,OAAO,OAAiC;AAAA,MAC1D;AAAA,MACA;AAAA,MACA,EAAE,MAAM,QAAQ,MAAM,UAAU;AAAA,MAChC;AAAA,MACA,CAAC,QAAQ;AAAA,IACV;AAAA,EACD,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,2BAA2B,CAAC;AAAA,EACtD;AAGA,MAAI;AACJ,MAAI;AACH,qBAAiB,gBAAgB,YAAY;AAAA,EAC9C,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,+CAA+C,CAAC;AAAA,EAC1E;AAEA,QAAM,eAAe,QAAQ,OAAO,GAAG,SAAS,IAAI,UAAU,EAAE;AAEhE,MAAI;AACJ,MAAI;AACH,YAAQ,MAAO,OAAO,OAAiC;AAAA,MACtD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAAA,EACD,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,+BAA+B,CAAC;AAAA,EAC1D;AAEA,MAAI,CAAC,OAAO;AACX,WAAO,IAAI,IAAI,UAAU,uBAAuB,CAAC;AAAA,EAClD;AAGA,MAAI;AACJ,MAAI;AACH,mBAAe,gBAAgB,UAAU;AAAA,EAC1C,QAAQ;AACP,WAAO,IAAI,IAAI,UAAU,6CAA6C,CAAC;AAAA,EACxE;AAEA,QAAM,UAAU,UAAU,IAAI,YAAY,EAAE,OAAO,YAAY,CAAC;AAChE,MAAI,CAAC,SAAS;AACb,WAAO,IAAI,IAAI,UAAU,0CAA0C,CAAC;AAAA,EACrE;AAGA,MAAI,QAAQ,QAAQ,UAAa,OAAO,QAAQ,QAAQ,UAAU;AACjE,WAAO,IAAI,IAAI,UAAU,yCAAyC,CAAC;AAAA,EACpE;AACA,QAAM,aAAa,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAC/C,MAAI,QAAQ,OAAO,YAAY;AAC9B,WAAO,IAAI,IAAI,UAAU,iBAAiB,CAAC;AAAA,EAC5C;AAGA,MAAI,OAAO,QAAQ,QAAQ,YAAY,QAAQ,IAAI,WAAW,GAAG;AAChE,WAAO,IAAI,IAAI,UAAU,2CAA2C,CAAC;AAAA,EACtE;AAEA,MAAI,OAAO,QAAQ,OAAO,YAAY,QAAQ,GAAG,WAAW,GAAG;AAC9D,WAAO,IAAI,IAAI,UAAU,2CAA2C,CAAC;AAAA,EACtE;AAGA,QAAM,iBAAiB,oBAAI,IAAI,CAAC,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,MAAM,CAAC;AAChF,QAAM,eAAkD,CAAC;AAEzD,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,QAAI,eAAe,IAAI,GAAG,EAAG;AAC7B,QAAI,OAAO,UAAU,UAAU;AAC9B,mBAAa,GAAG,IAAI;AAAA,IACrB,WAAW,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,CAAC,MAAM,OAAO,MAAM,QAAQ,GAAG;AAC7E,mBAAa,GAAG,IAAI;AAAA,IACrB;AAAA,EACD;AAGA,eAAa,MAAM,QAAQ;AAG3B,QAAM,OACL,OAAO,QAAQ,SAAS,YAAY,QAAQ,KAAK,SAAS,IAAI,QAAQ,OAAO;AAE9E,SAAO,GAAG;AAAA,IACT,UAAU,QAAQ;AAAA,IAClB,WAAW,QAAQ;AAAA,IACnB;AAAA,IACA;AAAA,EACD,CAAC;AACF;;;ACnNO,IAAM,MAAN,MAAM,KAAI;AAAA,EACC;AAAA,EACT,UAAU;AAAA,EACV,WAAW;AAAA;AAAA,EAGnB,OAAgB,eAAe;AAAA;AAAA,EAG/B,OAAgB,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ9B,YAAY,WAA0B;AACrC,SAAK,YAAY,cAAc,MAAM,KAAK,IAAI;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAoB;AACnB,UAAM,WAAW,KAAK,UAAU;AAChC,UAAM,OAAO,KAAK,IAAI,UAAU,KAAK,QAAQ;AAE7C,QAAI,SAAS,KAAK,UAAU;AAC3B,WAAK;AACL,UAAI,KAAK,UAAU,KAAI,aAAa;AAEnC,aAAK,WAAW,OAAO;AACvB,aAAK,UAAU;AAAA,MAChB;AAAA,IACD,OAAO;AACN,WAAK,WAAW;AAChB,WAAK,UAAU;AAAA,IAChB;AAEA,WAAO,KAAI,OAAO,KAAK,UAAU,KAAK,OAAO;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAK,QAA6D;AACjE,UAAM,EAAE,MAAM,YAAY,SAAS,cAAc,IAAI,KAAI,OAAO,MAAM;AACtE,UAAM,WAAW,KAAK,UAAU;AAChC,UAAM,YAAY,KAAK,IAAI,UAAU,KAAK,QAAQ;AAGlD,QAAI,aAAa,WAAW,KAAI,cAAc;AAC7C,aAAO;AAAA,QACN,IAAI;AAAA,UACH,mBAAmB,aAAa,QAAQ,wBAAwB,KAAI,YAAY;AAAA,QACjF;AAAA,MACD;AAAA,IACD;AAEA,QAAI,aAAa,WAAW;AAC3B,WAAK,WAAW;AAChB,WAAK,UAAU,gBAAgB;AAAA,IAChC,WAAW,eAAe,WAAW;AACpC,WAAK,WAAW;AAChB,WAAK,UAAU,KAAK,IAAI,KAAK,SAAS,aAAa,IAAI;AAAA,IACxD,OAAO;AACN,WAAK,WAAW;AAChB,WAAK;AAAA,IACN;AAEA,QAAI,KAAK,UAAU,KAAI,aAAa;AAEnC,WAAK,WAAW,KAAK,WAAW;AAChC,WAAK,UAAU;AAAA,IAChB;AAEA,WAAO,GAAG,KAAI,OAAO,KAAK,UAAU,KAAK,OAAO,CAAC;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,OAAO,MAAc,SAA+B;AAC1D,WAAS,OAAO,IAAI,KAAK,MAAO,OAAO,UAAU,KAAM;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,OAAO,IAAqD;AAClE,WAAO;AAAA,MACN,MAAM,OAAO,MAAM,GAAG;AAAA,MACtB,SAAS,OAAO,KAAK,OAAO;AAAA,IAC7B;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,QAAQ,GAAiB,GAA6B;AAC5D,QAAI,IAAI,EAAG,QAAO;AAClB,QAAI,IAAI,EAAG,QAAO;AAClB,WAAO;AAAA,EACR;AACD;;;AC7GO,SAAS,eAAe,QAA4C;AAC1E,SACC,OAAQ,OAAwB,UAAU,cAC1C,OAAQ,OAAwB,gBAAgB,cAChD,iBAAiB;AAEnB;AAYA,IAAM,qBAAqB;AAC3B,IAAM,0BAA0B;AAMzB,IAAe,mBAAf,MAAgC;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACF;AAAA,EACT,QAA8C;AAAA,EAC9C,UAAU;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACT,gBAA4B,CAAC;AAAA,EAErC,YAAY,QAKT;AACF,SAAK,UAAU,OAAO;AACtB,SAAK,MAAM,IAAI,IAAI;AACnB,SAAK,WAAW,UAAU,OAAO,IAAI;AACrC,SAAK,aAAa,OAAO;AACzB,SAAK,YAAY,OAAO,QAAQ,aAAa;AAC7C,SAAK,oBAAoB,OAAO,QAAQ;AACxC,SAAK,iBAAiB,OAAO,QAAQ,kBAAkB;AAAA,EACxD;AAAA;AAAA,EAGA,QAAc;AACb,QAAI,KAAK,QAAS;AAClB,SAAK,UAAU;AACf,SAAK,aAAa;AAAA,EACnB;AAAA;AAAA,EAGA,OAAa;AACZ,SAAK,UAAU;AACf,QAAI,KAAK,OAAO;AACf,mBAAa,KAAK,KAAK;AACvB,WAAK,QAAQ;AAAA,IACd;AAAA,EACD;AAAA;AAAA,EAGA,IAAI,YAAqB;AACxB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,WAA0B;AAC/B,WAAO,KAAK,KAAK;AAAA,EAClB;AAAA;AAAA,EAGU,WAAW,QAA0B;AAC9C,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,OAAiB;AAAA,MACtB,UAAU,KAAK;AAAA,MACf;AAAA,MACA,aAAa;AAAA,IACd;AACA,SAAK,QAAQ,WAAW,IAAI;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAgB,gBAAgB,OAAgC;AAC/D,SAAK,cAAc,KAAK,KAAK;AAC7B,QAAI,KAAK,cAAc,UAAU,KAAK,WAAW;AAChD,YAAM,KAAK,iBAAiB;AAAA,IAC7B;AAAA,EACD;AAAA;AAAA,EAGA,MAAgB,mBAAkC;AACjD,QAAI,KAAK,cAAc,SAAS,GAAG;AAClC,YAAM,KAAK,iBAAiB;AAAA,IAC7B;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,mBAAkC;AAC/C,UAAM,QAAQ,KAAK;AACnB,SAAK,gBAAgB,CAAC;AACtB,UAAM,KAAK,mBAAmB,KAAK;AAAA,EACpC;AAAA,EAEA,MAAc,mBAAmB,OAAkC;AAClE,QAAI,MAAM,WAAW,EAAG;AAExB,UAAM,SAAS,KAAK;AAGpB,QAAI,eAAe,MAAM,GAAG;AAC3B,UAAI,KAAK,kBAAkB,MAAM,GAAG;AACnC,cAAM,OAAO,MAAM;AAAA,MACpB;AAAA,IACD;AAEA,UAAM,OAAiB;AAAA,MACtB,UAAU,KAAK;AAAA,MACf,QAAQ;AAAA,MACR,aAAa;AAAA,IACd;AAEA,UAAM,SAAS,OAAO,WAAW,IAAI;AAGrC,QAAI,UAAU,OAAO,WAAW,YAAY,QAAQ,UAAU,CAAC,OAAO,IAAI;AACzE,UAAI,eAAe,MAAM,GAAG;AAC3B,cAAM,OAAO,MAAM;AACnB,eAAO,WAAW,IAAI;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA,EAEQ,kBAAkB,QAA+B;AACxD,QAAI,OAAO,YAAY,EAAG,QAAO;AACjC,QAAI,KAAK,qBAAqB,MAAM;AACnC,YAAM,YAAY,KAAK,MAAM,KAAK,oBAAoB,KAAK,cAAc;AACzE,UAAI,OAAO,YAAY,YAAY,UAAW,QAAO;AAAA,IACtD;AACA,WAAO;AAAA,EACR;AAAA,EAEQ,eAAqB;AAC5B,QAAI,CAAC,KAAK,QAAS;AACnB,SAAK,QAAQ,WAAW,YAAY;AACnC,UAAI;AACH,cAAM,KAAK,KAAK;AAAA,MACjB,QAAQ;AAAA,MAER;AACA,WAAK,aAAa;AAAA,IACnB,GAAG,KAAK,UAAU;AAAA,EACnB;AACD;;;AClMO,IAAM,qBAAN,MAA+C;AAAA,EACpC;AAAA,EAEjB,YAAY,QAAkD;AAC7D,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,WAAW,MAAsB;AAChC,SAAK,OAAO,IAAI;AAAA,EACjB;AACD;;;ACTO,IAAM,cAAN,MAA8C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBpD,QAAQ,OAAiB,QAAmD;AAE3E,QAAI,MAAM,UAAU,OAAO,SAAS,MAAM,UAAU,OAAO,OAAO;AACjE,aAAO;AAAA,QACN,IAAI;AAAA,UACH,oDAAoD,MAAM,KAAK,IAAI,MAAM,KAAK,OAAO,OAAO,KAAK,IAAI,OAAO,KAAK;AAAA,QAClH;AAAA,MACD;AAAA,IACD;AAGA,UAAM,SAAS,WAAW,OAAO,MAAM;AAGvC,QAAI,MAAM,OAAO,YAAY,OAAO,OAAO,UAAU;AACpD,aAAO,GAAG,EAAE,GAAG,QAAQ,SAAS,CAAC,EAAE,CAAC;AAAA,IACrC;AAGA,QAAI,MAAM,OAAO,YAAY,OAAO,OAAO,UAAU;AACpD,YAAM,cAAc,MAAM,OAAO,WAAW,QAAQ;AACpD,YAAM,aAAa,MAAM,OAAO,WAAW,SAAS;AAGpD,UAAI,gBAAgB,QAAQ;AAC3B,eAAO,GAAG,EAAE,GAAG,aAAa,SAAS,CAAC,EAAE,CAAC;AAAA,MAC1C;AAEA,aAAO,GAAG,EAAE,GAAG,WAAW,CAAC;AAAA,IAC5B;AAGA,UAAM,gBAAgB,aAAa,OAAO,MAAM;AAGhD,UAAM,KAAc,MAAM,OAAO,YAAY,OAAO,OAAO,WAAW,WAAW;AAEjF,WAAO,GAAG;AAAA,MACT;AAAA,MACA,OAAO,MAAM;AAAA,MACb,OAAO,MAAM;AAAA,MACb,UAAU,OAAO;AAAA,MACjB,SAAS;AAAA,MACT,KAAK,OAAO;AAAA,MACZ,SAAS,OAAO;AAAA,IACjB,CAAC;AAAA,EACF;AACD;AASA,SAAS,WAAW,OAAiB,QAA4B;AAChE,QAAM,MAAM,IAAI,QAAQ,MAAM,KAAK,OAAO,GAAG;AAC7C,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,MAAM,EAAG,QAAO;AAEpB,SAAO,MAAM,WAAW,OAAO,WAAW,QAAQ;AACnD;AAaA,SAAS,aAAa,OAAiB,QAAiC;AACvE,QAAM,WAAW,IAAI,IAAI,MAAM,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAChE,QAAM,YAAY,IAAI,IAAI,OAAO,QAAQ,IAAI,CAAC,MAAM,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAClE,QAAM,aAAa,oBAAI,IAAI,CAAC,GAAG,SAAS,KAAK,GAAG,GAAG,UAAU,KAAK,CAAC,CAAC;AACpE,QAAM,SAAS,WAAW,OAAO,MAAM;AAEvC,QAAM,SAAwB,CAAC;AAE/B,aAAW,OAAO,YAAY;AAC7B,UAAM,WAAW,SAAS,IAAI,GAAG;AACjC,UAAM,YAAY,UAAU,IAAI,GAAG;AAEnC,QAAI,CAAC,WAAW;AACf,aAAO,KAAK,QAAS;AAAA,IACtB,WAAW,CAAC,UAAU;AACrB,aAAO,KAAK,SAAS;AAAA,IACtB,OAAO;AAEN,aAAO,KAAK,WAAW,QAAQ,WAAW,SAAS;AAAA,IACpD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,IAAM,aAAa,IAAI,YAAY;AAW5B,SAAS,WAAW,OAAiB,QAAmD;AAC9F,SAAO,WAAW,QAAQ,OAAO,MAAM;AACxC;;;ACvGO,SAAS,gBAAgB,KAAoC;AACnE,MAAI,QAAQ,QAAQ,OAAO,QAAQ,SAAU,QAAO;AACpD,QAAM,YAAY;AAClB,SAAO,MAAM,QAAQ,UAAU,gBAAgB,KAAK,OAAO,UAAU,kBAAkB;AACxF;;;ACxCO,IAAM,2BAAN,cAAuC,cAAc;AAAA,EAC3D,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,wBAAwB,KAAK;AAAA,EAC7C;AACD;;;ACNO,IAAM,kBAAkB,CAAC,YAAY,SAAS,YAAY,QAAQ,YAAY;;;ACGrF,IAAM,mBAAmB,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAc5C,SAAS,wBACf,OACoD;AACpD,MAAI,OAAO,UAAU,YAAY,UAAU,MAAM;AAChD,WAAO,IAAI,IAAI,yBAAyB,oCAAoC,CAAC;AAAA,EAC9E;AAEA,QAAM,MAAM;AAGZ,MAAI,OAAO,IAAI,SAAS,YAAY,IAAI,KAAK,WAAW,GAAG;AAC1D,WAAO,IAAI,IAAI,yBAAyB,2CAA2C,CAAC;AAAA,EACrF;AAGA,MAAI,OAAO,IAAI,SAAS,YAAY,CAAE,gBAAsC,SAAS,IAAI,IAAI,GAAG;AAC/F,WAAO;AAAA,MACN,IAAI,yBAAyB,kCAAkC,gBAAgB,KAAK,IAAI,CAAC,EAAE;AAAA,IAC5F;AAAA,EACD;AAEA,QAAM,gBAAgB,IAAI;AAG1B,UAAQ,eAAe;AAAA,IACtB,KAAK,YAAY;AAChB,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI;AAAA,YACH;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,qBAAqB,YAAY,MAAM,iBAAiB,WAAW,GAAG;AACtF,eAAO;AAAA,UACN,IAAI,yBAAyB,0DAA0D;AAAA,QACxF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IACA,KAAK,SAAS;AACb,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI,yBAAyB,uDAAuD;AAAA,QACrF;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,qBAAqB,YAAY,MAAM,iBAAiB,WAAW,GAAG;AACtF,eAAO;AAAA,UACN,IAAI,yBAAyB,uDAAuD;AAAA,QACrF;AAAA,MACD;AACA;AAAA,IACD;AAAA,IACA,KAAK,YAAY;AAChB,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI;AAAA,YACH;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,cAAc,YAAY,MAAM,UAAU,WAAW,GAAG;AACxE,eAAO;AAAA,UACN,IAAI,yBAAyB,mDAAmD;AAAA,QACjF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,YAAY,YAAY,MAAM,QAAQ,WAAW,GAAG;AACpE,eAAO,IAAI,IAAI,yBAAyB,iDAAiD,CAAC;AAAA,MAC3F;AACA;AAAA,IACD;AAAA,IACA,KAAK,QAAQ;AACZ,YAAM,OAAO,IAAI;AACjB,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC9C,eAAO;AAAA,UACN,IAAI,yBAAyB,qDAAqD;AAAA,QACnF;AAAA,MACD;AACA,YAAM,UAAU;AAChB,UAAI,OAAO,QAAQ,WAAW,YAAY,QAAQ,OAAO,WAAW,GAAG;AACtE,eAAO,IAAI,IAAI,yBAAyB,4CAA4C,CAAC;AAAA,MACtF;AACA,UAAI,OAAO,QAAQ,UAAU,YAAY,QAAQ,MAAM,WAAW,GAAG;AACpE,eAAO,IAAI,IAAI,yBAAyB,2CAA2C,CAAC;AAAA,MACrF;AACA,UAAI,OAAO,QAAQ,aAAa,YAAY,QAAQ,SAAS,WAAW,GAAG;AAC1E,eAAO,IAAI,IAAI,yBAAyB,8CAA8C,CAAC;AAAA,MACxF;AACA;AAAA,IACD;AAAA,IACA,KAAK,cAAc;AAClB,YAAM,KAAK,IAAI;AACf,UAAI,OAAO,OAAO,YAAY,OAAO,MAAM;AAC1C,eAAO;AAAA,UACN,IAAI;AAAA,YACH;AAAA,UACD;AAAA,QACD;AAAA,MACD;AACA,YAAM,QAAQ;AACd,UAAI,OAAO,MAAM,gBAAgB,YAAY,MAAM,YAAY,WAAW,GAAG;AAC5E,eAAO;AAAA,UACN,IAAI,yBAAyB,uDAAuD;AAAA,QACrF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,SAAS,WAAW,GAAG;AACtE,eAAO;AAAA,UACN,IAAI,yBAAyB,oDAAoD;AAAA,QAClF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,iBAAiB,YAAY,MAAM,aAAa,WAAW,GAAG;AAC9E,eAAO;AAAA,UACN,IAAI,yBAAyB,wDAAwD;AAAA,QACtF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,SAAS,WAAW,GAAG;AACtE,eAAO;AAAA,UACN,IAAI,yBAAyB,oDAAoD;AAAA,QAClF;AAAA,MACD;AACA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,SAAS,WAAW,GAAG;AACtE,eAAO;AAAA,UACN,IAAI,yBAAyB,oDAAoD;AAAA,QAClF;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAGA,MAAI,IAAI,WAAW,QAAW;AAC7B,QAAI,OAAO,IAAI,WAAW,YAAY,IAAI,WAAW,MAAM;AAC1D,aAAO,IAAI,IAAI,yBAAyB,iCAAiC,CAAC;AAAA,IAC3E;AAEA,UAAM,SAAS,IAAI;AAGnB,QAAI,kBAAkB,UAAU,kBAAkB,cAAc;AAC/D,UAAI,OAAO,eAAe,QAAW;AACpC,YAAI,OAAO,OAAO,eAAe,YAAY,OAAO,aAAa,GAAG;AACnE,iBAAO,IAAI,IAAI,yBAAyB,6CAA6C,CAAC;AAAA,QACvF;AAAA,MACD;AACA,aAAO,GAAG,KAAwB;AAAA,IACnC;AAEA,QAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,KAAK,OAAO,OAAO,WAAW,GAAG;AAChE,aAAO,IAAI,IAAI,yBAAyB,kDAAkD,CAAC;AAAA,IAC5F;AAEA,aAAS,IAAI,GAAG,IAAI,OAAO,OAAO,QAAQ,KAAK;AAC9C,YAAM,QAAQ,OAAO,OAAO,CAAC;AAE7B,UAAI,OAAO,UAAU,YAAY,UAAU,MAAM;AAChD,eAAO,IAAI,IAAI,yBAAyB,yBAAyB,CAAC,oBAAoB,CAAC;AAAA,MACxF;AAEA,UAAI,OAAO,MAAM,UAAU,YAAa,MAAM,MAAiB,WAAW,GAAG;AAC5E,eAAO;AAAA,UACN,IAAI;AAAA,YACH,yBAAyB,CAAC;AAAA,UAC3B;AAAA,QACD;AAAA,MACD;AAEA,UAAI,OAAO,MAAM,UAAU,YAAa,MAAM,MAAiB,WAAW,GAAG;AAC5E,eAAO;AAAA,UACN,IAAI,yBAAyB,yBAAyB,CAAC,8BAA8B;AAAA,QACtF;AAAA,MACD;AAEA,UAAI,OAAO,MAAM,aAAa,YAAY,MAAM,aAAa,MAAM;AAClE,eAAO;AAAA,UACN,IAAI,yBAAyB,yBAAyB,CAAC,8BAA8B;AAAA,QACtF;AAAA,MACD;AAEA,YAAM,WAAW,MAAM;AACvB,UAAI,CAAC,iBAAiB,IAAI,SAAS,IAAc,GAAG;AACnD,eAAO;AAAA,UACN,IAAI;AAAA,YACH,yBAAyB,CAAC;AAAA,UAC3B;AAAA,QACD;AAAA,MACD;AAEA,UAAI,SAAS,SAAS,UAAU;AAC/B,YACC,OAAO,SAAS,iBAAiB,YAChC,SAAS,aAAwB,WAAW,GAC5C;AACD,iBAAO;AAAA,YACN,IAAI;AAAA,cACH,yBAAyB,CAAC;AAAA,YAC3B;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAEA,QAAI,OAAO,eAAe,QAAW;AACpC,UAAI,OAAO,OAAO,eAAe,YAAY,OAAO,aAAa,GAAG;AACnE,eAAO,IAAI,IAAI,yBAAyB,6CAA6C,CAAC;AAAA,MACvF;AAAA,IACD;AAAA,EACD;AAEA,SAAO,GAAG,KAAwB;AACnC;;;AC9NA,IAAM,kBAAkB,oBAAI,IAA2B;AAOhD,SAAS,sBAAsB,MAAc,SAA8B;AACjF,kBAAgB,IAAI,MAAM,OAAO;AAClC;AAOO,SAAS,aAAa,QAAyB,SAAuC;AAC5F,QAAM,UAAU,gBAAgB,IAAI,OAAO,IAAI;AAC/C,MAAI,CAAC,SAAS;AACb,UAAM,IAAI;AAAA,MACT,oDAAoD,OAAO,IAAI,sEACK,OAAO,IAAI;AAAA,IAChF;AAAA,EACD;AACA,SAAO,QAAQ,QAAQ,OAAO;AAC/B;;;ACvBO,SAAS,WACf,KACA,OACiC;AACjC,MAAI,MAAM,OAAO,SAAU,QAAO;AAElC,QAAM,OAAgC,MAAM,EAAE,GAAG,IAAI,IAAI,CAAC;AAC1D,aAAW,OAAO,MAAM,SAAS;AAChC,SAAK,IAAI,MAAM,IAAI,IAAI;AAAA,EACxB;AACA,SAAO;AACR;;;ACxBA,OAAO,WAAW;AAClB,OAAOA,sBAAqB;AAmB5B,eAAsB,aACrB,QACA,OACA,MAO2B;AAC3B,QAAM,EAAE,OAAO,OAAO,UAAU,KAAK,OAAO,IAAI;AAEhD,QAAM,eAAe,UAAU;AAC/B,QAAM,cAAc,SAAS;AAE7B,MAAI,CAAC,gBAAgB,CAAC,aAAa;AAClC,WAAO;AAAA,EACR;AAGA,MAAI,CAAC,gBAAgB,aAAa;AACjC,UAAMC,WAAU,aAAa,OAAO,MAAM;AAC1C,UAAMC,WAAU,MAAM,gBAAgB,EAAE,UAAU,KAAK,OAAO,OAAO,SAAAD,SAAQ,CAAC;AAC9E,WAAO,EAAE,IAAI,UAAU,OAAO,OAAO,UAAU,SAAAA,UAAS,KAAK,SAAAC,SAAQ;AAAA,EACtE;AAGA,MAAI,gBAAgB,CAAC,aAAa;AACjC,UAAMD,WAAyB,CAAC;AAChC,UAAMC,WAAU,MAAM,gBAAgB,EAAE,UAAU,KAAK,OAAO,OAAO,SAAAD,SAAQ,CAAC;AAC9E,WAAO,EAAE,IAAI,UAAU,OAAO,OAAO,UAAU,SAAAA,UAAS,KAAK,SAAAC,SAAQ;AAAA,EACtE;AAGA,QAAM,UAAU,YAAY,QAAS,OAAQ,MAAM;AACnD,MAAI,QAAQ,WAAW,GAAG;AACzB,WAAO;AAAA,EACR;AAEA,QAAM,UAAU,MAAM,gBAAgB,EAAE,UAAU,KAAK,OAAO,OAAO,QAAQ,CAAC;AAC9E,SAAO,EAAE,IAAI,UAAU,OAAO,OAAO,UAAU,SAAS,KAAK,QAAQ;AACtE;AAGA,SAAS,WAAW,QAA0C;AAC7D,SAAO,SAAS,IAAI,IAAI,OAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI;AAC9D;AAMA,SAAS,aAAa,KAA8B,QAAqC;AACxF,QAAM,UAAU,WAAW,MAAM;AACjC,QAAM,UAAyB,CAAC;AAEhC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC/C,QAAI,UAAU,OAAW;AACzB,QAAI,WAAW,CAAC,QAAQ,IAAI,GAAG,EAAG;AAClC,YAAQ,KAAK,EAAE,QAAQ,KAAK,MAAM,CAAC;AAAA,EACpC;AAEA,SAAO;AACR;AAMA,SAAS,YACR,QACA,OACA,QACgB;AAChB,QAAM,UAAU,WAAW,MAAM;AACjC,QAAM,UAAU,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,MAAM,GAAG,GAAG,OAAO,KAAK,KAAK,CAAC,CAAC;AACvE,QAAM,UAAyB,CAAC;AAEhC,aAAW,OAAO,SAAS;AAC1B,QAAI,WAAW,CAAC,QAAQ,IAAI,GAAG,EAAG;AAElC,UAAM,YAAY,OAAO,GAAG;AAC5B,UAAM,WAAW,MAAM,GAAG;AAG1B,QAAI,aAAa,OAAW;AAG5B,QAAI,cAAc,QAAW;AAC5B,cAAQ,KAAK,EAAE,QAAQ,KAAK,OAAO,SAAS,CAAC;AAC7C;AAAA,IACD;AAGA,QAAI,OAAO,GAAG,WAAW,QAAQ,EAAG;AAGpC,QACC,OAAO,cAAc,YACrB,cAAc,QACd,OAAO,aAAa,YACpB,aAAa,QACb,MAAM,WAAW,QAAQ,GACxB;AACD;AAAA,IACD;AAEA,YAAQ,KAAK,EAAE,QAAQ,KAAK,OAAO,SAAS,CAAC;AAAA,EAC9C;AAEA,SAAO;AACR;AAMA,eAAe,gBAAgB,QAMX;AACnB,QAAM,UAAUF,iBAAgB;AAAA,IAC/B,UAAU,OAAO;AAAA,IACjB,KAAK,OAAO,IAAI,SAAS;AAAA,IACzB,OAAO,OAAO;AAAA,IACd,OAAO,OAAO;AAAA,IACd,SAAS,OAAO;AAAA,EACjB,CAAC;AAED,QAAM,OAAO,IAAI,YAAY,EAAE,OAAO,OAAO;AAC7C,QAAM,aAAa,MAAM,OAAO,OAAO,OAAO,WAAW,IAAI;AAC7D,QAAM,QAAQ,IAAI,WAAW,UAAU;AAEvC,MAAI,MAAM;AACV,aAAW,KAAK,OAAO;AACtB,WAAO,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAAA,EACtC;AACA,SAAO;AACR;;;ACtHO,SAAS,OAAO,OAAe,OAAuB;AAC5D,SAAO,GAAG,KAAK,IAAI,KAAK;AACzB;;;ACxCO,SAAS,eAAe,MAAc,OAAyB;AACrE,SAAO,OAAO,UAAU,WAAW,MAAM,SAAS,IAAI;AACvD;AAUO,SAAS,cAAc,KAAa,OAAyB;AACnE,MAAI,OAAO,UAAU,YAAY,QAAQ,KAAK,GAAG,GAAG;AACnD,QAAI;AACH,aAAO,OAAO,KAAK;AAAA,IACpB,QAAQ;AACP,aAAO;AAAA,IACR;AAAA,EACD;AACA,SAAO;AACR;;;ACnBO,SAAS,qBAAsC;AACrD,SAAO;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,MACR;AAAA,QACC,MAAM;AAAA,QACN,QAAQ,CAAC;AAAA,QACT,SAAS,CAAC;AAAA,MACX;AAAA,IACD;AAAA,EACD;AACD;AAWO,SAAS,sBAAsB,QAAkB,aAAa,WAA4B;AAChG,SAAO;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,MACR;AAAA,QACC,MAAM;AAAA,QACN;AAAA,QACA,SAAS;AAAA,UACR;AAAA,YACC,QAAQ;AAAA,YACR,IAAI;AAAA,YACJ,OAAO;AAAA,UACR;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AACD;;;AC5CO,IAAM,gBAAN,cAA4B,cAAc;AAAA,EAChD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,mBAAmB,KAAK;AAAA,EACxC;AACD;;;ACcO,SAAS,mBAAmB,OAAe,QAAkC;AACnF,MAAI,CAAC,MAAM,WAAW,MAAM,GAAG;AAC9B,WAAO,CAAC,KAAK;AAAA,EACd;AAEA,QAAM,WAAW,MAAM,MAAM,CAAC;AAC9B,QAAM,aAAa,OAAO,QAAQ;AAElC,MAAI,eAAe,QAAW;AAC7B,WAAO,CAAC;AAAA,EACT;AAEA,SAAO,MAAM,QAAQ,UAAU,IAAI,aAAa,CAAC,UAAU;AAC5D;AAgBO,SAAS,mBACf,OACA,QACA,QACU;AAEV,MAAI,OAAO,OAAO,SAAS,KAAK,CAAC,OAAO,OAAO,SAAS,MAAM,KAAK,GAAG;AACrE,WAAO;AAAA,EACR;AAGA,aAAW,UAAU,OAAO,SAAS;AACpC,QAAI,CAAC,mBAAmB,OAAO,QAAQ,MAAM,GAAG;AAC/C,aAAO;AAAA,IACR;AAAA,EACD;AAEA,SAAO;AACR;AAMA,SAAS,cACR,YACA,aACA,IACU;AACV,QAAM,WAAW,WAAW,UAAU;AACtC,QAAM,YAAY,WAAW,WAAW;AACxC,QAAM,aAAa,CAAC,OAAO,MAAM,QAAQ,KAAK,CAAC,OAAO,MAAM,SAAS;AAErE,MAAI,YAAY;AACf,YAAQ,IAAI;AAAA,MACX,KAAK;AACJ,eAAO,WAAW;AAAA,MACnB,KAAK;AACJ,eAAO,WAAW;AAAA,MACnB,KAAK;AACJ,eAAO,YAAY;AAAA,MACpB,KAAK;AACJ,eAAO,YAAY;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,MAAM,WAAW,cAAc,WAAW;AAChD,UAAQ,IAAI;AAAA,IACX,KAAK;AACJ,aAAO,MAAM;AAAA,IACd,KAAK;AACJ,aAAO,MAAM;AAAA,IACd,KAAK;AACJ,aAAO,OAAO;AAAA,IACf,KAAK;AACJ,aAAO,OAAO;AAAA,EAChB;AACD;AAEA,IAAM,aAAoE;AAAA,EACzE,IAAI,CAAC,IAAI,OAAO,GAAG,SAAS,EAAE;AAAA,EAC9B,IAAI,CAAC,IAAI,OAAO,GAAG,SAAS,EAAE;AAAA,EAC9B,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,SAAS,EAAE;AAAA,EAChC,IAAI,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,IAAI;AAAA,EAC9C,IAAI,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,IAAI;AAAA,EAC9C,KAAK,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,KAAK;AAAA,EAChD,KAAK,CAAC,IAAI,OAAO,cAAc,IAAI,GAAG,CAAC,GAAI,KAAK;AACjD;AAKA,SAAS,mBACR,OACA,QACA,QACU;AACV,QAAM,MAAM,MAAM,QAAQ,KAAK,CAAC,MAAM,EAAE,WAAW,OAAO,MAAM;AAChE,MAAI,CAAC,KAAK;AAET,WAAO;AAAA,EACR;AAEA,QAAM,aAAa,OAAO,IAAI,KAAK;AACnC,QAAM,iBAAiB,mBAAmB,OAAO,OAAO,MAAM;AAE9D,MAAI,eAAe,WAAW,GAAG;AAEhC,WAAO;AAAA,EACR;AAEA,SAAO,WAAW,OAAO,EAAE,IAAI,YAAY,cAAc,KAAK;AAC/D;AAYO,SAAS,aAAa,QAAoB,SAAuC;AACvF,MAAI,QAAQ,MAAM,QAAQ,WAAW,GAAG;AACvC,WAAO;AAAA,EACR;AAEA,SAAO,OAAO;AAAA,IAAO,CAAC,UACrB,QAAQ,MAAM,QAAQ,KAAK,CAAC,WAAW,mBAAmB,OAAO,QAAQ,QAAQ,MAAM,CAAC;AAAA,EACzF;AACD;AAcO,SAAS,qBAAqB,OAAwB,QAAkC;AAC9F,SAAO,MAAM,QACX,OAAO,CAAC,WAAW;AAGnB,eAAW,UAAU,OAAO,SAAS;AACpC,UAAI,OAAO,MAAM,WAAW,MAAM,GAAG;AACpC,cAAM,WAAW,mBAAmB,OAAO,OAAO,MAAM;AACxD,YAAI,SAAS,WAAW,GAAG;AAC1B,iBAAO;AAAA,QACR;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR,CAAC,EACA,IAAI,CAAC,MAAM,EAAE,IAAI;AACpB;AAgBO,SAAS,kBAAkB,QAA8C;AAC/E,MAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAClD,WAAO,IAAI,IAAI,cAAc,qCAAqC,CAAC;AAAA,EACpE;AAEA,QAAM,MAAM;AAEZ,MAAI,OAAO,IAAI,YAAY,YAAY,CAAC,OAAO,UAAU,IAAI,OAAO,KAAK,IAAI,UAAU,GAAG;AACzF,WAAO,IAAI,IAAI,cAAc,+CAA+C,CAAC;AAAA,EAC9E;AAEA,MAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,GAAG;AAChC,WAAO,IAAI,IAAI,cAAc,qCAAqC,CAAC;AAAA,EACpE;AAEA,QAAM,YAAY,oBAAI,IAAY;AAElC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,QAAQ,KAAK;AAC5C,UAAM,SAAS,IAAI,QAAQ,CAAC;AAE5B,QAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAClD,aAAO,IAAI,IAAI,cAAc,mBAAmB,CAAC,oBAAoB,CAAC;AAAA,IACvE;AAEA,QAAI,OAAO,OAAO,SAAS,YAAY,OAAO,KAAK,WAAW,GAAG;AAChE,aAAO,IAAI,IAAI,cAAc,mBAAmB,CAAC,6BAA6B,CAAC;AAAA,IAChF;AAEA,QAAI,UAAU,IAAI,OAAO,IAAc,GAAG;AACzC,aAAO,IAAI,IAAI,cAAc,2BAA2B,OAAO,IAAI,GAAG,CAAC;AAAA,IACxE;AACA,cAAU,IAAI,OAAO,IAAc;AAEnC,QAAI,CAAC,MAAM,QAAQ,OAAO,MAAM,GAAG;AAClC,aAAO,IAAI,IAAI,cAAc,WAAW,OAAO,IAAI,2BAA2B,CAAC;AAAA,IAChF;AAEA,eAAW,SAAS,OAAO,QAAqB;AAC/C,UAAI,OAAO,UAAU,YAAY,MAAM,WAAW,GAAG;AACpD,eAAO;AAAA,UACN,IAAI,cAAc,WAAW,OAAO,IAAI,yCAAyC;AAAA,QAClF;AAAA,MACD;AAAA,IACD;AAEA,QAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AACnC,aAAO,IAAI,IAAI,cAAc,WAAW,OAAO,IAAI,4BAA4B,CAAC;AAAA,IACjF;AAEA,aAAS,IAAI,GAAG,IAAK,OAAO,QAAsB,QAAQ,KAAK;AAC9D,YAAM,SAAU,OAAO,QAAsC,CAAC;AAE9D,UAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AAClD,eAAO;AAAA,UACN,IAAI,cAAc,WAAW,OAAO,IAAI,qBAAqB,CAAC,oBAAoB;AAAA,QACnF;AAAA,MACD;AAEA,UAAI,OAAO,OAAO,WAAW,YAAa,OAAO,OAAkB,WAAW,GAAG;AAChF,eAAO;AAAA,UACN,IAAI;AAAA,YACH,WAAW,OAAO,IAAI,qBAAqB,CAAC;AAAA,UAC7C;AAAA,QACD;AAAA,MACD;AAEA,YAAM,WAAW,CAAC,MAAM,MAAM,OAAO,MAAM,MAAM,OAAO,KAAK;AAC7D,UAAI,CAAC,SAAS,SAAS,OAAO,EAAY,GAAG;AAC5C,eAAO;AAAA,UACN,IAAI;AAAA,YACH,WAAW,OAAO,IAAI,qBAAqB,CAAC,uBAAuB,SAAS,KAAK,IAAI,CAAC;AAAA,UACvF;AAAA,QACD;AAAA,MACD;AAEA,UAAI,OAAO,OAAO,UAAU,YAAa,OAAO,MAAiB,WAAW,GAAG;AAC9E,eAAO;AAAA,UACN,IAAI;AAAA,YACH,WAAW,OAAO,IAAI,qBAAqB,CAAC;AAAA,UAC7C;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,SAAO,GAAG,MAAS;AACpB;;;AClSA,IAAM,gBAAgB;AAWf,SAAS,kBAAkB,MAAuB;AACxD,SAAO,cAAc,KAAK,IAAI;AAC/B;AAQO,SAAS,sBAAsB,MAAyC;AAC9E,MAAI,kBAAkB,IAAI,GAAG;AAC5B,WAAO,GAAG,MAAS;AAAA,EACpB;AACA,SAAO;AAAA,IACN,IAAI;AAAA,MACH,4BAA4B,IAAI;AAAA,IACjC;AAAA,EACD;AACD;AAWO,SAAS,gBAAgB,MAAsB;AACrD,SAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AACpC;","names":["stableStringify","columns","deltaId"]}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
Err,
|
|
3
3
|
Ok
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-C7ECMNQ4.js";
|
|
5
5
|
|
|
6
6
|
// ../proto/src/gen/lakesync_pb.ts
|
|
7
7
|
import { enumDesc, fileDesc, messageDesc } from "@bufbuild/protobuf/codegenv2";
|
|
@@ -332,4 +332,4 @@ export {
|
|
|
332
332
|
encodeActionResponse,
|
|
333
333
|
decodeActionResponse
|
|
334
334
|
};
|
|
335
|
-
//# sourceMappingURL=chunk-
|
|
335
|
+
//# sourceMappingURL=chunk-E7ZDOJCP.js.map
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import {
|
|
2
2
|
isDatabaseAdapter
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-NCMXLWEW.js";
|
|
4
4
|
import {
|
|
5
5
|
buildPartitionSpec,
|
|
6
6
|
lakeSyncTableName,
|
|
7
7
|
tableSchemaToIceberg
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-TIPMVLIG.js";
|
|
9
9
|
import {
|
|
10
10
|
writeDeltasToParquet
|
|
11
|
-
} from "./chunk-
|
|
11
|
+
} from "./chunk-265CMYJH.js";
|
|
12
12
|
import {
|
|
13
13
|
AdapterNotFoundError,
|
|
14
14
|
BackpressureError,
|
|
@@ -26,7 +26,7 @@ import {
|
|
|
26
26
|
validateAction,
|
|
27
27
|
validateConnectorConfig,
|
|
28
28
|
validateSyncRules
|
|
29
|
-
} from "./chunk-
|
|
29
|
+
} from "./chunk-C7ECMNQ4.js";
|
|
30
30
|
|
|
31
31
|
// ../gateway/src/action-dispatcher.ts
|
|
32
32
|
var ActionDispatcher = class {
|
|
@@ -1112,4 +1112,4 @@ export {
|
|
|
1112
1112
|
handleMetrics,
|
|
1113
1113
|
SchemaManager
|
|
1114
1114
|
};
|
|
1115
|
-
//# sourceMappingURL=chunk-
|
|
1115
|
+
//# sourceMappingURL=chunk-HAR3YPCY.js.map
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
Ok,
|
|
6
6
|
extractDelta,
|
|
7
7
|
registerPollerFactory
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-C7ECMNQ4.js";
|
|
9
9
|
|
|
10
10
|
// ../connector-salesforce/src/errors.ts
|
|
11
11
|
var SalesforceApiError = class extends LakeSyncError {
|
|
@@ -514,9 +514,20 @@ var SALESFORCE_TABLE_SCHEMAS = [
|
|
|
514
514
|
}
|
|
515
515
|
];
|
|
516
516
|
|
|
517
|
+
// ../connector-salesforce/src/test-connection.ts
|
|
518
|
+
async function testConnection(config) {
|
|
519
|
+
const client = new SalesforceClient(config);
|
|
520
|
+
return client.authenticate();
|
|
521
|
+
}
|
|
522
|
+
|
|
517
523
|
// ../connector-salesforce/src/index.ts
|
|
518
524
|
registerPollerFactory("salesforce", (config, gateway) => {
|
|
519
|
-
|
|
525
|
+
const ingest = config.ingest ? {
|
|
526
|
+
intervalMs: config.ingest.intervalMs,
|
|
527
|
+
chunkSize: config.ingest.chunkSize,
|
|
528
|
+
memoryBudgetBytes: config.ingest.memoryBudgetBytes
|
|
529
|
+
} : void 0;
|
|
530
|
+
return new SalesforceSourcePoller(config.salesforce, ingest, config.name, gateway);
|
|
520
531
|
});
|
|
521
532
|
|
|
522
533
|
export {
|
|
@@ -528,6 +539,7 @@ export {
|
|
|
528
539
|
mapOpportunity,
|
|
529
540
|
mapLead,
|
|
530
541
|
SalesforceSourcePoller,
|
|
531
|
-
SALESFORCE_TABLE_SCHEMAS
|
|
542
|
+
SALESFORCE_TABLE_SCHEMAS,
|
|
543
|
+
testConnection
|
|
532
544
|
};
|
|
533
|
-
//# sourceMappingURL=chunk-
|
|
545
|
+
//# sourceMappingURL=chunk-L6LTCXJ4.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../connector-salesforce/src/errors.ts","../../connector-salesforce/src/client.ts","../../connector-salesforce/src/mapping.ts","../../connector-salesforce/src/poller.ts","../../connector-salesforce/src/schemas.ts","../../connector-salesforce/src/test-connection.ts","../../connector-salesforce/src/index.ts"],"sourcesContent":["import { LakeSyncError } from \"@lakesync/core\";\n\n/** HTTP error from the Salesforce REST API. */\nexport class SalesforceApiError extends LakeSyncError {\n\t/** HTTP status code returned by Salesforce. */\n\treadonly statusCode: number;\n\t/** Raw response body from Salesforce. */\n\treadonly responseBody: string;\n\n\tconstructor(statusCode: number, responseBody: string, cause?: Error) {\n\t\tsuper(`Salesforce API error (${statusCode}): ${responseBody}`, \"SALESFORCE_API_ERROR\", cause);\n\t\tthis.statusCode = statusCode;\n\t\tthis.responseBody = responseBody;\n\t}\n}\n\n/** Authentication failure from the Salesforce OAuth token endpoint. */\nexport class SalesforceAuthError extends LakeSyncError {\n\tconstructor(message: string, cause?: Error) {\n\t\tsuper(message, \"SALESFORCE_AUTH_ERROR\", cause);\n\t}\n}\n","// ---------------------------------------------------------------------------\n// SalesforceClient — HTTP wrapper for Salesforce REST API\n// ---------------------------------------------------------------------------\n\nimport { Err, Ok, type Result } from \"@lakesync/core\";\nimport { SalesforceApiError, SalesforceAuthError } from \"./errors\";\nimport type {\n\tSalesforceAuthResponse,\n\tSalesforceConnectorConfig,\n\tSalesforceQueryResponse,\n} from \"./types\";\n\nconst DEFAULT_API_VERSION = \"v62.0\";\nconst MAX_RETRY_ATTEMPTS = 3;\nconst DEFAULT_RETRY_AFTER_MS = 10_000;\n\n/**\n * HTTP client for the Salesforce REST API.\n *\n * Uses OAuth 2.0 Username-Password flow for authentication and global `fetch`.\n * All public methods return `Result<T, SalesforceApiError | SalesforceAuthError>`.\n */\nexport class SalesforceClient {\n\tprivate readonly config: SalesforceConnectorConfig;\n\tprivate readonly apiVersion: string;\n\tprivate readonly loginUrl: string;\n\n\tprivate accessToken: string | null = null;\n\tprivate instanceUrl: string;\n\n\tconstructor(config: SalesforceConnectorConfig) {\n\t\tthis.config = config;\n\t\tthis.apiVersion = config.apiVersion ?? DEFAULT_API_VERSION;\n\t\tthis.loginUrl = config.isSandbox\n\t\t\t? \"https://test.salesforce.com\"\n\t\t\t: \"https://login.salesforce.com\";\n\t\tthis.instanceUrl = config.instanceUrl;\n\t}\n\n\t/**\n\t * Authenticate via OAuth 2.0 Username-Password flow.\n\t *\n\t * Stores access token and updates instance URL from the response.\n\t */\n\tasync authenticate(): Promise<Result<void, SalesforceAuthError>> {\n\t\tconst body = new URLSearchParams({\n\t\t\tgrant_type: \"password\",\n\t\t\tclient_id: this.config.clientId,\n\t\t\tclient_secret: this.config.clientSecret,\n\t\t\tusername: this.config.username,\n\t\t\tpassword: this.config.password,\n\t\t});\n\n\t\tlet response: Response;\n\t\ttry {\n\t\t\tresponse = await fetch(`${this.loginUrl}/services/oauth2/token`, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: { \"Content-Type\": \"application/x-www-form-urlencoded\" },\n\t\t\t\tbody: body.toString(),\n\t\t\t});\n\t\t} catch (err) {\n\t\t\treturn Err(\n\t\t\t\tnew SalesforceAuthError(\n\t\t\t\t\t`Failed to connect to Salesforce auth endpoint: ${err instanceof Error ? err.message : String(err)}`,\n\t\t\t\t\terr instanceof Error ? err : undefined,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\n\t\tif (!response.ok) {\n\t\t\tconst text = await response.text();\n\t\t\treturn Err(\n\t\t\t\tnew SalesforceAuthError(`Salesforce authentication failed (${response.status}): ${text}`),\n\t\t\t);\n\t\t}\n\n\t\tconst data = (await response.json()) as SalesforceAuthResponse;\n\t\tthis.accessToken = data.access_token;\n\t\tthis.instanceUrl = data.instance_url;\n\n\t\treturn Ok(undefined);\n\t}\n\n\t/**\n\t * Execute a SOQL query with auto-pagination.\n\t *\n\t * Automatically authenticates on first call and re-authenticates on 401.\n\t */\n\tasync query<T>(soql: string): Promise<Result<T[], SalesforceApiError | SalesforceAuthError>> {\n\t\t// Ensure we have a token\n\t\tif (!this.accessToken) {\n\t\t\tconst authResult = await this.authenticate();\n\t\t\tif (!authResult.ok) return authResult;\n\t\t}\n\n\t\tconst allRecords: T[] = [];\n\t\tlet url = `${this.instanceUrl}/services/data/${this.apiVersion}/query?q=${encodeURIComponent(soql)}`;\n\n\t\twhile (true) {\n\t\t\tconst result = await this.request<SalesforceQueryResponse<T>>(url);\n\n\t\t\t// Re-auth on 401 and retry once\n\t\t\tif (\n\t\t\t\t!result.ok &&\n\t\t\t\tresult.error instanceof SalesforceApiError &&\n\t\t\t\tresult.error.statusCode === 401\n\t\t\t) {\n\t\t\t\tconst authResult = await this.authenticate();\n\t\t\t\tif (!authResult.ok) return authResult;\n\n\t\t\t\tconst retryResult = await this.request<SalesforceQueryResponse<T>>(url);\n\t\t\t\tif (!retryResult.ok) return retryResult;\n\n\t\t\t\tfor (const record of retryResult.value.records) {\n\t\t\t\t\tallRecords.push(record);\n\t\t\t\t}\n\n\t\t\t\tif (retryResult.value.done || !retryResult.value.nextRecordsUrl) break;\n\t\t\t\turl = `${this.instanceUrl}${retryResult.value.nextRecordsUrl}`;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tif (!result.ok) return result;\n\n\t\t\tfor (const record of result.value.records) {\n\t\t\t\tallRecords.push(record);\n\t\t\t}\n\n\t\t\tif (result.value.done || !result.value.nextRecordsUrl) break;\n\t\t\turl = `${this.instanceUrl}${result.value.nextRecordsUrl}`;\n\t\t}\n\n\t\treturn Ok(allRecords);\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Internal HTTP helpers\n\t// -----------------------------------------------------------------------\n\n\t/** Make an HTTP request with rate-limit retry logic. */\n\tprivate async request<T>(\n\t\turl: string,\n\t): Promise<Result<T, SalesforceApiError | SalesforceAuthError>> {\n\t\tfor (let attempt = 0; attempt <= MAX_RETRY_ATTEMPTS; attempt++) {\n\t\t\tconst headers: Record<string, string> = {\n\t\t\t\tAuthorization: `Bearer ${this.accessToken}`,\n\t\t\t\tAccept: \"application/json\",\n\t\t\t};\n\n\t\t\tconst response = await fetch(url, { method: \"GET\", headers });\n\n\t\t\tif (response.ok) {\n\t\t\t\tconst data = (await response.json()) as T;\n\t\t\t\treturn Ok(data);\n\t\t\t}\n\n\t\t\t// Rate limit: 503 with Retry-After\n\t\t\tif (response.status === 503) {\n\t\t\t\tconst retryAfter = response.headers.get(\"Retry-After\");\n\t\t\t\tconst waitMs = retryAfter ? Number.parseInt(retryAfter, 10) * 1000 : DEFAULT_RETRY_AFTER_MS;\n\n\t\t\t\tif (attempt < MAX_RETRY_ATTEMPTS) {\n\t\t\t\t\tawait sleep(waitMs);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tconst responseBody = await response.text();\n\t\t\t\treturn Err(new SalesforceApiError(503, responseBody));\n\t\t\t}\n\n\t\t\tconst responseBody = await response.text();\n\t\t\treturn Err(new SalesforceApiError(response.status, responseBody));\n\t\t}\n\n\t\treturn Err(new SalesforceApiError(0, \"Unknown error after retries\"));\n\t}\n}\n\n/** Sleep for the given number of milliseconds. */\nfunction sleep(ms: number): Promise<void> {\n\treturn new Promise((resolve) => setTimeout(resolve, ms));\n}\n","// ---------------------------------------------------------------------------\n// Salesforce Entity → Flat LakeSync Row Mapping\n// ---------------------------------------------------------------------------\n\nimport type { SfAccount, SfContact, SfLead, SfOpportunity } from \"./types\";\n\n/**\n * Map a Salesforce Account to a flat row for the `sf_accounts` table.\n *\n * The row ID is the Salesforce record Id.\n */\nexport function mapAccount(account: SfAccount): { rowId: string; row: Record<string, unknown> } {\n\treturn {\n\t\trowId: account.Id,\n\t\trow: {\n\t\t\tsf_id: account.Id,\n\t\t\tname: account.Name ?? null,\n\t\t\ttype: account.Type ?? null,\n\t\t\tindustry: account.Industry ?? null,\n\t\t\twebsite: account.Website ?? null,\n\t\t\tphone: account.Phone ?? null,\n\t\t\tbilling_city: account.BillingCity ?? null,\n\t\t\tbilling_state: account.BillingState ?? null,\n\t\t\tbilling_country: account.BillingCountry ?? null,\n\t\t\tannual_revenue: account.AnnualRevenue ?? null,\n\t\t\tnumber_of_employees: account.NumberOfEmployees ?? null,\n\t\t\towner_name: account.Owner?.Name ?? null,\n\t\t\tcreated_date: account.CreatedDate ?? null,\n\t\t\tlast_modified_date: account.LastModifiedDate ?? null,\n\t\t},\n\t};\n}\n\n/**\n * Map a Salesforce Contact to a flat row for the `sf_contacts` table.\n *\n * The row ID is the Salesforce record Id.\n */\nexport function mapContact(contact: SfContact): { rowId: string; row: Record<string, unknown> } {\n\treturn {\n\t\trowId: contact.Id,\n\t\trow: {\n\t\t\tsf_id: contact.Id,\n\t\t\tfirst_name: contact.FirstName ?? null,\n\t\t\tlast_name: contact.LastName ?? null,\n\t\t\temail: contact.Email ?? null,\n\t\t\tphone: contact.Phone ?? null,\n\t\t\ttitle: contact.Title ?? null,\n\t\t\taccount_id: contact.AccountId ?? null,\n\t\t\taccount_name: contact.Account?.Name ?? null,\n\t\t\tmailing_city: contact.MailingCity ?? null,\n\t\t\tmailing_state: contact.MailingState ?? null,\n\t\t\tmailing_country: contact.MailingCountry ?? null,\n\t\t\towner_name: contact.Owner?.Name ?? null,\n\t\t\tcreated_date: contact.CreatedDate ?? null,\n\t\t\tlast_modified_date: contact.LastModifiedDate ?? null,\n\t\t},\n\t};\n}\n\n/**\n * Map a Salesforce Opportunity to a flat row for the `sf_opportunities` table.\n *\n * The row ID is the Salesforce record Id.\n */\nexport function mapOpportunity(opportunity: SfOpportunity): {\n\trowId: string;\n\trow: Record<string, unknown>;\n} {\n\treturn {\n\t\trowId: opportunity.Id,\n\t\trow: {\n\t\t\tsf_id: opportunity.Id,\n\t\t\tname: opportunity.Name ?? null,\n\t\t\tstage_name: opportunity.StageName ?? null,\n\t\t\tamount: opportunity.Amount ?? null,\n\t\t\tclose_date: opportunity.CloseDate ?? null,\n\t\t\tprobability: opportunity.Probability ?? null,\n\t\t\taccount_id: opportunity.AccountId ?? null,\n\t\t\taccount_name: opportunity.Account?.Name ?? null,\n\t\t\ttype: opportunity.Type ?? null,\n\t\t\tlead_source: opportunity.LeadSource ?? null,\n\t\t\tis_closed: opportunity.IsClosed ?? null,\n\t\t\tis_won: opportunity.IsWon ?? null,\n\t\t\towner_name: opportunity.Owner?.Name ?? null,\n\t\t\tcreated_date: opportunity.CreatedDate ?? null,\n\t\t\tlast_modified_date: opportunity.LastModifiedDate ?? null,\n\t\t},\n\t};\n}\n\n/**\n * Map a Salesforce Lead to a flat row for the `sf_leads` table.\n *\n * The row ID is the Salesforce record Id.\n */\nexport function mapLead(lead: SfLead): { rowId: string; row: Record<string, unknown> } {\n\treturn {\n\t\trowId: lead.Id,\n\t\trow: {\n\t\t\tsf_id: lead.Id,\n\t\t\tfirst_name: lead.FirstName ?? null,\n\t\t\tlast_name: lead.LastName ?? null,\n\t\t\tcompany: lead.Company ?? null,\n\t\t\temail: lead.Email ?? null,\n\t\t\tphone: lead.Phone ?? null,\n\t\t\ttitle: lead.Title ?? null,\n\t\t\tstatus: lead.Status ?? null,\n\t\t\tlead_source: lead.LeadSource ?? null,\n\t\t\tis_converted: lead.IsConverted ?? null,\n\t\t\tconverted_account_id: lead.ConvertedAccountId ?? null,\n\t\t\tconverted_contact_id: lead.ConvertedContactId ?? null,\n\t\t\tconverted_opportunity_id: lead.ConvertedOpportunityId ?? null,\n\t\t\towner_name: lead.Owner?.Name ?? null,\n\t\t\tcreated_date: lead.CreatedDate ?? null,\n\t\t\tlast_modified_date: lead.LastModifiedDate ?? null,\n\t\t},\n\t};\n}\n","// ---------------------------------------------------------------------------\n// SalesforceSourcePoller — polls Salesforce CRM and pushes deltas to SyncGateway\n// ---------------------------------------------------------------------------\n\nimport { BaseSourcePoller, extractDelta, type PushTarget } from \"@lakesync/core\";\nimport { SalesforceClient } from \"./client\";\nimport { mapAccount, mapContact, mapLead, mapOpportunity } from \"./mapping\";\nimport type {\n\tSalesforceConnectorConfig,\n\tSalesforceIngestConfig,\n\tSfAccount,\n\tSfContact,\n\tSfLead,\n\tSfOpportunity,\n} from \"./types\";\n\nconst DEFAULT_INTERVAL_MS = 30_000;\n\n// ---------------------------------------------------------------------------\n// SOQL field lists\n// ---------------------------------------------------------------------------\n\nconst ACCOUNT_FIELDS = [\n\t\"Id\",\n\t\"Name\",\n\t\"Type\",\n\t\"Industry\",\n\t\"Website\",\n\t\"Phone\",\n\t\"BillingCity\",\n\t\"BillingState\",\n\t\"BillingCountry\",\n\t\"AnnualRevenue\",\n\t\"NumberOfEmployees\",\n\t\"Owner.Name\",\n\t\"CreatedDate\",\n\t\"LastModifiedDate\",\n].join(\", \");\n\nconst CONTACT_FIELDS = [\n\t\"Id\",\n\t\"FirstName\",\n\t\"LastName\",\n\t\"Email\",\n\t\"Phone\",\n\t\"Title\",\n\t\"AccountId\",\n\t\"Account.Name\",\n\t\"MailingCity\",\n\t\"MailingState\",\n\t\"MailingCountry\",\n\t\"Owner.Name\",\n\t\"CreatedDate\",\n\t\"LastModifiedDate\",\n].join(\", \");\n\nconst OPPORTUNITY_FIELDS = [\n\t\"Id\",\n\t\"Name\",\n\t\"StageName\",\n\t\"Amount\",\n\t\"CloseDate\",\n\t\"Probability\",\n\t\"AccountId\",\n\t\"Account.Name\",\n\t\"Type\",\n\t\"LeadSource\",\n\t\"IsClosed\",\n\t\"IsWon\",\n\t\"Owner.Name\",\n\t\"CreatedDate\",\n\t\"LastModifiedDate\",\n].join(\", \");\n\nconst LEAD_FIELDS = [\n\t\"Id\",\n\t\"FirstName\",\n\t\"LastName\",\n\t\"Company\",\n\t\"Email\",\n\t\"Phone\",\n\t\"Title\",\n\t\"Status\",\n\t\"LeadSource\",\n\t\"IsConverted\",\n\t\"ConvertedAccountId\",\n\t\"ConvertedContactId\",\n\t\"ConvertedOpportunityId\",\n\t\"Owner.Name\",\n\t\"CreatedDate\",\n\t\"LastModifiedDate\",\n].join(\", \");\n\n/**\n * Polls Salesforce CRM for accounts, contacts, opportunities, and leads\n * and pushes detected changes into a gateway via streaming accumulation.\n *\n * Uses {@link BaseSourcePoller.accumulateDelta} to push deltas in\n * memory-bounded chunks instead of collecting all deltas in a single array.\n */\nexport class SalesforceSourcePoller extends BaseSourcePoller {\n\tprivate readonly connectionConfig: SalesforceConnectorConfig;\n\tprivate readonly client: SalesforceClient;\n\n\t/** Per-entity cursors: max LastModifiedDate from the last poll. */\n\tprivate cursors: Record<string, string | undefined> = {\n\t\taccounts: undefined,\n\t\tcontacts: undefined,\n\t\topportunities: undefined,\n\t\tleads: undefined,\n\t};\n\n\t/** Export cursor state as a JSON-serialisable object for external persistence. */\n\toverride getCursorState(): Record<string, unknown> {\n\t\treturn { ...this.cursors };\n\t}\n\n\t/** Restore cursor state from a previously exported snapshot. */\n\toverride setCursorState(state: Record<string, unknown>): void {\n\t\tconst incoming = state as Record<string, string | undefined>;\n\t\tthis.cursors = {\n\t\t\taccounts: incoming.accounts,\n\t\t\tcontacts: incoming.contacts,\n\t\t\topportunities: incoming.opportunities,\n\t\t\tleads: incoming.leads,\n\t\t};\n\t}\n\n\tconstructor(\n\t\tconnectionConfig: SalesforceConnectorConfig,\n\t\tingestConfig: SalesforceIngestConfig | undefined,\n\t\tname: string,\n\t\tgateway: PushTarget,\n\t\tclient?: SalesforceClient,\n\t) {\n\t\tsuper({\n\t\t\tname,\n\t\t\tintervalMs: ingestConfig?.intervalMs ?? DEFAULT_INTERVAL_MS,\n\t\t\tgateway,\n\t\t\tmemory: {\n\t\t\t\tchunkSize: ingestConfig?.chunkSize,\n\t\t\t\tmemoryBudgetBytes: ingestConfig?.memoryBudgetBytes,\n\t\t\t},\n\t\t});\n\t\tthis.connectionConfig = connectionConfig;\n\t\tthis.client = client ?? new SalesforceClient(connectionConfig);\n\t}\n\n\t/** Execute a single poll cycle across all enabled entity types. */\n\tasync poll(): Promise<void> {\n\t\tconst includeAccounts = this.connectionConfig.includeAccounts ?? true;\n\t\tif (includeAccounts) {\n\t\t\tawait this.pollEntity<SfAccount>(\n\t\t\t\t\"Account\",\n\t\t\t\tACCOUNT_FIELDS,\n\t\t\t\t\"accounts\",\n\t\t\t\t\"sf_accounts\",\n\t\t\t\tmapAccount,\n\t\t\t);\n\t\t}\n\n\t\tconst includeContacts = this.connectionConfig.includeContacts ?? true;\n\t\tif (includeContacts) {\n\t\t\tawait this.pollEntity<SfContact>(\n\t\t\t\t\"Contact\",\n\t\t\t\tCONTACT_FIELDS,\n\t\t\t\t\"contacts\",\n\t\t\t\t\"sf_contacts\",\n\t\t\t\tmapContact,\n\t\t\t);\n\t\t}\n\n\t\tconst includeOpportunities = this.connectionConfig.includeOpportunities ?? true;\n\t\tif (includeOpportunities) {\n\t\t\tawait this.pollEntity<SfOpportunity>(\n\t\t\t\t\"Opportunity\",\n\t\t\t\tOPPORTUNITY_FIELDS,\n\t\t\t\t\"opportunities\",\n\t\t\t\t\"sf_opportunities\",\n\t\t\t\tmapOpportunity,\n\t\t\t);\n\t\t}\n\n\t\tconst includeLeads = this.connectionConfig.includeLeads ?? true;\n\t\tif (includeLeads) {\n\t\t\tawait this.pollEntity<SfLead>(\"Lead\", LEAD_FIELDS, \"leads\", \"sf_leads\", mapLead);\n\t\t}\n\n\t\tawait this.flushAccumulator();\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// Generic entity polling via LastModifiedDate cursor\n\t// -----------------------------------------------------------------------\n\n\tprivate async pollEntity<T extends { Id: string; LastModifiedDate: string | null }>(\n\t\tsObjectType: string,\n\t\tfields: string,\n\t\tcursorKey: string,\n\t\ttable: string,\n\t\tmapFn: (record: T) => { rowId: string; row: Record<string, unknown> },\n\t): Promise<void> {\n\t\tconst cursor = this.cursors[cursorKey];\n\t\tconst soql = this.buildSoql(sObjectType, fields, cursor);\n\n\t\tconst result = await this.client.query<T>(soql);\n\t\tif (!result.ok) return;\n\n\t\tconst records = result.value;\n\t\tif (records.length === 0) return;\n\n\t\tlet maxLastModified = cursor;\n\n\t\tfor (const record of records) {\n\t\t\tconst { rowId, row } = mapFn(record);\n\n\t\t\tconst delta = await extractDelta(null, row, {\n\t\t\t\ttable,\n\t\t\t\trowId,\n\t\t\t\tclientId: this.clientId,\n\t\t\t\thlc: this.hlc.now(),\n\t\t\t});\n\n\t\t\tif (delta) {\n\t\t\t\tawait this.accumulateDelta(delta);\n\t\t\t}\n\n\t\t\tconst lastModified = record.LastModifiedDate;\n\t\t\tif (lastModified && (!maxLastModified || lastModified > maxLastModified)) {\n\t\t\t\tmaxLastModified = lastModified;\n\t\t\t}\n\t\t}\n\n\t\tthis.cursors[cursorKey] = maxLastModified;\n\t}\n\n\t// -----------------------------------------------------------------------\n\t// SOQL query builder\n\t// -----------------------------------------------------------------------\n\n\tprivate buildSoql(sObjectType: string, fields: string, cursor: string | undefined): string {\n\t\tconst clauses: string[] = [];\n\n\t\tif (cursor) {\n\t\t\tclauses.push(`LastModifiedDate > ${cursor}`);\n\t\t}\n\n\t\tif (this.connectionConfig.soqlFilter) {\n\t\t\tclauses.push(this.connectionConfig.soqlFilter);\n\t\t}\n\n\t\tconst where = clauses.length > 0 ? ` WHERE ${clauses.join(\" AND \")}` : \"\";\n\t\treturn `SELECT ${fields} FROM ${sObjectType}${where} ORDER BY LastModifiedDate ASC`;\n\t}\n}\n","// ---------------------------------------------------------------------------\n// Salesforce Table Schemas — column definitions for each synced entity\n// ---------------------------------------------------------------------------\n\nimport type { TableSchema } from \"@lakesync/core\";\n\n/** Column helper — all Salesforce columns are mapped to string. */\nfunction textCol(name: string): { name: string; type: \"string\" } {\n\treturn { name, type: \"string\" };\n}\n\n/** Table schemas for all Salesforce entity types synced by the connector. */\nexport const SALESFORCE_TABLE_SCHEMAS: ReadonlyArray<TableSchema> = [\n\t{\n\t\ttable: \"sf_accounts\",\n\t\tcolumns: [\n\t\t\ttextCol(\"sf_id\"),\n\t\t\ttextCol(\"name\"),\n\t\t\ttextCol(\"type\"),\n\t\t\ttextCol(\"industry\"),\n\t\t\ttextCol(\"website\"),\n\t\t\ttextCol(\"phone\"),\n\t\t\ttextCol(\"billing_city\"),\n\t\t\ttextCol(\"billing_state\"),\n\t\t\ttextCol(\"billing_country\"),\n\t\t\ttextCol(\"annual_revenue\"),\n\t\t\ttextCol(\"number_of_employees\"),\n\t\t\ttextCol(\"owner_name\"),\n\t\t\ttextCol(\"created_date\"),\n\t\t\ttextCol(\"last_modified_date\"),\n\t\t],\n\t},\n\t{\n\t\ttable: \"sf_contacts\",\n\t\tcolumns: [\n\t\t\ttextCol(\"sf_id\"),\n\t\t\ttextCol(\"first_name\"),\n\t\t\ttextCol(\"last_name\"),\n\t\t\ttextCol(\"email\"),\n\t\t\ttextCol(\"phone\"),\n\t\t\ttextCol(\"title\"),\n\t\t\ttextCol(\"account_id\"),\n\t\t\ttextCol(\"account_name\"),\n\t\t\ttextCol(\"mailing_city\"),\n\t\t\ttextCol(\"mailing_state\"),\n\t\t\ttextCol(\"mailing_country\"),\n\t\t\ttextCol(\"owner_name\"),\n\t\t\ttextCol(\"created_date\"),\n\t\t\ttextCol(\"last_modified_date\"),\n\t\t],\n\t},\n\t{\n\t\ttable: \"sf_opportunities\",\n\t\tcolumns: [\n\t\t\ttextCol(\"sf_id\"),\n\t\t\ttextCol(\"name\"),\n\t\t\ttextCol(\"stage_name\"),\n\t\t\ttextCol(\"amount\"),\n\t\t\ttextCol(\"close_date\"),\n\t\t\ttextCol(\"probability\"),\n\t\t\ttextCol(\"account_id\"),\n\t\t\ttextCol(\"account_name\"),\n\t\t\ttextCol(\"type\"),\n\t\t\ttextCol(\"lead_source\"),\n\t\t\ttextCol(\"is_closed\"),\n\t\t\ttextCol(\"is_won\"),\n\t\t\ttextCol(\"owner_name\"),\n\t\t\ttextCol(\"created_date\"),\n\t\t\ttextCol(\"last_modified_date\"),\n\t\t],\n\t},\n\t{\n\t\ttable: \"sf_leads\",\n\t\tcolumns: [\n\t\t\ttextCol(\"sf_id\"),\n\t\t\ttextCol(\"first_name\"),\n\t\t\ttextCol(\"last_name\"),\n\t\t\ttextCol(\"company\"),\n\t\t\ttextCol(\"email\"),\n\t\t\ttextCol(\"phone\"),\n\t\t\ttextCol(\"title\"),\n\t\t\ttextCol(\"status\"),\n\t\t\ttextCol(\"lead_source\"),\n\t\t\ttextCol(\"is_converted\"),\n\t\t\ttextCol(\"converted_account_id\"),\n\t\t\ttextCol(\"converted_contact_id\"),\n\t\t\ttextCol(\"converted_opportunity_id\"),\n\t\t\ttextCol(\"owner_name\"),\n\t\t\ttextCol(\"created_date\"),\n\t\t\ttextCol(\"last_modified_date\"),\n\t\t],\n\t},\n];\n","import type { Result } from \"@lakesync/core\";\nimport { SalesforceClient } from \"./client\";\nimport type { SalesforceAuthError } from \"./errors\";\nimport type { SalesforceConnectorConfig } from \"./types\";\n\n/**\n * Test a Salesforce connection by attempting OAuth authentication.\n *\n * Creates a `SalesforceClient` internally and calls `authenticate()` —\n * if the OAuth flow succeeds, the connection is valid.\n */\nexport async function testConnection(\n\tconfig: SalesforceConnectorConfig,\n): Promise<Result<void, SalesforceAuthError>> {\n\tconst client = new SalesforceClient(config);\n\treturn client.authenticate();\n}\n","import { registerPollerFactory } from \"@lakesync/core\";\nimport { SalesforceSourcePoller } from \"./poller\";\nimport type { SalesforceIngestConfig } from \"./types\";\n\nexport { SalesforceClient } from \"./client\";\nexport { SalesforceApiError, SalesforceAuthError } from \"./errors\";\nexport { mapAccount, mapContact, mapLead, mapOpportunity } from \"./mapping\";\nexport { SalesforceSourcePoller } from \"./poller\";\nexport { SALESFORCE_TABLE_SCHEMAS } from \"./schemas\";\nexport { testConnection } from \"./test-connection\";\nexport type {\n\tSalesforceAuthResponse,\n\tSalesforceConnectorConfig,\n\tSalesforceIngestConfig,\n\tSalesforceQueryResponse,\n\tSfAccount,\n\tSfContact,\n\tSfLead,\n\tSfOpportunity,\n} from \"./types\";\n\n// Auto-register poller factory so createPoller(\"salesforce\", ...) works.\nregisterPollerFactory(\"salesforce\", (config, gateway) => {\n\tconst ingest: SalesforceIngestConfig | undefined = config.ingest\n\t\t? {\n\t\t\t\tintervalMs: config.ingest.intervalMs,\n\t\t\t\tchunkSize: config.ingest.chunkSize,\n\t\t\t\tmemoryBudgetBytes: config.ingest.memoryBudgetBytes,\n\t\t\t}\n\t\t: undefined;\n\treturn new SalesforceSourcePoller(config.salesforce!, ingest, config.name, gateway);\n});\n"],"mappings":";;;;;;;;;;AAGO,IAAM,qBAAN,cAAiC,cAAc;AAAA;AAAA,EAE5C;AAAA;AAAA,EAEA;AAAA,EAET,YAAY,YAAoB,cAAsB,OAAe;AACpE,UAAM,yBAAyB,UAAU,MAAM,YAAY,IAAI,wBAAwB,KAAK;AAC5F,SAAK,aAAa;AAClB,SAAK,eAAe;AAAA,EACrB;AACD;AAGO,IAAM,sBAAN,cAAkC,cAAc;AAAA,EACtD,YAAY,SAAiB,OAAe;AAC3C,UAAM,SAAS,yBAAyB,KAAK;AAAA,EAC9C;AACD;;;ACTA,IAAM,sBAAsB;AAC5B,IAAM,qBAAqB;AAC3B,IAAM,yBAAyB;AAQxB,IAAM,mBAAN,MAAuB;AAAA,EACZ;AAAA,EACA;AAAA,EACA;AAAA,EAET,cAA6B;AAAA,EAC7B;AAAA,EAER,YAAY,QAAmC;AAC9C,SAAK,SAAS;AACd,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,WAAW,OAAO,YACpB,gCACA;AACH,SAAK,cAAc,OAAO;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,eAA2D;AAChE,UAAM,OAAO,IAAI,gBAAgB;AAAA,MAChC,YAAY;AAAA,MACZ,WAAW,KAAK,OAAO;AAAA,MACvB,eAAe,KAAK,OAAO;AAAA,MAC3B,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,IACvB,CAAC;AAED,QAAI;AACJ,QAAI;AACH,iBAAW,MAAM,MAAM,GAAG,KAAK,QAAQ,0BAA0B;AAAA,QAChE,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,oCAAoC;AAAA,QAC/D,MAAM,KAAK,SAAS;AAAA,MACrB,CAAC;AAAA,IACF,SAAS,KAAK;AACb,aAAO;AAAA,QACN,IAAI;AAAA,UACH,kDAAkD,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,UAClG,eAAe,QAAQ,MAAM;AAAA,QAC9B;AAAA,MACD;AAAA,IACD;AAEA,QAAI,CAAC,SAAS,IAAI;AACjB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,aAAO;AAAA,QACN,IAAI,oBAAoB,qCAAqC,SAAS,MAAM,MAAM,IAAI,EAAE;AAAA,MACzF;AAAA,IACD;AAEA,UAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,SAAK,cAAc,KAAK;AACxB,SAAK,cAAc,KAAK;AAExB,WAAO,GAAG,MAAS;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MAAS,MAA8E;AAE5F,QAAI,CAAC,KAAK,aAAa;AACtB,YAAM,aAAa,MAAM,KAAK,aAAa;AAC3C,UAAI,CAAC,WAAW,GAAI,QAAO;AAAA,IAC5B;AAEA,UAAM,aAAkB,CAAC;AACzB,QAAI,MAAM,GAAG,KAAK,WAAW,kBAAkB,KAAK,UAAU,YAAY,mBAAmB,IAAI,CAAC;AAElG,WAAO,MAAM;AACZ,YAAM,SAAS,MAAM,KAAK,QAAoC,GAAG;AAGjE,UACC,CAAC,OAAO,MACR,OAAO,iBAAiB,sBACxB,OAAO,MAAM,eAAe,KAC3B;AACD,cAAM,aAAa,MAAM,KAAK,aAAa;AAC3C,YAAI,CAAC,WAAW,GAAI,QAAO;AAE3B,cAAM,cAAc,MAAM,KAAK,QAAoC,GAAG;AACtE,YAAI,CAAC,YAAY,GAAI,QAAO;AAE5B,mBAAW,UAAU,YAAY,MAAM,SAAS;AAC/C,qBAAW,KAAK,MAAM;AAAA,QACvB;AAEA,YAAI,YAAY,MAAM,QAAQ,CAAC,YAAY,MAAM,eAAgB;AACjE,cAAM,GAAG,KAAK,WAAW,GAAG,YAAY,MAAM,cAAc;AAC5D;AAAA,MACD;AAEA,UAAI,CAAC,OAAO,GAAI,QAAO;AAEvB,iBAAW,UAAU,OAAO,MAAM,SAAS;AAC1C,mBAAW,KAAK,MAAM;AAAA,MACvB;AAEA,UAAI,OAAO,MAAM,QAAQ,CAAC,OAAO,MAAM,eAAgB;AACvD,YAAM,GAAG,KAAK,WAAW,GAAG,OAAO,MAAM,cAAc;AAAA,IACxD;AAEA,WAAO,GAAG,UAAU;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,QACb,KAC+D;AAC/D,aAAS,UAAU,GAAG,WAAW,oBAAoB,WAAW;AAC/D,YAAM,UAAkC;AAAA,QACvC,eAAe,UAAU,KAAK,WAAW;AAAA,QACzC,QAAQ;AAAA,MACT;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,EAAE,QAAQ,OAAO,QAAQ,CAAC;AAE5D,UAAI,SAAS,IAAI;AAChB,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,eAAO,GAAG,IAAI;AAAA,MACf;AAGA,UAAI,SAAS,WAAW,KAAK;AAC5B,cAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,cAAM,SAAS,aAAa,OAAO,SAAS,YAAY,EAAE,IAAI,MAAO;AAErE,YAAI,UAAU,oBAAoB;AACjC,gBAAM,MAAM,MAAM;AAClB;AAAA,QACD;AAEA,cAAMA,gBAAe,MAAM,SAAS,KAAK;AACzC,eAAO,IAAI,IAAI,mBAAmB,KAAKA,aAAY,CAAC;AAAA,MACrD;AAEA,YAAM,eAAe,MAAM,SAAS,KAAK;AACzC,aAAO,IAAI,IAAI,mBAAmB,SAAS,QAAQ,YAAY,CAAC;AAAA,IACjE;AAEA,WAAO,IAAI,IAAI,mBAAmB,GAAG,6BAA6B,CAAC;AAAA,EACpE;AACD;AAGA,SAAS,MAAM,IAA2B;AACzC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACxD;;;AC1KO,SAAS,WAAW,SAAqE;AAC/F,SAAO;AAAA,IACN,OAAO,QAAQ;AAAA,IACf,KAAK;AAAA,MACJ,OAAO,QAAQ;AAAA,MACf,MAAM,QAAQ,QAAQ;AAAA,MACtB,MAAM,QAAQ,QAAQ;AAAA,MACtB,UAAU,QAAQ,YAAY;AAAA,MAC9B,SAAS,QAAQ,WAAW;AAAA,MAC5B,OAAO,QAAQ,SAAS;AAAA,MACxB,cAAc,QAAQ,eAAe;AAAA,MACrC,eAAe,QAAQ,gBAAgB;AAAA,MACvC,iBAAiB,QAAQ,kBAAkB;AAAA,MAC3C,gBAAgB,QAAQ,iBAAiB;AAAA,MACzC,qBAAqB,QAAQ,qBAAqB;AAAA,MAClD,YAAY,QAAQ,OAAO,QAAQ;AAAA,MACnC,cAAc,QAAQ,eAAe;AAAA,MACrC,oBAAoB,QAAQ,oBAAoB;AAAA,IACjD;AAAA,EACD;AACD;AAOO,SAAS,WAAW,SAAqE;AAC/F,SAAO;AAAA,IACN,OAAO,QAAQ;AAAA,IACf,KAAK;AAAA,MACJ,OAAO,QAAQ;AAAA,MACf,YAAY,QAAQ,aAAa;AAAA,MACjC,WAAW,QAAQ,YAAY;AAAA,MAC/B,OAAO,QAAQ,SAAS;AAAA,MACxB,OAAO,QAAQ,SAAS;AAAA,MACxB,OAAO,QAAQ,SAAS;AAAA,MACxB,YAAY,QAAQ,aAAa;AAAA,MACjC,cAAc,QAAQ,SAAS,QAAQ;AAAA,MACvC,cAAc,QAAQ,eAAe;AAAA,MACrC,eAAe,QAAQ,gBAAgB;AAAA,MACvC,iBAAiB,QAAQ,kBAAkB;AAAA,MAC3C,YAAY,QAAQ,OAAO,QAAQ;AAAA,MACnC,cAAc,QAAQ,eAAe;AAAA,MACrC,oBAAoB,QAAQ,oBAAoB;AAAA,IACjD;AAAA,EACD;AACD;AAOO,SAAS,eAAe,aAG7B;AACD,SAAO;AAAA,IACN,OAAO,YAAY;AAAA,IACnB,KAAK;AAAA,MACJ,OAAO,YAAY;AAAA,MACnB,MAAM,YAAY,QAAQ;AAAA,MAC1B,YAAY,YAAY,aAAa;AAAA,MACrC,QAAQ,YAAY,UAAU;AAAA,MAC9B,YAAY,YAAY,aAAa;AAAA,MACrC,aAAa,YAAY,eAAe;AAAA,MACxC,YAAY,YAAY,aAAa;AAAA,MACrC,cAAc,YAAY,SAAS,QAAQ;AAAA,MAC3C,MAAM,YAAY,QAAQ;AAAA,MAC1B,aAAa,YAAY,cAAc;AAAA,MACvC,WAAW,YAAY,YAAY;AAAA,MACnC,QAAQ,YAAY,SAAS;AAAA,MAC7B,YAAY,YAAY,OAAO,QAAQ;AAAA,MACvC,cAAc,YAAY,eAAe;AAAA,MACzC,oBAAoB,YAAY,oBAAoB;AAAA,IACrD;AAAA,EACD;AACD;AAOO,SAAS,QAAQ,MAA+D;AACtF,SAAO;AAAA,IACN,OAAO,KAAK;AAAA,IACZ,KAAK;AAAA,MACJ,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK,aAAa;AAAA,MAC9B,WAAW,KAAK,YAAY;AAAA,MAC5B,SAAS,KAAK,WAAW;AAAA,MACzB,OAAO,KAAK,SAAS;AAAA,MACrB,OAAO,KAAK,SAAS;AAAA,MACrB,OAAO,KAAK,SAAS;AAAA,MACrB,QAAQ,KAAK,UAAU;AAAA,MACvB,aAAa,KAAK,cAAc;AAAA,MAChC,cAAc,KAAK,eAAe;AAAA,MAClC,sBAAsB,KAAK,sBAAsB;AAAA,MACjD,sBAAsB,KAAK,sBAAsB;AAAA,MACjD,0BAA0B,KAAK,0BAA0B;AAAA,MACzD,YAAY,KAAK,OAAO,QAAQ;AAAA,MAChC,cAAc,KAAK,eAAe;AAAA,MAClC,oBAAoB,KAAK,oBAAoB;AAAA,IAC9C;AAAA,EACD;AACD;;;ACtGA,IAAM,sBAAsB;AAM5B,IAAM,iBAAiB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD,EAAE,KAAK,IAAI;AAEX,IAAM,iBAAiB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD,EAAE,KAAK,IAAI;AAEX,IAAM,qBAAqB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD,EAAE,KAAK,IAAI;AAEX,IAAM,cAAc;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD,EAAE,KAAK,IAAI;AASJ,IAAM,yBAAN,cAAqC,iBAAiB;AAAA,EAC3C;AAAA,EACA;AAAA;AAAA,EAGT,UAA8C;AAAA,IACrD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,eAAe;AAAA,IACf,OAAO;AAAA,EACR;AAAA;AAAA,EAGS,iBAA0C;AAClD,WAAO,EAAE,GAAG,KAAK,QAAQ;AAAA,EAC1B;AAAA;AAAA,EAGS,eAAe,OAAsC;AAC7D,UAAM,WAAW;AACjB,SAAK,UAAU;AAAA,MACd,UAAU,SAAS;AAAA,MACnB,UAAU,SAAS;AAAA,MACnB,eAAe,SAAS;AAAA,MACxB,OAAO,SAAS;AAAA,IACjB;AAAA,EACD;AAAA,EAEA,YACC,kBACA,cACA,MACA,SACA,QACC;AACD,UAAM;AAAA,MACL;AAAA,MACA,YAAY,cAAc,cAAc;AAAA,MACxC;AAAA,MACA,QAAQ;AAAA,QACP,WAAW,cAAc;AAAA,QACzB,mBAAmB,cAAc;AAAA,MAClC;AAAA,IACD,CAAC;AACD,SAAK,mBAAmB;AACxB,SAAK,SAAS,UAAU,IAAI,iBAAiB,gBAAgB;AAAA,EAC9D;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC3B,UAAM,kBAAkB,KAAK,iBAAiB,mBAAmB;AACjE,QAAI,iBAAiB;AACpB,YAAM,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAEA,UAAM,kBAAkB,KAAK,iBAAiB,mBAAmB;AACjE,QAAI,iBAAiB;AACpB,YAAM,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAEA,UAAM,uBAAuB,KAAK,iBAAiB,wBAAwB;AAC3E,QAAI,sBAAsB;AACzB,YAAM,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAEA,UAAM,eAAe,KAAK,iBAAiB,gBAAgB;AAC3D,QAAI,cAAc;AACjB,YAAM,KAAK,WAAmB,QAAQ,aAAa,SAAS,YAAY,OAAO;AAAA,IAChF;AAEA,UAAM,KAAK,iBAAiB;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,WACb,aACA,QACA,WACA,OACA,OACgB;AAChB,UAAM,SAAS,KAAK,QAAQ,SAAS;AACrC,UAAM,OAAO,KAAK,UAAU,aAAa,QAAQ,MAAM;AAEvD,UAAM,SAAS,MAAM,KAAK,OAAO,MAAS,IAAI;AAC9C,QAAI,CAAC,OAAO,GAAI;AAEhB,UAAM,UAAU,OAAO;AACvB,QAAI,QAAQ,WAAW,EAAG;AAE1B,QAAI,kBAAkB;AAEtB,eAAW,UAAU,SAAS;AAC7B,YAAM,EAAE,OAAO,IAAI,IAAI,MAAM,MAAM;AAEnC,YAAM,QAAQ,MAAM,aAAa,MAAM,KAAK;AAAA,QAC3C;AAAA,QACA;AAAA,QACA,UAAU,KAAK;AAAA,QACf,KAAK,KAAK,IAAI,IAAI;AAAA,MACnB,CAAC;AAED,UAAI,OAAO;AACV,cAAM,KAAK,gBAAgB,KAAK;AAAA,MACjC;AAEA,YAAM,eAAe,OAAO;AAC5B,UAAI,iBAAiB,CAAC,mBAAmB,eAAe,kBAAkB;AACzE,0BAAkB;AAAA,MACnB;AAAA,IACD;AAEA,SAAK,QAAQ,SAAS,IAAI;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAMQ,UAAU,aAAqB,QAAgB,QAAoC;AAC1F,UAAM,UAAoB,CAAC;AAE3B,QAAI,QAAQ;AACX,cAAQ,KAAK,sBAAsB,MAAM,EAAE;AAAA,IAC5C;AAEA,QAAI,KAAK,iBAAiB,YAAY;AACrC,cAAQ,KAAK,KAAK,iBAAiB,UAAU;AAAA,IAC9C;AAEA,UAAM,QAAQ,QAAQ,SAAS,IAAI,UAAU,QAAQ,KAAK,OAAO,CAAC,KAAK;AACvE,WAAO,UAAU,MAAM,SAAS,WAAW,GAAG,KAAK;AAAA,EACpD;AACD;;;ACvPA,SAAS,QAAQ,MAAgD;AAChE,SAAO,EAAE,MAAM,MAAM,SAAS;AAC/B;AAGO,IAAM,2BAAuD;AAAA,EACnE;AAAA,IACC,OAAO;AAAA,IACP,SAAS;AAAA,MACR,QAAQ,OAAO;AAAA,MACf,QAAQ,MAAM;AAAA,MACd,QAAQ,MAAM;AAAA,MACd,QAAQ,UAAU;AAAA,MAClB,QAAQ,SAAS;AAAA,MACjB,QAAQ,OAAO;AAAA,MACf,QAAQ,cAAc;AAAA,MACtB,QAAQ,eAAe;AAAA,MACvB,QAAQ,iBAAiB;AAAA,MACzB,QAAQ,gBAAgB;AAAA,MACxB,QAAQ,qBAAqB;AAAA,MAC7B,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,oBAAoB;AAAA,IAC7B;AAAA,EACD;AAAA,EACA;AAAA,IACC,OAAO;AAAA,IACP,SAAS;AAAA,MACR,QAAQ,OAAO;AAAA,MACf,QAAQ,YAAY;AAAA,MACpB,QAAQ,WAAW;AAAA,MACnB,QAAQ,OAAO;AAAA,MACf,QAAQ,OAAO;AAAA,MACf,QAAQ,OAAO;AAAA,MACf,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,cAAc;AAAA,MACtB,QAAQ,eAAe;AAAA,MACvB,QAAQ,iBAAiB;AAAA,MACzB,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,oBAAoB;AAAA,IAC7B;AAAA,EACD;AAAA,EACA;AAAA,IACC,OAAO;AAAA,IACP,SAAS;AAAA,MACR,QAAQ,OAAO;AAAA,MACf,QAAQ,MAAM;AAAA,MACd,QAAQ,YAAY;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,QAAQ,YAAY;AAAA,MACpB,QAAQ,aAAa;AAAA,MACrB,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,MAAM;AAAA,MACd,QAAQ,aAAa;AAAA,MACrB,QAAQ,WAAW;AAAA,MACnB,QAAQ,QAAQ;AAAA,MAChB,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,oBAAoB;AAAA,IAC7B;AAAA,EACD;AAAA,EACA;AAAA,IACC,OAAO;AAAA,IACP,SAAS;AAAA,MACR,QAAQ,OAAO;AAAA,MACf,QAAQ,YAAY;AAAA,MACpB,QAAQ,WAAW;AAAA,MACnB,QAAQ,SAAS;AAAA,MACjB,QAAQ,OAAO;AAAA,MACf,QAAQ,OAAO;AAAA,MACf,QAAQ,OAAO;AAAA,MACf,QAAQ,QAAQ;AAAA,MAChB,QAAQ,aAAa;AAAA,MACrB,QAAQ,cAAc;AAAA,MACtB,QAAQ,sBAAsB;AAAA,MAC9B,QAAQ,sBAAsB;AAAA,MAC9B,QAAQ,0BAA0B;AAAA,MAClC,QAAQ,YAAY;AAAA,MACpB,QAAQ,cAAc;AAAA,MACtB,QAAQ,oBAAoB;AAAA,IAC7B;AAAA,EACD;AACD;;;ACjFA,eAAsB,eACrB,QAC6C;AAC7C,QAAM,SAAS,IAAI,iBAAiB,MAAM;AAC1C,SAAO,OAAO,aAAa;AAC5B;;;ACMA,sBAAsB,cAAc,CAAC,QAAQ,YAAY;AACxD,QAAM,SAA6C,OAAO,SACvD;AAAA,IACA,YAAY,OAAO,OAAO;AAAA,IAC1B,WAAW,OAAO,OAAO;AAAA,IACzB,mBAAmB,OAAO,OAAO;AAAA,EAClC,IACC;AACH,SAAO,IAAI,uBAAuB,OAAO,YAAa,QAAQ,OAAO,MAAM,OAAO;AACnF,CAAC;","names":["responseBody"]}
|
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
Err,
|
|
4
4
|
Ok,
|
|
5
5
|
toError
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-C7ECMNQ4.js";
|
|
7
7
|
|
|
8
8
|
// ../adapter/src/shared.ts
|
|
9
9
|
function toCause(error) {
|
|
@@ -877,4 +877,4 @@ export {
|
|
|
877
877
|
MinIOAdapter,
|
|
878
878
|
createQueryFn
|
|
879
879
|
};
|
|
880
|
-
//# sourceMappingURL=chunk-
|
|
880
|
+
//# sourceMappingURL=chunk-NCMXLWEW.js.map
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
Ok,
|
|
6
6
|
extractDelta,
|
|
7
7
|
registerPollerFactory
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-C7ECMNQ4.js";
|
|
9
9
|
|
|
10
10
|
// ../connector-jira/src/errors.ts
|
|
11
11
|
var JiraApiError = class extends LakeSyncError {
|
|
@@ -137,6 +137,14 @@ var JiraClient = class {
|
|
|
137
137
|
}
|
|
138
138
|
return Ok(allProjects);
|
|
139
139
|
}
|
|
140
|
+
/**
|
|
141
|
+
* Fetch the currently authenticated user.
|
|
142
|
+
*
|
|
143
|
+
* Calls `GET /rest/api/3/myself` — the cheapest auth-validating endpoint.
|
|
144
|
+
*/
|
|
145
|
+
async getCurrentUser() {
|
|
146
|
+
return this.request("/rest/api/3/myself", "GET");
|
|
147
|
+
}
|
|
140
148
|
// -----------------------------------------------------------------------
|
|
141
149
|
// Internal HTTP helpers
|
|
142
150
|
// -----------------------------------------------------------------------
|
|
@@ -440,9 +448,22 @@ var JIRA_TABLE_SCHEMAS = [
|
|
|
440
448
|
JIRA_PROJECTS_SCHEMA
|
|
441
449
|
];
|
|
442
450
|
|
|
451
|
+
// ../connector-jira/src/test-connection.ts
|
|
452
|
+
async function testConnection(config) {
|
|
453
|
+
const client = new JiraClient(config);
|
|
454
|
+
const result = await client.getCurrentUser();
|
|
455
|
+
if (!result.ok) return result;
|
|
456
|
+
return Ok(void 0);
|
|
457
|
+
}
|
|
458
|
+
|
|
443
459
|
// ../connector-jira/src/index.ts
|
|
444
460
|
registerPollerFactory("jira", (config, gateway) => {
|
|
445
|
-
|
|
461
|
+
const ingest = config.ingest ? {
|
|
462
|
+
intervalMs: config.ingest.intervalMs,
|
|
463
|
+
chunkSize: config.ingest.chunkSize,
|
|
464
|
+
memoryBudgetBytes: config.ingest.memoryBudgetBytes
|
|
465
|
+
} : void 0;
|
|
466
|
+
return new JiraSourcePoller(config.jira, ingest, config.name, gateway);
|
|
446
467
|
});
|
|
447
468
|
|
|
448
469
|
export {
|
|
@@ -453,6 +474,7 @@ export {
|
|
|
453
474
|
mapComment,
|
|
454
475
|
mapProject,
|
|
455
476
|
JiraSourcePoller,
|
|
456
|
-
JIRA_TABLE_SCHEMAS
|
|
477
|
+
JIRA_TABLE_SCHEMAS,
|
|
478
|
+
testConnection
|
|
457
479
|
};
|
|
458
|
-
//# sourceMappingURL=chunk-
|
|
480
|
+
//# sourceMappingURL=chunk-SXQB6JT6.js.map
|