@databricks/appkit 0.1.5 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +57 -2
- package/CLAUDE.md +57 -2
- package/NOTICE.md +2 -0
- package/README.md +21 -15
- package/bin/appkit-lint.js +129 -0
- package/dist/analytics/analytics.d.ts.map +1 -1
- package/dist/analytics/analytics.js +33 -33
- package/dist/analytics/analytics.js.map +1 -1
- package/dist/analytics/query.js +8 -2
- package/dist/analytics/query.js.map +1 -1
- package/dist/app/index.d.ts +5 -1
- package/dist/app/index.d.ts.map +1 -1
- package/dist/app/index.js +41 -10
- package/dist/app/index.js.map +1 -1
- package/dist/appkit/package.js +1 -1
- package/dist/cache/index.d.ts.map +1 -1
- package/dist/cache/index.js +24 -3
- package/dist/cache/index.js.map +1 -1
- package/dist/cache/storage/persistent.js +12 -6
- package/dist/cache/storage/persistent.js.map +1 -1
- package/dist/connectors/lakebase/client.js +25 -14
- package/dist/connectors/lakebase/client.js.map +1 -1
- package/dist/connectors/sql-warehouse/client.js +68 -28
- package/dist/connectors/sql-warehouse/client.js.map +1 -1
- package/dist/context/service-context.js +13 -8
- package/dist/context/service-context.js.map +1 -1
- package/dist/errors/authentication.d.ts +38 -0
- package/dist/errors/authentication.d.ts.map +1 -0
- package/dist/errors/authentication.js +48 -0
- package/dist/errors/authentication.js.map +1 -0
- package/dist/errors/base.d.ts +58 -0
- package/dist/errors/base.d.ts.map +1 -0
- package/dist/errors/base.js +70 -0
- package/dist/errors/base.js.map +1 -0
- package/dist/errors/configuration.d.ts +38 -0
- package/dist/errors/configuration.d.ts.map +1 -0
- package/dist/errors/configuration.js +45 -0
- package/dist/errors/configuration.js.map +1 -0
- package/dist/errors/connection.d.ts +42 -0
- package/dist/errors/connection.d.ts.map +1 -0
- package/dist/errors/connection.js +54 -0
- package/dist/errors/connection.js.map +1 -0
- package/dist/errors/execution.d.ts +42 -0
- package/dist/errors/execution.d.ts.map +1 -0
- package/dist/errors/execution.js +51 -0
- package/dist/errors/execution.js.map +1 -0
- package/dist/errors/index.js +28 -0
- package/dist/errors/index.js.map +1 -0
- package/dist/errors/initialization.d.ts +34 -0
- package/dist/errors/initialization.d.ts.map +1 -0
- package/dist/errors/initialization.js +42 -0
- package/dist/errors/initialization.js.map +1 -0
- package/dist/errors/server.d.ts +38 -0
- package/dist/errors/server.d.ts.map +1 -0
- package/dist/errors/server.js +45 -0
- package/dist/errors/server.js.map +1 -0
- package/dist/errors/tunnel.d.ts +38 -0
- package/dist/errors/tunnel.d.ts.map +1 -0
- package/dist/errors/tunnel.js +51 -0
- package/dist/errors/tunnel.js.map +1 -0
- package/dist/errors/validation.d.ts +36 -0
- package/dist/errors/validation.d.ts.map +1 -0
- package/dist/errors/validation.js +45 -0
- package/dist/errors/validation.js.map +1 -0
- package/dist/index.d.ts +12 -3
- package/dist/index.js +18 -3
- package/dist/index.js.map +1 -0
- package/dist/logging/logger.js +179 -0
- package/dist/logging/logger.js.map +1 -0
- package/dist/logging/sampling.js +56 -0
- package/dist/logging/sampling.js.map +1 -0
- package/dist/logging/wide-event-emitter.js +108 -0
- package/dist/logging/wide-event-emitter.js.map +1 -0
- package/dist/logging/wide-event.js +167 -0
- package/dist/logging/wide-event.js.map +1 -0
- package/dist/plugin/dev-reader.d.ts.map +1 -1
- package/dist/plugin/dev-reader.js +8 -3
- package/dist/plugin/dev-reader.js.map +1 -1
- package/dist/plugin/interceptors/cache.js.map +1 -1
- package/dist/plugin/interceptors/retry.js +10 -2
- package/dist/plugin/interceptors/retry.js.map +1 -1
- package/dist/plugin/interceptors/telemetry.js +24 -9
- package/dist/plugin/interceptors/telemetry.js.map +1 -1
- package/dist/plugin/interceptors/timeout.js +4 -0
- package/dist/plugin/interceptors/timeout.js.map +1 -1
- package/dist/plugin/plugin.d.ts +1 -1
- package/dist/plugin/plugin.d.ts.map +1 -1
- package/dist/plugin/plugin.js +9 -4
- package/dist/plugin/plugin.js.map +1 -1
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +22 -17
- package/dist/server/index.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-controller.js +4 -2
- package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-manager.js +10 -8
- package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
- package/dist/server/vite-dev-server.js +8 -3
- package/dist/server/vite-dev-server.js.map +1 -1
- package/dist/stream/arrow-stream-processor.js +13 -6
- package/dist/stream/arrow-stream-processor.js.map +1 -1
- package/dist/stream/buffers.js +5 -1
- package/dist/stream/buffers.js.map +1 -1
- package/dist/stream/stream-manager.d.ts.map +1 -1
- package/dist/stream/stream-manager.js +47 -36
- package/dist/stream/stream-manager.js.map +1 -1
- package/dist/stream/types.js.map +1 -1
- package/dist/telemetry/index.d.ts +2 -2
- package/dist/telemetry/index.js +2 -2
- package/dist/telemetry/instrumentations.js +14 -10
- package/dist/telemetry/instrumentations.js.map +1 -1
- package/dist/telemetry/telemetry-manager.js +8 -6
- package/dist/telemetry/telemetry-manager.js.map +1 -1
- package/dist/telemetry/trace-sampler.js +33 -0
- package/dist/telemetry/trace-sampler.js.map +1 -0
- package/dist/type-generator/index.js +4 -2
- package/dist/type-generator/index.js.map +1 -1
- package/dist/type-generator/query-registry.js +13 -3
- package/dist/type-generator/query-registry.js.map +1 -1
- package/dist/type-generator/vite-plugin.d.ts.map +1 -1
- package/dist/type-generator/vite-plugin.js +5 -3
- package/dist/type-generator/vite-plugin.js.map +1 -1
- package/dist/utils/env-validator.js +5 -1
- package/dist/utils/env-validator.js.map +1 -1
- package/dist/utils/path-exclusions.js +66 -0
- package/dist/utils/path-exclusions.js.map +1 -0
- package/llms.txt +57 -2
- package/package.json +4 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"@/telemetry\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseDefaults } from \"./defaults\";\nimport type {\n LakebaseConfig,\n LakebaseConnectionConfig,\n LakebaseCredentials,\n} from \"./types\";\n\n/**\n * Enterprise-grade connector for Databricks Lakebase\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseConnector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseConnector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseConnector {\n private readonly name: string = \"lakebase\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseConfig;\n private readonly connectionConfig: LakebaseConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseCredentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseConfig>) {\n this.config = deepMerge(lakebaseDefaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw new Error(\"maxPoolSize must be at least 1\");\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.query\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n throw error;\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n console.error(\"Error closing connection pool:\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw new Error(\n \"Databricks workspace client not available. Either pass it in config or ensure ServiceContext is initialized.\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw new Error(\n \"Lakebase connection not configured. \" +\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config.\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n console.error(\"Connection pool error:\", error.message, {\n code: error.code,\n });\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n console.error(\n \"Error closing old connection pool during rotation:\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw new Error(\"Failed to get current user from Databricks workspace\");\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw new Error(`Database app name not found in connection config`);\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw new Error(\n `Failed to generate credentials for instance: ${this.connectionConfig.appName}`,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw new Error(\n \"Lakebase connection not configured. Required env vars: PGHOST, PGDATABASE, PGAPPNAME. \" +\n \"Optional: PGPORT (default: 5432), PGSSLMODE (default: require).\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw new Error(`Invalid port: ${pgPort}. Must be a number.`);\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw new Error(\"Connection string must include appName parameter\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,IAAa,oBAAb,MAA+B;CAe7B,YAAY,YAAsC;cAdlB;yBACG,MAAS;cAGb;qBACmB;AAUhD,OAAK,SAAS,UAAU,kBAAkB,WAAW;AACrD,OAAK,mBAAmB,KAAK,uBAAuB;AAEpD,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,YAAY,KAAK,UACd,UAAU,CACV,cAAc,wBAAwB;IACrC,aAAa;IACb,MAAM;IACP,CAAC;GACJ,eAAe,KAAK,UACjB,UAAU,CACV,gBAAgB,2BAA2B;IAC1C,aAAa;IACb,MAAM;IACP,CAAC;GACL;AAGD,MAAI,KAAK,OAAO,cAAc,EAC5B,OAAM,IAAI,MAAM,iCAAiC;;;;;;;;;;;CAarD,MAAM,MACJ,KACA,QACA,aAAqB,GACO;EAC5B,MAAM,YAAY,KAAK,KAAK;AAE5B,SAAO,KAAK,UAAU,gBACpB,kBACA,EACE,YAAY;GACV,aAAa;GACb,gBAAgB,IAAI,UAAU,GAAG,IAAI;GACrC,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAC/C,SAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AAEd,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,WAAM,KAAK,mBAAmB;KAE9B,MAAM,SAAS,OADC,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAClD,UAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,YAAO;;AAIT,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,wBAAwB;AACtC,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,MAAS,KAAK,QAAQ,aAAa,EAAE;;AAGzD,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,UAAM;aACE;IACR,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;;;;;;;;;;;;;;CAiBH,MAAM,YACJ,UACA,aAAqB,GACT;EACZ,MAAM,YAAY,KAAK,KAAK;AAC5B,SAAO,KAAK,UAAU,gBACpB,wBACA,EACE,YAAY;GACV,aAAa;GACb,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;GAEd,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,SAAS;AACnC,OAAI;AACF,UAAM,OAAO,MAAM,QAAQ;IAC3B,MAAM,SAAS,MAAM,SAAS,OAAO;AACrC,UAAM,OAAO,MAAM,SAAS;AAC5B,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,QAAI;AACF,WAAM,OAAO,MAAM,WAAW;YACxB;AAER,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,YAAO,SAAS;AAChB,WAAM,KAAK,mBAAmB;KAE9B,MAAM,cAAc,OADJ,MAAM,KAAK,SAAS,EACF,SAAS;AAC3C,SAAI;AACF,YAAM,OAAO,MAAM,QAAQ;MAC3B,MAAM,SAAS,MAAM,SAAS,YAAY;AAC1C,YAAM,OAAO,MAAM,SAAS;AAC5B,WAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,aAAO;cACA,YAAY;AACnB,UAAI;AACF,aAAM,YAAY,MAAM,WAAW;cAC7B;AACR,YAAM;eACE;AACR,kBAAY,SAAS;;;AAKzB,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,0BAA0B;AACxC,YAAO,SAAS;AAChB,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,YAAe,UAAU,aAAa,EAAE;;AAE5D,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,WAAO,SAAS;IAChB,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;CAIH,MAAM,cAAgC;AACpC,SAAO,KAAK,UAAU,gBACpB,wBACA,EAAE,EACF,OAAO,SAAS;AACd,OAAI;IAIF,MAAM,WAHS,MAAM,KAAK,MACxB,qBACD,EACsB,KAAK,IAAI,WAAW;AAC3C,SAAK,aAAa,cAAc,QAAQ;AACxC,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;WACD;AACN,SAAK,aAAa,cAAc,MAAM;AACtC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAO;aACC;AACR,SAAK,KAAK;;IAGf;;;CAIH,MAAM,QAAuB;AAC3B,MAAI,KAAK,MAAM;AACb,SAAM,KAAK,KAAK,KAAK,CAAC,OAAO,UAAmB;AAC9C,YAAQ,MAAM,kCAAkC,MAAM;KACtD;AACF,QAAK,OAAO;;AAEd,OAAK,cAAc;;;CAIrB,WAAiB;AACf,UAAQ,GAAG,iBAAiB,KAAK,OAAO,CAAC;AACzC,UAAQ,GAAG,gBAAgB,KAAK,OAAO,CAAC;AACxC,OAAK,OAAO;;;CAId,AAAQ,qBAAsC;AAC5C,MAAI,KAAK,OAAO,gBACd,QAAO,KAAK,OAAO;AAGrB,MAAI;GACF,MAAM,EAAE,oBAAoB;GAC5B,MAAM,SAAS,WAAW;AAG1B,QAAK,OAAO,kBAAkB;AAC9B,UAAO;WACA,QAAQ;AACf,SAAM,IAAI,MACR,+GACD;;;;CAKL,MAAc,UAA4B;AACxC,MAAI,CAAC,KAAK,iBACR,OAAM,IAAI,MACR,uIAED;AAGH,MAAI,CAAC,KAAK,MAAM;GACd,MAAM,QAAQ,MAAM,KAAK,gBAAgB;AACzC,QAAK,OAAO,KAAK,WAAW,MAAM;;AAEpC,SAAO,KAAK;;;CAId,AAAQ,WAAW,aAGP;EACV,MAAM,EAAE,MAAM,UAAU,MAAM,YAAY,KAAK;EAE/C,MAAM,OAAO,IAAI,GAAG,KAAK;GACvB;GACA;GACA;GACA,MAAM,YAAY;GAClB,UAAU,YAAY;GACtB,KAAK,KAAK,OAAO;GACjB,mBAAmB,KAAK,OAAO;GAC/B,yBAAyB,KAAK,OAAO;GACrC,KAAK,YAAY,YAAY,EAAE,oBAAoB,MAAM,GAAG;GAC7D,CAAC;AAEF,OAAK,GAAG,UAAU,UAAqC;AACrD,WAAQ,MAAM,0BAA0B,MAAM,SAAS,EACrD,MAAM,MAAM,MACb,CAAC;IACF;AAEF,SAAO;;;CAIT,MAAc,iBAGX;EACD,MAAM,MAAM,KAAK,KAAK;AAGtB,MACE,KAAK,eACL,MAAM,KAAK,YAAY,YAAY,KAAK,gBAExC,QAAO,KAAK;EAId,MAAM,WAAW,MAAM,KAAK,eAAe;EAC3C,MAAM,EAAE,OAAO,cAAc,MAAM,KAAK,eAAe;AAEvD,OAAK,cAAc;GACjB;GACA,UAAU;GACV;GACD;AAED,SAAO;GAAE;GAAU,UAAU;GAAO;;;CAItC,MAAc,oBAAmC;AAE/C,OAAK,cAAc;AAEnB,MAAI,KAAK,MAAM;GACb,MAAM,UAAU,KAAK;AACrB,QAAK,OAAO;AACZ,WAAQ,KAAK,CAAC,OAAO,UAAmB;AACtC,YAAQ,MACN,sDACA,MACD;KACD;;;;CAKN,MAAc,gBAAiC;EAE7C,MAAM,OAAO,MADW,KAAK,oBAAoB,CACd,YAAY,IAAI;AACnD,MAAI,CAAC,KAAK,SACR,OAAM,IAAI,MAAM,uDAAuD;AAEzE,SAAO,KAAK;;;CAId,MAAc,gBAA+D;EAG3E,MAAM,YAAY,IAAI,UADP,IAAI,OAAO,EAAE,MADJ,KAAK,oBAAoB,CACC,OAAO,MAAM,CAAC,CACzB;AAEvC,MAAI,CAAC,KAAK,iBAAiB,QACzB,OAAM,IAAI,MAAM,mDAAmD;EAGrE,MAAM,cAAc,MAAM,UAAU,QAAQ;GAC1C,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,SAAS;IACP,gBAAgB,CAAC,KAAK,iBAAiB,QAAQ;IAC/C,YAAY,YAAY;IACzB;GACF,CAAC;AAEF,MAAI,CAAC,KAAK,oBAAoB,YAAY,CACxC,OAAM,IAAI,MACR,gDAAgD,KAAK,iBAAiB,UACvE;EAGH,MAAM,YAAY,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS;AAEjE,SAAO;GAAE,OAAO,YAAY;GAAO;GAAW;;;CAIhD,AAAQ,YAAY,OAAyB;AAC3C,SACE,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAAc,SAAS;;;CAK5B,AAAQ,iBAAiB,OAAyB;AAChD,MAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,UAAU,OAC7D,QAAO;EAGT,MAAM,OAAQ,MAAc;AAC5B,SACE,SAAS,gBACT,SAAS,kBACT,SAAS,eACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS;;;CAKb,AAAQ,oBACN,OACqD;AACrD,MAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;EAGT,MAAM,cAAc;AACpB,SACE,WAAW,eACX,OAAO,YAAY,UAAU,YAC7B,qBAAqB,eACrB,OAAO,YAAY,oBAAoB,YACvC,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS,GAAG,KAAK,KAAK;;;CAKhE,AAAQ,wBAAkD;AACxD,MAAI,KAAK,OAAO,iBACd,QAAO,KAAK,sBAAsB,KAAK,OAAO,iBAAiB;AAIjE,MAAI,KAAK,OAAO,QAAQ,KAAK,OAAO,YAAY,KAAK,OAAO,QAC1D,QAAO;GACL,MAAM,KAAK,OAAO;GAClB,UAAU,KAAK,OAAO;GACtB,MAAM,KAAK,OAAO,QAAQ;GAC1B,SAAS,KAAK,OAAO,WAAW;GAChC,SAAS,KAAK,OAAO;GACtB;EAIH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,aAAa,QAAQ,IAAI;EAC/B,MAAM,YAAY,QAAQ,IAAI;AAC9B,MAAI,CAAC,UAAU,CAAC,cAAc,CAAC,UAC7B,OAAM,IAAI,MACR,wJAED;EAEH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,OAAO,SAAS,SAAS,QAAQ,GAAG,GAAG;AAE7C,MAAI,OAAO,MAAM,KAAK,CACpB,OAAM,IAAI,MAAM,iBAAiB,OAAO,qBAAqB;AAO/D,SAAO;GACL,MAAM;GACN,UAAU;GACV;GACA,SARgB,QAAQ,IAAI,aAEuB;GAOnD,SAAS;GACV;;CAGH,AAAQ,sBACN,kBAC0B;EAC1B,MAAM,MAAM,IAAI,IAAI,iBAAiB;EACrC,MAAM,UAAU,IAAI,aAAa,IAAI,UAAU;AAC/C,MAAI,CAAC,QACH,OAAM,IAAI,MAAM,mDAAmD;AAGrE,SAAO;GACL,MAAM,IAAI;GACV,UAAU,IAAI,SAAS,MAAM,EAAE;GAC/B,MAAM,IAAI,OAAO,SAAS,IAAI,MAAM,GAAG,GAAG;GAC1C,SACG,IAAI,aAAa,IAAI,UAAU,IAChC;GACO;GACV"}
|
|
1
|
+
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"@/telemetry\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseDefaults } from \"./defaults\";\nimport type {\n LakebaseConfig,\n LakebaseConnectionConfig,\n LakebaseCredentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseConnector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseConnector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseConnector {\n private readonly name: string = \"lakebase\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseConfig;\n private readonly connectionConfig: LakebaseConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseCredentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseConfig>) {\n this.config = deepMerge(lakebaseDefaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.query\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;aAiBsB;AAUtB,MAAM,SAAS,aAAa,sBAAsB;;;;;;;;;;;;;;;;AAiBlD,IAAa,oBAAb,MAA+B;CAe7B,YAAY,YAAsC;cAdlB;yBACG,MAAS;cAGb;qBACmB;AAUhD,OAAK,SAAS,UAAU,kBAAkB,WAAW;AACrD,OAAK,mBAAmB,KAAK,uBAAuB;AAEpD,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,YAAY,KAAK,UACd,UAAU,CACV,cAAc,wBAAwB;IACrC,aAAa;IACb,MAAM;IACP,CAAC;GACJ,eAAe,KAAK,UACjB,UAAU,CACV,gBAAgB,2BAA2B;IAC1C,aAAa;IACb,MAAM;IACP,CAAC;GACL;AAGD,MAAI,KAAK,OAAO,cAAc,EAC5B,OAAM,gBAAgB,aACpB,eACA,KAAK,OAAO,aACZ,aACD;;;;;;;;;;;CAaL,MAAM,MACJ,KACA,QACA,aAAqB,GACO;EAC5B,MAAM,YAAY,KAAK,KAAK;AAE5B,SAAO,KAAK,UAAU,gBACpB,kBACA,EACE,YAAY;GACV,aAAa;GACb,gBAAgB,IAAI,UAAU,GAAG,IAAI;GACrC,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAC/C,SAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AAEd,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,WAAM,KAAK,mBAAmB;KAE9B,MAAM,SAAS,OADC,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAClD,UAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,YAAO;;AAIT,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,wBAAwB;AACtC,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,MAAS,KAAK,QAAQ,aAAa,EAAE;;AAGzD,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,gBAAgB,YAAY,MAAe;aACzC;IACR,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;;;;;;;;;;;;;;CAiBH,MAAM,YACJ,UACA,aAAqB,GACT;EACZ,MAAM,YAAY,KAAK,KAAK;AAC5B,SAAO,KAAK,UAAU,gBACpB,wBACA,EACE,YAAY;GACV,aAAa;GACb,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;GAEd,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,SAAS;AACnC,OAAI;AACF,UAAM,OAAO,MAAM,QAAQ;IAC3B,MAAM,SAAS,MAAM,SAAS,OAAO;AACrC,UAAM,OAAO,MAAM,SAAS;AAC5B,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,QAAI;AACF,WAAM,OAAO,MAAM,WAAW;YACxB;AAER,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,YAAO,SAAS;AAChB,WAAM,KAAK,mBAAmB;KAE9B,MAAM,cAAc,OADJ,MAAM,KAAK,SAAS,EACF,SAAS;AAC3C,SAAI;AACF,YAAM,OAAO,MAAM,QAAQ;MAC3B,MAAM,SAAS,MAAM,SAAS,YAAY;AAC1C,YAAM,OAAO,MAAM,SAAS;AAC5B,WAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,aAAO;cACA,YAAY;AACnB,UAAI;AACF,aAAM,YAAY,MAAM,WAAW;cAC7B;AACR,YAAM;eACE;AACR,kBAAY,SAAS;;;AAKzB,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,0BAA0B;AACxC,YAAO,SAAS;AAChB,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,YAAe,UAAU,aAAa,EAAE;;AAE5D,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,gBAAgB,kBAAkB,MAAe;aAC/C;AACR,WAAO,SAAS;IAChB,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;CAIH,MAAM,cAAgC;AACpC,SAAO,KAAK,UAAU,gBACpB,wBACA,EAAE,EACF,OAAO,SAAS;AACd,OAAI;IAIF,MAAM,WAHS,MAAM,KAAK,MACxB,qBACD,EACsB,KAAK,IAAI,WAAW;AAC3C,SAAK,aAAa,cAAc,QAAQ;AACxC,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;WACD;AACN,SAAK,aAAa,cAAc,MAAM;AACtC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAO;aACC;AACR,SAAK,KAAK;;IAGf;;;CAIH,MAAM,QAAuB;AAC3B,MAAI,KAAK,MAAM;AACb,SAAM,KAAK,KAAK,KAAK,CAAC,OAAO,UAAmB;AAC9C,WAAO,MAAM,qCAAqC,MAAM;KACxD;AACF,QAAK,OAAO;;AAEd,OAAK,cAAc;;;CAIrB,WAAiB;AACf,UAAQ,GAAG,iBAAiB,KAAK,OAAO,CAAC;AACzC,UAAQ,GAAG,gBAAgB,KAAK,OAAO,CAAC;AACxC,OAAK,OAAO;;;CAId,AAAQ,qBAAsC;AAC5C,MAAI,KAAK,OAAO,gBACd,QAAO,KAAK,OAAO;AAGrB,MAAI;GACF,MAAM,EAAE,oBAAoB;GAC5B,MAAM,SAAS,WAAW;AAG1B,QAAK,OAAO,kBAAkB;AAC9B,UAAO;WACA,QAAQ;AACf,SAAM,gBAAgB,kBACpB,+BACA,mEACD;;;;CAKL,MAAc,UAA4B;AACxC,MAAI,CAAC,KAAK,iBACR,OAAM,mBAAmB,kBACvB,YACA,kGACD;AAGH,MAAI,CAAC,KAAK,MAAM;GACd,MAAM,QAAQ,MAAM,KAAK,gBAAgB;AACzC,QAAK,OAAO,KAAK,WAAW,MAAM;;AAEpC,SAAO,KAAK;;;CAId,AAAQ,WAAW,aAGP;EACV,MAAM,EAAE,MAAM,UAAU,MAAM,YAAY,KAAK;EAE/C,MAAM,OAAO,IAAI,GAAG,KAAK;GACvB;GACA;GACA;GACA,MAAM,YAAY;GAClB,UAAU,YAAY;GACtB,KAAK,KAAK,OAAO;GACjB,mBAAmB,KAAK,OAAO;GAC/B,yBAAyB,KAAK,OAAO;GACrC,KAAK,YAAY,YAAY,EAAE,oBAAoB,MAAM,GAAG;GAC7D,CAAC;AAEF,OAAK,GAAG,UAAU,UAAqC;AACrD,UAAO,MACL,wCACA,MAAM,SACN,MAAM,KACP;IACD;AAEF,SAAO;;;CAIT,MAAc,iBAGX;EACD,MAAM,MAAM,KAAK,KAAK;AAGtB,MACE,KAAK,eACL,MAAM,KAAK,YAAY,YAAY,KAAK,gBAExC,QAAO,KAAK;EAId,MAAM,WAAW,MAAM,KAAK,eAAe;EAC3C,MAAM,EAAE,OAAO,cAAc,MAAM,KAAK,eAAe;AAEvD,OAAK,cAAc;GACjB;GACA,UAAU;GACV;GACD;AAED,SAAO;GAAE;GAAU,UAAU;GAAO;;;CAItC,MAAc,oBAAmC;AAE/C,OAAK,cAAc;AAEnB,MAAI,KAAK,MAAM;GACb,MAAM,UAAU,KAAK;AACrB,QAAK,OAAO;AACZ,WAAQ,KAAK,CAAC,OAAO,UAAmB;AACtC,WAAO,MACL,yDACA,MACD;KACD;;;;CAKN,MAAc,gBAAiC;EAE7C,MAAM,OAAO,MADW,KAAK,oBAAoB,CACd,YAAY,IAAI;AACnD,MAAI,CAAC,KAAK,SACR,OAAM,oBAAoB,kBAAkB;AAE9C,SAAO,KAAK;;;CAId,MAAc,gBAA+D;EAG3E,MAAM,YAAY,IAAI,UADP,IAAI,OAAO,EAAE,MADJ,KAAK,oBAAoB,CACC,OAAO,MAAM,CAAC,CACzB;AAEvC,MAAI,CAAC,KAAK,iBAAiB,QACzB,OAAM,mBAAmB,iBAAiB,oBAAoB;EAGhE,MAAM,cAAc,MAAM,UAAU,QAAQ;GAC1C,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,SAAS;IACP,gBAAgB,CAAC,KAAK,iBAAiB,QAAQ;IAC/C,YAAY,YAAY;IACzB;GACF,CAAC;AAEF,MAAI,CAAC,KAAK,oBAAoB,YAAY,CACxC,OAAM,oBAAoB,kBACxB,KAAK,iBAAiB,QACvB;EAGH,MAAM,YAAY,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS;AAEjE,SAAO;GAAE,OAAO,YAAY;GAAO;GAAW;;;CAIhD,AAAQ,YAAY,OAAyB;AAC3C,SACE,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAAc,SAAS;;;CAK5B,AAAQ,iBAAiB,OAAyB;AAChD,MAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,UAAU,OAC7D,QAAO;EAGT,MAAM,OAAQ,MAAc;AAC5B,SACE,SAAS,gBACT,SAAS,kBACT,SAAS,eACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS;;;CAKb,AAAQ,oBACN,OACqD;AACrD,MAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;EAGT,MAAM,cAAc;AACpB,SACE,WAAW,eACX,OAAO,YAAY,UAAU,YAC7B,qBAAqB,eACrB,OAAO,YAAY,oBAAoB,YACvC,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS,GAAG,KAAK,KAAK;;;CAKhE,AAAQ,wBAAkD;AACxD,MAAI,KAAK,OAAO,iBACd,QAAO,KAAK,sBAAsB,KAAK,OAAO,iBAAiB;AAIjE,MAAI,KAAK,OAAO,QAAQ,KAAK,OAAO,YAAY,KAAK,OAAO,QAC1D,QAAO;GACL,MAAM,KAAK,OAAO;GAClB,UAAU,KAAK,OAAO;GACtB,MAAM,KAAK,OAAO,QAAQ;GAC1B,SAAS,KAAK,OAAO,WAAW;GAChC,SAAS,KAAK,OAAO;GACtB;EAIH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,aAAa,QAAQ,IAAI;EAC/B,MAAM,YAAY,QAAQ,IAAI;AAC9B,MAAI,CAAC,UAAU,CAAC,cAAc,CAAC,UAC7B,OAAM,mBAAmB,kBACvB,YACA,mHACD;EAEH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,OAAO,SAAS,SAAS,QAAQ,GAAG,GAAG;AAE7C,MAAI,OAAO,MAAM,KAAK,CACpB,OAAM,gBAAgB,aAAa,QAAQ,QAAQ,WAAW;AAOhE,SAAO;GACL,MAAM;GACN,UAAU;GACV;GACA,SARgB,QAAQ,IAAI,aAEuB;GAOnD,SAAS;GACV;;CAGH,AAAQ,sBACN,kBAC0B;EAC1B,MAAM,MAAM,IAAI,IAAI,iBAAiB;EACrC,MAAM,UAAU,IAAI,aAAa,IAAI,UAAU;AAC/C,MAAI,CAAC,QACH,OAAM,mBAAmB,uBAAuB,UAAU;AAG5D,SAAO;GACL,MAAM,IAAI;GACV,UAAU,IAAI,SAAS,MAAM,EAAE;GAC/B,MAAM,IAAI,OAAO,SAAS,IAAI,MAAM,GAAG,GAAG;GAC1C,SACG,IAAI,aAAa,IAAI,UAAU,IAChC;GACO;GACV"}
|
|
@@ -1,10 +1,18 @@
|
|
|
1
|
+
import { createLogger } from "../../logging/logger.js";
|
|
1
2
|
import { TelemetryManager } from "../../telemetry/telemetry-manager.js";
|
|
2
3
|
import { SpanKind, SpanStatusCode } from "../../telemetry/index.js";
|
|
4
|
+
import { AppKitError } from "../../errors/base.js";
|
|
5
|
+
import { ConnectionError } from "../../errors/connection.js";
|
|
6
|
+
import { ExecutionError } from "../../errors/execution.js";
|
|
7
|
+
import { ValidationError } from "../../errors/validation.js";
|
|
8
|
+
import { init_errors } from "../../errors/index.js";
|
|
3
9
|
import { ArrowStreamProcessor } from "../../stream/arrow-stream-processor.js";
|
|
4
10
|
import { executeStatementDefaults } from "./defaults.js";
|
|
5
11
|
import { Context } from "@databricks/sdk-experimental";
|
|
6
12
|
|
|
7
13
|
//#region src/connectors/sql-warehouse/client.ts
|
|
14
|
+
init_errors();
|
|
15
|
+
const logger = createLogger("connectors:sql-warehouse");
|
|
8
16
|
var SQLWarehouseConnector = class {
|
|
9
17
|
constructor(config) {
|
|
10
18
|
this.name = "sql-warehouse";
|
|
@@ -37,6 +45,7 @@ var SQLWarehouseConnector = class {
|
|
|
37
45
|
async executeStatement(workspaceClient, input, signal) {
|
|
38
46
|
const startTime = Date.now();
|
|
39
47
|
let success = false;
|
|
48
|
+
if (signal?.aborted) throw ExecutionError.canceled();
|
|
40
49
|
return this.telemetry.startActiveSpan("sql.query", {
|
|
41
50
|
kind: SpanKind.CLIENT,
|
|
42
51
|
attributes: {
|
|
@@ -48,9 +57,24 @@ var SQLWarehouseConnector = class {
|
|
|
48
57
|
"db.has_parameters": !!input.parameters
|
|
49
58
|
}
|
|
50
59
|
}, async (span) => {
|
|
60
|
+
let abortHandler;
|
|
61
|
+
let isAborted = false;
|
|
62
|
+
if (signal) {
|
|
63
|
+
abortHandler = () => {
|
|
64
|
+
if (!span.isRecording()) return;
|
|
65
|
+
isAborted = true;
|
|
66
|
+
span.setAttribute("cancelled", true);
|
|
67
|
+
span.setStatus({
|
|
68
|
+
code: SpanStatusCode.ERROR,
|
|
69
|
+
message: "Query cancelled by client"
|
|
70
|
+
});
|
|
71
|
+
span.end();
|
|
72
|
+
};
|
|
73
|
+
signal.addEventListener("abort", abortHandler, { once: true });
|
|
74
|
+
}
|
|
51
75
|
try {
|
|
52
|
-
if (!input.statement) throw
|
|
53
|
-
if (!input.warehouse_id) throw
|
|
76
|
+
if (!input.statement) throw ValidationError.missingField("statement");
|
|
77
|
+
if (!input.warehouse_id) throw ValidationError.missingField("warehouse_id");
|
|
54
78
|
const body = {
|
|
55
79
|
statement: input.statement,
|
|
56
80
|
parameters: input.parameters,
|
|
@@ -66,7 +90,7 @@ var SQLWarehouseConnector = class {
|
|
|
66
90
|
};
|
|
67
91
|
span.addEvent("statement.submitting", { "db.warehouse_id": input.warehouse_id });
|
|
68
92
|
const response = await workspaceClient.statementExecution.executeStatement(body, this._createContext(signal));
|
|
69
|
-
if (!response) throw
|
|
93
|
+
if (!response) throw ConnectionError.apiFailure("SQL Warehouse");
|
|
70
94
|
const status = response.status;
|
|
71
95
|
const statementId = response.statement_id;
|
|
72
96
|
span.setAttribute("db.statement_id", statementId);
|
|
@@ -84,27 +108,37 @@ var SQLWarehouseConnector = class {
|
|
|
84
108
|
case "SUCCEEDED":
|
|
85
109
|
result = this._transformDataArray(response);
|
|
86
110
|
break;
|
|
87
|
-
case "FAILED": throw
|
|
88
|
-
case "CANCELED": throw
|
|
89
|
-
case "CLOSED": throw
|
|
90
|
-
default: throw
|
|
111
|
+
case "FAILED": throw ExecutionError.statementFailed(status.error?.message);
|
|
112
|
+
case "CANCELED": throw ExecutionError.canceled();
|
|
113
|
+
case "CLOSED": throw ExecutionError.resultsClosed();
|
|
114
|
+
default: throw ExecutionError.unknownState(String(status?.state ?? "unknown"));
|
|
91
115
|
}
|
|
92
116
|
const resultData = result.result;
|
|
93
|
-
|
|
94
|
-
|
|
117
|
+
const rowCount = resultData?.data?.length ?? resultData?.data_array?.length ?? 0;
|
|
118
|
+
if (rowCount > 0) span.setAttribute("db.result.row_count", rowCount);
|
|
119
|
+
const duration = Date.now() - startTime;
|
|
120
|
+
logger.event()?.setContext("sql-warehouse", {
|
|
121
|
+
warehouse_id: input.warehouse_id,
|
|
122
|
+
rows_returned: rowCount,
|
|
123
|
+
query_duration_ms: duration
|
|
124
|
+
});
|
|
95
125
|
success = true;
|
|
96
|
-
span.setStatus({ code: SpanStatusCode.OK });
|
|
126
|
+
if (!isAborted) span.setStatus({ code: SpanStatusCode.OK });
|
|
97
127
|
return result;
|
|
98
128
|
} catch (error) {
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
129
|
+
if (!isAborted) {
|
|
130
|
+
span.recordException(error);
|
|
131
|
+
span.setStatus({
|
|
132
|
+
code: SpanStatusCode.ERROR,
|
|
133
|
+
message: error instanceof Error ? error.message : String(error)
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
if (error instanceof AppKitError) throw error;
|
|
137
|
+
throw ExecutionError.statementFailed(error instanceof Error ? error.message : String(error));
|
|
105
138
|
} finally {
|
|
139
|
+
if (abortHandler && signal) signal.removeEventListener("abort", abortHandler);
|
|
106
140
|
const duration = Date.now() - startTime;
|
|
107
|
-
span.end();
|
|
141
|
+
if (!isAborted) span.end();
|
|
108
142
|
const attributes = {
|
|
109
143
|
"db.warehouse_id": input.warehouse_id,
|
|
110
144
|
"db.catalog": input.catalog ?? "",
|
|
@@ -135,13 +169,13 @@ var SQLWarehouseConnector = class {
|
|
|
135
169
|
span.setAttribute("db.polling.current_attempt", pollCount);
|
|
136
170
|
const elapsedTime = Date.now() - startTime;
|
|
137
171
|
if (elapsedTime > timeout) {
|
|
138
|
-
const error =
|
|
172
|
+
const error = ExecutionError.statementFailed(`Polling timeout exceeded after ${timeout}ms (elapsed: ${elapsedTime}ms)`);
|
|
139
173
|
span.recordException(error);
|
|
140
174
|
span.setStatus({ code: SpanStatusCode.ERROR });
|
|
141
175
|
throw error;
|
|
142
176
|
}
|
|
143
177
|
if (signal?.aborted) {
|
|
144
|
-
const error =
|
|
178
|
+
const error = ExecutionError.canceled();
|
|
145
179
|
span.recordException(error);
|
|
146
180
|
span.setStatus({ code: SpanStatusCode.ERROR });
|
|
147
181
|
throw error;
|
|
@@ -152,7 +186,7 @@ var SQLWarehouseConnector = class {
|
|
|
152
186
|
"poll.elapsed_ms": elapsedTime
|
|
153
187
|
});
|
|
154
188
|
const response = await workspaceClient.statementExecution.getStatement({ statement_id: statementId }, this._createContext(signal));
|
|
155
|
-
if (!response) throw
|
|
189
|
+
if (!response) throw ConnectionError.apiFailure("SQL Warehouse");
|
|
156
190
|
const status = response.status;
|
|
157
191
|
span.addEvent("polling.status_check", {
|
|
158
192
|
"db.status": status?.state,
|
|
@@ -170,10 +204,10 @@ var SQLWarehouseConnector = class {
|
|
|
170
204
|
});
|
|
171
205
|
span.setStatus({ code: SpanStatusCode.OK });
|
|
172
206
|
return this._transformDataArray(response);
|
|
173
|
-
case "FAILED": throw
|
|
174
|
-
case "CANCELED": throw
|
|
175
|
-
case "CLOSED": throw
|
|
176
|
-
default: throw
|
|
207
|
+
case "FAILED": throw ExecutionError.statementFailed(status.error?.message);
|
|
208
|
+
case "CANCELED": throw ExecutionError.canceled();
|
|
209
|
+
case "CLOSED": throw ExecutionError.resultsClosed();
|
|
210
|
+
default: throw ExecutionError.unknownState(String(status?.state ?? "unknown"));
|
|
177
211
|
}
|
|
178
212
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
179
213
|
delay = Math.min(delay * 2, maxDelayBetweenPolls);
|
|
@@ -184,7 +218,8 @@ var SQLWarehouseConnector = class {
|
|
|
184
218
|
code: SpanStatusCode.ERROR,
|
|
185
219
|
message: error instanceof Error ? error.message : String(error)
|
|
186
220
|
});
|
|
187
|
-
throw error;
|
|
221
|
+
if (error instanceof AppKitError) throw error;
|
|
222
|
+
throw ExecutionError.statementFailed(error instanceof Error ? error.message : String(error));
|
|
188
223
|
} finally {
|
|
189
224
|
span.end();
|
|
190
225
|
}
|
|
@@ -242,7 +277,7 @@ var SQLWarehouseConnector = class {
|
|
|
242
277
|
const response = await workspaceClient.statementExecution.getStatement({ statement_id: jobId }, this._createContext(signal));
|
|
243
278
|
const chunks = response.result?.external_links;
|
|
244
279
|
const schema = response.manifest?.schema;
|
|
245
|
-
if (!chunks || !schema) throw
|
|
280
|
+
if (!chunks || !schema) throw ExecutionError.missingData("chunks or schema");
|
|
246
281
|
span.setAttribute("arrow.chunk_count", chunks.length);
|
|
247
282
|
const result = await this.arrowProcessor.processChunks(chunks, schema, signal);
|
|
248
283
|
span.setAttribute("arrow.data_size_bytes", result.data.length);
|
|
@@ -252,6 +287,10 @@ var SQLWarehouseConnector = class {
|
|
|
252
287
|
operation: "arrow.getData",
|
|
253
288
|
status: "success"
|
|
254
289
|
});
|
|
290
|
+
logger.event()?.setContext("sql-warehouse", {
|
|
291
|
+
arrow_data_size_bytes: result.data.length,
|
|
292
|
+
arrow_job_id: jobId
|
|
293
|
+
});
|
|
255
294
|
return result;
|
|
256
295
|
} catch (error) {
|
|
257
296
|
span.setStatus({
|
|
@@ -264,8 +303,9 @@ var SQLWarehouseConnector = class {
|
|
|
264
303
|
operation: "arrow.getData",
|
|
265
304
|
status: "error"
|
|
266
305
|
});
|
|
267
|
-
|
|
268
|
-
throw error;
|
|
306
|
+
logger.error("Failed Arrow job: %s %O", jobId, error);
|
|
307
|
+
if (error instanceof AppKitError) throw error;
|
|
308
|
+
throw ExecutionError.statementFailed(error instanceof Error ? error.message : String(error));
|
|
269
309
|
}
|
|
270
310
|
});
|
|
271
311
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/sql-warehouse/client.ts"],"sourcesContent":["import {\n Context,\n type sql,\n type WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport { ArrowStreamProcessor } from \"../../stream/arrow-stream-processor\";\nimport type { TelemetryOptions } from \"shared\";\nimport type { TelemetryProvider } from \"../../telemetry\";\nimport {\n type Counter,\n type Histogram,\n type Span,\n SpanKind,\n SpanStatusCode,\n TelemetryManager,\n} from \"../../telemetry\";\nimport { executeStatementDefaults } from \"./defaults\";\n\nexport interface SQLWarehouseConfig {\n timeout?: number;\n telemetry?: TelemetryOptions;\n}\n\nexport class SQLWarehouseConnector {\n private readonly name = \"sql-warehouse\";\n\n private config: SQLWarehouseConfig;\n\n // Lazy-initialized: only created when Arrow format is used\n private _arrowProcessor: ArrowStreamProcessor | null = null;\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(config: SQLWarehouseConfig) {\n this.config = config;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry.getMeter().createCounter(\"query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n }\n\n /**\n * Lazily initializes and returns the ArrowStreamProcessor.\n * Only created on first Arrow format query to avoid unnecessary allocation.\n */\n private get arrowProcessor(): ArrowStreamProcessor {\n if (!this._arrowProcessor) {\n this._arrowProcessor = new ArrowStreamProcessor({\n timeout: this.config.timeout || executeStatementDefaults.timeout,\n maxConcurrentDownloads:\n ArrowStreamProcessor.DEFAULT_MAX_CONCURRENT_DOWNLOADS,\n retries: ArrowStreamProcessor.DEFAULT_RETRIES,\n });\n }\n return this._arrowProcessor;\n }\n\n async executeStatement(\n workspaceClient: WorkspaceClient,\n input: sql.ExecuteStatementRequest,\n signal?: AbortSignal,\n ) {\n const startTime = Date.now();\n let success = false;\n\n return this.telemetry.startActiveSpan(\n \"sql.query\",\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"databricks\",\n \"db.warehouse_id\": input.warehouse_id || \"\",\n \"db.catalog\": input.catalog ?? \"\",\n \"db.schema\": input.schema ?? \"\",\n \"db.statement\": input.statement?.substring(0, 500) || \"\",\n \"db.has_parameters\": !!input.parameters,\n },\n },\n async (span: Span) => {\n try {\n // validate required fields\n if (!input.statement) {\n throw new Error(\n \"Statement is required: Please provide a SQL statement to execute\",\n );\n }\n\n if (!input.warehouse_id) {\n throw new Error(\n \"Warehouse ID is required: Please provide a warehouse_id to execute the statement\",\n );\n }\n\n const body: sql.ExecuteStatementRequest = {\n statement: input.statement,\n parameters: input.parameters,\n warehouse_id: input.warehouse_id,\n catalog: input.catalog,\n schema: input.schema,\n wait_timeout:\n input.wait_timeout || executeStatementDefaults.wait_timeout,\n disposition:\n input.disposition || executeStatementDefaults.disposition,\n format: input.format || executeStatementDefaults.format,\n byte_limit: input.byte_limit,\n row_limit: input.row_limit,\n on_wait_timeout:\n input.on_wait_timeout || executeStatementDefaults.on_wait_timeout,\n };\n\n span.addEvent(\"statement.submitting\", {\n \"db.warehouse_id\": input.warehouse_id,\n });\n\n const response =\n await workspaceClient.statementExecution.executeStatement(\n body,\n this._createContext(signal),\n );\n\n if (!response) {\n throw new Error(\"No response received from SQL Warehouse API\");\n }\n const status = response.status;\n const statementId = response.statement_id as string;\n\n span.setAttribute(\"db.statement_id\", statementId);\n span.addEvent(\"statement.submitted\", {\n \"db.statement_id\": response.statement_id,\n \"db.status\": status?.state,\n });\n\n let result:\n | sql.StatementResponse\n | { result: { statement_id: string; status: sql.StatementStatus } };\n\n switch (status?.state) {\n case \"RUNNING\":\n case \"PENDING\":\n span.addEvent(\"statement.polling_started\", {\n \"db.status\": response.status?.state,\n });\n result = await this._pollForStatementResult(\n workspaceClient,\n statementId,\n this.config.timeout,\n signal,\n );\n break;\n case \"SUCCEEDED\":\n result = this._transformDataArray(response);\n break;\n case \"FAILED\":\n throw new Error(\n `Statement failed: ${status.error?.message || \"Unknown error\"}`,\n );\n case \"CANCELED\":\n throw new Error(\"Statement was canceled\");\n case \"CLOSED\":\n throw new Error(\n \"Statement execution completed but results are no longer available (CLOSED state)\",\n );\n default:\n throw new Error(`Unknown statement state: ${status?.state}`);\n }\n\n const resultData = result.result as any;\n if (resultData?.data) {\n span.setAttribute(\"db.result.row_count\", resultData.data.length);\n } else if (resultData?.data_array) {\n span.setAttribute(\n \"db.result.row_count\",\n resultData.data_array.length,\n );\n }\n\n success = true;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n throw error;\n } finally {\n const duration = Date.now() - startTime;\n span.end();\n\n const attributes = {\n \"db.warehouse_id\": input.warehouse_id,\n \"db.catalog\": input.catalog ?? \"\",\n \"db.schema\": input.schema ?? \"\",\n \"db.statement\": input.statement?.substring(0, 500) || \"\",\n success: success.toString(),\n };\n\n this.telemetryMetrics.queryCount.add(1, attributes);\n this.telemetryMetrics.queryDuration.record(duration, attributes);\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n private async _pollForStatementResult(\n workspaceClient: WorkspaceClient,\n statementId: string,\n timeout = executeStatementDefaults.timeout,\n signal?: AbortSignal,\n ) {\n return this.telemetry.startActiveSpan(\n \"sql.poll\",\n {\n attributes: {\n \"db.statement_id\": statementId,\n \"db.polling.timeout\": timeout,\n },\n },\n async (span: Span) => {\n try {\n const startTime = Date.now();\n let delay = 1000;\n const maxDelayBetweenPolls = 5000; // max 5 seconds between polls\n let pollCount = 0;\n\n while (true) {\n pollCount++;\n span.setAttribute(\"db.polling.current_attempt\", pollCount);\n\n // check if timeout exceeded\n const elapsedTime = Date.now() - startTime;\n if (elapsedTime > timeout) {\n const error = new Error(\n `Statement polling timeout exceeded after ${timeout}ms (elapsed: ${elapsedTime}ms)`,\n );\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n }\n\n if (signal?.aborted) {\n const error = new Error(\"Request aborted\");\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n }\n\n span.addEvent(\"polling.attempt\", {\n \"poll.attempt\": pollCount,\n \"poll.delay_ms\": delay,\n \"poll.elapsed_ms\": elapsedTime,\n });\n\n const response =\n await workspaceClient.statementExecution.getStatement(\n {\n statement_id: statementId,\n },\n this._createContext(signal),\n );\n if (!response) {\n throw new Error(\"No response received from SQL Warehouse API\");\n }\n\n const status = response.status;\n\n span.addEvent(\"polling.status_check\", {\n \"db.status\": status?.state,\n \"poll.attempt\": pollCount,\n });\n\n switch (status?.state) {\n case \"PENDING\":\n case \"RUNNING\":\n // continue polling\n break;\n case \"SUCCEEDED\":\n span.setAttribute(\"db.polling.attempts\", pollCount);\n span.setAttribute(\"db.polling.total_duration_ms\", elapsedTime);\n span.addEvent(\"polling.completed\", {\n \"poll.attempts\": pollCount,\n \"poll.duration_ms\": elapsedTime,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n return this._transformDataArray(response);\n case \"FAILED\":\n throw new Error(\n `Statement failed: ${\n status.error?.message || \"Unknown error\"\n }`,\n );\n case \"CANCELED\":\n throw new Error(\"Statement was canceled\");\n case \"CLOSED\":\n throw new Error(\n \"Statement execution completed but results are no longer available (CLOSED state)\",\n );\n default:\n throw new Error(`Unknown statement state: ${status?.state}`);\n }\n\n // continue polling after delay\n await new Promise((resolve) => setTimeout(resolve, delay));\n delay = Math.min(delay * 2, maxDelayBetweenPolls);\n }\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n private _transformDataArray(response: sql.StatementResponse) {\n if (response.manifest?.format === \"ARROW_STREAM\") {\n return this.updateWithArrowStatus(response);\n }\n\n if (!response.result?.data_array || !response.manifest?.schema?.columns) {\n return response;\n }\n\n const columns = response.manifest.schema.columns;\n\n const transformedData = response.result.data_array.map((row) => {\n const obj: Record<string, unknown> = {};\n row.forEach((value, index) => {\n const column = columns[index];\n const columnName = column?.name || `column_${index}`;\n\n // attempt to parse JSON strings for string columns\n if (\n column?.type_name === \"STRING\" &&\n typeof value === \"string\" &&\n value &&\n (value[0] === \"{\" || value[0] === \"[\")\n ) {\n try {\n obj[columnName] = JSON.parse(value);\n } catch {\n // if parsing fails, keep as string\n obj[columnName] = value;\n }\n } else {\n obj[columnName] = value;\n }\n });\n return obj;\n });\n\n // remove data_array\n const { data_array: _data_array, ...restResult } = response.result;\n return {\n ...response,\n result: {\n ...restResult,\n data: transformedData,\n },\n };\n }\n\n private updateWithArrowStatus(response: sql.StatementResponse): {\n result: { statement_id: string; status: sql.StatementStatus };\n } {\n return {\n result: {\n statement_id: response.statement_id as string,\n status: {\n state: response.status?.state,\n error: response.status?.error,\n } as sql.StatementStatus,\n },\n };\n }\n\n async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.arrowProcessor.processChunks>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"arrow.getData\",\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"databricks\",\n \"arrow.job_id\": jobId,\n },\n },\n async (span: Span) => {\n try {\n const response =\n await workspaceClient.statementExecution.getStatement(\n { statement_id: jobId },\n this._createContext(signal),\n );\n\n const chunks = response.result?.external_links;\n const schema = response.manifest?.schema;\n\n if (!chunks || !schema) {\n throw new Error(\"No chunks or schema found in response\");\n }\n\n span.setAttribute(\"arrow.chunk_count\", chunks.length);\n\n const result = await this.arrowProcessor.processChunks(\n chunks,\n schema,\n signal,\n );\n\n span.setAttribute(\"arrow.data_size_bytes\", result.data.length);\n span.setStatus({ code: SpanStatusCode.OK });\n\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryDuration.record(duration, {\n operation: \"arrow.getData\",\n status: \"success\",\n });\n\n return result;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : \"Unknown error\",\n });\n span.recordException(error as Error);\n\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryDuration.record(duration, {\n operation: \"arrow.getData\",\n status: \"error\",\n });\n\n console.error(`Failed Arrow job: ${jobId}`, error);\n throw error;\n }\n },\n );\n }\n\n // create context for cancellation token\n private _createContext(signal?: AbortSignal) {\n return new Context({\n cancellationToken: {\n isCancellationRequested: signal?.aborted ?? false,\n onCancellationRequested: (cb: () => void) => {\n signal?.addEventListener(\"abort\", cb, { once: true });\n },\n },\n });\n }\n}\n"],"mappings":";;;;;;;AAuBA,IAAa,wBAAb,MAAmC;CAcjC,YAAY,QAA4B;cAbhB;yBAK+B;AASrD,OAAK,SAAS;AAEd,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,YAAY,KAAK,UAAU,UAAU,CAAC,cAAc,eAAe;IACjE,aAAa;IACb,MAAM;IACP,CAAC;GACF,eAAe,KAAK,UACjB,UAAU,CACV,gBAAgB,kBAAkB;IACjC,aAAa;IACb,MAAM;IACP,CAAC;GACL;;;;;;CAOH,IAAY,iBAAuC;AACjD,MAAI,CAAC,KAAK,gBACR,MAAK,kBAAkB,IAAI,qBAAqB;GAC9C,SAAS,KAAK,OAAO,WAAW,yBAAyB;GACzD,wBACE,qBAAqB;GACvB,SAAS,qBAAqB;GAC/B,CAAC;AAEJ,SAAO,KAAK;;CAGd,MAAM,iBACJ,iBACA,OACA,QACA;EACA,MAAM,YAAY,KAAK,KAAK;EAC5B,IAAI,UAAU;AAEd,SAAO,KAAK,UAAU,gBACpB,aACA;GACE,MAAM,SAAS;GACf,YAAY;IACV,aAAa;IACb,mBAAmB,MAAM,gBAAgB;IACzC,cAAc,MAAM,WAAW;IAC/B,aAAa,MAAM,UAAU;IAC7B,gBAAgB,MAAM,WAAW,UAAU,GAAG,IAAI,IAAI;IACtD,qBAAqB,CAAC,CAAC,MAAM;IAC9B;GACF,EACD,OAAO,SAAe;AACpB,OAAI;AAEF,QAAI,CAAC,MAAM,UACT,OAAM,IAAI,MACR,mEACD;AAGH,QAAI,CAAC,MAAM,aACT,OAAM,IAAI,MACR,mFACD;IAGH,MAAM,OAAoC;KACxC,WAAW,MAAM;KACjB,YAAY,MAAM;KAClB,cAAc,MAAM;KACpB,SAAS,MAAM;KACf,QAAQ,MAAM;KACd,cACE,MAAM,gBAAgB,yBAAyB;KACjD,aACE,MAAM,eAAe,yBAAyB;KAChD,QAAQ,MAAM,UAAU,yBAAyB;KACjD,YAAY,MAAM;KAClB,WAAW,MAAM;KACjB,iBACE,MAAM,mBAAmB,yBAAyB;KACrD;AAED,SAAK,SAAS,wBAAwB,EACpC,mBAAmB,MAAM,cAC1B,CAAC;IAEF,MAAM,WACJ,MAAM,gBAAgB,mBAAmB,iBACvC,MACA,KAAK,eAAe,OAAO,CAC5B;AAEH,QAAI,CAAC,SACH,OAAM,IAAI,MAAM,8CAA8C;IAEhE,MAAM,SAAS,SAAS;IACxB,MAAM,cAAc,SAAS;AAE7B,SAAK,aAAa,mBAAmB,YAAY;AACjD,SAAK,SAAS,uBAAuB;KACnC,mBAAmB,SAAS;KAC5B,aAAa,QAAQ;KACtB,CAAC;IAEF,IAAI;AAIJ,YAAQ,QAAQ,OAAhB;KACE,KAAK;KACL,KAAK;AACH,WAAK,SAAS,6BAA6B,EACzC,aAAa,SAAS,QAAQ,OAC/B,CAAC;AACF,eAAS,MAAM,KAAK,wBAClB,iBACA,aACA,KAAK,OAAO,SACZ,OACD;AACD;KACF,KAAK;AACH,eAAS,KAAK,oBAAoB,SAAS;AAC3C;KACF,KAAK,SACH,OAAM,IAAI,MACR,qBAAqB,OAAO,OAAO,WAAW,kBAC/C;KACH,KAAK,WACH,OAAM,IAAI,MAAM,yBAAyB;KAC3C,KAAK,SACH,OAAM,IAAI,MACR,mFACD;KACH,QACE,OAAM,IAAI,MAAM,4BAA4B,QAAQ,QAAQ;;IAGhE,MAAM,aAAa,OAAO;AAC1B,QAAI,YAAY,KACd,MAAK,aAAa,uBAAuB,WAAW,KAAK,OAAO;aACvD,YAAY,WACrB,MAAK,aACH,uBACA,WAAW,WAAW,OACvB;AAGH,cAAU;AACV,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU;KACb,MAAM,eAAe;KACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;KAChE,CAAC;AACF,UAAM;aACE;IACR,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,KAAK;IAEV,MAAM,aAAa;KACjB,mBAAmB,MAAM;KACzB,cAAc,MAAM,WAAW;KAC/B,aAAa,MAAM,UAAU;KAC7B,gBAAgB,MAAM,WAAW,UAAU,GAAG,IAAI,IAAI;KACtD,SAAS,QAAQ,UAAU;KAC5B;AAED,SAAK,iBAAiB,WAAW,IAAI,GAAG,WAAW;AACnD,SAAK,iBAAiB,cAAc,OAAO,UAAU,WAAW;;KAGpE;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;CAGH,MAAc,wBACZ,iBACA,aACA,UAAU,yBAAyB,SACnC,QACA;AACA,SAAO,KAAK,UAAU,gBACpB,YACA,EACE,YAAY;GACV,mBAAmB;GACnB,sBAAsB;GACvB,EACF,EACD,OAAO,SAAe;AACpB,OAAI;IACF,MAAM,YAAY,KAAK,KAAK;IAC5B,IAAI,QAAQ;IACZ,MAAM,uBAAuB;IAC7B,IAAI,YAAY;AAEhB,WAAO,MAAM;AACX;AACA,UAAK,aAAa,8BAA8B,UAAU;KAG1D,MAAM,cAAc,KAAK,KAAK,GAAG;AACjC,SAAI,cAAc,SAAS;MACzB,MAAM,wBAAQ,IAAI,MAChB,4CAA4C,QAAQ,eAAe,YAAY,KAChF;AACD,WAAK,gBAAgB,MAAM;AAC3B,WAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,YAAM;;AAGR,SAAI,QAAQ,SAAS;MACnB,MAAM,wBAAQ,IAAI,MAAM,kBAAkB;AAC1C,WAAK,gBAAgB,MAAM;AAC3B,WAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,YAAM;;AAGR,UAAK,SAAS,mBAAmB;MAC/B,gBAAgB;MAChB,iBAAiB;MACjB,mBAAmB;MACpB,CAAC;KAEF,MAAM,WACJ,MAAM,gBAAgB,mBAAmB,aACvC,EACE,cAAc,aACf,EACD,KAAK,eAAe,OAAO,CAC5B;AACH,SAAI,CAAC,SACH,OAAM,IAAI,MAAM,8CAA8C;KAGhE,MAAM,SAAS,SAAS;AAExB,UAAK,SAAS,wBAAwB;MACpC,aAAa,QAAQ;MACrB,gBAAgB;MACjB,CAAC;AAEF,aAAQ,QAAQ,OAAhB;MACE,KAAK;MACL,KAAK,UAEH;MACF,KAAK;AACH,YAAK,aAAa,uBAAuB,UAAU;AACnD,YAAK,aAAa,gCAAgC,YAAY;AAC9D,YAAK,SAAS,qBAAqB;QACjC,iBAAiB;QACjB,oBAAoB;QACrB,CAAC;AACF,YAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,cAAO,KAAK,oBAAoB,SAAS;MAC3C,KAAK,SACH,OAAM,IAAI,MACR,qBACE,OAAO,OAAO,WAAW,kBAE5B;MACH,KAAK,WACH,OAAM,IAAI,MAAM,yBAAyB;MAC3C,KAAK,SACH,OAAM,IAAI,MACR,mFACD;MACH,QACE,OAAM,IAAI,MAAM,4BAA4B,QAAQ,QAAQ;;AAIhE,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,MAAM,CAAC;AAC1D,aAAQ,KAAK,IAAI,QAAQ,GAAG,qBAAqB;;YAE5C,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU;KACb,MAAM,eAAe;KACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;KAChE,CAAC;AACF,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;CAGH,AAAQ,oBAAoB,UAAiC;AAC3D,MAAI,SAAS,UAAU,WAAW,eAChC,QAAO,KAAK,sBAAsB,SAAS;AAG7C,MAAI,CAAC,SAAS,QAAQ,cAAc,CAAC,SAAS,UAAU,QAAQ,QAC9D,QAAO;EAGT,MAAM,UAAU,SAAS,SAAS,OAAO;EAEzC,MAAM,kBAAkB,SAAS,OAAO,WAAW,KAAK,QAAQ;GAC9D,MAAM,MAA+B,EAAE;AACvC,OAAI,SAAS,OAAO,UAAU;IAC5B,MAAM,SAAS,QAAQ;IACvB,MAAM,aAAa,QAAQ,QAAQ,UAAU;AAG7C,QACE,QAAQ,cAAc,YACtB,OAAO,UAAU,YACjB,UACC,MAAM,OAAO,OAAO,MAAM,OAAO,KAElC,KAAI;AACF,SAAI,cAAc,KAAK,MAAM,MAAM;YAC7B;AAEN,SAAI,cAAc;;QAGpB,KAAI,cAAc;KAEpB;AACF,UAAO;IACP;EAGF,MAAM,EAAE,YAAY,aAAa,GAAG,eAAe,SAAS;AAC5D,SAAO;GACL,GAAG;GACH,QAAQ;IACN,GAAG;IACH,MAAM;IACP;GACF;;CAGH,AAAQ,sBAAsB,UAE5B;AACA,SAAO,EACL,QAAQ;GACN,cAAc,SAAS;GACvB,QAAQ;IACN,OAAO,SAAS,QAAQ;IACxB,OAAO,SAAS,QAAQ;IACzB;GACF,EACF;;CAGH,MAAM,aACJ,iBACA,OACA,QAC+D;EAC/D,MAAM,YAAY,KAAK,KAAK;AAE5B,SAAO,KAAK,UAAU,gBACpB,iBACA;GACE,MAAM,SAAS;GACf,YAAY;IACV,aAAa;IACb,gBAAgB;IACjB;GACF,EACD,OAAO,SAAe;AACpB,OAAI;IACF,MAAM,WACJ,MAAM,gBAAgB,mBAAmB,aACvC,EAAE,cAAc,OAAO,EACvB,KAAK,eAAe,OAAO,CAC5B;IAEH,MAAM,SAAS,SAAS,QAAQ;IAChC,MAAM,SAAS,SAAS,UAAU;AAElC,QAAI,CAAC,UAAU,CAAC,OACd,OAAM,IAAI,MAAM,wCAAwC;AAG1D,SAAK,aAAa,qBAAqB,OAAO,OAAO;IAErD,MAAM,SAAS,MAAM,KAAK,eAAe,cACvC,QACA,QACA,OACD;AAED,SAAK,aAAa,yBAAyB,OAAO,KAAK,OAAO;AAC9D,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;IAE3C,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,cAAc,OAAO,UAAU;KACnD,WAAW;KACX,QAAQ;KACT,CAAC;AAEF,WAAO;YACA,OAAO;AACd,SAAK,UAAU;KACb,MAAM,eAAe;KACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU;KACnD,CAAC;AACF,SAAK,gBAAgB,MAAe;IAEpC,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,cAAc,OAAO,UAAU;KACnD,WAAW;KACX,QAAQ;KACT,CAAC;AAEF,YAAQ,MAAM,qBAAqB,SAAS,MAAM;AAClD,UAAM;;IAGX;;CAIH,AAAQ,eAAe,QAAsB;AAC3C,SAAO,IAAI,QAAQ,EACjB,mBAAmB;GACjB,yBAAyB,QAAQ,WAAW;GAC5C,0BAA0B,OAAmB;AAC3C,YAAQ,iBAAiB,SAAS,IAAI,EAAE,MAAM,MAAM,CAAC;;GAExD,EACF,CAAC"}
|
|
1
|
+
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/sql-warehouse/client.ts"],"sourcesContent":["import {\n Context,\n type sql,\n type WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport type { TelemetryOptions } from \"shared\";\nimport {\n AppKitError,\n ConnectionError,\n ExecutionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { ArrowStreamProcessor } from \"../../stream/arrow-stream-processor\";\nimport type { TelemetryProvider } from \"../../telemetry\";\nimport {\n type Counter,\n type Histogram,\n type Span,\n SpanKind,\n SpanStatusCode,\n TelemetryManager,\n} from \"../../telemetry\";\nimport { executeStatementDefaults } from \"./defaults\";\n\nconst logger = createLogger(\"connectors:sql-warehouse\");\n\nexport interface SQLWarehouseConfig {\n timeout?: number;\n telemetry?: TelemetryOptions;\n}\n\nexport class SQLWarehouseConnector {\n private readonly name = \"sql-warehouse\";\n\n private config: SQLWarehouseConfig;\n\n // Lazy-initialized: only created when Arrow format is used\n private _arrowProcessor: ArrowStreamProcessor | null = null;\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(config: SQLWarehouseConfig) {\n this.config = config;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry.getMeter().createCounter(\"query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n }\n\n /**\n * Lazily initializes and returns the ArrowStreamProcessor.\n * Only created on first Arrow format query to avoid unnecessary allocation.\n */\n private get arrowProcessor(): ArrowStreamProcessor {\n if (!this._arrowProcessor) {\n this._arrowProcessor = new ArrowStreamProcessor({\n timeout: this.config.timeout || executeStatementDefaults.timeout,\n maxConcurrentDownloads:\n ArrowStreamProcessor.DEFAULT_MAX_CONCURRENT_DOWNLOADS,\n retries: ArrowStreamProcessor.DEFAULT_RETRIES,\n });\n }\n return this._arrowProcessor;\n }\n\n async executeStatement(\n workspaceClient: WorkspaceClient,\n input: sql.ExecuteStatementRequest,\n signal?: AbortSignal,\n ) {\n const startTime = Date.now();\n let success = false;\n\n // if signal is aborted, throw an error\n if (signal?.aborted) {\n throw ExecutionError.canceled();\n }\n\n return this.telemetry.startActiveSpan(\n \"sql.query\",\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"databricks\",\n \"db.warehouse_id\": input.warehouse_id || \"\",\n \"db.catalog\": input.catalog ?? \"\",\n \"db.schema\": input.schema ?? \"\",\n \"db.statement\": input.statement?.substring(0, 500) || \"\",\n \"db.has_parameters\": !!input.parameters,\n },\n },\n async (span: Span) => {\n let abortHandler: (() => void) | undefined;\n let isAborted = false;\n\n if (signal) {\n abortHandler = () => {\n // abort span if not recording\n if (!span.isRecording()) return;\n isAborted = true;\n span.setAttribute(\"cancelled\", true);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Query cancelled by client\",\n });\n span.end();\n };\n signal.addEventListener(\"abort\", abortHandler, { once: true });\n }\n\n try {\n // validate required fields\n if (!input.statement) {\n throw ValidationError.missingField(\"statement\");\n }\n\n if (!input.warehouse_id) {\n throw ValidationError.missingField(\"warehouse_id\");\n }\n\n const body: sql.ExecuteStatementRequest = {\n statement: input.statement,\n parameters: input.parameters,\n warehouse_id: input.warehouse_id,\n catalog: input.catalog,\n schema: input.schema,\n wait_timeout:\n input.wait_timeout || executeStatementDefaults.wait_timeout,\n disposition:\n input.disposition || executeStatementDefaults.disposition,\n format: input.format || executeStatementDefaults.format,\n byte_limit: input.byte_limit,\n row_limit: input.row_limit,\n on_wait_timeout:\n input.on_wait_timeout || executeStatementDefaults.on_wait_timeout,\n };\n\n span.addEvent(\"statement.submitting\", {\n \"db.warehouse_id\": input.warehouse_id,\n });\n\n const response =\n await workspaceClient.statementExecution.executeStatement(\n body,\n this._createContext(signal),\n );\n\n if (!response) {\n throw ConnectionError.apiFailure(\"SQL Warehouse\");\n }\n const status = response.status;\n const statementId = response.statement_id as string;\n\n span.setAttribute(\"db.statement_id\", statementId);\n span.addEvent(\"statement.submitted\", {\n \"db.statement_id\": response.statement_id,\n \"db.status\": status?.state,\n });\n\n let result:\n | sql.StatementResponse\n | { result: { statement_id: string; status: sql.StatementStatus } };\n\n switch (status?.state) {\n case \"RUNNING\":\n case \"PENDING\":\n span.addEvent(\"statement.polling_started\", {\n \"db.status\": response.status?.state,\n });\n result = await this._pollForStatementResult(\n workspaceClient,\n statementId,\n this.config.timeout,\n signal,\n );\n break;\n case \"SUCCEEDED\":\n result = this._transformDataArray(response);\n break;\n case \"FAILED\":\n throw ExecutionError.statementFailed(status.error?.message);\n case \"CANCELED\":\n throw ExecutionError.canceled();\n case \"CLOSED\":\n throw ExecutionError.resultsClosed();\n default:\n throw ExecutionError.unknownState(\n String(status?.state ?? \"unknown\"),\n );\n }\n\n const resultData = result.result as any;\n const rowCount =\n resultData?.data?.length ?? resultData?.data_array?.length ?? 0;\n\n if (rowCount > 0) {\n span.setAttribute(\"db.result.row_count\", rowCount);\n }\n\n const duration = Date.now() - startTime;\n logger.event()?.setContext(\"sql-warehouse\", {\n warehouse_id: input.warehouse_id,\n rows_returned: rowCount,\n query_duration_ms: duration,\n });\n\n success = true;\n // only set success status if not aborted\n if (!isAborted) {\n span.setStatus({ code: SpanStatusCode.OK });\n }\n return result;\n } catch (error) {\n // only record error if not already handled by abort\n if (!isAborted) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n }\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n } finally {\n // remove abort handler\n if (abortHandler && signal) {\n signal.removeEventListener(\"abort\", abortHandler);\n }\n\n const duration = Date.now() - startTime;\n\n // end span if not already ended by abort handler\n if (!isAborted) {\n span.end();\n }\n\n const attributes = {\n \"db.warehouse_id\": input.warehouse_id,\n \"db.catalog\": input.catalog ?? \"\",\n \"db.schema\": input.schema ?? \"\",\n \"db.statement\": input.statement?.substring(0, 500) || \"\",\n success: success.toString(),\n };\n\n this.telemetryMetrics.queryCount.add(1, attributes);\n this.telemetryMetrics.queryDuration.record(duration, attributes);\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n private async _pollForStatementResult(\n workspaceClient: WorkspaceClient,\n statementId: string,\n timeout = executeStatementDefaults.timeout,\n signal?: AbortSignal,\n ) {\n return this.telemetry.startActiveSpan(\n \"sql.poll\",\n {\n attributes: {\n \"db.statement_id\": statementId,\n \"db.polling.timeout\": timeout,\n },\n },\n async (span: Span) => {\n try {\n const startTime = Date.now();\n let delay = 1000;\n const maxDelayBetweenPolls = 5000; // max 5 seconds between polls\n let pollCount = 0;\n\n while (true) {\n pollCount++;\n span.setAttribute(\"db.polling.current_attempt\", pollCount);\n\n // check if timeout exceeded\n const elapsedTime = Date.now() - startTime;\n if (elapsedTime > timeout) {\n const error = ExecutionError.statementFailed(\n `Polling timeout exceeded after ${timeout}ms (elapsed: ${elapsedTime}ms)`,\n );\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n }\n\n if (signal?.aborted) {\n const error = ExecutionError.canceled();\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n }\n\n span.addEvent(\"polling.attempt\", {\n \"poll.attempt\": pollCount,\n \"poll.delay_ms\": delay,\n \"poll.elapsed_ms\": elapsedTime,\n });\n\n const response =\n await workspaceClient.statementExecution.getStatement(\n {\n statement_id: statementId,\n },\n this._createContext(signal),\n );\n if (!response) {\n throw ConnectionError.apiFailure(\"SQL Warehouse\");\n }\n\n const status = response.status;\n\n span.addEvent(\"polling.status_check\", {\n \"db.status\": status?.state,\n \"poll.attempt\": pollCount,\n });\n\n switch (status?.state) {\n case \"PENDING\":\n case \"RUNNING\":\n // continue polling\n break;\n case \"SUCCEEDED\":\n span.setAttribute(\"db.polling.attempts\", pollCount);\n span.setAttribute(\"db.polling.total_duration_ms\", elapsedTime);\n span.addEvent(\"polling.completed\", {\n \"poll.attempts\": pollCount,\n \"poll.duration_ms\": elapsedTime,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n return this._transformDataArray(response);\n case \"FAILED\":\n throw ExecutionError.statementFailed(status.error?.message);\n case \"CANCELED\":\n throw ExecutionError.canceled();\n case \"CLOSED\":\n throw ExecutionError.resultsClosed();\n default:\n throw ExecutionError.unknownState(\n String(status?.state ?? \"unknown\"),\n );\n }\n\n // continue polling after delay\n await new Promise((resolve) => setTimeout(resolve, delay));\n delay = Math.min(delay * 2, maxDelayBetweenPolls);\n }\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n private _transformDataArray(response: sql.StatementResponse) {\n if (response.manifest?.format === \"ARROW_STREAM\") {\n return this.updateWithArrowStatus(response);\n }\n\n if (!response.result?.data_array || !response.manifest?.schema?.columns) {\n return response;\n }\n\n const columns = response.manifest.schema.columns;\n\n const transformedData = response.result.data_array.map((row) => {\n const obj: Record<string, unknown> = {};\n row.forEach((value, index) => {\n const column = columns[index];\n const columnName = column?.name || `column_${index}`;\n\n // attempt to parse JSON strings for string columns\n if (\n column?.type_name === \"STRING\" &&\n typeof value === \"string\" &&\n value &&\n (value[0] === \"{\" || value[0] === \"[\")\n ) {\n try {\n obj[columnName] = JSON.parse(value);\n } catch {\n // if parsing fails, keep as string\n obj[columnName] = value;\n }\n } else {\n obj[columnName] = value;\n }\n });\n return obj;\n });\n\n // remove data_array\n const { data_array: _data_array, ...restResult } = response.result;\n return {\n ...response,\n result: {\n ...restResult,\n data: transformedData,\n },\n };\n }\n\n private updateWithArrowStatus(response: sql.StatementResponse): {\n result: { statement_id: string; status: sql.StatementStatus };\n } {\n return {\n result: {\n statement_id: response.statement_id as string,\n status: {\n state: response.status?.state,\n error: response.status?.error,\n } as sql.StatementStatus,\n },\n };\n }\n\n async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.arrowProcessor.processChunks>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"arrow.getData\",\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"databricks\",\n \"arrow.job_id\": jobId,\n },\n },\n async (span: Span) => {\n try {\n const response =\n await workspaceClient.statementExecution.getStatement(\n { statement_id: jobId },\n this._createContext(signal),\n );\n\n const chunks = response.result?.external_links;\n const schema = response.manifest?.schema;\n\n if (!chunks || !schema) {\n throw ExecutionError.missingData(\"chunks or schema\");\n }\n\n span.setAttribute(\"arrow.chunk_count\", chunks.length);\n\n const result = await this.arrowProcessor.processChunks(\n chunks,\n schema,\n signal,\n );\n\n span.setAttribute(\"arrow.data_size_bytes\", result.data.length);\n span.setStatus({ code: SpanStatusCode.OK });\n\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryDuration.record(duration, {\n operation: \"arrow.getData\",\n status: \"success\",\n });\n\n logger.event()?.setContext(\"sql-warehouse\", {\n arrow_data_size_bytes: result.data.length,\n arrow_job_id: jobId,\n });\n\n return result;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : \"Unknown error\",\n });\n span.recordException(error as Error);\n\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryDuration.record(duration, {\n operation: \"arrow.getData\",\n status: \"error\",\n });\n\n logger.error(\"Failed Arrow job: %s %O\", jobId, error);\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n }\n },\n );\n }\n\n // create context for cancellation token\n private _createContext(signal?: AbortSignal) {\n return new Context({\n cancellationToken: {\n isCancellationRequested: signal?.aborted ?? false,\n onCancellationRequested: (cb: () => void) => {\n signal?.addEventListener(\"abort\", cb, { once: true });\n },\n },\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;aAWsB;AActB,MAAM,SAAS,aAAa,2BAA2B;AAOvD,IAAa,wBAAb,MAAmC;CAcjC,YAAY,QAA4B;cAbhB;yBAK+B;AASrD,OAAK,SAAS;AAEd,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,YAAY,KAAK,UAAU,UAAU,CAAC,cAAc,eAAe;IACjE,aAAa;IACb,MAAM;IACP,CAAC;GACF,eAAe,KAAK,UACjB,UAAU,CACV,gBAAgB,kBAAkB;IACjC,aAAa;IACb,MAAM;IACP,CAAC;GACL;;;;;;CAOH,IAAY,iBAAuC;AACjD,MAAI,CAAC,KAAK,gBACR,MAAK,kBAAkB,IAAI,qBAAqB;GAC9C,SAAS,KAAK,OAAO,WAAW,yBAAyB;GACzD,wBACE,qBAAqB;GACvB,SAAS,qBAAqB;GAC/B,CAAC;AAEJ,SAAO,KAAK;;CAGd,MAAM,iBACJ,iBACA,OACA,QACA;EACA,MAAM,YAAY,KAAK,KAAK;EAC5B,IAAI,UAAU;AAGd,MAAI,QAAQ,QACV,OAAM,eAAe,UAAU;AAGjC,SAAO,KAAK,UAAU,gBACpB,aACA;GACE,MAAM,SAAS;GACf,YAAY;IACV,aAAa;IACb,mBAAmB,MAAM,gBAAgB;IACzC,cAAc,MAAM,WAAW;IAC/B,aAAa,MAAM,UAAU;IAC7B,gBAAgB,MAAM,WAAW,UAAU,GAAG,IAAI,IAAI;IACtD,qBAAqB,CAAC,CAAC,MAAM;IAC9B;GACF,EACD,OAAO,SAAe;GACpB,IAAI;GACJ,IAAI,YAAY;AAEhB,OAAI,QAAQ;AACV,yBAAqB;AAEnB,SAAI,CAAC,KAAK,aAAa,CAAE;AACzB,iBAAY;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU;MACb,MAAM,eAAe;MACrB,SAAS;MACV,CAAC;AACF,UAAK,KAAK;;AAEZ,WAAO,iBAAiB,SAAS,cAAc,EAAE,MAAM,MAAM,CAAC;;AAGhE,OAAI;AAEF,QAAI,CAAC,MAAM,UACT,OAAM,gBAAgB,aAAa,YAAY;AAGjD,QAAI,CAAC,MAAM,aACT,OAAM,gBAAgB,aAAa,eAAe;IAGpD,MAAM,OAAoC;KACxC,WAAW,MAAM;KACjB,YAAY,MAAM;KAClB,cAAc,MAAM;KACpB,SAAS,MAAM;KACf,QAAQ,MAAM;KACd,cACE,MAAM,gBAAgB,yBAAyB;KACjD,aACE,MAAM,eAAe,yBAAyB;KAChD,QAAQ,MAAM,UAAU,yBAAyB;KACjD,YAAY,MAAM;KAClB,WAAW,MAAM;KACjB,iBACE,MAAM,mBAAmB,yBAAyB;KACrD;AAED,SAAK,SAAS,wBAAwB,EACpC,mBAAmB,MAAM,cAC1B,CAAC;IAEF,MAAM,WACJ,MAAM,gBAAgB,mBAAmB,iBACvC,MACA,KAAK,eAAe,OAAO,CAC5B;AAEH,QAAI,CAAC,SACH,OAAM,gBAAgB,WAAW,gBAAgB;IAEnD,MAAM,SAAS,SAAS;IACxB,MAAM,cAAc,SAAS;AAE7B,SAAK,aAAa,mBAAmB,YAAY;AACjD,SAAK,SAAS,uBAAuB;KACnC,mBAAmB,SAAS;KAC5B,aAAa,QAAQ;KACtB,CAAC;IAEF,IAAI;AAIJ,YAAQ,QAAQ,OAAhB;KACE,KAAK;KACL,KAAK;AACH,WAAK,SAAS,6BAA6B,EACzC,aAAa,SAAS,QAAQ,OAC/B,CAAC;AACF,eAAS,MAAM,KAAK,wBAClB,iBACA,aACA,KAAK,OAAO,SACZ,OACD;AACD;KACF,KAAK;AACH,eAAS,KAAK,oBAAoB,SAAS;AAC3C;KACF,KAAK,SACH,OAAM,eAAe,gBAAgB,OAAO,OAAO,QAAQ;KAC7D,KAAK,WACH,OAAM,eAAe,UAAU;KACjC,KAAK,SACH,OAAM,eAAe,eAAe;KACtC,QACE,OAAM,eAAe,aACnB,OAAO,QAAQ,SAAS,UAAU,CACnC;;IAGL,MAAM,aAAa,OAAO;IAC1B,MAAM,WACJ,YAAY,MAAM,UAAU,YAAY,YAAY,UAAU;AAEhE,QAAI,WAAW,EACb,MAAK,aAAa,uBAAuB,SAAS;IAGpD,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,WAAO,OAAO,EAAE,WAAW,iBAAiB;KAC1C,cAAc,MAAM;KACpB,eAAe;KACf,mBAAmB;KACpB,CAAC;AAEF,cAAU;AAEV,QAAI,CAAC,UACH,MAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAE7C,WAAO;YACA,OAAO;AAEd,QAAI,CAAC,WAAW;AACd,UAAK,gBAAgB,MAAe;AACpC,UAAK,UAAU;MACb,MAAM,eAAe;MACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;MAChE,CAAC;;AAGJ,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;aACO;AAER,QAAI,gBAAgB,OAClB,QAAO,oBAAoB,SAAS,aAAa;IAGnD,MAAM,WAAW,KAAK,KAAK,GAAG;AAG9B,QAAI,CAAC,UACH,MAAK,KAAK;IAGZ,MAAM,aAAa;KACjB,mBAAmB,MAAM;KACzB,cAAc,MAAM,WAAW;KAC/B,aAAa,MAAM,UAAU;KAC7B,gBAAgB,MAAM,WAAW,UAAU,GAAG,IAAI,IAAI;KACtD,SAAS,QAAQ,UAAU;KAC5B;AAED,SAAK,iBAAiB,WAAW,IAAI,GAAG,WAAW;AACnD,SAAK,iBAAiB,cAAc,OAAO,UAAU,WAAW;;KAGpE;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;CAGH,MAAc,wBACZ,iBACA,aACA,UAAU,yBAAyB,SACnC,QACA;AACA,SAAO,KAAK,UAAU,gBACpB,YACA,EACE,YAAY;GACV,mBAAmB;GACnB,sBAAsB;GACvB,EACF,EACD,OAAO,SAAe;AACpB,OAAI;IACF,MAAM,YAAY,KAAK,KAAK;IAC5B,IAAI,QAAQ;IACZ,MAAM,uBAAuB;IAC7B,IAAI,YAAY;AAEhB,WAAO,MAAM;AACX;AACA,UAAK,aAAa,8BAA8B,UAAU;KAG1D,MAAM,cAAc,KAAK,KAAK,GAAG;AACjC,SAAI,cAAc,SAAS;MACzB,MAAM,QAAQ,eAAe,gBAC3B,kCAAkC,QAAQ,eAAe,YAAY,KACtE;AACD,WAAK,gBAAgB,MAAM;AAC3B,WAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,YAAM;;AAGR,SAAI,QAAQ,SAAS;MACnB,MAAM,QAAQ,eAAe,UAAU;AACvC,WAAK,gBAAgB,MAAM;AAC3B,WAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,YAAM;;AAGR,UAAK,SAAS,mBAAmB;MAC/B,gBAAgB;MAChB,iBAAiB;MACjB,mBAAmB;MACpB,CAAC;KAEF,MAAM,WACJ,MAAM,gBAAgB,mBAAmB,aACvC,EACE,cAAc,aACf,EACD,KAAK,eAAe,OAAO,CAC5B;AACH,SAAI,CAAC,SACH,OAAM,gBAAgB,WAAW,gBAAgB;KAGnD,MAAM,SAAS,SAAS;AAExB,UAAK,SAAS,wBAAwB;MACpC,aAAa,QAAQ;MACrB,gBAAgB;MACjB,CAAC;AAEF,aAAQ,QAAQ,OAAhB;MACE,KAAK;MACL,KAAK,UAEH;MACF,KAAK;AACH,YAAK,aAAa,uBAAuB,UAAU;AACnD,YAAK,aAAa,gCAAgC,YAAY;AAC9D,YAAK,SAAS,qBAAqB;QACjC,iBAAiB;QACjB,oBAAoB;QACrB,CAAC;AACF,YAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,cAAO,KAAK,oBAAoB,SAAS;MAC3C,KAAK,SACH,OAAM,eAAe,gBAAgB,OAAO,OAAO,QAAQ;MAC7D,KAAK,WACH,OAAM,eAAe,UAAU;MACjC,KAAK,SACH,OAAM,eAAe,eAAe;MACtC,QACE,OAAM,eAAe,aACnB,OAAO,QAAQ,SAAS,UAAU,CACnC;;AAIL,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,MAAM,CAAC;AAC1D,aAAQ,KAAK,IAAI,QAAQ,GAAG,qBAAqB;;YAE5C,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU;KACb,MAAM,eAAe;KACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;KAChE,CAAC;AAEF,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;aACO;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;CAGH,AAAQ,oBAAoB,UAAiC;AAC3D,MAAI,SAAS,UAAU,WAAW,eAChC,QAAO,KAAK,sBAAsB,SAAS;AAG7C,MAAI,CAAC,SAAS,QAAQ,cAAc,CAAC,SAAS,UAAU,QAAQ,QAC9D,QAAO;EAGT,MAAM,UAAU,SAAS,SAAS,OAAO;EAEzC,MAAM,kBAAkB,SAAS,OAAO,WAAW,KAAK,QAAQ;GAC9D,MAAM,MAA+B,EAAE;AACvC,OAAI,SAAS,OAAO,UAAU;IAC5B,MAAM,SAAS,QAAQ;IACvB,MAAM,aAAa,QAAQ,QAAQ,UAAU;AAG7C,QACE,QAAQ,cAAc,YACtB,OAAO,UAAU,YACjB,UACC,MAAM,OAAO,OAAO,MAAM,OAAO,KAElC,KAAI;AACF,SAAI,cAAc,KAAK,MAAM,MAAM;YAC7B;AAEN,SAAI,cAAc;;QAGpB,KAAI,cAAc;KAEpB;AACF,UAAO;IACP;EAGF,MAAM,EAAE,YAAY,aAAa,GAAG,eAAe,SAAS;AAC5D,SAAO;GACL,GAAG;GACH,QAAQ;IACN,GAAG;IACH,MAAM;IACP;GACF;;CAGH,AAAQ,sBAAsB,UAE5B;AACA,SAAO,EACL,QAAQ;GACN,cAAc,SAAS;GACvB,QAAQ;IACN,OAAO,SAAS,QAAQ;IACxB,OAAO,SAAS,QAAQ;IACzB;GACF,EACF;;CAGH,MAAM,aACJ,iBACA,OACA,QAC+D;EAC/D,MAAM,YAAY,KAAK,KAAK;AAE5B,SAAO,KAAK,UAAU,gBACpB,iBACA;GACE,MAAM,SAAS;GACf,YAAY;IACV,aAAa;IACb,gBAAgB;IACjB;GACF,EACD,OAAO,SAAe;AACpB,OAAI;IACF,MAAM,WACJ,MAAM,gBAAgB,mBAAmB,aACvC,EAAE,cAAc,OAAO,EACvB,KAAK,eAAe,OAAO,CAC5B;IAEH,MAAM,SAAS,SAAS,QAAQ;IAChC,MAAM,SAAS,SAAS,UAAU;AAElC,QAAI,CAAC,UAAU,CAAC,OACd,OAAM,eAAe,YAAY,mBAAmB;AAGtD,SAAK,aAAa,qBAAqB,OAAO,OAAO;IAErD,MAAM,SAAS,MAAM,KAAK,eAAe,cACvC,QACA,QACA,OACD;AAED,SAAK,aAAa,yBAAyB,OAAO,KAAK,OAAO;AAC9D,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;IAE3C,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,cAAc,OAAO,UAAU;KACnD,WAAW;KACX,QAAQ;KACT,CAAC;AAEF,WAAO,OAAO,EAAE,WAAW,iBAAiB;KAC1C,uBAAuB,OAAO,KAAK;KACnC,cAAc;KACf,CAAC;AAEF,WAAO;YACA,OAAO;AACd,SAAK,UAAU;KACb,MAAM,eAAe;KACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU;KACnD,CAAC;AACF,SAAK,gBAAgB,MAAe;IAEpC,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,cAAc,OAAO,UAAU;KACnD,WAAW;KACX,QAAQ;KACT,CAAC;AAEF,WAAO,MAAM,2BAA2B,OAAO,MAAM;AAErD,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;;IAGN;;CAIH,AAAQ,eAAe,QAAsB;AAC3C,SAAO,IAAI,QAAQ,EACjB,mBAAmB;GACjB,yBAAyB,QAAQ,WAAW;GAC5C,0BAA0B,OAAmB;AAC3C,YAAQ,iBAAiB,SAAS,IAAI,EAAE,MAAM,MAAM,CAAC;;GAExD,EACF,CAAC"}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
import { __esmMin } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
import { AuthenticationError } from "../errors/authentication.js";
|
|
3
|
+
import { ConfigurationError } from "../errors/configuration.js";
|
|
4
|
+
import { InitializationError } from "../errors/initialization.js";
|
|
5
|
+
import { init_errors } from "../errors/index.js";
|
|
2
6
|
import { name, version } from "../appkit/package.js";
|
|
3
7
|
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
4
8
|
import { coerce } from "semver";
|
|
@@ -14,6 +18,7 @@ function getClientOptions() {
|
|
|
14
18
|
}
|
|
15
19
|
var ServiceContext;
|
|
16
20
|
var init_service_context = __esmMin((() => {
|
|
21
|
+
init_errors();
|
|
17
22
|
ServiceContext = class ServiceContext {
|
|
18
23
|
static {
|
|
19
24
|
this.instance = null;
|
|
@@ -37,7 +42,7 @@ var init_service_context = __esmMin((() => {
|
|
|
37
42
|
* @throws Error if not initialized
|
|
38
43
|
*/
|
|
39
44
|
static get() {
|
|
40
|
-
if (!ServiceContext.instance) throw
|
|
45
|
+
if (!ServiceContext.instance) throw InitializationError.notInitialized("ServiceContext", "Call ServiceContext.initialize() first");
|
|
41
46
|
return ServiceContext.instance;
|
|
42
47
|
}
|
|
43
48
|
/**
|
|
@@ -55,9 +60,9 @@ var init_service_context = __esmMin((() => {
|
|
|
55
60
|
* @throws Error if token is not provided
|
|
56
61
|
*/
|
|
57
62
|
static createUserContext(token, userId, userName) {
|
|
58
|
-
if (!token) throw
|
|
63
|
+
if (!token) throw AuthenticationError.missingToken("user token");
|
|
59
64
|
const host = process.env.DATABRICKS_HOST;
|
|
60
|
-
if (!host) throw
|
|
65
|
+
if (!host) throw ConfigurationError.missingEnvVar("DATABRICKS_HOST");
|
|
61
66
|
const serviceCtx = ServiceContext.get();
|
|
62
67
|
return {
|
|
63
68
|
client: new WorkspaceClient({
|
|
@@ -84,7 +89,7 @@ var init_service_context = __esmMin((() => {
|
|
|
84
89
|
const warehouseId = ServiceContext.getWarehouseId(client);
|
|
85
90
|
const workspaceId = ServiceContext.getWorkspaceId(client);
|
|
86
91
|
const currentUser = await client.currentUser.me();
|
|
87
|
-
if (!currentUser.id) throw
|
|
92
|
+
if (!currentUser.id) throw ConfigurationError.resourceNotFound("Service user ID");
|
|
88
93
|
return {
|
|
89
94
|
client,
|
|
90
95
|
serviceUserId: currentUser.id,
|
|
@@ -102,7 +107,7 @@ var init_service_context = __esmMin((() => {
|
|
|
102
107
|
query: {},
|
|
103
108
|
responseHeaders: ["x-databricks-org-id"]
|
|
104
109
|
});
|
|
105
|
-
if (!response["x-databricks-org-id"]) throw
|
|
110
|
+
if (!response["x-databricks-org-id"]) throw ConfigurationError.resourceNotFound("Workspace ID");
|
|
106
111
|
return response["x-databricks-org-id"];
|
|
107
112
|
}
|
|
108
113
|
static async getWarehouseId(client) {
|
|
@@ -126,12 +131,12 @@ var init_service_context = __esmMin((() => {
|
|
|
126
131
|
const warehouses = (response.warehouses || []).sort((a, b) => {
|
|
127
132
|
return priorities[a.state] - priorities[b.state];
|
|
128
133
|
});
|
|
129
|
-
if (response.warehouses.length === 0) throw
|
|
134
|
+
if (response.warehouses.length === 0) throw ConfigurationError.resourceNotFound("Warehouse ID", "Please configure the DATABRICKS_WAREHOUSE_ID environment variable");
|
|
130
135
|
const firstWarehouse = warehouses[0];
|
|
131
|
-
if (firstWarehouse.state === "DELETED" || firstWarehouse.state === "DELETING" || !firstWarehouse.id) throw
|
|
136
|
+
if (firstWarehouse.state === "DELETED" || firstWarehouse.state === "DELETING" || !firstWarehouse.id) throw ConfigurationError.resourceNotFound("Warehouse ID", "Please configure the DATABRICKS_WAREHOUSE_ID environment variable");
|
|
132
137
|
return firstWarehouse.id;
|
|
133
138
|
}
|
|
134
|
-
throw
|
|
139
|
+
throw ConfigurationError.resourceNotFound("Warehouse ID", "Please configure the DATABRICKS_WAREHOUSE_ID environment variable");
|
|
135
140
|
}
|
|
136
141
|
/**
|
|
137
142
|
* Reset the service context. Only for testing purposes.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"service-context.js","names":["productName","productVersion"],"sources":["../../src/context/service-context.ts"],"sourcesContent":["import {\n type ClientOptions,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport { coerce } from \"semver\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\nimport type { UserContext } from \"./user-context\";\n\n/**\n * Service context holds the service principal client and shared resources.\n * This is initialized once at app startup and shared across all requests.\n */\nexport interface ServiceContextState {\n /** WorkspaceClient authenticated as the service principal */\n client: WorkspaceClient;\n /** The service principal's user ID */\n serviceUserId: string;\n /** Promise that resolves to the warehouse ID */\n warehouseId: Promise<string>;\n /** Promise that resolves to the workspace ID */\n workspaceId: Promise<string>;\n}\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const semver = coerce(productVersion);\n const normalizedVersion = (semver?.version ??\n productVersion) as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\n/**\n * ServiceContext is a singleton that manages the service principal's\n * WorkspaceClient and shared resources like warehouse/workspace IDs.\n *\n * It's initialized once at app startup and provides the foundation\n * for both service principal and user context execution.\n */\nexport class ServiceContext {\n private static instance: ServiceContextState | null = null;\n private static initPromise: Promise<ServiceContextState> | null = null;\n\n /**\n * Initialize the service context. Should be called once at app startup.\n * Safe to call multiple times - will return the same instance.\n */\n static async initialize(): Promise<ServiceContextState> {\n if (ServiceContext.instance) {\n return ServiceContext.instance;\n }\n\n if (ServiceContext.initPromise) {\n return ServiceContext.initPromise;\n }\n\n ServiceContext.initPromise = ServiceContext.createContext();\n ServiceContext.instance = await ServiceContext.initPromise;\n return ServiceContext.instance;\n }\n\n /**\n * Get the initialized service context.\n * @throws Error if not initialized\n */\n static get(): ServiceContextState {\n if (!ServiceContext.instance) {\n throw
|
|
1
|
+
{"version":3,"file":"service-context.js","names":["productName","productVersion"],"sources":["../../src/context/service-context.ts"],"sourcesContent":["import {\n type ClientOptions,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport { coerce } from \"semver\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\nimport {\n AuthenticationError,\n ConfigurationError,\n InitializationError,\n} from \"../errors\";\nimport type { UserContext } from \"./user-context\";\n\n/**\n * Service context holds the service principal client and shared resources.\n * This is initialized once at app startup and shared across all requests.\n */\nexport interface ServiceContextState {\n /** WorkspaceClient authenticated as the service principal */\n client: WorkspaceClient;\n /** The service principal's user ID */\n serviceUserId: string;\n /** Promise that resolves to the warehouse ID */\n warehouseId: Promise<string>;\n /** Promise that resolves to the workspace ID */\n workspaceId: Promise<string>;\n}\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const semver = coerce(productVersion);\n const normalizedVersion = (semver?.version ??\n productVersion) as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\n/**\n * ServiceContext is a singleton that manages the service principal's\n * WorkspaceClient and shared resources like warehouse/workspace IDs.\n *\n * It's initialized once at app startup and provides the foundation\n * for both service principal and user context execution.\n */\nexport class ServiceContext {\n private static instance: ServiceContextState | null = null;\n private static initPromise: Promise<ServiceContextState> | null = null;\n\n /**\n * Initialize the service context. Should be called once at app startup.\n * Safe to call multiple times - will return the same instance.\n */\n static async initialize(): Promise<ServiceContextState> {\n if (ServiceContext.instance) {\n return ServiceContext.instance;\n }\n\n if (ServiceContext.initPromise) {\n return ServiceContext.initPromise;\n }\n\n ServiceContext.initPromise = ServiceContext.createContext();\n ServiceContext.instance = await ServiceContext.initPromise;\n return ServiceContext.instance;\n }\n\n /**\n * Get the initialized service context.\n * @throws Error if not initialized\n */\n static get(): ServiceContextState {\n if (!ServiceContext.instance) {\n throw InitializationError.notInitialized(\n \"ServiceContext\",\n \"Call ServiceContext.initialize() first\",\n );\n }\n return ServiceContext.instance;\n }\n\n /**\n * Check if the service context has been initialized.\n */\n static isInitialized(): boolean {\n return ServiceContext.instance !== null;\n }\n\n /**\n * Create a user context from request headers.\n *\n * @param token - The user's access token from x-forwarded-access-token header\n * @param userId - The user's ID from x-forwarded-user header\n * @param userName - Optional user name\n * @throws Error if token is not provided\n */\n static createUserContext(\n token: string,\n userId: string,\n userName?: string,\n ): UserContext {\n if (!token) {\n throw AuthenticationError.missingToken(\"user token\");\n }\n\n const host = process.env.DATABRICKS_HOST;\n if (!host) {\n throw ConfigurationError.missingEnvVar(\"DATABRICKS_HOST\");\n }\n\n const serviceCtx = ServiceContext.get();\n\n // Create user client with the OAuth token from Databricks Apps\n // Note: We use authType: \"pat\" because the token is passed as a Bearer token\n // just like a PAT, even though it's technically an OAuth token\n const userClient = new WorkspaceClient(\n {\n token,\n host,\n authType: \"pat\",\n },\n getClientOptions(),\n );\n\n return {\n client: userClient,\n userId,\n userName,\n warehouseId: serviceCtx.warehouseId,\n workspaceId: serviceCtx.workspaceId,\n isUserContext: true,\n };\n }\n\n /**\n * Get the client options for WorkspaceClient.\n * Exposed for testing purposes.\n */\n static getClientOptions(): ClientOptions {\n return getClientOptions();\n }\n\n private static async createContext(): Promise<ServiceContextState> {\n const client = new WorkspaceClient({}, getClientOptions());\n\n const warehouseId = ServiceContext.getWarehouseId(client);\n const workspaceId = ServiceContext.getWorkspaceId(client);\n const currentUser = await client.currentUser.me();\n\n if (!currentUser.id) {\n throw ConfigurationError.resourceNotFound(\"Service user ID\");\n }\n\n return {\n client,\n serviceUserId: currentUser.id,\n warehouseId,\n workspaceId,\n };\n }\n\n private static async getWorkspaceId(\n client: WorkspaceClient,\n ): Promise<string> {\n if (process.env.DATABRICKS_WORKSPACE_ID) {\n return process.env.DATABRICKS_WORKSPACE_ID;\n }\n\n const response = (await client.apiClient.request({\n path: \"/api/2.0/preview/scim/v2/Me\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {},\n responseHeaders: [\"x-databricks-org-id\"],\n })) as { \"x-databricks-org-id\": string };\n\n if (!response[\"x-databricks-org-id\"]) {\n throw ConfigurationError.resourceNotFound(\"Workspace ID\");\n }\n\n return response[\"x-databricks-org-id\"];\n }\n\n private static async getWarehouseId(\n client: WorkspaceClient,\n ): Promise<string> {\n if (process.env.DATABRICKS_WAREHOUSE_ID) {\n return process.env.DATABRICKS_WAREHOUSE_ID;\n }\n\n if (process.env.NODE_ENV === \"development\") {\n const response = (await client.apiClient.request({\n path: \"/api/2.0/sql/warehouses\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {\n skip_cannot_use: \"true\",\n },\n })) as { warehouses: sql.EndpointInfo[] };\n\n const priorities: Record<sql.State, number> = {\n RUNNING: 0,\n STOPPED: 1,\n STARTING: 2,\n STOPPING: 3,\n DELETED: 99,\n DELETING: 99,\n };\n\n const warehouses = (response.warehouses || []).sort((a, b) => {\n return (\n priorities[a.state as sql.State] - priorities[b.state as sql.State]\n );\n });\n\n if (response.warehouses.length === 0) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n const firstWarehouse = warehouses[0];\n if (\n firstWarehouse.state === \"DELETED\" ||\n firstWarehouse.state === \"DELETING\" ||\n !firstWarehouse.id\n ) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n return firstWarehouse.id;\n }\n\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n /**\n * Reset the service context. Only for testing purposes.\n */\n static reset(): void {\n ServiceContext.instance = null;\n ServiceContext.initPromise = null;\n }\n}\n"],"mappings":";;;;;;;;;;AAgCA,SAAS,mBAAkC;CACzC,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAKvC,QAAO;EACL,SAASA;EACT,gBANa,OAAOC,QAAe,EACF,WACjCA;EAKA,GAAI,SAAS,EAAE,gBAAgB,EAAE,MAAM,OAAO,EAAE;EACjD;;;;cA5BgB;CAsCN,iBAAb,MAAa,eAAe;;mBAC4B;;;sBACY;;;;;;EAMlE,aAAa,aAA2C;AACtD,OAAI,eAAe,SACjB,QAAO,eAAe;AAGxB,OAAI,eAAe,YACjB,QAAO,eAAe;AAGxB,kBAAe,cAAc,eAAe,eAAe;AAC3D,kBAAe,WAAW,MAAM,eAAe;AAC/C,UAAO,eAAe;;;;;;EAOxB,OAAO,MAA2B;AAChC,OAAI,CAAC,eAAe,SAClB,OAAM,oBAAoB,eACxB,kBACA,yCACD;AAEH,UAAO,eAAe;;;;;EAMxB,OAAO,gBAAyB;AAC9B,UAAO,eAAe,aAAa;;;;;;;;;;EAWrC,OAAO,kBACL,OACA,QACA,UACa;AACb,OAAI,CAAC,MACH,OAAM,oBAAoB,aAAa,aAAa;GAGtD,MAAM,OAAO,QAAQ,IAAI;AACzB,OAAI,CAAC,KACH,OAAM,mBAAmB,cAAc,kBAAkB;GAG3D,MAAM,aAAa,eAAe,KAAK;AAcvC,UAAO;IACL,QAViB,IAAI,gBACrB;KACE;KACA;KACA,UAAU;KACX,EACD,kBAAkB,CACnB;IAIC;IACA;IACA,aAAa,WAAW;IACxB,aAAa,WAAW;IACxB,eAAe;IAChB;;;;;;EAOH,OAAO,mBAAkC;AACvC,UAAO,kBAAkB;;EAG3B,aAAqB,gBAA8C;GACjE,MAAM,SAAS,IAAI,gBAAgB,EAAE,EAAE,kBAAkB,CAAC;GAE1D,MAAM,cAAc,eAAe,eAAe,OAAO;GACzD,MAAM,cAAc,eAAe,eAAe,OAAO;GACzD,MAAM,cAAc,MAAM,OAAO,YAAY,IAAI;AAEjD,OAAI,CAAC,YAAY,GACf,OAAM,mBAAmB,iBAAiB,kBAAkB;AAG9D,UAAO;IACL;IACA,eAAe,YAAY;IAC3B;IACA;IACD;;EAGH,aAAqB,eACnB,QACiB;AACjB,OAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;GAGrB,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;IAC/C,MAAM;IACN,QAAQ;IACR,SAAS,IAAI,SAAS;IACtB,KAAK;IACL,OAAO,EAAE;IACT,iBAAiB,CAAC,sBAAsB;IACzC,CAAC;AAEF,OAAI,CAAC,SAAS,uBACZ,OAAM,mBAAmB,iBAAiB,eAAe;AAG3D,UAAO,SAAS;;EAGlB,aAAqB,eACnB,QACiB;AACjB,OAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;AAGrB,OAAI,QAAQ,IAAI,aAAa,eAAe;IAC1C,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;KAC/C,MAAM;KACN,QAAQ;KACR,SAAS,IAAI,SAAS;KACtB,KAAK;KACL,OAAO,EACL,iBAAiB,QAClB;KACF,CAAC;IAEF,MAAM,aAAwC;KAC5C,SAAS;KACT,SAAS;KACT,UAAU;KACV,UAAU;KACV,SAAS;KACT,UAAU;KACX;IAED,MAAM,cAAc,SAAS,cAAc,EAAE,EAAE,MAAM,GAAG,MAAM;AAC5D,YACE,WAAW,EAAE,SAAsB,WAAW,EAAE;MAElD;AAEF,QAAI,SAAS,WAAW,WAAW,EACjC,OAAM,mBAAmB,iBACvB,gBACA,oEACD;IAGH,MAAM,iBAAiB,WAAW;AAClC,QACE,eAAe,UAAU,aACzB,eAAe,UAAU,cACzB,CAAC,eAAe,GAEhB,OAAM,mBAAmB,iBACvB,gBACA,oEACD;AAGH,WAAO,eAAe;;AAGxB,SAAM,mBAAmB,iBACvB,gBACA,oEACD;;;;;EAMH,OAAO,QAAc;AACnB,kBAAe,WAAW;AAC1B,kBAAe,cAAc"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { AppKitError } from "./base.js";
|
|
2
|
+
|
|
3
|
+
//#region src/errors/authentication.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Error thrown when authentication fails.
|
|
7
|
+
* Use for missing tokens, invalid credentials, or authorization failures.
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* ```typescript
|
|
11
|
+
* throw new AuthenticationError("User token is required");
|
|
12
|
+
* throw new AuthenticationError("Failed to generate credentials", { cause: originalError });
|
|
13
|
+
* ```
|
|
14
|
+
*/
|
|
15
|
+
declare class AuthenticationError extends AppKitError {
|
|
16
|
+
readonly code = "AUTHENTICATION_ERROR";
|
|
17
|
+
readonly statusCode = 401;
|
|
18
|
+
readonly isRetryable = false;
|
|
19
|
+
/**
|
|
20
|
+
* Create an authentication error for missing token
|
|
21
|
+
*/
|
|
22
|
+
static missingToken(tokenType?: string): AuthenticationError;
|
|
23
|
+
/**
|
|
24
|
+
* Create an authentication error for missing user identity
|
|
25
|
+
*/
|
|
26
|
+
static missingUserId(): AuthenticationError;
|
|
27
|
+
/**
|
|
28
|
+
* Create an authentication error for credential generation failure
|
|
29
|
+
*/
|
|
30
|
+
static credentialsFailed(instance: string, cause?: Error): AuthenticationError;
|
|
31
|
+
/**
|
|
32
|
+
* Create an authentication error for failed user lookup
|
|
33
|
+
*/
|
|
34
|
+
static userLookupFailed(cause?: Error): AuthenticationError;
|
|
35
|
+
}
|
|
36
|
+
//#endregion
|
|
37
|
+
export { AuthenticationError };
|
|
38
|
+
//# sourceMappingURL=authentication.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"authentication.d.ts","names":[],"sources":["../../src/errors/authentication.ts"],"sourcesContent":[],"mappings":";;;;;;AAYA;;;;;;;;AAAyC,cAA5B,mBAAA,SAA4B,WAAA,CAAA;EAAW,SAAA,IAAA,GAAA,sBAAA;;;;;;2CAQD;;;;0BASzB;;;;qDAYd,QACP;;;;kCAU6B,QAAQ"}
|