@databricks/appkit 0.28.0 → 0.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"file":"databricks.js","names":["servingStream"],"sources":["../../src/agents/databricks.ts"],"sourcesContent":["import type {\n AgentAdapter,\n AgentEvent,\n AgentInput,\n AgentRunContext,\n AgentToolDefinition,\n} from \"shared\";\nimport { stream as servingStream } from \"../connectors/serving/client\";\n\n/** Default cap for a single incomplete SSE line tail (DoS guard). */\nconst DEFAULT_MAX_SSE_LINE_CHARS = 1024 * 1024;\n\n/** Default cap for accumulated assistant text from `delta.content`. */\nconst DEFAULT_MAX_STREAM_TEXT_CHARS = 4 * 1024 * 1024;\n\n/** Default cap for accumulated JSON arguments per streamed tool call index. */\nconst DEFAULT_MAX_TOOL_ARGUMENT_CHARS = 2 * 1024 * 1024;\n\n/** Cap text length before running Python-style tool-call regex (ReDoS guard). */\nconst PYTHON_STYLE_TOOL_PARSE_MAX_INPUT = 64 * 1024;\n\n/** Fallback HTTP timeout when the raw fetch adapter path receives no AbortSignal from the runner. */\nconst RAW_FETCH_DEFAULT_TIMEOUT_MS = 120_000;\n\nfunction isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === \"object\" && value !== null;\n}\n\nfunction extractLlamaToolJsonSlice(text: string): string | undefined {\n const start = text.indexOf(\"[{\");\n if (start < 0) return undefined;\n const endBracket = text.lastIndexOf(\"}]\");\n if (endBracket < start) return undefined;\n return text.slice(start, endBracket + 2);\n}\n\n/** OpenAI SSE payload: `{ choices: [{ delta }] }`. */\nfunction openAiChoicesDelta(parsed: unknown): unknown {\n if (!isRecord(parsed)) return undefined;\n const choices = parsed.choices;\n if (!Array.isArray(choices) || choices.length < 1) return undefined;\n const first = choices[0];\n if (!isRecord(first)) return undefined;\n return first.delta;\n}\n\nfunction isStreamingDeltaToolCall(value: unknown): value is DeltaToolCall {\n if (!isRecord(value)) return false;\n return typeof value.index === \"number\";\n}\n\nfunction throwIfExceedsStreamLimit(\n label: string,\n currentLength: number,\n chunk: string,\n max: number,\n): void {\n if (currentLength + chunk.length > max) {\n throw new Error(\n `DatabricksAdapter: ${label} exceeds configured limit (${max} UTF-16 code units)`,\n );\n }\n}\n\n/**\n * Transport shim: given an OpenAI-compatible request body, returns the raw\n * SSE byte stream from the serving endpoint. Injected at construction time so\n * callers can swap in the workspace SDK (factory paths), a bare `fetch`\n * (the raw constructor), or a test fake.\n */\ntype StreamBody = (\n body: Record<string, unknown>,\n signal?: AbortSignal,\n) => Promise<ReadableStream<Uint8Array>>;\n\n/**\n * Escape-hatch options: provide an `endpointUrl` + `authenticate()` and the\n * adapter uses a bare `fetch()` to call it. Useful for tests and for pointing\n * the adapter at non-workspace endpoints (reverse proxies, mocks).\n */\ninterface RawFetchAdapterOptions {\n endpointUrl: string;\n authenticate: () => Promise<Record<string, string>>;\n maxSteps?: number;\n maxTokens?: number;\n /** Max length of one SSE line (including an incomplete tail in the buffer). */\n maxSseLineChars?: number;\n /** Max total length of assistant `delta.content` across the stream. */\n maxStreamTextChars?: number;\n /** Max length of streamed `function.arguments` per tool call index. */\n maxToolArgumentsChars?: number;\n}\n\n/**\n * Preferred options: caller provides the transport function directly.\n * The `fromServingEndpoint` / `fromModelServing` factories use this to route\n * through `connectors/serving/stream`, which centralises URL encoding, auth\n * via the SDK's `apiClient.request`, and any future retries/telemetry.\n */\ninterface StreamBodyAdapterOptions {\n streamBody: StreamBody;\n maxSteps?: number;\n maxTokens?: number;\n maxSseLineChars?: number;\n maxStreamTextChars?: number;\n maxToolArgumentsChars?: number;\n}\n\ntype DatabricksAdapterOptions =\n | RawFetchAdapterOptions\n | StreamBodyAdapterOptions;\n\nfunction isStreamBodyOptions(\n o: DatabricksAdapterOptions,\n): o is StreamBodyAdapterOptions {\n return \"streamBody\" in o;\n}\n\n/**\n * Duck-typed subset of the Databricks SDK `WorkspaceClient`. Callers of\n * `fromServingEndpoint` and `fromModelServing` pass a real `WorkspaceClient`,\n * but we only need the `apiClient.request` surface — so we declare the minimal\n * interface rather than importing the SDK type directly. This keeps the adapter\n * free of a hard compile-time dependency on `@databricks/sdk-experimental`.\n */\ninterface WorkspaceClientLike {\n apiClient: {\n request(options: Record<string, unknown>): Promise<unknown>;\n };\n}\n\ninterface ServingEndpointOptions {\n workspaceClient: WorkspaceClientLike;\n endpointName: string;\n maxSteps?: number;\n maxTokens?: number;\n maxSseLineChars?: number;\n maxStreamTextChars?: number;\n maxToolArgumentsChars?: number;\n}\n\ninterface ModelServingOptions {\n maxSteps?: number;\n maxTokens?: number;\n workspaceClient?: WorkspaceClientLike;\n maxSseLineChars?: number;\n maxStreamTextChars?: number;\n maxToolArgumentsChars?: number;\n}\n\ninterface OpenAIMessage {\n role: \"system\" | \"user\" | \"assistant\" | \"tool\";\n content: string | null;\n tool_calls?: OpenAIToolCall[];\n tool_call_id?: string;\n}\n\ninterface OpenAIToolCall {\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n}\n\ninterface OpenAITool {\n type: \"function\";\n function: {\n name: string;\n description: string;\n parameters: unknown;\n };\n}\n\ninterface DeltaToolCall {\n index: number;\n id?: string;\n type?: string;\n function?: { name?: string; arguments?: string };\n}\n\n/**\n * Adapter that talks directly to Databricks Model Serving `/invocations` endpoint.\n *\n * No dependency on the Vercel AI SDK or LangChain. Uses raw `fetch()` to POST\n * OpenAI-compatible payloads and parses the SSE stream itself. Calls\n * `authenticate()` per-request so tokens are always fresh.\n *\n * Handles both structured `tool_calls` responses and text-based tool call\n * fallback parsing for models that output tool calls as text.\n *\n * @example Using the factory (recommended)\n * ```ts\n * import { createApp, createAgent, agents } from \"@databricks/appkit\";\n * import { DatabricksAdapter } from \"@databricks/appkit/beta\";\n * import { WorkspaceClient } from \"@databricks/sdk-experimental\";\n *\n * const adapter = DatabricksAdapter.fromServingEndpoint({\n * workspaceClient: new WorkspaceClient({}),\n * endpointName: \"my-endpoint\",\n * });\n *\n * await createApp({\n * plugins: [\n * agents({\n * agents: {\n * assistant: createAgent({\n * instructions: \"You are a helpful assistant.\",\n * model: adapter,\n * }),\n * },\n * }),\n * ],\n * });\n * ```\n *\n * @example Using the raw constructor\n * ```ts\n * const adapter = new DatabricksAdapter({\n * endpointUrl: \"https://host/serving-endpoints/my-endpoint/invocations\",\n * authenticate: async () => ({ Authorization: `Bearer ${token}` }),\n * });\n * ```\n */\nexport class DatabricksAdapter implements AgentAdapter {\n private streamBody: StreamBody;\n private maxSteps: number;\n private maxTokens: number;\n private maxSseLineChars: number;\n private maxStreamTextChars: number;\n private maxToolArgumentsChars: number;\n\n constructor(options: DatabricksAdapterOptions) {\n this.maxSteps = options.maxSteps ?? 10;\n this.maxTokens = options.maxTokens ?? 4096;\n this.maxSseLineChars =\n options.maxSseLineChars ?? DEFAULT_MAX_SSE_LINE_CHARS;\n this.maxStreamTextChars =\n options.maxStreamTextChars ?? DEFAULT_MAX_STREAM_TEXT_CHARS;\n this.maxToolArgumentsChars =\n options.maxToolArgumentsChars ?? DEFAULT_MAX_TOOL_ARGUMENT_CHARS;\n\n if (isStreamBodyOptions(options)) {\n this.streamBody = options.streamBody;\n } else {\n const { endpointUrl, authenticate } = options;\n this.streamBody = async (body, signal) => {\n const fetchSignal =\n signal ?? AbortSignal.timeout(RAW_FETCH_DEFAULT_TIMEOUT_MS);\n const authHeaders = await authenticate();\n const response = await fetch(endpointUrl, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n ...authHeaders,\n },\n body: JSON.stringify(body),\n signal: fetchSignal,\n });\n if (!response.ok) {\n const errorText = await response.text().catch(() => \"Unknown error\");\n throw new Error(\n `Databricks API error (${response.status}): ${errorText}`,\n );\n }\n if (!response.body) throw new Error(\"No response body\");\n return response.body;\n };\n }\n }\n\n /**\n * Creates a DatabricksAdapter for a Databricks Model Serving endpoint.\n *\n * Routes through the shared `connectors/serving/stream` helper, which\n * delegates to the SDK's `apiClient.request({ raw: true })`. That gives the\n * adapter centralised URL encoding + authentication with the rest of the\n * serving surface — no bespoke `fetch()` + `authenticate()` plumbing.\n */\n static async fromServingEndpoint(\n options: ServingEndpointOptions,\n ): Promise<DatabricksAdapter> {\n const {\n workspaceClient,\n endpointName,\n maxSteps,\n maxTokens,\n maxSseLineChars,\n maxStreamTextChars,\n maxToolArgumentsChars,\n } = options;\n return new DatabricksAdapter({\n streamBody: (body, signal) =>\n // Cast through the structural shape: the connector types\n // `workspaceClient` as the SDK's concrete `WorkspaceClient`, but we\n // only need `apiClient.request`.\n servingStream(\n workspaceClient as unknown as Parameters<typeof servingStream>[0],\n endpointName,\n body,\n signal,\n ),\n maxSteps,\n maxTokens,\n maxSseLineChars,\n maxStreamTextChars,\n maxToolArgumentsChars,\n });\n }\n\n /**\n * Creates a DatabricksAdapter from a Model Serving endpoint name.\n * Auto-creates a WorkspaceClient internally. Reads the endpoint name\n * from the argument or the `DATABRICKS_SERVING_ENDPOINT_NAME` env var.\n *\n * @example\n * ```ts\n * // Reads endpoint from DATABRICKS_SERVING_ENDPOINT_NAME env var\n * const adapter = await DatabricksAdapter.fromModelServing();\n *\n * // Explicit endpoint\n * const adapter = await DatabricksAdapter.fromModelServing(\"my-endpoint\");\n *\n * // With options\n * const adapter = await DatabricksAdapter.fromModelServing(\"my-endpoint\", {\n * maxSteps: 5,\n * maxTokens: 2048,\n * });\n * ```\n */\n static async fromModelServing(\n endpointName?: string,\n options?: ModelServingOptions,\n ): Promise<DatabricksAdapter> {\n const resolvedEndpoint =\n endpointName ?? process.env.DATABRICKS_SERVING_ENDPOINT_NAME;\n\n if (!resolvedEndpoint) {\n throw new Error(\n \"No endpoint name provided and DATABRICKS_SERVING_ENDPOINT_NAME env var is not set. \" +\n \"Pass an endpoint name or set DATABRICKS_SERVING_ENDPOINT_NAME.\",\n );\n }\n\n let workspaceClient: WorkspaceClientLike | undefined =\n options?.workspaceClient;\n if (!workspaceClient) {\n const sdk = await import(\"@databricks/sdk-experimental\");\n workspaceClient = new sdk.WorkspaceClient(\n {},\n ) as unknown as WorkspaceClientLike;\n }\n\n return DatabricksAdapter.fromServingEndpoint({\n workspaceClient,\n endpointName: resolvedEndpoint,\n maxSteps: options?.maxSteps,\n maxTokens: options?.maxTokens,\n maxSseLineChars: options?.maxSseLineChars,\n maxStreamTextChars: options?.maxStreamTextChars,\n maxToolArgumentsChars: options?.maxToolArgumentsChars,\n });\n }\n\n async *run(\n input: AgentInput,\n context: AgentRunContext,\n ): AsyncGenerator<AgentEvent, void, unknown> {\n // Databricks API requires tool names to match [a-zA-Z0-9_-].\n // Our tool names use dots (e.g. \"analytics.query\"), so we swap dots\n // for double-underscores in the wire format and map back on receipt.\n const nameToWire = new Map<string, string>();\n const wireToName = new Map<string, string>();\n for (const tool of input.tools) {\n const wire = tool.name.replace(/\\./g, \"__\");\n if (wireToName.has(wire) && wireToName.get(wire) !== tool.name) {\n throw new Error(\n `Tool name collision: '${tool.name}' and '${wireToName.get(wire)}' both map to wire name '${wire}'`,\n );\n }\n nameToWire.set(tool.name, wire);\n wireToName.set(wire, tool.name);\n }\n\n const tools = this.buildTools(input.tools, nameToWire);\n const messages = this.buildMessages(input.messages, nameToWire);\n\n yield { type: \"status\", status: \"running\" };\n\n for (let step = 0; step < this.maxSteps; step++) {\n if (context.signal?.aborted) break;\n\n const { text, toolCalls } = yield* this.streamCompletion(\n messages,\n tools,\n context,\n );\n\n if (toolCalls.length === 0) {\n const parsed = parseTextToolCalls(text);\n if (parsed.length > 0) {\n yield* this.executeToolCalls(parsed, messages, context, nameToWire);\n continue;\n }\n break;\n }\n\n messages.push({\n role: \"assistant\",\n content: text || null,\n tool_calls: toolCalls,\n });\n\n for (const tc of toolCalls) {\n const wireName = tc.function.name;\n const originalName = wireToName.get(wireName) ?? wireName;\n yield* this.executeSingleTool(tc, originalName, messages, context);\n }\n }\n }\n\n /** Parse wire arguments, emit tool_call / tool_result, append tool messages. */\n private async *executeSingleTool(\n tc: OpenAIToolCall,\n originalName: string,\n messages: OpenAIMessage[],\n context: AgentRunContext,\n ): AsyncGenerator<AgentEvent, void, unknown> {\n let args: unknown;\n try {\n args = JSON.parse(tc.function.arguments);\n } catch {\n args = {};\n }\n\n yield { type: \"tool_call\", callId: tc.id, name: originalName, args };\n\n try {\n const result = await context.executeTool(originalName, args);\n const resultStr =\n typeof result === \"string\" ? result : JSON.stringify(result);\n\n yield { type: \"tool_result\", callId: tc.id, result };\n\n messages.push({\n role: \"tool\",\n content: resultStr,\n tool_call_id: tc.id,\n });\n } catch (error) {\n const errMsg =\n error instanceof Error ? error.message : \"Tool execution failed\";\n\n yield {\n type: \"tool_result\",\n callId: tc.id,\n result: null,\n error: errMsg,\n };\n\n messages.push({\n role: \"tool\",\n content: JSON.stringify({ error: errMsg }),\n tool_call_id: tc.id,\n });\n }\n }\n\n private async *streamCompletion(\n messages: OpenAIMessage[],\n tools: OpenAITool[],\n context: AgentRunContext,\n ): AsyncGenerator<\n AgentEvent,\n { text: string; toolCalls: OpenAIToolCall[] },\n unknown\n > {\n const body: Record<string, unknown> = {\n messages,\n stream: true,\n max_tokens: this.maxTokens,\n };\n\n if (tools.length > 0) {\n body.tools = tools;\n }\n\n let responseBody: ReadableStream<Uint8Array>;\n try {\n responseBody = await this.streamBody(body, context.signal);\n } catch (err) {\n const msg = err instanceof Error ? err.message : \"Stream request failed\";\n yield { type: \"status\", status: \"error\", error: msg };\n throw err;\n }\n\n const reader = responseBody.getReader();\n\n const decoder = new TextDecoder();\n let buffer = \"\";\n let fullText = \"\";\n const toolCallAccumulator = new Map<\n number,\n { id: string; name: string; arguments: string }\n >();\n\n try {\n while (true) {\n if (context.signal?.aborted) break;\n\n const { done, value } = await reader.read();\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split(\"\\n\");\n buffer = lines.pop() ?? \"\";\n\n if (buffer.length > this.maxSseLineChars) {\n throw new Error(\n `DatabricksAdapter: SSE line buffer exceeds configured limit (${this.maxSseLineChars} UTF-16 code units)`,\n );\n }\n\n for (const line of lines) {\n if (line.length > this.maxSseLineChars) {\n throw new Error(\n `DatabricksAdapter: SSE line exceeds configured limit (${this.maxSseLineChars} UTF-16 code units)`,\n );\n }\n\n const trimmed = line.trim();\n if (!trimmed.startsWith(\"data: \")) continue;\n const data = trimmed.slice(6);\n if (data === \"[DONE]\") continue;\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(data);\n } catch (parseErr) {\n console.debug(\n \"[DatabricksAdapter] malformed SSE data line JSON\",\n { line: `${data.slice(0, 256)}${data.length > 256 ? \"…\" : \"\"}` },\n parseErr,\n );\n continue;\n }\n\n const deltaUnknown = openAiChoicesDelta(parsed);\n if (!isRecord(deltaUnknown)) continue;\n\n if (typeof deltaUnknown.content === \"string\") {\n const content = deltaUnknown.content;\n throwIfExceedsStreamLimit(\n \"streamed assistant text\",\n fullText.length,\n content,\n this.maxStreamTextChars,\n );\n fullText += content;\n yield { type: \"message_delta\" as const, content };\n }\n\n const toolCallsRaw = deltaUnknown.tool_calls;\n if (!Array.isArray(toolCallsRaw)) continue;\n\n for (const tc of toolCallsRaw) {\n if (!isStreamingDeltaToolCall(tc)) continue;\n const existing = toolCallAccumulator.get(tc.index);\n if (existing) {\n if (tc.function?.arguments) {\n throwIfExceedsStreamLimit(\n \"tool call arguments\",\n existing.arguments.length,\n tc.function.arguments,\n this.maxToolArgumentsChars,\n );\n existing.arguments += tc.function.arguments;\n }\n } else {\n const initial = tc.function?.arguments ?? \"\";\n if (initial.length > this.maxToolArgumentsChars) {\n throw new Error(\n `DatabricksAdapter: tool call arguments exceed configured limit (${this.maxToolArgumentsChars} UTF-16 code units)`,\n );\n }\n toolCallAccumulator.set(tc.index, {\n id: tc.id ?? `call_${tc.index}`,\n name: tc.function?.name ?? \"\",\n arguments: initial,\n });\n }\n }\n }\n }\n } finally {\n try {\n await reader.cancel();\n } catch (cancelErr) {\n console.debug(\n \"[DatabricksAdapter] reader.cancel() failed during teardown\",\n cancelErr,\n );\n }\n try {\n reader.releaseLock();\n } catch (unlockErr) {\n console.debug(\n \"[DatabricksAdapter] reader.releaseLock() failed during teardown\",\n unlockErr,\n );\n }\n }\n\n const toolCalls: OpenAIToolCall[] = Array.from(\n toolCallAccumulator.values(),\n ).map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: { name: tc.name, arguments: tc.arguments || \"{}\" },\n }));\n\n return { text: fullText, toolCalls };\n }\n\n private async *executeToolCalls(\n calls: Array<{ name: string; args: unknown }>,\n messages: OpenAIMessage[],\n context: AgentRunContext,\n nameToWire: Map<string, string>,\n ): AsyncGenerator<AgentEvent, void, unknown> {\n const wireToolName = (name: string) =>\n nameToWire.get(name) ?? name.replace(/\\./g, \"__\");\n\n const toolCallObjs: OpenAIToolCall[] = calls.map((c, i) => ({\n id: `text_call_${i}`,\n type: \"function\" as const,\n function: {\n name: wireToolName(c.name),\n arguments: JSON.stringify(c.args),\n },\n }));\n\n messages.push({\n role: \"assistant\",\n content: null,\n tool_calls: toolCallObjs,\n });\n\n for (let i = 0; i < toolCallObjs.length; i++) {\n const tc = toolCallObjs[i];\n const originalName = calls[i]?.name ?? tc.function.name;\n yield* this.executeSingleTool(tc, originalName, messages, context);\n }\n }\n\n /**\n * Maps AppKit {@link AgentInput} messages into OpenAI-compatible wire messages.\n * Preserves multi-turn tool state (`toolCalls` → `tool_calls`, `toolCallId` →\n * `tool_call_id`) so resumed threads and hydrated history reach the model.\n */\n private buildMessages(\n messages: AgentInput[\"messages\"],\n nameToWire: Map<string, string>,\n ): OpenAIMessage[] {\n const wireToolName = (name: string) =>\n nameToWire.get(name) ?? name.replace(/\\./g, \"__\");\n\n return messages.map((m) => {\n let content: string | null = m.content;\n if (\n m.role === \"assistant\" &&\n m.toolCalls &&\n m.toolCalls.length > 0 &&\n (!m.content || m.content.trim() === \"\")\n ) {\n content = null;\n }\n\n const out: OpenAIMessage = {\n role: m.role as OpenAIMessage[\"role\"],\n content,\n };\n\n if (m.toolCallId) {\n out.tool_call_id = m.toolCallId;\n }\n\n if (m.toolCalls && m.toolCalls.length > 0) {\n out.tool_calls = m.toolCalls.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: {\n name: wireToolName(tc.name),\n arguments:\n typeof tc.args === \"string\"\n ? tc.args\n : JSON.stringify(tc.args ?? {}),\n },\n }));\n }\n\n return out;\n });\n }\n\n private buildTools(\n definitions: AgentToolDefinition[],\n nameToWire: Map<string, string>,\n ): OpenAITool[] {\n return definitions.map((def) => ({\n type: \"function\" as const,\n function: {\n name: nameToWire.get(def.name) ?? def.name,\n description: def.description,\n parameters: def.parameters,\n },\n }));\n }\n}\n\n// ---------------------------------------------------------------------------\n// Text-based tool call parsing (fallback)\n// ---------------------------------------------------------------------------\n\n/**\n * Parses text-based tool calls from model output.\n *\n * Handles two formats:\n * 1. Llama native: `[{\"name\": \"tool_name\", \"parameters\": {\"arg\": \"val\"}}]`\n * 2. Python-style: `[tool_name(arg1='val1', arg2='val2')]`\n */\nexport function parseTextToolCalls(\n text: string,\n): Array<{ name: string; args: unknown }> {\n const trimmed = text.trim();\n\n const jsonResult = tryParseLlamaJsonToolCalls(trimmed);\n if (jsonResult.length > 0) return jsonResult;\n\n const pyResult = tryParsePythonStyleToolCalls(trimmed);\n if (pyResult.length > 0) return pyResult;\n\n return [];\n}\n\nfunction isLlamaToolJsonItem(value: unknown): value is Record<\n string,\n unknown\n> & {\n name: string;\n} {\n if (!isRecord(value)) return false;\n return typeof value.name === \"string\";\n}\n\nfunction tryParseLlamaJsonToolCalls(\n text: string,\n): Array<{ name: string; args: unknown }> {\n const slice = extractLlamaToolJsonSlice(text);\n if (!slice) return [];\n\n try {\n const parsed: unknown = JSON.parse(slice);\n if (!Array.isArray(parsed)) return [];\n\n return parsed.filter(isLlamaToolJsonItem).map((item) => ({\n name: item.name,\n args: item.parameters ?? item.arguments ?? item.args ?? {},\n }));\n } catch {\n return [];\n }\n}\n\nfunction tryParsePythonStyleToolCalls(\n text: string,\n): Array<{ name: string; args: unknown }> {\n if (text.length > PYTHON_STYLE_TOOL_PARSE_MAX_INPUT) {\n return [];\n }\n\n const pattern = /\\[?([a-zA-Z_][\\w.]*)\\(([^)]*)\\)\\]?/g;\n const results: Array<{ name: string; args: unknown }> = [];\n\n for (const match of text.matchAll(pattern)) {\n const name = match[1];\n const argsStr = match[2];\n\n const args: Record<string, unknown> = {};\n const argPattern = /(\\w+)\\s*=\\s*(?:'([^']*)'|\"([^\"]*)\"|(\\S+))/g;\n for (const argMatch of argsStr.matchAll(argPattern)) {\n const key = argMatch[1];\n const value = argMatch[2] ?? argMatch[3] ?? argMatch[4];\n args[key] = value;\n }\n\n results.push({ name, args });\n }\n\n return results;\n}\n"],"mappings":";;;;AAUA,MAAM,6BAA6B,OAAO;;AAG1C,MAAM,gCAAgC,IAAI,OAAO;;AAGjD,MAAM,kCAAkC,IAAI,OAAO;;AAGnD,MAAM,oCAAoC,KAAK;;AAG/C,MAAM,+BAA+B;AAErC,SAAS,SAAS,OAAkD;AAClE,QAAO,OAAO,UAAU,YAAY,UAAU;;AAGhD,SAAS,0BAA0B,MAAkC;CACnE,MAAM,QAAQ,KAAK,QAAQ,KAAK;AAChC,KAAI,QAAQ,EAAG,QAAO;CACtB,MAAM,aAAa,KAAK,YAAY,KAAK;AACzC,KAAI,aAAa,MAAO,QAAO;AAC/B,QAAO,KAAK,MAAM,OAAO,aAAa,EAAE;;;AAI1C,SAAS,mBAAmB,QAA0B;AACpD,KAAI,CAAC,SAAS,OAAO,CAAE,QAAO;CAC9B,MAAM,UAAU,OAAO;AACvB,KAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,QAAQ,SAAS,EAAG,QAAO;CAC1D,MAAM,QAAQ,QAAQ;AACtB,KAAI,CAAC,SAAS,MAAM,CAAE,QAAO;AAC7B,QAAO,MAAM;;AAGf,SAAS,yBAAyB,OAAwC;AACxE,KAAI,CAAC,SAAS,MAAM,CAAE,QAAO;AAC7B,QAAO,OAAO,MAAM,UAAU;;AAGhC,SAAS,0BACP,OACA,eACA,OACA,KACM;AACN,KAAI,gBAAgB,MAAM,SAAS,IACjC,OAAM,IAAI,MACR,sBAAsB,MAAM,6BAA6B,IAAI,qBAC9D;;AAoDL,SAAS,oBACP,GAC+B;AAC/B,QAAO,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2GzB,IAAa,oBAAb,MAAa,kBAA0C;CACrD,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAY,SAAmC;AAC7C,OAAK,WAAW,QAAQ,YAAY;AACpC,OAAK,YAAY,QAAQ,aAAa;AACtC,OAAK,kBACH,QAAQ,mBAAmB;AAC7B,OAAK,qBACH,QAAQ,sBAAsB;AAChC,OAAK,wBACH,QAAQ,yBAAyB;AAEnC,MAAI,oBAAoB,QAAQ,CAC9B,MAAK,aAAa,QAAQ;OACrB;GACL,MAAM,EAAE,aAAa,iBAAiB;AACtC,QAAK,aAAa,OAAO,MAAM,WAAW;IACxC,MAAM,cACJ,UAAU,YAAY,QAAQ,6BAA6B;IAC7D,MAAM,cAAc,MAAM,cAAc;IACxC,MAAM,WAAW,MAAM,MAAM,aAAa;KACxC,QAAQ;KACR,SAAS;MACP,gBAAgB;MAChB,GAAG;MACJ;KACD,MAAM,KAAK,UAAU,KAAK;KAC1B,QAAQ;KACT,CAAC;AACF,QAAI,CAAC,SAAS,IAAI;KAChB,MAAM,YAAY,MAAM,SAAS,MAAM,CAAC,YAAY,gBAAgB;AACpE,WAAM,IAAI,MACR,yBAAyB,SAAS,OAAO,KAAK,YAC/C;;AAEH,QAAI,CAAC,SAAS,KAAM,OAAM,IAAI,MAAM,mBAAmB;AACvD,WAAO,SAAS;;;;;;;;;;;;CAatB,aAAa,oBACX,SAC4B;EAC5B,MAAM,EACJ,iBACA,cACA,UACA,WACA,iBACA,oBACA,0BACE;AACJ,SAAO,IAAI,kBAAkB;GAC3B,aAAa,MAAM,WAIjBA,OACE,iBACA,cACA,MACA,OACD;GACH;GACA;GACA;GACA;GACA;GACD,CAAC;;;;;;;;;;;;;;;;;;;;;;CAuBJ,aAAa,iBACX,cACA,SAC4B;EAC5B,MAAM,mBACJ,gBAAgB,QAAQ,IAAI;AAE9B,MAAI,CAAC,iBACH,OAAM,IAAI,MACR,oJAED;EAGH,IAAI,kBACF,SAAS;AACX,MAAI,CAAC,gBAEH,mBAAkB,KADN,OAAM,OAAO,kCACC,gBACxB,EAAE,CACH;AAGH,SAAO,kBAAkB,oBAAoB;GAC3C;GACA,cAAc;GACd,UAAU,SAAS;GACnB,WAAW,SAAS;GACpB,iBAAiB,SAAS;GAC1B,oBAAoB,SAAS;GAC7B,uBAAuB,SAAS;GACjC,CAAC;;CAGJ,OAAO,IACL,OACA,SAC2C;EAI3C,MAAM,6BAAa,IAAI,KAAqB;EAC5C,MAAM,6BAAa,IAAI,KAAqB;AAC5C,OAAK,MAAM,QAAQ,MAAM,OAAO;GAC9B,MAAM,OAAO,KAAK,KAAK,QAAQ,OAAO,KAAK;AAC3C,OAAI,WAAW,IAAI,KAAK,IAAI,WAAW,IAAI,KAAK,KAAK,KAAK,KACxD,OAAM,IAAI,MACR,yBAAyB,KAAK,KAAK,SAAS,WAAW,IAAI,KAAK,CAAC,2BAA2B,KAAK,GAClG;AAEH,cAAW,IAAI,KAAK,MAAM,KAAK;AAC/B,cAAW,IAAI,MAAM,KAAK,KAAK;;EAGjC,MAAM,QAAQ,KAAK,WAAW,MAAM,OAAO,WAAW;EACtD,MAAM,WAAW,KAAK,cAAc,MAAM,UAAU,WAAW;AAE/D,QAAM;GAAE,MAAM;GAAU,QAAQ;GAAW;AAE3C,OAAK,IAAI,OAAO,GAAG,OAAO,KAAK,UAAU,QAAQ;AAC/C,OAAI,QAAQ,QAAQ,QAAS;GAE7B,MAAM,EAAE,MAAM,cAAc,OAAO,KAAK,iBACtC,UACA,OACA,QACD;AAED,OAAI,UAAU,WAAW,GAAG;IAC1B,MAAM,SAAS,mBAAmB,KAAK;AACvC,QAAI,OAAO,SAAS,GAAG;AACrB,YAAO,KAAK,iBAAiB,QAAQ,UAAU,SAAS,WAAW;AACnE;;AAEF;;AAGF,YAAS,KAAK;IACZ,MAAM;IACN,SAAS,QAAQ;IACjB,YAAY;IACb,CAAC;AAEF,QAAK,MAAM,MAAM,WAAW;IAC1B,MAAM,WAAW,GAAG,SAAS;IAC7B,MAAM,eAAe,WAAW,IAAI,SAAS,IAAI;AACjD,WAAO,KAAK,kBAAkB,IAAI,cAAc,UAAU,QAAQ;;;;;CAMxE,OAAe,kBACb,IACA,cACA,UACA,SAC2C;EAC3C,IAAI;AACJ,MAAI;AACF,UAAO,KAAK,MAAM,GAAG,SAAS,UAAU;UAClC;AACN,UAAO,EAAE;;AAGX,QAAM;GAAE,MAAM;GAAa,QAAQ,GAAG;GAAI,MAAM;GAAc;GAAM;AAEpE,MAAI;GACF,MAAM,SAAS,MAAM,QAAQ,YAAY,cAAc,KAAK;GAC5D,MAAM,YACJ,OAAO,WAAW,WAAW,SAAS,KAAK,UAAU,OAAO;AAE9D,SAAM;IAAE,MAAM;IAAe,QAAQ,GAAG;IAAI;IAAQ;AAEpD,YAAS,KAAK;IACZ,MAAM;IACN,SAAS;IACT,cAAc,GAAG;IAClB,CAAC;WACK,OAAO;GACd,MAAM,SACJ,iBAAiB,QAAQ,MAAM,UAAU;AAE3C,SAAM;IACJ,MAAM;IACN,QAAQ,GAAG;IACX,QAAQ;IACR,OAAO;IACR;AAED,YAAS,KAAK;IACZ,MAAM;IACN,SAAS,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC;IAC1C,cAAc,GAAG;IAClB,CAAC;;;CAIN,OAAe,iBACb,UACA,OACA,SAKA;EACA,MAAM,OAAgC;GACpC;GACA,QAAQ;GACR,YAAY,KAAK;GAClB;AAED,MAAI,MAAM,SAAS,EACjB,MAAK,QAAQ;EAGf,IAAI;AACJ,MAAI;AACF,kBAAe,MAAM,KAAK,WAAW,MAAM,QAAQ,OAAO;WACnD,KAAK;AAEZ,SAAM;IAAE,MAAM;IAAU,QAAQ;IAAS,OAD7B,eAAe,QAAQ,IAAI,UAAU;IACI;AACrD,SAAM;;EAGR,MAAM,SAAS,aAAa,WAAW;EAEvC,MAAM,UAAU,IAAI,aAAa;EACjC,IAAI,SAAS;EACb,IAAI,WAAW;EACf,MAAM,sCAAsB,IAAI,KAG7B;AAEH,MAAI;AACF,UAAO,MAAM;AACX,QAAI,QAAQ,QAAQ,QAAS;IAE7B,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,QAAI,KAAM;AAEV,cAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;IACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,aAAS,MAAM,KAAK,IAAI;AAExB,QAAI,OAAO,SAAS,KAAK,gBACvB,OAAM,IAAI,MACR,gEAAgE,KAAK,gBAAgB,qBACtF;AAGH,SAAK,MAAM,QAAQ,OAAO;AACxB,SAAI,KAAK,SAAS,KAAK,gBACrB,OAAM,IAAI,MACR,yDAAyD,KAAK,gBAAgB,qBAC/E;KAGH,MAAM,UAAU,KAAK,MAAM;AAC3B,SAAI,CAAC,QAAQ,WAAW,SAAS,CAAE;KACnC,MAAM,OAAO,QAAQ,MAAM,EAAE;AAC7B,SAAI,SAAS,SAAU;KAEvB,IAAI;AACJ,SAAI;AACF,eAAS,KAAK,MAAM,KAAK;cAClB,UAAU;AACjB,cAAQ,MACN,oDACA,EAAE,MAAM,GAAG,KAAK,MAAM,GAAG,IAAI,GAAG,KAAK,SAAS,MAAM,MAAM,MAAM,EAChE,SACD;AACD;;KAGF,MAAM,eAAe,mBAAmB,OAAO;AAC/C,SAAI,CAAC,SAAS,aAAa,CAAE;AAE7B,SAAI,OAAO,aAAa,YAAY,UAAU;MAC5C,MAAM,UAAU,aAAa;AAC7B,gCACE,2BACA,SAAS,QACT,SACA,KAAK,mBACN;AACD,kBAAY;AACZ,YAAM;OAAE,MAAM;OAA0B;OAAS;;KAGnD,MAAM,eAAe,aAAa;AAClC,SAAI,CAAC,MAAM,QAAQ,aAAa,CAAE;AAElC,UAAK,MAAM,MAAM,cAAc;AAC7B,UAAI,CAAC,yBAAyB,GAAG,CAAE;MACnC,MAAM,WAAW,oBAAoB,IAAI,GAAG,MAAM;AAClD,UAAI,UACF;WAAI,GAAG,UAAU,WAAW;AAC1B,kCACE,uBACA,SAAS,UAAU,QACnB,GAAG,SAAS,WACZ,KAAK,sBACN;AACD,iBAAS,aAAa,GAAG,SAAS;;aAE/B;OACL,MAAM,UAAU,GAAG,UAAU,aAAa;AAC1C,WAAI,QAAQ,SAAS,KAAK,sBACxB,OAAM,IAAI,MACR,mEAAmE,KAAK,sBAAsB,qBAC/F;AAEH,2BAAoB,IAAI,GAAG,OAAO;QAChC,IAAI,GAAG,MAAM,QAAQ,GAAG;QACxB,MAAM,GAAG,UAAU,QAAQ;QAC3B,WAAW;QACZ,CAAC;;;;;YAKF;AACR,OAAI;AACF,UAAM,OAAO,QAAQ;YACd,WAAW;AAClB,YAAQ,MACN,8DACA,UACD;;AAEH,OAAI;AACF,WAAO,aAAa;YACb,WAAW;AAClB,YAAQ,MACN,mEACA,UACD;;;EAIL,MAAM,YAA8B,MAAM,KACxC,oBAAoB,QAAQ,CAC7B,CAAC,KAAK,QAAQ;GACb,IAAI,GAAG;GACP,MAAM;GACN,UAAU;IAAE,MAAM,GAAG;IAAM,WAAW,GAAG,aAAa;IAAM;GAC7D,EAAE;AAEH,SAAO;GAAE,MAAM;GAAU;GAAW;;CAGtC,OAAe,iBACb,OACA,UACA,SACA,YAC2C;EAC3C,MAAM,gBAAgB,SACpB,WAAW,IAAI,KAAK,IAAI,KAAK,QAAQ,OAAO,KAAK;EAEnD,MAAM,eAAiC,MAAM,KAAK,GAAG,OAAO;GAC1D,IAAI,aAAa;GACjB,MAAM;GACN,UAAU;IACR,MAAM,aAAa,EAAE,KAAK;IAC1B,WAAW,KAAK,UAAU,EAAE,KAAK;IAClC;GACF,EAAE;AAEH,WAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,YAAY;GACb,CAAC;AAEF,OAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;GAC5C,MAAM,KAAK,aAAa;GACxB,MAAM,eAAe,MAAM,IAAI,QAAQ,GAAG,SAAS;AACnD,UAAO,KAAK,kBAAkB,IAAI,cAAc,UAAU,QAAQ;;;;;;;;CAStE,AAAQ,cACN,UACA,YACiB;EACjB,MAAM,gBAAgB,SACpB,WAAW,IAAI,KAAK,IAAI,KAAK,QAAQ,OAAO,KAAK;AAEnD,SAAO,SAAS,KAAK,MAAM;GACzB,IAAI,UAAyB,EAAE;AAC/B,OACE,EAAE,SAAS,eACX,EAAE,aACF,EAAE,UAAU,SAAS,MACpB,CAAC,EAAE,WAAW,EAAE,QAAQ,MAAM,KAAK,IAEpC,WAAU;GAGZ,MAAM,MAAqB;IACzB,MAAM,EAAE;IACR;IACD;AAED,OAAI,EAAE,WACJ,KAAI,eAAe,EAAE;AAGvB,OAAI,EAAE,aAAa,EAAE,UAAU,SAAS,EACtC,KAAI,aAAa,EAAE,UAAU,KAAK,QAAQ;IACxC,IAAI,GAAG;IACP,MAAM;IACN,UAAU;KACR,MAAM,aAAa,GAAG,KAAK;KAC3B,WACE,OAAO,GAAG,SAAS,WACf,GAAG,OACH,KAAK,UAAU,GAAG,QAAQ,EAAE,CAAC;KACpC;IACF,EAAE;AAGL,UAAO;IACP;;CAGJ,AAAQ,WACN,aACA,YACc;AACd,SAAO,YAAY,KAAK,SAAS;GAC/B,MAAM;GACN,UAAU;IACR,MAAM,WAAW,IAAI,IAAI,KAAK,IAAI,IAAI;IACtC,aAAa,IAAI;IACjB,YAAY,IAAI;IACjB;GACF,EAAE;;;;;;;;;;AAeP,SAAgB,mBACd,MACwC;CACxC,MAAM,UAAU,KAAK,MAAM;CAE3B,MAAM,aAAa,2BAA2B,QAAQ;AACtD,KAAI,WAAW,SAAS,EAAG,QAAO;CAElC,MAAM,WAAW,6BAA6B,QAAQ;AACtD,KAAI,SAAS,SAAS,EAAG,QAAO;AAEhC,QAAO,EAAE;;AAGX,SAAS,oBAAoB,OAK3B;AACA,KAAI,CAAC,SAAS,MAAM,CAAE,QAAO;AAC7B,QAAO,OAAO,MAAM,SAAS;;AAG/B,SAAS,2BACP,MACwC;CACxC,MAAM,QAAQ,0BAA0B,KAAK;AAC7C,KAAI,CAAC,MAAO,QAAO,EAAE;AAErB,KAAI;EACF,MAAM,SAAkB,KAAK,MAAM,MAAM;AACzC,MAAI,CAAC,MAAM,QAAQ,OAAO,CAAE,QAAO,EAAE;AAErC,SAAO,OAAO,OAAO,oBAAoB,CAAC,KAAK,UAAU;GACvD,MAAM,KAAK;GACX,MAAM,KAAK,cAAc,KAAK,aAAa,KAAK,QAAQ,EAAE;GAC3D,EAAE;SACG;AACN,SAAO,EAAE;;;AAIb,SAAS,6BACP,MACwC;AACxC,KAAI,KAAK,SAAS,kCAChB,QAAO,EAAE;CAGX,MAAM,UAAU;CAChB,MAAM,UAAkD,EAAE;AAE1D,MAAK,MAAM,SAAS,KAAK,SAAS,QAAQ,EAAE;EAC1C,MAAM,OAAO,MAAM;EACnB,MAAM,UAAU,MAAM;EAEtB,MAAM,OAAgC,EAAE;AAExC,OAAK,MAAM,YAAY,QAAQ,SADZ,6CACgC,EAAE;GACnD,MAAM,MAAM,SAAS;AAErB,QAAK,OADS,SAAS,MAAM,SAAS,MAAM,SAAS;;AAIvD,UAAQ,KAAK;GAAE;GAAM;GAAM,CAAC;;AAG9B,QAAO"}
@@ -1,6 +1,6 @@
1
1
  //#region package.json
2
2
  var name = "@databricks/appkit";
3
- var version = "0.28.0";
3
+ var version = "0.30.0";
4
4
 
5
5
  //#endregion
6
6
  export { name, version };
package/dist/beta.d.ts CHANGED
@@ -1 +1,2 @@
1
- export { };
1
+ import { DatabricksAdapter, parseTextToolCalls } from "./agents/databricks.js";
2
+ export { DatabricksAdapter, parseTextToolCalls };
package/dist/beta.js CHANGED
@@ -1 +1,3 @@
1
- export { };
1
+ import { DatabricksAdapter, parseTextToolCalls } from "./agents/databricks.js";
2
+
3
+ export { DatabricksAdapter, parseTextToolCalls };
@@ -1,4 +1,5 @@
1
1
  import { createLogger } from "../../logging/logger.js";
2
+ import { init_errors } from "../../errors/index.js";
2
3
  import "../../telemetry/index.js";
3
4
  import "../../context/index.js";
4
5
  import { randomUUID } from "node:crypto";
@@ -6,6 +7,7 @@ import "@databricks/sdk-experimental";
6
7
  import "pg";
7
8
 
8
9
  //#region src/connectors/lakebase-v1/client.ts
10
+ init_errors();
9
11
  const logger = createLogger("connectors:lakebase-v1");
10
12
 
11
13
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase-v1/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"../../telemetry\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseV1Defaults } from \"./defaults\";\nimport type {\n LakebaseV1Config,\n LakebaseV1ConnectionConfig,\n LakebaseV1Credentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase-v1\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase Provisioned\n *\n * @deprecated This connector is for Lakebase Provisioned only.\n * For new projects, use Lakebase Autoscaling instead: https://docs.databricks.com/aws/en/oltp/projects/\n *\n * This connector is compatible with Lakebase Provisioned: https://docs.databricks.com/aws/en/oltp/instances/\n *\n * Lakebase Autoscaling offers:\n * - Automatic compute scaling\n * - Scale-to-zero for cost optimization\n * - Database branching for development\n * - Instant restore capabilities\n *\n * Use the new LakebaseConnector (coming in a future release) for Lakebase Autoscaling support.\n *\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseV1Connector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseV1Connector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseV1Connector {\n private readonly name: string = \"lakebase-v1\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseV1Config;\n private readonly connectionConfig: LakebaseV1ConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseV1Credentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseV1Config>) {\n this.config = deepMerge(lakebaseV1Defaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.v1.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.v1.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.query\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n logger.error(\n \"Query execution failed: %s (code=%s)\",\n error instanceof Error ? error.message : String(error),\n (error as any)?.code,\n );\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n logger.error(\n \"Transaction execution failed: %s (code=%s)\",\n error instanceof Error ? error.message : String(error),\n (error as any)?.code,\n );\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseV1ConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseV1ConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;AA2BA,MAAM,SAAS,aAAa,yBAAyB"}
1
+ {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase-v1/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"../../telemetry\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseV1Defaults } from \"./defaults\";\nimport type {\n LakebaseV1Config,\n LakebaseV1ConnectionConfig,\n LakebaseV1Credentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase-v1\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase Provisioned\n *\n * @deprecated This connector is for Lakebase Provisioned only.\n * For new projects, use Lakebase Autoscaling instead: https://docs.databricks.com/aws/en/oltp/projects/\n *\n * This connector is compatible with Lakebase Provisioned: https://docs.databricks.com/aws/en/oltp/instances/\n *\n * Lakebase Autoscaling offers:\n * - Automatic compute scaling\n * - Scale-to-zero for cost optimization\n * - Database branching for development\n * - Instant restore capabilities\n *\n * Use the new LakebaseConnector (coming in a future release) for Lakebase Autoscaling support.\n *\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseV1Connector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseV1Connector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseV1Connector {\n private readonly name: string = \"lakebase-v1\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseV1Config;\n private readonly connectionConfig: LakebaseV1ConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseV1Credentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseV1Config>) {\n this.config = deepMerge(lakebaseV1Defaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.v1.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.v1.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.query\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n logger.error(\n \"Query execution failed: %s (code=%s)\",\n error instanceof Error ? error.message : String(error),\n (error as any)?.code,\n );\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n logger.error(\n \"Transaction execution failed: %s (code=%s)\",\n error instanceof Error ? error.message : String(error),\n (error as any)?.code,\n );\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseV1ConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseV1ConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;;aAUsB;AAiBtB,MAAM,SAAS,aAAa,yBAAyB"}
@@ -1,8 +1,32 @@
1
1
  import { createLogger } from "../../logging/logger.js";
2
+ import { Context } from "@databricks/sdk-experimental";
2
3
 
3
4
  //#region src/connectors/serving/client.ts
4
5
  const logger = createLogger("connectors:serving");
5
6
  /**
7
+ * Bridges {@link AbortSignal} to the SDK's {@link CancellationToken} so
8
+ * `apiClient.request` can abort the outbound HTTP request (and stop pulling
9
+ * the SSE body) when the agent run is cancelled.
10
+ */
11
+ function cancellationTokenFromAbortSignal(signal) {
12
+ const listeners = /* @__PURE__ */ new Set();
13
+ const fire = () => {
14
+ for (const cb of listeners) try {
15
+ cb();
16
+ } catch {}
17
+ };
18
+ signal.addEventListener("abort", fire, { passive: true });
19
+ return {
20
+ get isCancellationRequested() {
21
+ return signal.aborted;
22
+ },
23
+ onCancellationRequested(callback) {
24
+ listeners.add(callback);
25
+ if (signal.aborted) callback();
26
+ }
27
+ };
28
+ }
29
+ /**
6
30
  * Invokes a serving endpoint using the SDK's high-level query API.
7
31
  * Returns a typed QueryEndpointResponse.
8
32
  */
@@ -22,9 +46,10 @@ async function invoke(client, endpointName, body) {
22
46
  * the high-level `servingEndpoints.query()` returns `Promise<QueryEndpointResponse>`
23
47
  * and does not support SSE streaming.
24
48
  */
25
- async function stream(client, endpointName, body) {
49
+ async function stream(client, endpointName, body, signal) {
26
50
  const { stream: _stream, ...cleanBody } = body;
27
51
  logger.debug("Streaming from endpoint %s", endpointName);
52
+ const context = signal ? new Context({ cancellationToken: cancellationTokenFromAbortSignal(signal) }) : void 0;
28
53
  const response = await client.apiClient.request({
29
54
  path: `/serving-endpoints/${encodeURIComponent(endpointName)}/invocations`,
30
55
  method: "POST",
@@ -37,7 +62,7 @@ async function stream(client, endpointName, body) {
37
62
  stream: true
38
63
  },
39
64
  raw: true
40
- });
65
+ }, context);
41
66
  if (!response.contents) throw new Error("Response body is null — streaming not supported");
42
67
  return response.contents;
43
68
  }
@@ -1 +1 @@
1
- {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/serving/client.ts"],"sourcesContent":["import type { serving, WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { createLogger } from \"../../logging/logger\";\n\nconst logger = createLogger(\"connectors:serving\");\n\n/**\n * Invokes a serving endpoint using the SDK's high-level query API.\n * Returns a typed QueryEndpointResponse.\n */\nexport async function invoke(\n client: WorkspaceClient,\n endpointName: string,\n body: Record<string, unknown>,\n): Promise<serving.QueryEndpointResponse> {\n // Strip `stream` from the body — the connector controls this\n const { stream: _stream, ...cleanBody } = body;\n\n logger.debug(\"Invoking endpoint %s\", endpointName);\n\n return client.servingEndpoints.query({\n name: endpointName,\n ...cleanBody,\n } as serving.QueryEndpointInput);\n}\n\n/**\n * Returns the raw SSE byte stream from a serving endpoint.\n * No parsing is performed — bytes are passed through as-is.\n *\n * Uses the SDK's low-level `apiClient.request({ raw: true })` because\n * the high-level `servingEndpoints.query()` returns `Promise<QueryEndpointResponse>`\n * and does not support SSE streaming.\n */\nexport async function stream(\n client: WorkspaceClient,\n endpointName: string,\n body: Record<string, unknown>,\n): Promise<ReadableStream<Uint8Array>> {\n const { stream: _stream, ...cleanBody } = body;\n\n logger.debug(\"Streaming from endpoint %s\", endpointName);\n\n const response = (await client.apiClient.request({\n path: `/serving-endpoints/${encodeURIComponent(endpointName)}/invocations`,\n method: \"POST\",\n headers: new Headers({\n \"Content-Type\": \"application/json\",\n Accept: \"text/event-stream\",\n }),\n payload: { ...cleanBody, stream: true },\n raw: true,\n })) as { contents: ReadableStream<Uint8Array> };\n\n if (!response.contents) {\n throw new Error(\"Response body is null — streaming not supported\");\n }\n\n return response.contents;\n}\n"],"mappings":";;;AAGA,MAAM,SAAS,aAAa,qBAAqB;;;;;AAMjD,eAAsB,OACpB,QACA,cACA,MACwC;CAExC,MAAM,EAAE,QAAQ,SAAS,GAAG,cAAc;AAE1C,QAAO,MAAM,wBAAwB,aAAa;AAElD,QAAO,OAAO,iBAAiB,MAAM;EACnC,MAAM;EACN,GAAG;EACJ,CAA+B;;;;;;;;;;AAWlC,eAAsB,OACpB,QACA,cACA,MACqC;CACrC,MAAM,EAAE,QAAQ,SAAS,GAAG,cAAc;AAE1C,QAAO,MAAM,8BAA8B,aAAa;CAExD,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;EAC/C,MAAM,sBAAsB,mBAAmB,aAAa,CAAC;EAC7D,QAAQ;EACR,SAAS,IAAI,QAAQ;GACnB,gBAAgB;GAChB,QAAQ;GACT,CAAC;EACF,SAAS;GAAE,GAAG;GAAW,QAAQ;GAAM;EACvC,KAAK;EACN,CAAC;AAEF,KAAI,CAAC,SAAS,SACZ,OAAM,IAAI,MAAM,kDAAkD;AAGpE,QAAO,SAAS"}
1
+ {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/serving/client.ts"],"sourcesContent":["import type {\n CancellationToken,\n serving,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport { Context } from \"@databricks/sdk-experimental\";\nimport { createLogger } from \"../../logging/logger\";\n\nconst logger = createLogger(\"connectors:serving\");\n\n/**\n * Bridges {@link AbortSignal} to the SDK's {@link CancellationToken} so\n * `apiClient.request` can abort the outbound HTTP request (and stop pulling\n * the SSE body) when the agent run is cancelled.\n */\nfunction cancellationTokenFromAbortSignal(\n signal: AbortSignal,\n): CancellationToken {\n const listeners = new Set<() => void>();\n const fire = () => {\n for (const cb of listeners) {\n try {\n cb();\n } catch {\n // ignore listener failures — abort must stay best-effort\n }\n }\n };\n signal.addEventListener(\"abort\", fire, { passive: true });\n\n return {\n get isCancellationRequested() {\n return signal.aborted;\n },\n onCancellationRequested(callback: (e?: unknown) => unknown) {\n listeners.add(callback as () => void);\n if (signal.aborted) {\n void callback();\n }\n },\n };\n}\n\n/**\n * Invokes a serving endpoint using the SDK's high-level query API.\n * Returns a typed QueryEndpointResponse.\n */\nexport async function invoke(\n client: WorkspaceClient,\n endpointName: string,\n body: Record<string, unknown>,\n): Promise<serving.QueryEndpointResponse> {\n // Strip `stream` from the body — the connector controls this\n const { stream: _stream, ...cleanBody } = body;\n\n logger.debug(\"Invoking endpoint %s\", endpointName);\n\n return client.servingEndpoints.query({\n name: endpointName,\n ...cleanBody,\n } as serving.QueryEndpointInput);\n}\n\n/**\n * Returns the raw SSE byte stream from a serving endpoint.\n * No parsing is performed — bytes are passed through as-is.\n *\n * Uses the SDK's low-level `apiClient.request({ raw: true })` because\n * the high-level `servingEndpoints.query()` returns `Promise<QueryEndpointResponse>`\n * and does not support SSE streaming.\n */\nexport async function stream(\n client: WorkspaceClient,\n endpointName: string,\n body: Record<string, unknown>,\n signal?: AbortSignal,\n): Promise<ReadableStream<Uint8Array>> {\n const { stream: _stream, ...cleanBody } = body;\n\n logger.debug(\"Streaming from endpoint %s\", endpointName);\n\n const context = signal\n ? new Context({\n cancellationToken: cancellationTokenFromAbortSignal(signal),\n })\n : undefined;\n\n const response = (await client.apiClient.request(\n {\n path: `/serving-endpoints/${encodeURIComponent(endpointName)}/invocations`,\n method: \"POST\",\n headers: new Headers({\n \"Content-Type\": \"application/json\",\n Accept: \"text/event-stream\",\n }),\n payload: { ...cleanBody, stream: true },\n raw: true,\n },\n context,\n )) as { contents: ReadableStream<Uint8Array> };\n\n if (!response.contents) {\n throw new Error(\"Response body is null — streaming not supported\");\n }\n\n return response.contents;\n}\n"],"mappings":";;;;AAQA,MAAM,SAAS,aAAa,qBAAqB;;;;;;AAOjD,SAAS,iCACP,QACmB;CACnB,MAAM,4BAAY,IAAI,KAAiB;CACvC,MAAM,aAAa;AACjB,OAAK,MAAM,MAAM,UACf,KAAI;AACF,OAAI;UACE;;AAKZ,QAAO,iBAAiB,SAAS,MAAM,EAAE,SAAS,MAAM,CAAC;AAEzD,QAAO;EACL,IAAI,0BAA0B;AAC5B,UAAO,OAAO;;EAEhB,wBAAwB,UAAoC;AAC1D,aAAU,IAAI,SAAuB;AACrC,OAAI,OAAO,QACT,CAAK,UAAU;;EAGpB;;;;;;AAOH,eAAsB,OACpB,QACA,cACA,MACwC;CAExC,MAAM,EAAE,QAAQ,SAAS,GAAG,cAAc;AAE1C,QAAO,MAAM,wBAAwB,aAAa;AAElD,QAAO,OAAO,iBAAiB,MAAM;EACnC,MAAM;EACN,GAAG;EACJ,CAA+B;;;;;;;;;;AAWlC,eAAsB,OACpB,QACA,cACA,MACA,QACqC;CACrC,MAAM,EAAE,QAAQ,SAAS,GAAG,cAAc;AAE1C,QAAO,MAAM,8BAA8B,aAAa;CAExD,MAAM,UAAU,SACZ,IAAI,QAAQ,EACV,mBAAmB,iCAAiC,OAAO,EAC5D,CAAC,GACF;CAEJ,MAAM,WAAY,MAAM,OAAO,UAAU,QACvC;EACE,MAAM,sBAAsB,mBAAmB,aAAa,CAAC;EAC7D,QAAQ;EACR,SAAS,IAAI,QAAQ;GACnB,gBAAgB;GAChB,QAAQ;GACT,CAAC;EACF,SAAS;GAAE,GAAG;GAAW,QAAQ;GAAM;EACvC,KAAK;EACN,EACD,QACD;AAED,KAAI,CAAC,SAAS,SACZ,OAAM,IAAI,MAAM,kDAAkD;AAGpE,QAAO,SAAS"}
@@ -1 +1 @@
1
- {"version":3,"file":"service-context.d.ts","names":[],"sources":["../../src/context/service-context.ts"],"mappings":";;;;;;AAqBA;;UAAiB,mBAAA;EAEP;EAAR,MAAA,EAAQ,eAAA;EAMK;EAJb,aAAA;EAIoB;EAFpB,WAAA,GAAc,OAAA;EAJN;EAMR,WAAA,EAAa,OAAA;AAAA"}
1
+ {"version":3,"file":"service-context.d.ts","names":[],"sources":["../../src/context/service-context.ts"],"mappings":";;;;;;AAsBA;;UAAiB,mBAAA;EAEP;EAAR,MAAA,EAAQ,eAAA;EAMK;EAJb,aAAA;EAIoB;EAFpB,WAAA,GAAc,OAAA;EAJN;EAMR,WAAA,EAAa,OAAA;AAAA"}
@@ -4,7 +4,7 @@ import { ConfigurationError } from "../errors/configuration.js";
4
4
  import { InitializationError } from "../errors/initialization.js";
5
5
  import { init_errors } from "../errors/index.js";
6
6
  import { name, version } from "../appkit/package.js";
7
- import { WorkspaceClient } from "@databricks/sdk-experimental";
7
+ import { ConfigError, WorkspaceClient } from "@databricks/sdk-experimental";
8
8
  import { coerce } from "semver";
9
9
 
10
10
  //#region src/context/service-context.ts
@@ -85,17 +85,25 @@ var init_service_context = __esmMin((() => {
85
85
  return getClientOptions();
86
86
  }
87
87
  static async createContext(options, client) {
88
- const wsClient = client ?? new WorkspaceClient({}, getClientOptions());
89
- const warehouseId = options?.warehouseId ? ServiceContext.getWarehouseId(wsClient) : void 0;
90
- const workspaceId = ServiceContext.getWorkspaceId(wsClient);
91
- const currentUser = await wsClient.currentUser.me();
92
- if (!currentUser.id) throw ConfigurationError.resourceNotFound("Service user ID");
93
- return {
94
- client: wsClient,
95
- serviceUserId: currentUser.id,
96
- warehouseId,
97
- workspaceId
98
- };
88
+ try {
89
+ const wsClient = client ?? new WorkspaceClient({}, getClientOptions());
90
+ const [resolvedWorkspaceId, currentUser, resolvedWarehouseId] = await Promise.all([
91
+ ServiceContext.getWorkspaceId(wsClient),
92
+ wsClient.currentUser.me(),
93
+ options?.warehouseId ? ServiceContext.getWarehouseId(wsClient) : Promise.resolve(void 0)
94
+ ]);
95
+ if (!currentUser.id) throw ConfigurationError.resourceNotFound("Service user ID");
96
+ const warehouseId = options?.warehouseId && resolvedWarehouseId !== void 0 ? Promise.resolve(resolvedWarehouseId) : void 0;
97
+ return {
98
+ client: wsClient,
99
+ serviceUserId: currentUser.id,
100
+ warehouseId,
101
+ workspaceId: Promise.resolve(resolvedWorkspaceId)
102
+ };
103
+ } catch (e) {
104
+ if (e instanceof ConfigError) throw ConfigurationError.databricksAuthenticationSetupFailed(e.baseMessage, { cause: e });
105
+ throw e;
106
+ }
99
107
  }
100
108
  static async getWorkspaceId(client) {
101
109
  if (process.env.DATABRICKS_WORKSPACE_ID) return process.env.DATABRICKS_WORKSPACE_ID;
@@ -1 +1 @@
1
- {"version":3,"file":"service-context.js","names":["productName","productVersion"],"sources":["../../src/context/service-context.ts"],"sourcesContent":["import {\n type ClientOptions,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport { coerce } from \"semver\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\nimport {\n AuthenticationError,\n ConfigurationError,\n InitializationError,\n} from \"../errors\";\nimport type { UserContext } from \"./user-context\";\n\n/**\n * Service context holds the service principal client and shared resources.\n * This is initialized once at app startup and shared across all requests.\n */\nexport interface ServiceContextState {\n /** WorkspaceClient authenticated as the service principal */\n client: WorkspaceClient;\n /** The service principal's user ID */\n serviceUserId: string;\n /** Promise that resolves to the warehouse ID (only present when a plugin requires `SQL_WAREHOUSE` resource) */\n warehouseId?: Promise<string>;\n /** Promise that resolves to the workspace ID */\n workspaceId: Promise<string>;\n}\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const semver = coerce(productVersion);\n const normalizedVersion = (semver?.version ??\n productVersion) as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\n/**\n * ServiceContext is a singleton that manages the service principal's\n * WorkspaceClient and shared resources like warehouse/workspace IDs.\n *\n * It's initialized once at app startup and provides the foundation\n * for both service principal and user context execution.\n */\nexport class ServiceContext {\n private static instance: ServiceContextState | null = null;\n private static initPromise: Promise<ServiceContextState> | null = null;\n\n /**\n * Initialize the service context. Should be called once at app startup.\n * Safe to call multiple times - will return the same instance.\n *\n * @param options - Which shared resources to resolve (derived from plugin manifests).\n * @param client - Optional pre-configured WorkspaceClient to use instead\n * of creating one from environment credentials.\n */\n static async initialize(\n options?: { warehouseId?: boolean },\n client?: WorkspaceClient,\n ): Promise<ServiceContextState> {\n if (ServiceContext.instance) {\n return ServiceContext.instance;\n }\n\n if (ServiceContext.initPromise) {\n return ServiceContext.initPromise;\n }\n\n ServiceContext.initPromise = ServiceContext.createContext(options, client);\n ServiceContext.instance = await ServiceContext.initPromise;\n return ServiceContext.instance;\n }\n\n /**\n * Get the initialized service context.\n * @throws Error if not initialized\n */\n static get(): ServiceContextState {\n if (!ServiceContext.instance) {\n throw InitializationError.notInitialized(\n \"ServiceContext\",\n \"Call ServiceContext.initialize() first\",\n );\n }\n return ServiceContext.instance;\n }\n\n /**\n * Check if the service context has been initialized.\n */\n static isInitialized(): boolean {\n return ServiceContext.instance !== null;\n }\n\n /**\n * Create a user context from request headers.\n *\n * @param token - The user's access token from x-forwarded-access-token header\n * @param userId - The user's ID from x-forwarded-user header\n * @param userName - Optional user name\n * @throws Error if token is not provided\n */\n static createUserContext(\n token: string,\n userId: string,\n userName?: string,\n ): UserContext {\n if (!token) {\n throw AuthenticationError.missingToken(\"user token\");\n }\n\n const host = process.env.DATABRICKS_HOST;\n if (!host) {\n throw ConfigurationError.missingEnvVar(\"DATABRICKS_HOST\");\n }\n\n const serviceCtx = ServiceContext.get();\n\n // Create user client with the OAuth token from Databricks Apps\n // Note: We use authType: \"pat\" because the token is passed as a Bearer token\n // just like a PAT, even though it's technically an OAuth token\n const userClient = new WorkspaceClient(\n {\n token,\n host,\n authType: \"pat\",\n },\n getClientOptions(),\n );\n\n return {\n client: userClient,\n userId,\n userName,\n warehouseId: serviceCtx.warehouseId,\n workspaceId: serviceCtx.workspaceId,\n isUserContext: true,\n };\n }\n\n /**\n * Get the client options for WorkspaceClient.\n * Exposed for testing purposes.\n */\n static getClientOptions(): ClientOptions {\n return getClientOptions();\n }\n\n private static async createContext(\n options?: { warehouseId?: boolean },\n client?: WorkspaceClient,\n ): Promise<ServiceContextState> {\n const wsClient = client ?? new WorkspaceClient({}, getClientOptions());\n\n const warehouseId = options?.warehouseId\n ? ServiceContext.getWarehouseId(wsClient)\n : undefined;\n\n const workspaceId = ServiceContext.getWorkspaceId(wsClient);\n const currentUser = await wsClient.currentUser.me();\n\n if (!currentUser.id) {\n throw ConfigurationError.resourceNotFound(\"Service user ID\");\n }\n\n return {\n client: wsClient,\n serviceUserId: currentUser.id,\n warehouseId,\n workspaceId,\n };\n }\n\n private static async getWorkspaceId(\n client: WorkspaceClient,\n ): Promise<string> {\n if (process.env.DATABRICKS_WORKSPACE_ID) {\n return process.env.DATABRICKS_WORKSPACE_ID;\n }\n\n const response = (await client.apiClient.request({\n path: \"/api/2.0/preview/scim/v2/Me\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {},\n responseHeaders: [\"x-databricks-org-id\"],\n })) as { \"x-databricks-org-id\": string };\n\n if (!response[\"x-databricks-org-id\"]) {\n throw ConfigurationError.resourceNotFound(\"Workspace ID\");\n }\n\n return response[\"x-databricks-org-id\"];\n }\n\n private static async getWarehouseId(\n client: WorkspaceClient,\n ): Promise<string> {\n if (process.env.DATABRICKS_WAREHOUSE_ID) {\n return process.env.DATABRICKS_WAREHOUSE_ID;\n }\n\n if (process.env.NODE_ENV === \"development\") {\n const response = (await client.apiClient.request({\n path: \"/api/2.0/sql/warehouses\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {\n skip_cannot_use: \"true\",\n },\n })) as { warehouses: sql.EndpointInfo[] };\n\n const priorities: Record<sql.State, number> = {\n RUNNING: 0,\n STOPPED: 1,\n STARTING: 2,\n STOPPING: 3,\n DELETED: 99,\n DELETING: 99,\n };\n\n const warehouses = (response.warehouses || []).sort((a, b) => {\n return (\n priorities[a.state as sql.State] - priorities[b.state as sql.State]\n );\n });\n\n if (response.warehouses.length === 0) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n const firstWarehouse = warehouses[0];\n if (\n firstWarehouse.state === \"DELETED\" ||\n firstWarehouse.state === \"DELETING\" ||\n !firstWarehouse.id\n ) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n return firstWarehouse.id;\n }\n\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n /**\n * Reset the service context. Only for testing purposes.\n */\n static reset(): void {\n ServiceContext.instance = null;\n ServiceContext.initPromise = null;\n }\n}\n"],"mappings":";;;;;;;;;;AAgCA,SAAS,mBAAkC;CACzC,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAKvC,QAAO;EACL,SAASA;EACT,gBANa,OAAOC,QAAe,EACF,WACjCA;EAKA,GAAI,SAAS,EAAE,gBAAgB,EAAE,MAAM,OAAO,EAAE;EACjD;;;;cA5BgB;CAsCN,iBAAb,MAAa,eAAe;EAC1B,OAAe,WAAuC;EACtD,OAAe,cAAmD;;;;;;;;;EAUlE,aAAa,WACX,SACA,QAC8B;AAC9B,OAAI,eAAe,SACjB,QAAO,eAAe;AAGxB,OAAI,eAAe,YACjB,QAAO,eAAe;AAGxB,kBAAe,cAAc,eAAe,cAAc,SAAS,OAAO;AAC1E,kBAAe,WAAW,MAAM,eAAe;AAC/C,UAAO,eAAe;;;;;;EAOxB,OAAO,MAA2B;AAChC,OAAI,CAAC,eAAe,SAClB,OAAM,oBAAoB,eACxB,kBACA,yCACD;AAEH,UAAO,eAAe;;;;;EAMxB,OAAO,gBAAyB;AAC9B,UAAO,eAAe,aAAa;;;;;;;;;;EAWrC,OAAO,kBACL,OACA,QACA,UACa;AACb,OAAI,CAAC,MACH,OAAM,oBAAoB,aAAa,aAAa;GAGtD,MAAM,OAAO,QAAQ,IAAI;AACzB,OAAI,CAAC,KACH,OAAM,mBAAmB,cAAc,kBAAkB;GAG3D,MAAM,aAAa,eAAe,KAAK;AAcvC,UAAO;IACL,QAViB,IAAI,gBACrB;KACE;KACA;KACA,UAAU;KACX,EACD,kBAAkB,CACnB;IAIC;IACA;IACA,aAAa,WAAW;IACxB,aAAa,WAAW;IACxB,eAAe;IAChB;;;;;;EAOH,OAAO,mBAAkC;AACvC,UAAO,kBAAkB;;EAG3B,aAAqB,cACnB,SACA,QAC8B;GAC9B,MAAM,WAAW,UAAU,IAAI,gBAAgB,EAAE,EAAE,kBAAkB,CAAC;GAEtE,MAAM,cAAc,SAAS,cACzB,eAAe,eAAe,SAAS,GACvC;GAEJ,MAAM,cAAc,eAAe,eAAe,SAAS;GAC3D,MAAM,cAAc,MAAM,SAAS,YAAY,IAAI;AAEnD,OAAI,CAAC,YAAY,GACf,OAAM,mBAAmB,iBAAiB,kBAAkB;AAG9D,UAAO;IACL,QAAQ;IACR,eAAe,YAAY;IAC3B;IACA;IACD;;EAGH,aAAqB,eACnB,QACiB;AACjB,OAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;GAGrB,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;IAC/C,MAAM;IACN,QAAQ;IACR,SAAS,IAAI,SAAS;IACtB,KAAK;IACL,OAAO,EAAE;IACT,iBAAiB,CAAC,sBAAsB;IACzC,CAAC;AAEF,OAAI,CAAC,SAAS,uBACZ,OAAM,mBAAmB,iBAAiB,eAAe;AAG3D,UAAO,SAAS;;EAGlB,aAAqB,eACnB,QACiB;AACjB,OAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;AAGrB,OAAI,QAAQ,IAAI,aAAa,eAAe;IAC1C,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;KAC/C,MAAM;KACN,QAAQ;KACR,SAAS,IAAI,SAAS;KACtB,KAAK;KACL,OAAO,EACL,iBAAiB,QAClB;KACF,CAAC;IAEF,MAAM,aAAwC;KAC5C,SAAS;KACT,SAAS;KACT,UAAU;KACV,UAAU;KACV,SAAS;KACT,UAAU;KACX;IAED,MAAM,cAAc,SAAS,cAAc,EAAE,EAAE,MAAM,GAAG,MAAM;AAC5D,YACE,WAAW,EAAE,SAAsB,WAAW,EAAE;MAElD;AAEF,QAAI,SAAS,WAAW,WAAW,EACjC,OAAM,mBAAmB,iBACvB,gBACA,oEACD;IAGH,MAAM,iBAAiB,WAAW;AAClC,QACE,eAAe,UAAU,aACzB,eAAe,UAAU,cACzB,CAAC,eAAe,GAEhB,OAAM,mBAAmB,iBACvB,gBACA,oEACD;AAGH,WAAO,eAAe;;AAGxB,SAAM,mBAAmB,iBACvB,gBACA,oEACD;;;;;EAMH,OAAO,QAAc;AACnB,kBAAe,WAAW;AAC1B,kBAAe,cAAc"}
1
+ {"version":3,"file":"service-context.js","names":["productName","productVersion"],"sources":["../../src/context/service-context.ts"],"sourcesContent":["import {\n type ClientOptions,\n ConfigError,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport { coerce } from \"semver\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\nimport {\n AuthenticationError,\n ConfigurationError,\n InitializationError,\n} from \"../errors\";\nimport type { UserContext } from \"./user-context\";\n\n/**\n * Service context holds the service principal client and shared resources.\n * This is initialized once at app startup and shared across all requests.\n */\nexport interface ServiceContextState {\n /** WorkspaceClient authenticated as the service principal */\n client: WorkspaceClient;\n /** The service principal's user ID */\n serviceUserId: string;\n /** Promise that resolves to the warehouse ID (only present when a plugin requires `SQL_WAREHOUSE` resource) */\n warehouseId?: Promise<string>;\n /** Promise that resolves to the workspace ID */\n workspaceId: Promise<string>;\n}\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const semver = coerce(productVersion);\n const normalizedVersion = (semver?.version ??\n productVersion) as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\n/**\n * ServiceContext is a singleton that manages the service principal's\n * WorkspaceClient and shared resources like warehouse/workspace IDs.\n *\n * It's initialized once at app startup and provides the foundation\n * for both service principal and user context execution.\n */\nexport class ServiceContext {\n private static instance: ServiceContextState | null = null;\n private static initPromise: Promise<ServiceContextState> | null = null;\n\n /**\n * Initialize the service context. Should be called once at app startup.\n * Safe to call multiple times - will return the same instance.\n *\n * @param options - Which shared resources to resolve (derived from plugin manifests).\n * @param client - Optional pre-configured WorkspaceClient to use instead\n * of creating one from environment credentials.\n */\n static async initialize(\n options?: { warehouseId?: boolean },\n client?: WorkspaceClient,\n ): Promise<ServiceContextState> {\n if (ServiceContext.instance) {\n return ServiceContext.instance;\n }\n\n if (ServiceContext.initPromise) {\n return ServiceContext.initPromise;\n }\n\n ServiceContext.initPromise = ServiceContext.createContext(options, client);\n ServiceContext.instance = await ServiceContext.initPromise;\n return ServiceContext.instance;\n }\n\n /**\n * Get the initialized service context.\n * @throws Error if not initialized\n */\n static get(): ServiceContextState {\n if (!ServiceContext.instance) {\n throw InitializationError.notInitialized(\n \"ServiceContext\",\n \"Call ServiceContext.initialize() first\",\n );\n }\n return ServiceContext.instance;\n }\n\n /**\n * Check if the service context has been initialized.\n */\n static isInitialized(): boolean {\n return ServiceContext.instance !== null;\n }\n\n /**\n * Create a user context from request headers.\n *\n * @param token - The user's access token from x-forwarded-access-token header\n * @param userId - The user's ID from x-forwarded-user header\n * @param userName - Optional user name\n * @throws Error if token is not provided\n */\n static createUserContext(\n token: string,\n userId: string,\n userName?: string,\n ): UserContext {\n if (!token) {\n throw AuthenticationError.missingToken(\"user token\");\n }\n\n const host = process.env.DATABRICKS_HOST;\n if (!host) {\n throw ConfigurationError.missingEnvVar(\"DATABRICKS_HOST\");\n }\n\n const serviceCtx = ServiceContext.get();\n\n // Create user client with the OAuth token from Databricks Apps\n // Note: We use authType: \"pat\" because the token is passed as a Bearer token\n // just like a PAT, even though it's technically an OAuth token\n const userClient = new WorkspaceClient(\n {\n token,\n host,\n authType: \"pat\",\n },\n getClientOptions(),\n );\n\n return {\n client: userClient,\n userId,\n userName,\n warehouseId: serviceCtx.warehouseId,\n workspaceId: serviceCtx.workspaceId,\n isUserContext: true,\n };\n }\n\n /**\n * Get the client options for WorkspaceClient.\n * Exposed for testing purposes.\n */\n static getClientOptions(): ClientOptions {\n return getClientOptions();\n }\n\n private static async createContext(\n options?: { warehouseId?: boolean },\n client?: WorkspaceClient,\n ): Promise<ServiceContextState> {\n try {\n const wsClient = client ?? new WorkspaceClient({}, getClientOptions());\n\n const [resolvedWorkspaceId, currentUser, resolvedWarehouseId] =\n await Promise.all([\n ServiceContext.getWorkspaceId(wsClient),\n wsClient.currentUser.me(),\n options?.warehouseId\n ? ServiceContext.getWarehouseId(wsClient)\n : Promise.resolve(undefined as string | undefined),\n ]);\n\n if (!currentUser.id) {\n throw ConfigurationError.resourceNotFound(\"Service user ID\");\n }\n\n const warehouseId =\n options?.warehouseId && resolvedWarehouseId !== undefined\n ? Promise.resolve(resolvedWarehouseId)\n : undefined;\n\n return {\n client: wsClient,\n serviceUserId: currentUser.id,\n warehouseId,\n workspaceId: Promise.resolve(resolvedWorkspaceId),\n };\n } catch (e) {\n if (e instanceof ConfigError) {\n throw ConfigurationError.databricksAuthenticationSetupFailed(\n e.baseMessage,\n { cause: e },\n );\n }\n throw e;\n }\n }\n\n private static async getWorkspaceId(\n client: WorkspaceClient,\n ): Promise<string> {\n if (process.env.DATABRICKS_WORKSPACE_ID) {\n return process.env.DATABRICKS_WORKSPACE_ID;\n }\n\n const response = (await client.apiClient.request({\n path: \"/api/2.0/preview/scim/v2/Me\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {},\n responseHeaders: [\"x-databricks-org-id\"],\n })) as { \"x-databricks-org-id\": string };\n\n if (!response[\"x-databricks-org-id\"]) {\n throw ConfigurationError.resourceNotFound(\"Workspace ID\");\n }\n\n return response[\"x-databricks-org-id\"];\n }\n\n private static async getWarehouseId(\n client: WorkspaceClient,\n ): Promise<string> {\n if (process.env.DATABRICKS_WAREHOUSE_ID) {\n return process.env.DATABRICKS_WAREHOUSE_ID;\n }\n\n if (process.env.NODE_ENV === \"development\") {\n const response = (await client.apiClient.request({\n path: \"/api/2.0/sql/warehouses\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {\n skip_cannot_use: \"true\",\n },\n })) as { warehouses: sql.EndpointInfo[] };\n\n const priorities: Record<sql.State, number> = {\n RUNNING: 0,\n STOPPED: 1,\n STARTING: 2,\n STOPPING: 3,\n DELETED: 99,\n DELETING: 99,\n };\n\n const warehouses = (response.warehouses || []).sort((a, b) => {\n return (\n priorities[a.state as sql.State] - priorities[b.state as sql.State]\n );\n });\n\n if (response.warehouses.length === 0) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n const firstWarehouse = warehouses[0];\n if (\n firstWarehouse.state === \"DELETED\" ||\n firstWarehouse.state === \"DELETING\" ||\n !firstWarehouse.id\n ) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n return firstWarehouse.id;\n }\n\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"Please configure the DATABRICKS_WAREHOUSE_ID environment variable\",\n );\n }\n\n /**\n * Reset the service context. Only for testing purposes.\n */\n static reset(): void {\n ServiceContext.instance = null;\n ServiceContext.initPromise = null;\n }\n}\n"],"mappings":";;;;;;;;;;AAiCA,SAAS,mBAAkC;CACzC,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAKvC,QAAO;EACL,SAASA;EACT,gBANa,OAAOC,QAAe,EACF,WACjCA;EAKA,GAAI,SAAS,EAAE,gBAAgB,EAAE,MAAM,OAAO,EAAE;EACjD;;;;cA5BgB;CAsCN,iBAAb,MAAa,eAAe;EAC1B,OAAe,WAAuC;EACtD,OAAe,cAAmD;;;;;;;;;EAUlE,aAAa,WACX,SACA,QAC8B;AAC9B,OAAI,eAAe,SACjB,QAAO,eAAe;AAGxB,OAAI,eAAe,YACjB,QAAO,eAAe;AAGxB,kBAAe,cAAc,eAAe,cAAc,SAAS,OAAO;AAC1E,kBAAe,WAAW,MAAM,eAAe;AAC/C,UAAO,eAAe;;;;;;EAOxB,OAAO,MAA2B;AAChC,OAAI,CAAC,eAAe,SAClB,OAAM,oBAAoB,eACxB,kBACA,yCACD;AAEH,UAAO,eAAe;;;;;EAMxB,OAAO,gBAAyB;AAC9B,UAAO,eAAe,aAAa;;;;;;;;;;EAWrC,OAAO,kBACL,OACA,QACA,UACa;AACb,OAAI,CAAC,MACH,OAAM,oBAAoB,aAAa,aAAa;GAGtD,MAAM,OAAO,QAAQ,IAAI;AACzB,OAAI,CAAC,KACH,OAAM,mBAAmB,cAAc,kBAAkB;GAG3D,MAAM,aAAa,eAAe,KAAK;AAcvC,UAAO;IACL,QAViB,IAAI,gBACrB;KACE;KACA;KACA,UAAU;KACX,EACD,kBAAkB,CACnB;IAIC;IACA;IACA,aAAa,WAAW;IACxB,aAAa,WAAW;IACxB,eAAe;IAChB;;;;;;EAOH,OAAO,mBAAkC;AACvC,UAAO,kBAAkB;;EAG3B,aAAqB,cACnB,SACA,QAC8B;AAC9B,OAAI;IACF,MAAM,WAAW,UAAU,IAAI,gBAAgB,EAAE,EAAE,kBAAkB,CAAC;IAEtE,MAAM,CAAC,qBAAqB,aAAa,uBACvC,MAAM,QAAQ,IAAI;KAChB,eAAe,eAAe,SAAS;KACvC,SAAS,YAAY,IAAI;KACzB,SAAS,cACL,eAAe,eAAe,SAAS,GACvC,QAAQ,QAAQ,OAAgC;KACrD,CAAC;AAEJ,QAAI,CAAC,YAAY,GACf,OAAM,mBAAmB,iBAAiB,kBAAkB;IAG9D,MAAM,cACJ,SAAS,eAAe,wBAAwB,SAC5C,QAAQ,QAAQ,oBAAoB,GACpC;AAEN,WAAO;KACL,QAAQ;KACR,eAAe,YAAY;KAC3B;KACA,aAAa,QAAQ,QAAQ,oBAAoB;KAClD;YACM,GAAG;AACV,QAAI,aAAa,YACf,OAAM,mBAAmB,oCACvB,EAAE,aACF,EAAE,OAAO,GAAG,CACb;AAEH,UAAM;;;EAIV,aAAqB,eACnB,QACiB;AACjB,OAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;GAGrB,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;IAC/C,MAAM;IACN,QAAQ;IACR,SAAS,IAAI,SAAS;IACtB,KAAK;IACL,OAAO,EAAE;IACT,iBAAiB,CAAC,sBAAsB;IACzC,CAAC;AAEF,OAAI,CAAC,SAAS,uBACZ,OAAM,mBAAmB,iBAAiB,eAAe;AAG3D,UAAO,SAAS;;EAGlB,aAAqB,eACnB,QACiB;AACjB,OAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;AAGrB,OAAI,QAAQ,IAAI,aAAa,eAAe;IAC1C,MAAM,WAAY,MAAM,OAAO,UAAU,QAAQ;KAC/C,MAAM;KACN,QAAQ;KACR,SAAS,IAAI,SAAS;KACtB,KAAK;KACL,OAAO,EACL,iBAAiB,QAClB;KACF,CAAC;IAEF,MAAM,aAAwC;KAC5C,SAAS;KACT,SAAS;KACT,UAAU;KACV,UAAU;KACV,SAAS;KACT,UAAU;KACX;IAED,MAAM,cAAc,SAAS,cAAc,EAAE,EAAE,MAAM,GAAG,MAAM;AAC5D,YACE,WAAW,EAAE,SAAsB,WAAW,EAAE;MAElD;AAEF,QAAI,SAAS,WAAW,WAAW,EACjC,OAAM,mBAAmB,iBACvB,gBACA,oEACD;IAGH,MAAM,iBAAiB,WAAW;AAClC,QACE,eAAe,UAAU,aACzB,eAAe,UAAU,cACzB,CAAC,eAAe,GAEhB,OAAM,mBAAmB,iBACvB,gBACA,oEACD;AAGH,WAAO,eAAe;;AAGxB,SAAM,mBAAmB,iBACvB,gBACA,oEACD;;;;;EAMH,OAAO,QAAc;AACnB,kBAAe,WAAW;AAC1B,kBAAe,cAAc"}
@@ -31,6 +31,17 @@ declare class ConfigurationError extends AppKitError {
31
31
  * Create a configuration error for missing connection string parameter
32
32
  */
33
33
  static missingConnectionParam(param: string): ConfigurationError;
34
+ /**
35
+ * Databricks CLI / token auth failed while creating the workspace client.
36
+ *
37
+ * By default the message is short; key lines use **picocolors** when the
38
+ * terminal supports it (also respects `NO_COLOR`). `console.error` won’t show
39
+ * stacks or `{ code, context, … }`. Set `APPKIT_VERBOSE_AUTH_ERRORS=1` for full
40
+ * `cause`, stack, and the raw SDK message (verbose appendix is unstyled).
41
+ */
42
+ static databricksAuthenticationSetupFailed(detail: string, options?: {
43
+ cause?: Error;
44
+ }): ConfigurationError;
34
45
  }
35
46
  //#endregion
36
47
  export { ConfigurationError };
@@ -1 +1 @@
1
- {"version":3,"file":"configuration.d.ts","names":[],"sources":["../../src/errors/configuration.ts"],"mappings":";;;;;AAYA;;;;;;;;cAAa,kBAAA,SAA2B,WAAA;EAAA,SAC7B,IAAA;EAAA,SACA,UAAA;EAAA,SACA,WAAA;EADA;;;EAAA,OAMF,aAAA,CAAc,OAAA,WAAkB,kBAAA;EAAA;;;EAAA,OAUhC,gBAAA,CAAiB,QAAA,UAAkB,IAAA,YAAgB,kBAAA;EAAA;;;EAAA,OAUnD,iBAAA,CACL,OAAA,UACA,OAAA,YACC,kBAAA;EAAA;;;EAAA,OAUI,sBAAA,CAAuB,KAAA,WAAgB,kBAAA;AAAA"}
1
+ {"version":3,"file":"configuration.d.ts","names":[],"sources":["../../src/errors/configuration.ts"],"mappings":";;;;;AA2CA;;;;;;;;cAAa,kBAAA,SAA2B,WAAA;EAAA,SAC7B,IAAA;EAAA,SACA,UAAA;EAAA,SACA,WAAA;EAH6B;;;EAAA,OAQ/B,aAAA,CAAc,OAAA,WAAkB,kBAAA;EAAhC;;;EAAA,OAUA,gBAAA,CAAiB,QAAA,UAAkB,IAAA,YAAgB,kBAAA;EAAlC;;;EAAA,OAUjB,iBAAA,CACL,OAAA,UACA,OAAA,YACC,kBAAA;EAFD;;;EAAA,OAYK,sBAAA,CAAuB,KAAA,WAAgB,kBAAA;EAAhB;;;;;;;;EAAA,OAevB,mCAAA,CACL,MAAA,UACA,OAAA;IAAY,KAAA,GAAQ,KAAA;EAAA,IACnB,kBAAA;AAAA"}
@@ -1,7 +1,31 @@
1
1
  import { __esmMin } from "../_virtual/_rolldown/runtime.js";
2
2
  import { AppKitError, init_base } from "./base.js";
3
+ import pc from "picocolors";
3
4
 
4
5
  //#region src/errors/configuration.ts
6
+ function authSetupVerbose() {
7
+ return process.env.APPKIT_VERBOSE_AUTH_ERRORS === "1" || process.env.APPKIT_VERBOSE_AUTH_ERRORS === "true";
8
+ }
9
+ /** Pulls ` $ databricks ...` from SDK text when present. */
10
+ function suggestedDatabricksCliCommand(detail) {
11
+ return detail.match(/\$\s*(databricks[^\n]+)/)?.[1]?.trim();
12
+ }
13
+ /** Makes `console.error` show only the message (no stack, no extra fields). */
14
+ function pinUserFacingAuthError(err) {
15
+ Object.defineProperty(err, "stack", {
16
+ value: "",
17
+ configurable: true,
18
+ enumerable: false,
19
+ writable: true
20
+ });
21
+ Object.defineProperty(err, Symbol.for("nodejs.util.inspect.custom"), {
22
+ value: function() {
23
+ return this.message;
24
+ },
25
+ enumerable: false,
26
+ configurable: true
27
+ });
28
+ }
5
29
  var ConfigurationError;
6
30
  var init_configuration = __esmMin((() => {
7
31
  init_base();
@@ -33,6 +57,35 @@ var init_configuration = __esmMin((() => {
33
57
  static missingConnectionParam(param) {
34
58
  return new ConfigurationError(`Connection string must include ${param} parameter`, { context: { parameter: param } });
35
59
  }
60
+ /**
61
+ * Databricks CLI / token auth failed while creating the workspace client.
62
+ *
63
+ * By default the message is short; key lines use **picocolors** when the
64
+ * terminal supports it (also respects `NO_COLOR`). `console.error` won’t show
65
+ * stacks or `{ code, context, … }`. Set `APPKIT_VERBOSE_AUTH_ERRORS=1` for full
66
+ * `cause`, stack, and the raw SDK message (verbose appendix is unstyled).
67
+ */
68
+ static databricksAuthenticationSetupFailed(detail, options) {
69
+ const verbose = authSetupVerbose();
70
+ const host = process.env.DATABRICKS_HOST ?? "(not set)";
71
+ const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID;
72
+ const d = detail.trim();
73
+ const cli = suggestedDatabricksCliCommand(d);
74
+ const lines = [
75
+ pc.bold(pc.red("Databricks authentication failed.")),
76
+ "",
77
+ cli ? `${pc.bold("Run this, then try again:")}\n ${pc.cyan(cli)}` : pc.yellow("Log in with the Databricks CLI (for example, databricks auth login for this workspace), then try again."),
78
+ "",
79
+ pc.dim("Or set DATABRICKS_TOKEN and DATABRICKS_HOST instead of CLI-based auth."),
80
+ "",
81
+ `${pc.green("DATABRICKS_HOST")}: ${host}`
82
+ ];
83
+ if (warehouseId) lines.push(`${pc.green("DATABRICKS_WAREHOUSE_ID")}: ${warehouseId}`);
84
+ if (verbose) lines.push("", d);
85
+ const err = new ConfigurationError(lines.join("\n"), { cause: verbose ? options?.cause : void 0 });
86
+ if (!verbose) pinUserFacingAuthError(err);
87
+ return err;
88
+ }
36
89
  };
37
90
  }));
38
91
 
@@ -1 +1 @@
1
- {"version":3,"file":"configuration.js","names":[],"sources":["../../src/errors/configuration.ts"],"sourcesContent":["import { AppKitError } from \"./base\";\n\n/**\n * Error thrown when configuration is missing or invalid.\n * Use for missing environment variables, invalid settings, or setup issues.\n *\n * @example\n * ```typescript\n * throw new ConfigurationError(\"DATABRICKS_HOST environment variable is required\");\n * throw new ConfigurationError(\"Warehouse ID not found\", { context: { env: \"production\" } });\n * ```\n */\nexport class ConfigurationError extends AppKitError {\n readonly code = \"CONFIGURATION_ERROR\";\n readonly statusCode = 500;\n readonly isRetryable = false;\n\n /**\n * Create a configuration error for missing environment variable\n */\n static missingEnvVar(varName: string): ConfigurationError {\n return new ConfigurationError(\n `${varName} environment variable is required`,\n { context: { envVar: varName } },\n );\n }\n\n /**\n * Create a configuration error for missing resource\n */\n static resourceNotFound(resource: string, hint?: string): ConfigurationError {\n const message = hint\n ? `${resource} not found. ${hint}`\n : `${resource} not found`;\n return new ConfigurationError(message, { context: { resource } });\n }\n\n /**\n * Create a configuration error for invalid connection config\n */\n static invalidConnection(\n service: string,\n details?: string,\n ): ConfigurationError {\n const message = details\n ? `${service} connection not configured. ${details}`\n : `${service} connection not configured`;\n return new ConfigurationError(message, { context: { service } });\n }\n\n /**\n * Create a configuration error for missing connection string parameter\n */\n static missingConnectionParam(param: string): ConfigurationError {\n return new ConfigurationError(\n `Connection string must include ${param} parameter`,\n { context: { parameter: param } },\n );\n }\n}\n"],"mappings":";;;;;;YAAqC;CAYxB,qBAAb,MAAa,2BAA2B,YAAY;EAClD,AAAS,OAAO;EAChB,AAAS,aAAa;EACtB,AAAS,cAAc;;;;EAKvB,OAAO,cAAc,SAAqC;AACxD,UAAO,IAAI,mBACT,GAAG,QAAQ,oCACX,EAAE,SAAS,EAAE,QAAQ,SAAS,EAAE,CACjC;;;;;EAMH,OAAO,iBAAiB,UAAkB,MAAmC;AAI3E,UAAO,IAAI,mBAHK,OACZ,GAAG,SAAS,cAAc,SAC1B,GAAG,SAAS,aACuB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;;;;;EAMnE,OAAO,kBACL,SACA,SACoB;AAIpB,UAAO,IAAI,mBAHK,UACZ,GAAG,QAAQ,8BAA8B,YACzC,GAAG,QAAQ,6BACwB,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC;;;;;EAMlE,OAAO,uBAAuB,OAAmC;AAC/D,UAAO,IAAI,mBACT,kCAAkC,MAAM,aACxC,EAAE,SAAS,EAAE,WAAW,OAAO,EAAE,CAClC"}
1
+ {"version":3,"file":"configuration.js","names":[],"sources":["../../src/errors/configuration.ts"],"sourcesContent":["import pc from \"picocolors\";\nimport { AppKitError } from \"./base\";\n\nfunction authSetupVerbose(): boolean {\n return (\n process.env.APPKIT_VERBOSE_AUTH_ERRORS === \"1\" ||\n process.env.APPKIT_VERBOSE_AUTH_ERRORS === \"true\"\n );\n}\n\n/** Pulls ` $ databricks ...` from SDK text when present. */\nfunction suggestedDatabricksCliCommand(detail: string): string | undefined {\n const m = detail.match(/\\$\\s*(databricks[^\\n]+)/);\n return m?.[1]?.trim();\n}\n\n/** Makes `console.error` show only the message (no stack, no extra fields). */\nfunction pinUserFacingAuthError(err: ConfigurationError): void {\n Object.defineProperty(err, \"stack\", {\n value: \"\",\n configurable: true,\n enumerable: false,\n writable: true,\n });\n Object.defineProperty(err, Symbol.for(\"nodejs.util.inspect.custom\"), {\n value: function (this: ConfigurationError): string {\n return this.message;\n },\n enumerable: false,\n configurable: true,\n });\n}\n\n/**\n * Error thrown when configuration is missing or invalid.\n * Use for missing environment variables, invalid settings, or setup issues.\n *\n * @example\n * ```typescript\n * throw new ConfigurationError(\"DATABRICKS_HOST environment variable is required\");\n * throw new ConfigurationError(\"Warehouse ID not found\", { context: { env: \"production\" } });\n * ```\n */\nexport class ConfigurationError extends AppKitError {\n readonly code = \"CONFIGURATION_ERROR\";\n readonly statusCode = 500;\n readonly isRetryable = false;\n\n /**\n * Create a configuration error for missing environment variable\n */\n static missingEnvVar(varName: string): ConfigurationError {\n return new ConfigurationError(\n `${varName} environment variable is required`,\n { context: { envVar: varName } },\n );\n }\n\n /**\n * Create a configuration error for missing resource\n */\n static resourceNotFound(resource: string, hint?: string): ConfigurationError {\n const message = hint\n ? `${resource} not found. ${hint}`\n : `${resource} not found`;\n return new ConfigurationError(message, { context: { resource } });\n }\n\n /**\n * Create a configuration error for invalid connection config\n */\n static invalidConnection(\n service: string,\n details?: string,\n ): ConfigurationError {\n const message = details\n ? `${service} connection not configured. ${details}`\n : `${service} connection not configured`;\n return new ConfigurationError(message, { context: { service } });\n }\n\n /**\n * Create a configuration error for missing connection string parameter\n */\n static missingConnectionParam(param: string): ConfigurationError {\n return new ConfigurationError(\n `Connection string must include ${param} parameter`,\n { context: { parameter: param } },\n );\n }\n\n /**\n * Databricks CLI / token auth failed while creating the workspace client.\n *\n * By default the message is short; key lines use **picocolors** when the\n * terminal supports it (also respects `NO_COLOR`). `console.error` won’t show\n * stacks or `{ code, context, … }`. Set `APPKIT_VERBOSE_AUTH_ERRORS=1` for full\n * `cause`, stack, and the raw SDK message (verbose appendix is unstyled).\n */\n static databricksAuthenticationSetupFailed(\n detail: string,\n options?: { cause?: Error },\n ): ConfigurationError {\n const verbose = authSetupVerbose();\n const host = process.env.DATABRICKS_HOST ?? \"(not set)\";\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID;\n const d = detail.trim();\n const cli = suggestedDatabricksCliCommand(d);\n\n const title = pc.bold(pc.red(\"Databricks authentication failed.\"));\n const action = cli\n ? `${pc.bold(\"Run this, then try again:\")}\\n ${pc.cyan(cli)}`\n : pc.yellow(\n \"Log in with the Databricks CLI (for example, databricks auth login for this workspace), then try again.\",\n );\n const tokenHint = pc.dim(\n \"Or set DATABRICKS_TOKEN and DATABRICKS_HOST instead of CLI-based auth.\",\n );\n\n const lines: string[] = [\n title,\n \"\",\n action,\n \"\",\n tokenHint,\n \"\",\n `${pc.green(\"DATABRICKS_HOST\")}: ${host}`,\n ];\n if (warehouseId) {\n lines.push(`${pc.green(\"DATABRICKS_WAREHOUSE_ID\")}: ${warehouseId}`);\n }\n if (verbose) {\n lines.push(\"\", d);\n }\n\n const err = new ConfigurationError(lines.join(\"\\n\"), {\n cause: verbose ? options?.cause : undefined,\n });\n\n if (!verbose) {\n pinUserFacingAuthError(err);\n }\n return err;\n }\n}\n"],"mappings":";;;;;AAGA,SAAS,mBAA4B;AACnC,QACE,QAAQ,IAAI,+BAA+B,OAC3C,QAAQ,IAAI,+BAA+B;;;AAK/C,SAAS,8BAA8B,QAAoC;AAEzE,QADU,OAAO,MAAM,0BAA0B,GACtC,IAAI,MAAM;;;AAIvB,SAAS,uBAAuB,KAA+B;AAC7D,QAAO,eAAe,KAAK,SAAS;EAClC,OAAO;EACP,cAAc;EACd,YAAY;EACZ,UAAU;EACX,CAAC;AACF,QAAO,eAAe,KAAK,OAAO,IAAI,6BAA6B,EAAE;EACnE,OAAO,WAA4C;AACjD,UAAO,KAAK;;EAEd,YAAY;EACZ,cAAc;EACf,CAAC;;;;YA7BiC;CA0CxB,qBAAb,MAAa,2BAA2B,YAAY;EAClD,AAAS,OAAO;EAChB,AAAS,aAAa;EACtB,AAAS,cAAc;;;;EAKvB,OAAO,cAAc,SAAqC;AACxD,UAAO,IAAI,mBACT,GAAG,QAAQ,oCACX,EAAE,SAAS,EAAE,QAAQ,SAAS,EAAE,CACjC;;;;;EAMH,OAAO,iBAAiB,UAAkB,MAAmC;AAI3E,UAAO,IAAI,mBAHK,OACZ,GAAG,SAAS,cAAc,SAC1B,GAAG,SAAS,aACuB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;;;;;EAMnE,OAAO,kBACL,SACA,SACoB;AAIpB,UAAO,IAAI,mBAHK,UACZ,GAAG,QAAQ,8BAA8B,YACzC,GAAG,QAAQ,6BACwB,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC;;;;;EAMlE,OAAO,uBAAuB,OAAmC;AAC/D,UAAO,IAAI,mBACT,kCAAkC,MAAM,aACxC,EAAE,SAAS,EAAE,WAAW,OAAO,EAAE,CAClC;;;;;;;;;;EAWH,OAAO,oCACL,QACA,SACoB;GACpB,MAAM,UAAU,kBAAkB;GAClC,MAAM,OAAO,QAAQ,IAAI,mBAAmB;GAC5C,MAAM,cAAc,QAAQ,IAAI;GAChC,MAAM,IAAI,OAAO,MAAM;GACvB,MAAM,MAAM,8BAA8B,EAAE;GAY5C,MAAM,QAAkB;IAVV,GAAG,KAAK,GAAG,IAAI,oCAAoC,CAAC;IAYhE;IAXa,MACX,GAAG,GAAG,KAAK,4BAA4B,CAAC,MAAM,GAAG,KAAK,IAAI,KAC1D,GAAG,OACD,0GACD;IASH;IARgB,GAAG,IACnB,yEACD;IAQC;IACA,GAAG,GAAG,MAAM,kBAAkB,CAAC,IAAI;IACpC;AACD,OAAI,YACF,OAAM,KAAK,GAAG,GAAG,MAAM,0BAA0B,CAAC,IAAI,cAAc;AAEtE,OAAI,QACF,OAAM,KAAK,IAAI,EAAE;GAGnB,MAAM,MAAM,IAAI,mBAAmB,MAAM,KAAK,KAAK,EAAE,EACnD,OAAO,UAAU,SAAS,QAAQ,QACnC,CAAC;AAEF,OAAI,CAAC,QACH,wBAAuB,IAAI;AAE7B,UAAO"}
@@ -1,7 +1,7 @@
1
1
  import crypto from "node:crypto";
2
+ import pc from "picocolors";
2
3
  import path from "node:path";
3
4
  import fs from "node:fs";
4
- import pc from "picocolors";
5
5
 
6
6
  //#region src/plugins/server/utils.ts
7
7
  function parseCookies(req) {
@@ -0,0 +1,72 @@
1
+ import { JSONSchema7 } from "json-schema";
2
+
3
+ //#region ../shared/src/agent.d.ts
4
+ interface ToolAnnotations {
5
+ readOnly?: boolean;
6
+ destructive?: boolean;
7
+ idempotent?: boolean;
8
+ requiresUserContext?: boolean;
9
+ }
10
+ interface AgentToolDefinition {
11
+ name: string;
12
+ description: string;
13
+ parameters: JSONSchema7;
14
+ annotations?: ToolAnnotations;
15
+ }
16
+ interface Message {
17
+ id: string;
18
+ role: "user" | "assistant" | "system" | "tool";
19
+ content: string;
20
+ toolCallId?: string;
21
+ toolCalls?: ToolCall[];
22
+ createdAt: Date;
23
+ }
24
+ interface ToolCall {
25
+ id: string;
26
+ name: string;
27
+ args: unknown;
28
+ }
29
+ type AgentEvent = {
30
+ type: "message_delta";
31
+ content: string;
32
+ } | {
33
+ type: "message";
34
+ content: string;
35
+ } | {
36
+ type: "tool_call";
37
+ callId: string;
38
+ name: string;
39
+ args: unknown;
40
+ } | {
41
+ type: "tool_result";
42
+ callId: string;
43
+ result: unknown;
44
+ error?: string;
45
+ } | {
46
+ type: "thinking";
47
+ content: string;
48
+ } | {
49
+ type: "status";
50
+ status: "running" | "waiting" | "complete" | "error";
51
+ error?: string;
52
+ } | {
53
+ type: "metadata";
54
+ data: Record<string, unknown>;
55
+ };
56
+ interface AgentInput {
57
+ messages: Message[];
58
+ tools: AgentToolDefinition[];
59
+ threadId: string;
60
+ signal?: AbortSignal;
61
+ }
62
+ interface AgentRunContext {
63
+ /** Tool implementations should sanitize failure text — errors become `tool_result.error` and can flow back into the LLM transcript. */
64
+ executeTool: (name: string, args: unknown) => Promise<unknown>;
65
+ signal?: AbortSignal;
66
+ }
67
+ interface AgentAdapter {
68
+ run(input: AgentInput, context: AgentRunContext): AsyncGenerator<AgentEvent, void, unknown>;
69
+ }
70
+ //#endregion
71
+ export { AgentAdapter, AgentEvent, AgentInput, AgentRunContext, AgentToolDefinition, Message, ToolAnnotations, ToolCall };
72
+ //# sourceMappingURL=agent.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"agent.d.ts","names":[],"sources":["../../../../shared/src/agent.ts"],"mappings":";;;UAMiB,eAAA;EACf,QAAA;EACA,WAAA;EACA,UAAA;EACA,mBAAA;AAAA;AAAA,UAGe,mBAAA;EACf,IAAA;EACA,WAAA;EACA,UAAA,EAAY,WAAA;EACZ,WAAA,GAAc,eAAA;AAAA;AAAA,UAgBC,OAAA;EACf,EAAA;EACA,IAAA;EACA,OAAA;EACA,UAAA;EACA,SAAA,GAAY,QAAA;EACZ,SAAA,EAAW,IAAA;AAAA;AAAA,UAGI,QAAA;EACf,EAAA;EACA,IAAA;EACA,IAAA;AAAA;AAAA,KA2BU,UAAA;EACN,IAAA;EAAuB,OAAA;AAAA;EACvB,IAAA;EAAiB,OAAA;AAAA;EACjB,IAAA;EAAmB,MAAA;EAAgB,IAAA;EAAc,IAAA;AAAA;EAEjD,IAAA;EACA,MAAA;EACA,MAAA;EACA,KAAA;AAAA;EAEA,IAAA;EAAkB,OAAA;AAAA;EAElB,IAAA;EACA,MAAA;EACA,KAAA;AAAA;EAEA,IAAA;EAAkB,IAAA,EAAM,MAAA;AAAA;AAAA,UA0Gb,UAAA;EACf,QAAA,EAAU,OAAA;EACV,KAAA,EAAO,mBAAA;EACP,QAAA;EACA,MAAA,GAAS,WAAA;AAAA;AAAA,UAGM,eAAA;;EAEf,WAAA,GAAc,IAAA,UAAc,IAAA,cAAkB,OAAA;EAC9C,MAAA,GAAS,WAAA;AAAA;AAAA,UAGM,YAAA;EACf,GAAA,CACE,KAAA,EAAO,UAAA,EACP,OAAA,EAAS,eAAA,GACR,cAAA,CAAe,UAAA;AAAA"}
@@ -1,3 +1,4 @@
1
+ import { AgentAdapter, AgentEvent, AgentInput, AgentRunContext, AgentToolDefinition, Message, ToolAnnotations, ToolCall } from "./agent.js";
1
2
  import { ResourceFieldEntry } from "./schemas/plugin-manifest.generated.js";
2
3
  import { BasePlugin, BasePluginConfig, HttpMethod, IAppRequest, IAppResponse, IAppRouter, PluginConstructor, PluginData, PluginEndpointMap, PluginExports, PluginManifest, PluginMap, PluginPhase, ResourceRequirement, RouteConfig, TelemetryOptions, ToPlugin, WithAsUser } from "./plugin.js";
3
4
  import { CacheConfig, CacheEntry, CacheStorage } from "./cache.js";
@@ -1,8 +1,8 @@
1
1
  import { createLogger } from "../logging/logger.js";
2
+ import pc from "picocolors";
2
3
  import fs from "node:fs/promises";
3
4
  import path from "node:path";
4
5
  import fs$1 from "node:fs";
5
- import pc from "picocolors";
6
6
 
7
7
  //#region src/type-generator/migration.ts
8
8
  const logger = createLogger("type-generator:migration");