@bitofsky/databricks-sql 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["// Types\nexport type * from './types.js'\n\n// Errors\nexport * from './errors.js'\n\n// Core functions\nexport * from './api'","/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 500\nconst MAX_POLL_INTERVAL_MS = 5000\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress } = options\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // 1. Build request (filter out undefined values)\n // Keep payload small and aligned with the REST API contract.\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout,\n wait_timeout: options.wait_timeout,\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n\n // 3. Poll until terminal state\n let pollInterval = POLL_INTERVAL_MS\n\n while (!TERMINAL_STATES.has(result.status.state)) {\n // Check abort signal\n if (signal?.aborted) {\n // Try to cancel on server\n await cancelStatement(auth, result.statement_id).catch(() => {\n // Ignore cancel errors\n })\n throw new AbortError('Aborted during polling')\n }\n\n // Call progress callback\n onProgress?.(result.status)\n\n // Wait before next poll (exponential backoff)\n await delay(pollInterval, signal)\n pollInterval = Math.min(pollInterval * 1.5, MAX_POLL_INTERVAL_MS)\n\n // Get current status\n result = await getStatement(auth, result.statement_id, signal)\n }\n\n // 4. Final progress callback\n onProgress?.(result.status)\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\nimport type { Readable } from 'node:stream'\nimport { getChunk } from '../databricks-api.js'\nimport { DatabricksSqlError, AbortError } from '../errors.js'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\nimport { validateSucceededResult } from '../util.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format } = options\n const manifest = validateSucceededResult(statementResult)\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format)\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementResult.statement_id\n )\n }\n\n const stream = fetchStream(statementResult, auth, signal ? { signal } : {})\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n const statementId = statementResult.statement_id\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIMESTAMP',\n 'TIMESTAMP_NTZ',\n 'TIMESTAMP_LTZ',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format']\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(column: ColumnInfo): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(descriptor: TypeDescriptor, value: unknown): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(fields: StructField[], value: unknown): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name])\n\n return mapped\n}\n\nfunction convertArrayValue(elementType: TypeDescriptor, value: unknown): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0])\n mapped[String(convertedKey)] = convertValue(valueType, entry[1])\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown): unknown {\n if (typeof value === 'bigint')\n return value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value))\n return BigInt(value)\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n return BigInt(value)\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import { PassThrough, type Readable } from 'node:stream'\nimport { mergeStreamsFromUrls, type MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n StatementResult,\n FetchStreamOptions,\n StatementManifest,\n} from '../types.js'\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => {\n output.destroy(new AbortError('Stream aborted'))\n }\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => {\n signal.removeEventListener('abort', onAbort)\n })\n }\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal).catch(\n (err) => {\n output.destroy(err as Error)\n }\n )\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal\n): Promise<void> {\n const result = statementResult.result\n\n // Collect all external link URLs\n let urls = result?.external_links?.map((link) => link.external_link) ?? []\n\n // If no URLs in initial result, fetch from chunks\n if (urls.length === 0 && manifest.total_chunk_count > 0) {\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (signal?.aborted) throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n const chunkUrls = chunkData.external_links?.map((link) => link.external_link) ?? []\n urls.push(...chunkUrls)\n }\n }\n\n // No external links - close the stream\n if (urls.length === 0)\n return void output.end()\n\n // Merge all URLs using merge-streams\n await mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n await fetchRow(statementResult, auth, fetchOptions)\n return rows\n}\n","import type {\n AuthInfo,\n StatementResult,\n MergeExternalLinksOptions,\n} from '../types.js'\nimport { fetchStream } from './fetchStream.js'\nimport { validateSucceededResult } from '../util.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink } = options\n\n // If not external links, return original as-is\n if (!statementResult.result?.external_links)\n return statementResult\n\n // Get merged stream via fetchStream\n const stream = fetchStream(statementResult, auth, signal ? { signal } : {})\n\n // Upload via callback\n const uploadResult = await mergeStreamToExternalLink(stream)\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const manifest = validateSucceededResult(statementResult)\n const totalRowCount = manifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...manifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;ACrDO,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;;;ACxEA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;ACnHA,IAAM,YAAY;AAMlB,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC5DA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AACzB,IAAM,uBAAuB;AAK7B,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,WAAW,IAAI;AAG/B,iBAAe,QAAQ,kBAAkB;AAIzC,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ;AAAA,MACzB,cAAc,QAAQ;AAAA,MACtB,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAGA,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AAGtD,MAAI,eAAe;AAEnB,SAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAEhD,QAAI,QAAQ,SAAS;AAEnB,YAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM;AAAA,MAE7D,CAAC;AACD,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AAGA,iBAAa,OAAO,MAAM;AAG1B,UAAM,MAAM,cAAc,MAAM;AAChC,mBAAe,KAAK,IAAI,eAAe,KAAK,oBAAoB;AAGhE,aAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAAA,EAC/D;AAGA,eAAa,OAAO,MAAM;AAG1B,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;ACnGA,yBAAuB;AACvB,yBAA4B;;;AC2B5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACW;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,MAAM;AAAA,EACvC,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBAAsB,QAAiD;AAC9E,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,KAAK;AAClD;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aAAa,YAA4B,OAAyB;AACzE,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,KAAK;AAEpD,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,KAAK;AAExD,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,KAAK;AAExE,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,KAAK;AAE7B,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBAAmB,QAAuB,OAAyB;AAC1E,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,CAAC;AAE9E,SAAO;AACT;AAEA,SAAS,kBAAkB,aAA6B,OAAyB;AAC/E,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,KAAK,CAAC;AAC5D;AAEA,SAAS,gBACP,SACA,WACA,OACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,CAAC;AACnD,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,CAAC;AAAA,IACjE;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,GAAG;AAC9C,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,UAAU;AAAA,EACnE;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK;AACxB,aAAO,OAAO,KAAK;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,aAAO,OAAO,KAAK;AAAA,IACrB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;ACtZA,yBAA2C;AAC3C,2BAAuD;AAgBhD,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,OAAO,IAAI;AACnB,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AAGxB,QAAM,SAAS,IAAI,+BAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM;AACpB,aAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACjD;AACA,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM;AACzB,aAAO,oBAAoB,SAAS,OAAO;AAAA,IAC7C,CAAC;AAAA,EACH;AAIA,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,MAAM,EAAE;AAAA,IAC3E,CAAC,QAAQ;AACP,aAAO,QAAQ,GAAY;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACe;AACf,QAAM,SAAS,gBAAgB;AAG/B,MAAI,OAAO,QAAQ,gBAAgB,IAAI,CAAC,SAAS,KAAK,aAAa,KAAK,CAAC;AAGzE,MAAI,KAAK,WAAW,KAAK,SAAS,oBAAoB,GAAG;AACvD,aAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,+BAA+B;AAGzE,YAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,YAAM,YAAY,UAAU,gBAAgB,IAAI,CAAC,SAAS,KAAK,aAAa,KAAK,CAAC;AAClF,WAAK,KAAK,GAAG,SAAS;AAAA,IACxB;AAAA,EACF;AAGA,MAAI,KAAK,WAAW;AAClB,WAAO,KAAK,OAAO,IAAI;AAGzB,YAAM,2CAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AACzF;;;AFjEA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,OAAO,IAAI;AACtC,QAAM,WAAW,wBAAwB,eAAe;AAExD,QAAM,SAAS,gBAAgB,UAAU,MAAM;AAE/C,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,IACF;AAEA,UAAM,SAAS,YAAY,iBAAiB,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC,CAAC;AAC1E,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,MAAM;AAC9D;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,UAAM,cAAc,gBAAgB;AACpC,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACe;AAEf,QAAM,aAAa,OAAO,SAAK,2BAAO,CAAC,EAAE,SAAK,gCAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AG9FA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AAEA,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,SAAO;AACT;;;ACrBA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,0BAA0B,IAAI;AAG9C,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,WAAO;AAGT,QAAM,SAAS,YAAY,iBAAiB,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC,CAAC;AAG1E,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAI3D,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,gBAAgB,SAAS,mBAAmB;AAElD,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped"]}
@@ -0,0 +1,250 @@
1
+ import { Readable } from 'node:stream';
2
+
3
+ /** Authentication information for Databricks API */
4
+ type AuthInfo = {
5
+ /** Databricks Personal Access Token */
6
+ token: string;
7
+ /** Databricks workspace host (e.g., ...cloud.databricks.com) */
8
+ host: string;
9
+ /** SQL warehouse HTTP path (e.g., /sql/1.0/warehouses/abc123) */
10
+ httpPath: string;
11
+ };
12
+ /** Statement execution states */
13
+ type StatementState = 'PENDING' | 'RUNNING' | 'SUCCEEDED' | 'FAILED' | 'CANCELED' | 'CLOSED';
14
+ /** Statement status */
15
+ type StatementStatus = {
16
+ state: StatementState;
17
+ error?: {
18
+ error_code: string;
19
+ message: string;
20
+ };
21
+ };
22
+ /** Column schema information */
23
+ type ColumnInfo = {
24
+ name: string;
25
+ type_text: string;
26
+ type_name: string;
27
+ position: number;
28
+ type_precision?: number;
29
+ type_scale?: number;
30
+ };
31
+ /** Chunk information */
32
+ type ChunkInfo = {
33
+ chunk_index: number;
34
+ row_offset: number;
35
+ row_count: number;
36
+ byte_count?: number;
37
+ };
38
+ /** Result manifest (schema and chunk info) */
39
+ type StatementManifest = {
40
+ format: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV';
41
+ schema: {
42
+ column_count: number;
43
+ columns: ColumnInfo[];
44
+ };
45
+ total_chunk_count: number;
46
+ total_row_count?: number;
47
+ total_byte_count?: number;
48
+ truncated?: boolean;
49
+ chunks?: ChunkInfo[];
50
+ };
51
+ /** External link for chunked results */
52
+ type ExternalLinkInfo = {
53
+ chunk_index: number;
54
+ row_offset: number;
55
+ row_count: number;
56
+ byte_count: number;
57
+ external_link: string;
58
+ expiration: string;
59
+ };
60
+ /** Inline result data */
61
+ type InlineResultData = {
62
+ data_array?: unknown[][];
63
+ external_links?: never;
64
+ };
65
+ /** External links result data */
66
+ type ExternalLinksResultData = {
67
+ data_array?: never;
68
+ external_links?: ExternalLinkInfo[];
69
+ };
70
+ /** Result data (inline or external links) */
71
+ type ResultData = InlineResultData | ExternalLinksResultData;
72
+ /**
73
+ * Statement result from API
74
+ * @see https://docs.databricks.com/api/workspace/statementexecution/getstatement
75
+ */
76
+ type StatementResult = {
77
+ statement_id: string;
78
+ status: StatementStatus;
79
+ manifest?: StatementManifest;
80
+ result?: ResultData;
81
+ };
82
+ /** Statement parameter */
83
+ type StatementParameter = {
84
+ name: string;
85
+ type?: 'STRING' | 'LONG' | 'DOUBLE' | 'BOOLEAN';
86
+ value?: string | number | boolean;
87
+ };
88
+ /**
89
+ * Options for executeStatement
90
+ * @see https://docs.databricks.com/api/workspace/statementexecution/executestatement
91
+ */
92
+ type ExecuteStatementOptions = {
93
+ /** Progress callback (called on each poll) */
94
+ onProgress?: (status: StatementStatus) => void;
95
+ /** Abort signal for cancellation */
96
+ signal?: AbortSignal;
97
+ /** Result byte limit */
98
+ byte_limit?: number;
99
+ /** Catalog name */
100
+ catalog?: string;
101
+ /** Result disposition */
102
+ disposition?: 'INLINE' | 'EXTERNAL_LINKS';
103
+ /** Result format */
104
+ format?: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV';
105
+ /** Behavior on wait timeout */
106
+ on_wait_timeout?: 'CONTINUE' | 'CANCEL';
107
+ /** Query parameters */
108
+ parameters?: StatementParameter[];
109
+ /** Row limit */
110
+ row_limit?: number;
111
+ /** Schema name */
112
+ schema?: string;
113
+ /** Server wait timeout (e.g., '10s', '50s') */
114
+ wait_timeout?: string;
115
+ /** Warehouse ID (can be extracted from httpPath) */
116
+ warehouse_id?: string;
117
+ };
118
+ /** Base options with abort signal support */
119
+ type SignalOptions = {
120
+ /** Abort signal for cancellation */
121
+ signal?: AbortSignal;
122
+ };
123
+ /** Row data as array */
124
+ type RowArray = unknown[];
125
+ /** Row data as JSON object */
126
+ type RowObject = Record<string, unknown>;
127
+ /** Format for fetchRow/fetchAll */
128
+ type FetchRowFormat = 'JSON_ARRAY' | 'JSON_OBJECT';
129
+ /** Options for fetchStream */
130
+ type FetchStreamOptions = SignalOptions;
131
+ /** Options for fetchRow */
132
+ type FetchRowsOptions = SignalOptions & {
133
+ /** Callback for each row */
134
+ onEachRow?: (row: RowArray | RowObject) => void;
135
+ /** Row format (default: JSON_ARRAY) */
136
+ format?: FetchRowFormat;
137
+ };
138
+ /** Options for fetchAll */
139
+ type FetchAllOptions = SignalOptions & {
140
+ /** Row format (default: JSON_ARRAY) */
141
+ format?: FetchRowFormat;
142
+ };
143
+ /** Result from mergeStreamToExternalLink callback */
144
+ type MergeExternalLinksResult = {
145
+ /** Uploaded external link URL */
146
+ externalLink: string;
147
+ /** Uploaded byte count (actual size after compression like gzip) */
148
+ byte_count: number;
149
+ /** Link expiration time (ISO string) */
150
+ expiration: string;
151
+ };
152
+ /** Options for mergeExternalLinks */
153
+ type MergeExternalLinksOptions = SignalOptions & {
154
+ /** Callback to upload merged stream to external link */
155
+ mergeStreamToExternalLink: (stream: Readable) => Promise<MergeExternalLinksResult>;
156
+ };
157
+ /**
158
+ * API request for executeStatement
159
+ * @see https://docs.databricks.com/api/workspace/statementexecution/executestatement
160
+ */
161
+ type ExecuteStatementRequest = {
162
+ warehouse_id: string;
163
+ statement: string;
164
+ byte_limit?: number;
165
+ catalog?: string;
166
+ disposition?: 'INLINE' | 'EXTERNAL_LINKS';
167
+ format?: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV';
168
+ on_wait_timeout?: 'CONTINUE' | 'CANCEL';
169
+ parameters?: StatementParameter[];
170
+ row_limit?: number;
171
+ schema?: string;
172
+ wait_timeout?: string;
173
+ };
174
+ /**
175
+ * API response for getChunk
176
+ * @see https://docs.databricks.com/api/workspace/statementexecution/getstatementresultchunkn
177
+ */
178
+ type GetChunkResponse = {
179
+ chunk_index: number;
180
+ row_offset: number;
181
+ row_count: number;
182
+ data_array?: unknown[][];
183
+ external_links?: ExternalLinkInfo[];
184
+ next_chunk_index?: number;
185
+ next_chunk_internal_link?: string;
186
+ };
187
+
188
+ /** Base error for Databricks SQL operations */
189
+ declare class DatabricksSqlError extends Error {
190
+ readonly code: string;
191
+ readonly statementId: string | undefined;
192
+ constructor(message: string, code?: string, statementId?: string);
193
+ }
194
+ /** Error when statement is cancelled */
195
+ declare class StatementCancelledError extends DatabricksSqlError {
196
+ constructor(statementId: string);
197
+ }
198
+ /** Error when operation is aborted via AbortSignal */
199
+ declare class AbortError extends DatabricksSqlError {
200
+ constructor(message?: string);
201
+ }
202
+ /** HTTP error from API calls */
203
+ declare class HttpError extends DatabricksSqlError {
204
+ readonly status: number;
205
+ readonly statusText: string;
206
+ constructor(status: number, statusText: string, message?: string);
207
+ }
208
+ /** Authentication error (401) */
209
+ declare class AuthenticationError extends HttpError {
210
+ constructor();
211
+ }
212
+ /** Rate limit error (429) */
213
+ declare class RateLimitError extends HttpError {
214
+ readonly retryAfter: number | undefined;
215
+ constructor(retryAfter?: number);
216
+ }
217
+
218
+ /**
219
+ * Execute SQL statement and poll until completion
220
+ */
221
+ declare function executeStatement(query: string, auth: AuthInfo, options?: ExecuteStatementOptions): Promise<StatementResult>;
222
+
223
+ /**
224
+ * Process each row from statement result with a callback.
225
+ * Supports INLINE results and JSON_ARRAY external links.
226
+ */
227
+ declare function fetchRow(statementResult: StatementResult, auth: AuthInfo, options?: FetchRowsOptions): Promise<void>;
228
+
229
+ /**
230
+ * Fetch all rows from statement result as an array.
231
+ * Only supports INLINE results or JSON_ARRAY external links.
232
+ */
233
+ declare function fetchAll(statementResult: StatementResult, auth: AuthInfo, options?: FetchAllOptions): Promise<Array<RowArray | RowObject>>;
234
+
235
+ /**
236
+ * Create a readable stream from statement result.
237
+ * Merges all external link chunks into a single binary stream,
238
+ * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).
239
+ */
240
+ declare function fetchStream(statementResult: StatementResult, auth: AuthInfo, options?: FetchStreamOptions): Readable;
241
+
242
+ /**
243
+ * Merge external links from StatementResult into a single stream,
244
+ * upload it via the provided callback, and return updated StatementResult.
245
+ *
246
+ * If the result is not external links (inline data or empty), returns the original as-is.
247
+ */
248
+ declare function mergeExternalLinks(statementResult: StatementResult, auth: AuthInfo, options: MergeExternalLinksOptions): Promise<StatementResult>;
249
+
250
+ export { AbortError, type AuthInfo, AuthenticationError, type ChunkInfo, type ColumnInfo, DatabricksSqlError, type ExecuteStatementOptions, type ExecuteStatementRequest, type ExternalLinkInfo, type ExternalLinksResultData, type FetchAllOptions, type FetchRowFormat, type FetchRowsOptions, type FetchStreamOptions, type GetChunkResponse, HttpError, type InlineResultData, type MergeExternalLinksOptions, type MergeExternalLinksResult, RateLimitError, type ResultData, type RowArray, type RowObject, type SignalOptions, StatementCancelledError, type StatementManifest, type StatementParameter, type StatementResult, type StatementState, type StatementStatus, executeStatement, fetchAll, fetchRow, fetchStream, mergeExternalLinks };
@@ -0,0 +1,250 @@
1
+ import { Readable } from 'node:stream';
2
+
3
+ /** Authentication information for Databricks API */
4
+ type AuthInfo = {
5
+ /** Databricks Personal Access Token */
6
+ token: string;
7
+ /** Databricks workspace host (e.g., ...cloud.databricks.com) */
8
+ host: string;
9
+ /** SQL warehouse HTTP path (e.g., /sql/1.0/warehouses/abc123) */
10
+ httpPath: string;
11
+ };
12
+ /** Statement execution states */
13
+ type StatementState = 'PENDING' | 'RUNNING' | 'SUCCEEDED' | 'FAILED' | 'CANCELED' | 'CLOSED';
14
+ /** Statement status */
15
+ type StatementStatus = {
16
+ state: StatementState;
17
+ error?: {
18
+ error_code: string;
19
+ message: string;
20
+ };
21
+ };
22
+ /** Column schema information */
23
+ type ColumnInfo = {
24
+ name: string;
25
+ type_text: string;
26
+ type_name: string;
27
+ position: number;
28
+ type_precision?: number;
29
+ type_scale?: number;
30
+ };
31
+ /** Chunk information */
32
+ type ChunkInfo = {
33
+ chunk_index: number;
34
+ row_offset: number;
35
+ row_count: number;
36
+ byte_count?: number;
37
+ };
38
+ /** Result manifest (schema and chunk info) */
39
+ type StatementManifest = {
40
+ format: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV';
41
+ schema: {
42
+ column_count: number;
43
+ columns: ColumnInfo[];
44
+ };
45
+ total_chunk_count: number;
46
+ total_row_count?: number;
47
+ total_byte_count?: number;
48
+ truncated?: boolean;
49
+ chunks?: ChunkInfo[];
50
+ };
51
+ /** External link for chunked results */
52
+ type ExternalLinkInfo = {
53
+ chunk_index: number;
54
+ row_offset: number;
55
+ row_count: number;
56
+ byte_count: number;
57
+ external_link: string;
58
+ expiration: string;
59
+ };
60
+ /** Inline result data */
61
+ type InlineResultData = {
62
+ data_array?: unknown[][];
63
+ external_links?: never;
64
+ };
65
+ /** External links result data */
66
+ type ExternalLinksResultData = {
67
+ data_array?: never;
68
+ external_links?: ExternalLinkInfo[];
69
+ };
70
+ /** Result data (inline or external links) */
71
+ type ResultData = InlineResultData | ExternalLinksResultData;
72
+ /**
73
+ * Statement result from API
74
+ * @see https://docs.databricks.com/api/workspace/statementexecution/getstatement
75
+ */
76
+ type StatementResult = {
77
+ statement_id: string;
78
+ status: StatementStatus;
79
+ manifest?: StatementManifest;
80
+ result?: ResultData;
81
+ };
82
+ /** Statement parameter */
83
+ type StatementParameter = {
84
+ name: string;
85
+ type?: 'STRING' | 'LONG' | 'DOUBLE' | 'BOOLEAN';
86
+ value?: string | number | boolean;
87
+ };
88
+ /**
89
+ * Options for executeStatement
90
+ * @see https://docs.databricks.com/api/workspace/statementexecution/executestatement
91
+ */
92
+ type ExecuteStatementOptions = {
93
+ /** Progress callback (called on each poll) */
94
+ onProgress?: (status: StatementStatus) => void;
95
+ /** Abort signal for cancellation */
96
+ signal?: AbortSignal;
97
+ /** Result byte limit */
98
+ byte_limit?: number;
99
+ /** Catalog name */
100
+ catalog?: string;
101
+ /** Result disposition */
102
+ disposition?: 'INLINE' | 'EXTERNAL_LINKS';
103
+ /** Result format */
104
+ format?: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV';
105
+ /** Behavior on wait timeout */
106
+ on_wait_timeout?: 'CONTINUE' | 'CANCEL';
107
+ /** Query parameters */
108
+ parameters?: StatementParameter[];
109
+ /** Row limit */
110
+ row_limit?: number;
111
+ /** Schema name */
112
+ schema?: string;
113
+ /** Server wait timeout (e.g., '10s', '50s') */
114
+ wait_timeout?: string;
115
+ /** Warehouse ID (can be extracted from httpPath) */
116
+ warehouse_id?: string;
117
+ };
118
+ /** Base options with abort signal support */
119
+ type SignalOptions = {
120
+ /** Abort signal for cancellation */
121
+ signal?: AbortSignal;
122
+ };
123
+ /** Row data as array */
124
+ type RowArray = unknown[];
125
+ /** Row data as JSON object */
126
+ type RowObject = Record<string, unknown>;
127
+ /** Format for fetchRow/fetchAll */
128
+ type FetchRowFormat = 'JSON_ARRAY' | 'JSON_OBJECT';
129
+ /** Options for fetchStream */
130
+ type FetchStreamOptions = SignalOptions;
131
+ /** Options for fetchRow */
132
+ type FetchRowsOptions = SignalOptions & {
133
+ /** Callback for each row */
134
+ onEachRow?: (row: RowArray | RowObject) => void;
135
+ /** Row format (default: JSON_ARRAY) */
136
+ format?: FetchRowFormat;
137
+ };
138
+ /** Options for fetchAll */
139
+ type FetchAllOptions = SignalOptions & {
140
+ /** Row format (default: JSON_ARRAY) */
141
+ format?: FetchRowFormat;
142
+ };
143
+ /** Result from mergeStreamToExternalLink callback */
144
+ type MergeExternalLinksResult = {
145
+ /** Uploaded external link URL */
146
+ externalLink: string;
147
+ /** Uploaded byte count (actual size after compression like gzip) */
148
+ byte_count: number;
149
+ /** Link expiration time (ISO string) */
150
+ expiration: string;
151
+ };
152
+ /** Options for mergeExternalLinks */
153
+ type MergeExternalLinksOptions = SignalOptions & {
154
+ /** Callback to upload merged stream to external link */
155
+ mergeStreamToExternalLink: (stream: Readable) => Promise<MergeExternalLinksResult>;
156
+ };
157
+ /**
158
+ * API request for executeStatement
159
+ * @see https://docs.databricks.com/api/workspace/statementexecution/executestatement
160
+ */
161
+ type ExecuteStatementRequest = {
162
+ warehouse_id: string;
163
+ statement: string;
164
+ byte_limit?: number;
165
+ catalog?: string;
166
+ disposition?: 'INLINE' | 'EXTERNAL_LINKS';
167
+ format?: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV';
168
+ on_wait_timeout?: 'CONTINUE' | 'CANCEL';
169
+ parameters?: StatementParameter[];
170
+ row_limit?: number;
171
+ schema?: string;
172
+ wait_timeout?: string;
173
+ };
174
+ /**
175
+ * API response for getChunk
176
+ * @see https://docs.databricks.com/api/workspace/statementexecution/getstatementresultchunkn
177
+ */
178
+ type GetChunkResponse = {
179
+ chunk_index: number;
180
+ row_offset: number;
181
+ row_count: number;
182
+ data_array?: unknown[][];
183
+ external_links?: ExternalLinkInfo[];
184
+ next_chunk_index?: number;
185
+ next_chunk_internal_link?: string;
186
+ };
187
+
188
+ /** Base error for Databricks SQL operations */
189
+ declare class DatabricksSqlError extends Error {
190
+ readonly code: string;
191
+ readonly statementId: string | undefined;
192
+ constructor(message: string, code?: string, statementId?: string);
193
+ }
194
+ /** Error when statement is cancelled */
195
+ declare class StatementCancelledError extends DatabricksSqlError {
196
+ constructor(statementId: string);
197
+ }
198
+ /** Error when operation is aborted via AbortSignal */
199
+ declare class AbortError extends DatabricksSqlError {
200
+ constructor(message?: string);
201
+ }
202
+ /** HTTP error from API calls */
203
+ declare class HttpError extends DatabricksSqlError {
204
+ readonly status: number;
205
+ readonly statusText: string;
206
+ constructor(status: number, statusText: string, message?: string);
207
+ }
208
+ /** Authentication error (401) */
209
+ declare class AuthenticationError extends HttpError {
210
+ constructor();
211
+ }
212
+ /** Rate limit error (429) */
213
+ declare class RateLimitError extends HttpError {
214
+ readonly retryAfter: number | undefined;
215
+ constructor(retryAfter?: number);
216
+ }
217
+
218
+ /**
219
+ * Execute SQL statement and poll until completion
220
+ */
221
+ declare function executeStatement(query: string, auth: AuthInfo, options?: ExecuteStatementOptions): Promise<StatementResult>;
222
+
223
+ /**
224
+ * Process each row from statement result with a callback.
225
+ * Supports INLINE results and JSON_ARRAY external links.
226
+ */
227
+ declare function fetchRow(statementResult: StatementResult, auth: AuthInfo, options?: FetchRowsOptions): Promise<void>;
228
+
229
+ /**
230
+ * Fetch all rows from statement result as an array.
231
+ * Only supports INLINE results or JSON_ARRAY external links.
232
+ */
233
+ declare function fetchAll(statementResult: StatementResult, auth: AuthInfo, options?: FetchAllOptions): Promise<Array<RowArray | RowObject>>;
234
+
235
+ /**
236
+ * Create a readable stream from statement result.
237
+ * Merges all external link chunks into a single binary stream,
238
+ * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).
239
+ */
240
+ declare function fetchStream(statementResult: StatementResult, auth: AuthInfo, options?: FetchStreamOptions): Readable;
241
+
242
+ /**
243
+ * Merge external links from StatementResult into a single stream,
244
+ * upload it via the provided callback, and return updated StatementResult.
245
+ *
246
+ * If the result is not external links (inline data or empty), returns the original as-is.
247
+ */
248
+ declare function mergeExternalLinks(statementResult: StatementResult, auth: AuthInfo, options: MergeExternalLinksOptions): Promise<StatementResult>;
249
+
250
+ export { AbortError, type AuthInfo, AuthenticationError, type ChunkInfo, type ColumnInfo, DatabricksSqlError, type ExecuteStatementOptions, type ExecuteStatementRequest, type ExternalLinkInfo, type ExternalLinksResultData, type FetchAllOptions, type FetchRowFormat, type FetchRowsOptions, type FetchStreamOptions, type GetChunkResponse, HttpError, type InlineResultData, type MergeExternalLinksOptions, type MergeExternalLinksResult, RateLimitError, type ResultData, type RowArray, type RowObject, type SignalOptions, StatementCancelledError, type StatementManifest, type StatementParameter, type StatementResult, type StatementState, type StatementStatus, executeStatement, fetchAll, fetchRow, fetchStream, mergeExternalLinks };