@bitofsky/databricks-sql 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -1
- package/dist/index.cjs +53 -36
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +18 -3
- package/dist/index.d.ts +18 -3
- package/dist/index.js +53 -36
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -209,6 +209,8 @@ function fetchRow(
|
|
|
209
209
|
```
|
|
210
210
|
- Streams each row to `options.onEachRow`.
|
|
211
211
|
- Use `format: 'JSON_OBJECT'` to map rows into schema-based objects.
|
|
212
|
+
- Use `encodeBigInt` to customize BIGINT/LONG conversions when using `JSON_OBJECT`.
|
|
213
|
+
- Use `encodeTimestamp` to customize TIMESTAMP* conversions when using `JSON_OBJECT`.
|
|
212
214
|
- Supports `INLINE` results or `JSON_ARRAY` formatted `EXTERNAL_LINKS` only.
|
|
213
215
|
- If only a subset of external links is returned, missing chunk metadata is fetched by index.
|
|
214
216
|
|
|
@@ -274,15 +276,19 @@ type ExecuteStatementOptions = {
|
|
|
274
276
|
|
|
275
277
|
type FetchRowsOptions = {
|
|
276
278
|
signal?: AbortSignal
|
|
277
|
-
onEachRow?: (row: RowArray | RowObject) => void
|
|
278
279
|
format?: 'JSON_ARRAY' | 'JSON_OBJECT'
|
|
279
280
|
logger?: Logger
|
|
281
|
+
onEachRow?: (row: RowArray | RowObject) => void
|
|
282
|
+
encodeBigInt?: (value: bigint) => unknown
|
|
283
|
+
encodeTimestamp?: (value: string) => unknown
|
|
280
284
|
}
|
|
281
285
|
|
|
282
286
|
type FetchAllOptions = {
|
|
283
287
|
signal?: AbortSignal
|
|
284
288
|
format?: 'JSON_ARRAY' | 'JSON_OBJECT'
|
|
285
289
|
logger?: Logger
|
|
290
|
+
encodeBigInt?: (value: bigint) => unknown
|
|
291
|
+
encodeTimestamp?: (value: string) => unknown
|
|
286
292
|
}
|
|
287
293
|
|
|
288
294
|
type FetchStreamOptions = {
|
package/dist/index.cjs
CHANGED
|
@@ -272,12 +272,8 @@ var TERMINAL_STATES = /* @__PURE__ */ new Set([
|
|
|
272
272
|
]);
|
|
273
273
|
var POLL_INTERVAL_MS = 5e3;
|
|
274
274
|
async function fetchMetrics(auth, statementId, signal) {
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
return queryInfo.metrics;
|
|
278
|
-
} catch {
|
|
279
|
-
return void 0;
|
|
280
|
-
}
|
|
275
|
+
const queryInfo = await getQueryMetrics(auth, statementId, signal);
|
|
276
|
+
return queryInfo.metrics;
|
|
281
277
|
}
|
|
282
278
|
async function executeStatement(query, auth, options = {}) {
|
|
283
279
|
const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath);
|
|
@@ -285,7 +281,13 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
285
281
|
const waitTimeout = options.wait_timeout ?? (onProgress ? "0s" : "50s");
|
|
286
282
|
let cancelIssued = false;
|
|
287
283
|
throwIfAborted(signal, "executeStatement");
|
|
288
|
-
const emitProgress = onProgress ? async (
|
|
284
|
+
const emitProgress = onProgress ? async () => result ? onProgress(
|
|
285
|
+
result,
|
|
286
|
+
enableMetrics ? await fetchMetrics(auth, result.statement_id, signal).catch((e) => {
|
|
287
|
+
logger?.error?.(`executeStatement Failed to fetch query metrics for statement ${result?.statement_id}: ${String(e)}`, { statementId: result?.statement_id });
|
|
288
|
+
return void 0;
|
|
289
|
+
}) : void 0
|
|
290
|
+
) : void 0 : void 0;
|
|
289
291
|
const request = Object.fromEntries(
|
|
290
292
|
Object.entries({
|
|
291
293
|
warehouse_id: warehouseId,
|
|
@@ -321,9 +323,9 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
321
323
|
signal?.addEventListener("abort", onAbort, { once: true });
|
|
322
324
|
while (!TERMINAL_STATES.has(result.status.state)) {
|
|
323
325
|
logger?.info?.(`executeStatement Statement ${result.statement_id} in state ${result.status.state}; polling for status...`);
|
|
324
|
-
await emitProgress?.(result.statement_id);
|
|
325
326
|
await delay(POLL_INTERVAL_MS, signal);
|
|
326
327
|
result = await getStatement(auth, result.statement_id, signal);
|
|
328
|
+
await emitProgress?.();
|
|
327
329
|
}
|
|
328
330
|
} catch (err) {
|
|
329
331
|
if (err instanceof AbortError || signal?.aborted) {
|
|
@@ -337,7 +339,7 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
337
339
|
logger?.info?.(`executeStatement Statement ${result.statement_id} reached final state: ${result.status.state}`);
|
|
338
340
|
signal?.removeEventListener("abort", onAbort);
|
|
339
341
|
}
|
|
340
|
-
await emitProgress?.(
|
|
342
|
+
await emitProgress?.();
|
|
341
343
|
if (result.status.state === "SUCCEEDED")
|
|
342
344
|
return result;
|
|
343
345
|
if (result.status.state === "CANCELED")
|
|
@@ -358,20 +360,18 @@ var INTEGER_TYPES = /* @__PURE__ */ new Set(["TINYINT", "SMALLINT", "INT"]);
|
|
|
358
360
|
var BIGINT_TYPES = /* @__PURE__ */ new Set(["BIGINT", "LONG"]);
|
|
359
361
|
var FLOAT_TYPES = /* @__PURE__ */ new Set(["FLOAT", "DOUBLE"]);
|
|
360
362
|
var BOOLEAN_TYPES = /* @__PURE__ */ new Set(["BOOLEAN"]);
|
|
363
|
+
var TIMESTAMP_TYPES = /* @__PURE__ */ new Set(["TIMESTAMP", "TIMESTAMP_NTZ", "TIMESTAMP_LTZ"]);
|
|
361
364
|
var STRING_TYPES = /* @__PURE__ */ new Set([
|
|
362
365
|
"STRING",
|
|
363
366
|
"DATE",
|
|
364
|
-
"TIMESTAMP",
|
|
365
|
-
"TIMESTAMP_NTZ",
|
|
366
|
-
"TIMESTAMP_LTZ",
|
|
367
367
|
"TIME"
|
|
368
368
|
]);
|
|
369
|
-
function createRowMapper(manifest, format) {
|
|
369
|
+
function createRowMapper(manifest, format, options = {}) {
|
|
370
370
|
if (format !== "JSON_OBJECT")
|
|
371
371
|
return (row) => row;
|
|
372
372
|
const columnConverters = manifest.schema.columns.map((column) => ({
|
|
373
373
|
name: column.name,
|
|
374
|
-
convert: createColumnConverter(column)
|
|
374
|
+
convert: createColumnConverter(column, options)
|
|
375
375
|
}));
|
|
376
376
|
return (row) => {
|
|
377
377
|
const mapped = {};
|
|
@@ -386,9 +386,9 @@ function createRowMapper(manifest, format) {
|
|
|
386
386
|
return mapped;
|
|
387
387
|
};
|
|
388
388
|
}
|
|
389
|
-
function createColumnConverter(column) {
|
|
389
|
+
function createColumnConverter(column, options) {
|
|
390
390
|
const descriptor = parseColumnType(column);
|
|
391
|
-
return (value) => convertValue(descriptor, value);
|
|
391
|
+
return (value) => convertValue(descriptor, value, options);
|
|
392
392
|
}
|
|
393
393
|
function parseColumnType(column) {
|
|
394
394
|
if (column.type_name === "STRUCT" || column.type_name === "ARRAY" || column.type_name === "MAP")
|
|
@@ -529,45 +529,47 @@ function stripNotNull(typeText) {
|
|
|
529
529
|
trimmed = trimmed.slice(0, -"NOT NULL".length).trim();
|
|
530
530
|
return trimmed;
|
|
531
531
|
}
|
|
532
|
-
function convertValue(descriptor, value) {
|
|
532
|
+
function convertValue(descriptor, value, options) {
|
|
533
533
|
if (value === null || value === void 0)
|
|
534
534
|
return value;
|
|
535
535
|
if (descriptor.typeName === "STRUCT" && descriptor.fields)
|
|
536
|
-
return convertStructValue(descriptor.fields, value);
|
|
536
|
+
return convertStructValue(descriptor.fields, value, options);
|
|
537
537
|
if (descriptor.typeName === "ARRAY" && descriptor.elementType)
|
|
538
|
-
return convertArrayValue(descriptor.elementType, value);
|
|
538
|
+
return convertArrayValue(descriptor.elementType, value, options);
|
|
539
539
|
if (descriptor.typeName === "MAP" && descriptor.keyType && descriptor.valueType)
|
|
540
|
-
return convertMapValue(descriptor.keyType, descriptor.valueType, value);
|
|
540
|
+
return convertMapValue(descriptor.keyType, descriptor.valueType, value, options);
|
|
541
541
|
if (descriptor.typeName === "DECIMAL")
|
|
542
542
|
return convertNumber(value);
|
|
543
543
|
if (INTEGER_TYPES.has(descriptor.typeName))
|
|
544
544
|
return convertNumber(value);
|
|
545
545
|
if (BIGINT_TYPES.has(descriptor.typeName))
|
|
546
|
-
return convertInteger(value);
|
|
546
|
+
return convertInteger(value, options.encodeBigInt);
|
|
547
547
|
if (FLOAT_TYPES.has(descriptor.typeName))
|
|
548
548
|
return convertNumber(value);
|
|
549
549
|
if (BOOLEAN_TYPES.has(descriptor.typeName))
|
|
550
550
|
return convertBoolean(value);
|
|
551
|
+
if (TIMESTAMP_TYPES.has(descriptor.typeName))
|
|
552
|
+
return convertTimestamp(value, options.encodeTimestamp);
|
|
551
553
|
if (STRING_TYPES.has(descriptor.typeName))
|
|
552
554
|
return value;
|
|
553
555
|
return value;
|
|
554
556
|
}
|
|
555
|
-
function convertStructValue(fields, value) {
|
|
557
|
+
function convertStructValue(fields, value, options) {
|
|
556
558
|
const raw = parseStructValue(value);
|
|
557
559
|
if (!raw || typeof raw !== "object" || Array.isArray(raw))
|
|
558
560
|
return value;
|
|
559
561
|
const mapped = {};
|
|
560
562
|
for (const field of fields)
|
|
561
|
-
mapped[field.name] = convertValue(field.type, raw[field.name]);
|
|
563
|
+
mapped[field.name] = convertValue(field.type, raw[field.name], options);
|
|
562
564
|
return mapped;
|
|
563
565
|
}
|
|
564
|
-
function convertArrayValue(elementType, value) {
|
|
566
|
+
function convertArrayValue(elementType, value, options) {
|
|
565
567
|
const raw = parseJsonValue(value);
|
|
566
568
|
if (!Array.isArray(raw))
|
|
567
569
|
return value;
|
|
568
|
-
return raw.map((entry) => convertValue(elementType, entry));
|
|
570
|
+
return raw.map((entry) => convertValue(elementType, entry, options));
|
|
569
571
|
}
|
|
570
|
-
function convertMapValue(keyType, valueType, value) {
|
|
572
|
+
function convertMapValue(keyType, valueType, value, options) {
|
|
571
573
|
const raw = parseJsonValue(value);
|
|
572
574
|
if (!raw || typeof raw !== "object")
|
|
573
575
|
return value;
|
|
@@ -576,15 +578,15 @@ function convertMapValue(keyType, valueType, value) {
|
|
|
576
578
|
for (const entry of raw) {
|
|
577
579
|
if (!Array.isArray(entry) || entry.length < 2)
|
|
578
580
|
continue;
|
|
579
|
-
const convertedKey = convertValue(keyType, entry[0]);
|
|
580
|
-
mapped2[String(convertedKey)] = convertValue(valueType, entry[1]);
|
|
581
|
+
const convertedKey = convertValue(keyType, entry[0], options);
|
|
582
|
+
mapped2[String(convertedKey)] = convertValue(valueType, entry[1], options);
|
|
581
583
|
}
|
|
582
584
|
return mapped2;
|
|
583
585
|
}
|
|
584
586
|
const mapped = {};
|
|
585
587
|
for (const [key, entryValue] of Object.entries(raw)) {
|
|
586
|
-
const convertedKey = convertValue(keyType, key);
|
|
587
|
-
mapped[String(convertedKey)] = convertValue(valueType, entryValue);
|
|
588
|
+
const convertedKey = convertValue(keyType, key, options);
|
|
589
|
+
mapped[String(convertedKey)] = convertValue(valueType, entryValue, options);
|
|
588
590
|
}
|
|
589
591
|
return mapped;
|
|
590
592
|
}
|
|
@@ -613,23 +615,31 @@ function convertNumber(value) {
|
|
|
613
615
|
}
|
|
614
616
|
return value;
|
|
615
617
|
}
|
|
616
|
-
function convertInteger(value) {
|
|
618
|
+
function convertInteger(value, encodeBigInt) {
|
|
617
619
|
if (typeof value === "bigint")
|
|
618
|
-
return value;
|
|
620
|
+
return encodeBigInt ? encodeBigInt(value) : value;
|
|
619
621
|
if (typeof value === "number") {
|
|
620
|
-
if (Number.isInteger(value))
|
|
621
|
-
|
|
622
|
+
if (Number.isInteger(value)) {
|
|
623
|
+
const bigintValue = BigInt(value);
|
|
624
|
+
return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue;
|
|
625
|
+
}
|
|
622
626
|
return value;
|
|
623
627
|
}
|
|
624
628
|
if (typeof value === "string") {
|
|
625
629
|
try {
|
|
626
|
-
|
|
630
|
+
const bigintValue = BigInt(value);
|
|
631
|
+
return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue;
|
|
627
632
|
} catch {
|
|
628
633
|
return value;
|
|
629
634
|
}
|
|
630
635
|
}
|
|
631
636
|
return value;
|
|
632
637
|
}
|
|
638
|
+
function convertTimestamp(value, encodeTimestamp) {
|
|
639
|
+
if (typeof value !== "string")
|
|
640
|
+
return value;
|
|
641
|
+
return encodeTimestamp ? encodeTimestamp(value) : value;
|
|
642
|
+
}
|
|
633
643
|
function convertBoolean(value) {
|
|
634
644
|
if (typeof value === "boolean")
|
|
635
645
|
return value;
|
|
@@ -759,7 +769,10 @@ async function fetchRow(statementResult, auth, options = {}) {
|
|
|
759
769
|
const manifest = validateSucceededResult(statementResult);
|
|
760
770
|
const statementId = statementResult.statement_id;
|
|
761
771
|
const logContext = { statementId, manifest, requestedFormat: format };
|
|
762
|
-
const mapRow = createRowMapper(manifest, format
|
|
772
|
+
const mapRow = createRowMapper(manifest, format, {
|
|
773
|
+
...options.encodeBigInt ? { encodeBigInt: options.encodeBigInt } : {},
|
|
774
|
+
...options.encodeTimestamp ? { encodeTimestamp: options.encodeTimestamp } : {}
|
|
775
|
+
});
|
|
763
776
|
logger?.info?.(`fetchRow fetching rows for statement ${statementId}.`, {
|
|
764
777
|
...logContext,
|
|
765
778
|
resultType: statementResult.result?.external_links ? "EXTERNAL_LINKS" : "INLINE"
|
|
@@ -855,6 +868,10 @@ async function fetchAll(statementResult, auth, options = {}) {
|
|
|
855
868
|
fetchOptions.format = options.format;
|
|
856
869
|
if (options.logger)
|
|
857
870
|
fetchOptions.logger = options.logger;
|
|
871
|
+
if (options.encodeBigInt)
|
|
872
|
+
fetchOptions.encodeBigInt = options.encodeBigInt;
|
|
873
|
+
if (options.encodeTimestamp)
|
|
874
|
+
fetchOptions.encodeTimestamp = options.encodeTimestamp;
|
|
858
875
|
await fetchRow(statementResult, auth, fetchOptions);
|
|
859
876
|
logger?.info?.(`fetchAll fetched ${rows.length} rows for statement ${statementId}.`, {
|
|
860
877
|
...logContext,
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["// Types\nexport type * from './types.js'\n\n// Errors\nexport * from './errors.js'\n\n// Core functions\nexport * from './api/index.js'\n","/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import { Readable } from 'node:stream'\nimport { pipeline } from 'node:stream/promises'\nimport type { ReadableStream as WebReadableStream } from 'node:stream/web'\nimport type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n\nfunction isWebReadableStream(body: unknown): body is WebReadableStream {\n return typeof (body as WebReadableStream).getReader === 'function'\n}\n\nexport async function pipeUrlToOutput(\n url: string,\n output: NodeJS.WritableStream,\n signal?: AbortSignal\n): Promise<void> {\n // Uses Node 20+ global fetch with Web streams.\n if (signal?.aborted)\n throw new AbortError('Aborted while streaming')\n\n const response = await fetch(url, signal ? { signal } : undefined)\n if (!response.ok) {\n throw new Error(\n `Failed to fetch external link: ${response.status} ${response.statusText}`\n )\n }\n\n if (!response.body)\n return void output.end()\n\n const body = response.body\n const input = isWebReadableStream(body)\n ? Readable.fromWeb(body)\n : (body as NodeJS.ReadableStream)\n\n await pipeline(input, output)\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n QueryInfo,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n// Base path for Query History API.\nconst HISTORY_BASE_PATH = '/api/2.0/sql/history/queries'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get query metrics from Query History API\n * GET /api/2.0/sql/history/queries/{query_id}?include_metrics=true\n */\nexport async function getQueryMetrics(\n auth: AuthInfo,\n queryId: string,\n signal?: AbortSignal\n): Promise<QueryInfo> {\n return httpRequest<QueryInfo>(auth, {\n method: 'GET',\n path: `${HISTORY_BASE_PATH}/${queryId}?include_metrics=true`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n QueryMetrics,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement, getQueryMetrics } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 5000\n\nasync function fetchMetrics(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<QueryMetrics | undefined> {\n try {\n const queryInfo = await getQueryMetrics(auth, statementId, signal)\n return queryInfo.metrics\n } catch {\n // Ignore metrics fetch errors - non-critical\n return undefined\n }\n}\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress, enableMetrics, logger } = options\n const waitTimeout = options.wait_timeout ?? (onProgress ? '0s' : '50s')\n let cancelIssued = false\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // Helper to call onProgress with optional metrics\n const emitProgress = onProgress\n ? async (statementId: string) => onProgress(result, enableMetrics ? await fetchMetrics(auth, statementId, signal) : undefined)\n : undefined\n\n // 1. Build request (filter out undefined values)\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout ?? 'CONTINUE',\n wait_timeout: waitTimeout,\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n logger?.info?.(`executeStatement Executing statement on warehouse ${warehouseId}...`)\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n const cancelStatementSafely = async () => {\n if (cancelIssued) return\n logger?.info?.('executeStatement Abort signal received during executeStatement.')\n cancelIssued = true\n await cancelStatement(auth, result.statement_id).catch((err) => {\n logger?.error?.('executeStatement Failed to cancel statement after abort.', err)\n })\n }\n\n if (signal?.aborted) {\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n\n const onAbort = () => cancelStatementSafely().catch(() => { })\n\n try {\n signal?.addEventListener('abort', onAbort, { once: true })\n\n // 3. Poll until terminal state\n while (!TERMINAL_STATES.has(result.status.state)) {\n logger?.info?.(`executeStatement Statement ${result.statement_id} in state ${result.status.state}; polling for status...`)\n await emitProgress?.(result.statement_id)\n await delay(POLL_INTERVAL_MS, signal)\n result = await getStatement(auth, result.statement_id, signal)\n }\n } catch (err) {\n if (err instanceof AbortError || signal?.aborted) {\n logger?.info?.('executeStatement Abort detected in executeStatement polling loop.')\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n logger?.error?.(`executeStatement Error during executeStatement polling: ${String(err)}`)\n throw err\n } finally {\n logger?.info?.(`executeStatement Statement ${result.statement_id} reached final state: ${result.status.state}`)\n signal?.removeEventListener('abort', onAbort)\n }\n\n // 4. Final progress callback\n await emitProgress?.(result.statement_id)\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import type { Readable } from 'node:stream'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\n\nimport { getChunk } from '../databricks-api.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const statementId = statementResult.statement_id\n const logContext = { statementId, manifest, requestedFormat: format }\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format)\n\n logger?.info?.(`fetchRow fetching rows for statement ${statementId}.`, {\n ...logContext,\n resultType: statementResult.result?.external_links ? 'EXTERNAL_LINKS' : 'INLINE',\n })\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n logger?.error?.(`fetchRow only supports JSON_ARRAY for external_links; got ${manifest.format}.`, logContext)\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchRow streaming external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...logger ? { logger } : {},\n })\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal, logger, logContext)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n logger?.info?.(`fetchRow processing inline rows for statement ${statementId}.`, {\n ...logContext,\n inlineRows: dataArray.length,\n })\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n logger?.info?.(`fetchRow processing ${totalChunks} chunks for statement ${statementId}.`, logContext)\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined,\n logger: FetchRowsOptions['logger'],\n logContext: Record<string, unknown>\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n logger?.info?.('fetchRow abort detected while streaming JSON_ARRAY rows.', {\n ...logContext,\n aborted: signal.aborted,\n })\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIMESTAMP',\n 'TIMESTAMP_NTZ',\n 'TIMESTAMP_LTZ',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format']\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(column: ColumnInfo): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(descriptor: TypeDescriptor, value: unknown): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(fields: StructField[], value: unknown): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name])\n\n return mapped\n}\n\nfunction convertArrayValue(elementType: TypeDescriptor, value: unknown): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0])\n mapped[String(convertedKey)] = convertValue(valueType, entry[1])\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown): unknown {\n if (typeof value === 'bigint')\n return value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value))\n return BigInt(value)\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n return BigInt(value)\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import type { MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n ExternalLinkInfo,\n FetchStreamOptions,\n StatementManifest,\n StatementResult,\n} from '../types.js'\n\nimport { PassThrough, Readable } from 'node:stream'\n\nimport { mergeStreamsFromUrls } from '@bitofsky/merge-streams'\n\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { pipeUrlToOutput, validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal, forceMerge, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n\n if (statementResult.result?.data_array) {\n logger?.error?.(\n `fetchStream only supports EXTERNAL_LINKS results for statement ${statementId}.`,\n { ...baseLog, hasDataArray: true }\n )\n throw new DatabricksSqlError(\n 'fetchStream only supports EXTERNAL_LINKS results',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchStream creating stream for statement ${statementId}.`, {\n ...baseLog,\n hasExternalLinks: Boolean(statementResult.result?.external_links?.length),\n })\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => {\n logger?.info?.(`fetchStream abort signal received while streaming statement ${statementId}.`, baseLog)\n output.destroy(new AbortError('Stream aborted'))\n }\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => signal.removeEventListener('abort', onAbort))\n }\n\n // Prevent AbortError from becoming an uncaught exception when no error handler is attached.\n output.on('error', (err) => {\n if (err instanceof AbortError)\n return\n if (output.listenerCount('error') === 1)\n throw err\n })\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge, logger)\n .catch((err) => {\n logger?.error?.(`fetchStream error while streaming statement ${statementId}.`, {\n ...baseLog,\n error: err,\n })\n output.destroy(err as Error)\n })\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal,\n forceMerge?: boolean,\n logger?: FetchStreamOptions['logger']\n): Promise<void> {\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n logger?.info?.(`fetchStream collecting external links for statement ${statementId}.`, baseLog)\n const urls = await collectExternalUrls(statementResult, auth, manifest, signal)\n\n // No external links - close the stream\n if (urls.length === 0) {\n logger?.info?.(`fetchStream no external links found for statement ${statementId}.`, baseLog)\n return void output.end()\n }\n\n // Single URL - pipe directly to output unless forcing merge\n if (urls.length === 1 && !forceMerge) {\n logger?.info?.(`fetchStream piping single external link for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n // Avoid merge-streams overhead for a single URL unless forced.\n return pipeUrlToOutput(urls[0]!, output, signal)\n }\n\n // Merge all URLs using merge-streams\n logger?.info?.(`fetchStream merging ${urls.length} external links for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n return mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n\nasync function collectExternalUrls(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n signal?: AbortSignal\n): Promise<string[]> {\n const chunkUrls = new Map<number, string[]>()\n\n addChunkLinks(chunkUrls, statementResult.result?.external_links)\n\n if (!manifest.total_chunk_count)\n return flattenChunkUrls(chunkUrls)\n\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (chunkUrls.has(i))\n continue\n if (signal?.aborted)\n throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n addChunkLinks(chunkUrls, chunkData.external_links)\n }\n\n return flattenChunkUrls(chunkUrls)\n}\n\nfunction addChunkLinks(\n chunkUrls: Map<number, string[]>,\n externalLinks?: ExternalLinkInfo[]\n): void {\n if (!externalLinks)\n return\n\n for (const link of externalLinks) {\n if (!isNonEmptyString(link.external_link))\n continue\n\n const existing = chunkUrls.get(link.chunk_index)\n if (existing) {\n existing.push(link.external_link)\n } else {\n chunkUrls.set(link.chunk_index, [link.external_link])\n }\n }\n}\n\nfunction flattenChunkUrls(chunkUrls: Map<number, string[]>): string[] {\n if (chunkUrls.size === 0)\n return []\n\n const sorted = [...chunkUrls.entries()].sort(([a], [b]) => a - b)\n const urls: string[] = []\n for (const [, links] of sorted) {\n urls.push(...links)\n }\n return urls\n}\n\nfunction isNonEmptyString(value: unknown): value is string {\n return typeof value === 'string' && value.length > 0\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const logContext = { statementId, manifest, requestedFormat: options.format }\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n const { logger } = options\n\n logger?.info?.(`fetchAll fetching all rows for statement ${statementId}.`, logContext)\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n if (options.logger)\n fetchOptions.logger = options.logger\n\n await fetchRow(statementResult, auth, fetchOptions)\n logger?.info?.(`fetchAll fetched ${rows.length} rows for statement ${statementId}.`, {\n ...logContext,\n rowCount: rows.length,\n resolvedFormat: options.format ?? manifest?.format,\n })\n return rows\n}\n","import type {\n AuthInfo,\n MergeExternalLinksOptions,\n StatementResult,\n} from '../types.js'\n\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink, forceMerge, logger } = options\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const externalLinks = statementResult.result?.external_links\n const totalChunks = manifest?.total_chunk_count ?? 0\n const logContext = { statementId, manifest, totalChunks, forceMerge }\n\n // If not external links, return original as-is\n if (!externalLinks) {\n logger?.info?.(`mergeExternalLinks no external links to merge for statement ${statementId}.`, logContext)\n return statementResult\n }\n\n if (!forceMerge) {\n const isSingleChunk = totalChunks <= 1\n\n // Skip merging when a single external link already exists unless forced.\n if (isSingleChunk) {\n logger?.info?.(`mergeExternalLinks skipping merge for single external link in statement ${statementId}.`, {\n ...logContext,\n totalChunks,\n })\n return statementResult\n }\n }\n\n // Get merged stream via fetchStream\n logger?.info?.(`mergeExternalLinks merging external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...forceMerge !== undefined ? { forceMerge } : {},\n ...logger ? { logger } : {},\n })\n\n // Upload via callback\n logger?.info?.(`mergeExternalLinks uploading merged external link for statement ${statementId}.`, logContext)\n const uploadResult = await mergeStreamToExternalLink(stream)\n logger?.info?.(`mergeExternalLinks uploaded merged external link for statement ${statementId}.`, {\n ...logContext,\n byteCount: uploadResult.byte_count,\n expiration: uploadResult.expiration,\n })\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const validatedManifest = validateSucceededResult(statementResult)\n const totalRowCount = validatedManifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...validatedManifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;AC5DA,yBAAyB;AACzB,sBAAyB;AASlB,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;AAEA,SAAS,oBAAoB,MAA0C;AACrE,SAAO,OAAQ,KAA2B,cAAc;AAC1D;AAEA,eAAsB,gBACpB,KACA,QACA,QACe;AAEf,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,yBAAyB;AAEhD,QAAM,WAAW,MAAM,MAAM,KAAK,SAAS,EAAE,OAAO,IAAI,MAAS;AACjE,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,kCAAkC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IAC1E;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,OAAO,IAAI;AAEzB,QAAM,OAAO,SAAS;AACtB,QAAM,QAAQ,oBAAoB,IAAI,IAClC,4BAAS,QAAQ,IAAI,IACpB;AAEL,YAAM,0BAAS,OAAO,MAAM;AAC9B;;;AC1GA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;AClHA,IAAM,YAAY;AAElB,IAAM,oBAAoB;AAM1B,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,SACA,QACoB;AACpB,SAAO,YAAuB,MAAM;AAAA,IAClC,QAAQ;AAAA,IACR,MAAM,GAAG,iBAAiB,IAAI,OAAO;AAAA,IACrC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC9EA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AAEzB,eAAe,aACb,MACA,aACA,QACmC;AACnC,MAAI;AACF,UAAM,YAAY,MAAM,gBAAgB,MAAM,aAAa,MAAM;AACjE,WAAO,UAAU;AAAA,EACnB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,YAAY,eAAe,OAAO,IAAI;AACtD,QAAM,cAAc,QAAQ,iBAAiB,aAAa,OAAO;AACjE,MAAI,eAAe;AAGnB,iBAAe,QAAQ,kBAAkB;AAGzC,QAAM,eAAe,aACjB,OAAO,gBAAwB,WAAW,QAAQ,gBAAgB,MAAM,aAAa,MAAM,aAAa,MAAM,IAAI,MAAS,IAC3H;AAGJ,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ,mBAAmB;AAAA,MAC5C,cAAc;AAAA,MACd,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAEA,UAAQ,OAAO,qDAAqD,WAAW,KAAK;AAGpF,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AACtD,QAAM,wBAAwB,YAAY;AACxC,QAAI,aAAc;AAClB,YAAQ,OAAO,iEAAiE;AAChF,mBAAe;AACf,UAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,CAAC,QAAQ;AAC9D,cAAQ,QAAQ,4DAA4D,GAAG;AAAA,IACjF,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,SAAS;AACnB,UAAM,sBAAsB;AAC5B,UAAM,IAAI,WAAW,wBAAwB;AAAA,EAC/C;AAEA,QAAM,UAAU,MAAM,sBAAsB,EAAE,MAAM,MAAM;AAAA,EAAE,CAAC;AAE7D,MAAI;AACF,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAGzD,WAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAChD,cAAQ,OAAO,8BAA8B,OAAO,YAAY,aAAa,OAAO,OAAO,KAAK,yBAAyB;AACzH,YAAM,eAAe,OAAO,YAAY;AACxC,YAAM,MAAM,kBAAkB,MAAM;AACpC,eAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAAA,IAC/D;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,eAAe,cAAc,QAAQ,SAAS;AAChD,cAAQ,OAAO,mEAAmE;AAClF,YAAM,sBAAsB;AAC5B,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AACA,YAAQ,QAAQ,2DAA2D,OAAO,GAAG,CAAC,EAAE;AACxF,UAAM;AAAA,EACR,UAAE;AACA,YAAQ,OAAO,8BAA8B,OAAO,YAAY,yBAAyB,OAAO,OAAO,KAAK,EAAE;AAC9G,YAAQ,oBAAoB,SAAS,OAAO;AAAA,EAC9C;AAGA,QAAM,eAAe,OAAO,YAAY;AAGxC,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;AC9HA,yBAAuB;AACvB,yBAA4B;;;ACkB5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACW;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,MAAM;AAAA,EACvC,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBAAsB,QAAiD;AAC9E,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,KAAK;AAClD;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aAAa,YAA4B,OAAyB;AACzE,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,KAAK;AAEpD,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,KAAK;AAExD,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,KAAK;AAExE,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,KAAK;AAE7B,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBAAmB,QAAuB,OAAyB;AAC1E,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,CAAC;AAE9E,SAAO;AACT;AAEA,SAAS,kBAAkB,aAA6B,OAAyB;AAC/E,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,KAAK,CAAC;AAC5D;AAEA,SAAS,gBACP,SACA,WACA,OACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,CAAC;AACnD,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,CAAC;AAAA,IACjE;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,GAAG;AAC9C,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,UAAU;AAAA,EACnE;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK;AACxB,aAAO,OAAO,KAAK;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,aAAO,OAAO,KAAK;AAAA,IACrB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;AC7YA,IAAAC,sBAAsC;AAEtC,2BAAqC;AAW9B,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,QAAQ,YAAY,OAAO,IAAI;AACvC,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AACxB,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAE5D,MAAI,gBAAgB,QAAQ,YAAY;AACtC,YAAQ;AAAA,MACN,kEAAkE,WAAW;AAAA,MAC7E,EAAE,GAAG,SAAS,cAAc,KAAK;AAAA,IACnC;AACA,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,OAAO,6CAA6C,WAAW,KAAK;AAAA,IAC1E,GAAG;AAAA,IACH,kBAAkB,QAAQ,gBAAgB,QAAQ,gBAAgB,MAAM;AAAA,EAC1E,CAAC;AAGD,QAAM,SAAS,IAAI,gCAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM;AACpB,cAAQ,OAAO,+DAA+D,WAAW,KAAK,OAAO;AACrG,aAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACjD;AACA,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM,OAAO,oBAAoB,SAAS,OAAO,CAAC;AAAA,EACzE;AAGA,SAAO,GAAG,SAAS,CAAC,QAAQ;AAC1B,QAAI,eAAe;AACjB;AACF,QAAI,OAAO,cAAc,OAAO,MAAM;AACpC,YAAM;AAAA,EACV,CAAC;AAID,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,QAAQ,YAAY,MAAM,EAC5F,MAAM,CAAC,QAAQ;AACd,YAAQ,QAAQ,+CAA+C,WAAW,KAAK;AAAA,MAC7E,GAAG;AAAA,MACH,OAAO;AAAA,IACT,CAAC;AACD,WAAO,QAAQ,GAAY;AAAA,EAC7B,CAAC;AAEH,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACA,YACA,QACe;AACf,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAC5D,UAAQ,OAAO,uDAAuD,WAAW,KAAK,OAAO;AAC7F,QAAM,OAAO,MAAM,oBAAoB,iBAAiB,MAAM,UAAU,MAAM;AAG9E,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,OAAO,qDAAqD,WAAW,KAAK,OAAO;AAC3F,WAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAGA,MAAI,KAAK,WAAW,KAAK,CAAC,YAAY;AACpC,YAAQ,OAAO,yDAAyD,WAAW,KAAK;AAAA,MACtF,GAAG;AAAA,MACH,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,WAAO,gBAAgB,KAAK,CAAC,GAAI,QAAQ,MAAM;AAAA,EACjD;AAGA,UAAQ,OAAO,uBAAuB,KAAK,MAAM,iCAAiC,WAAW,KAAK;AAAA,IAChG,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,EACjB,CAAC;AACD,aAAO,2CAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AAC1F;AAEA,eAAe,oBACb,iBACA,MACA,UACA,QACmB;AACnB,QAAM,YAAY,oBAAI,IAAsB;AAE5C,gBAAc,WAAW,gBAAgB,QAAQ,cAAc;AAE/D,MAAI,CAAC,SAAS;AACZ,WAAO,iBAAiB,SAAS;AAEnC,WAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,QAAI,UAAU,IAAI,CAAC;AACjB;AACF,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW,+BAA+B;AAGtD,UAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,kBAAc,WAAW,UAAU,cAAc;AAAA,EACnD;AAEA,SAAO,iBAAiB,SAAS;AACnC;AAEA,SAAS,cACP,WACA,eACM;AACN,MAAI,CAAC;AACH;AAEF,aAAW,QAAQ,eAAe;AAChC,QAAI,CAAC,iBAAiB,KAAK,aAAa;AACtC;AAEF,UAAM,WAAW,UAAU,IAAI,KAAK,WAAW;AAC/C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK,aAAa;AAAA,IAClC,OAAO;AACL,gBAAU,IAAI,KAAK,aAAa,CAAC,KAAK,aAAa,CAAC;AAAA,IACtD;AAAA,EACF;AACF;AAEA,SAAS,iBAAiB,WAA4C;AACpE,MAAI,UAAU,SAAS;AACrB,WAAO,CAAC;AAEV,QAAM,SAAS,CAAC,GAAG,UAAU,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC;AAChE,QAAM,OAAiB,CAAC;AACxB,aAAW,CAAC,EAAE,KAAK,KAAK,QAAQ;AAC9B,SAAK,KAAK,GAAG,KAAK;AAAA,EACpB;AACA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAiC;AACzD,SAAO,OAAO,UAAU,YAAY,MAAM,SAAS;AACrD;;;AFtKA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,QAAQ,OAAO,IAAI;AAC9C,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,cAAc,gBAAgB;AACpC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,OAAO;AAEpE,QAAM,SAAS,gBAAgB,UAAU,MAAM;AAE/C,UAAQ,OAAO,wCAAwC,WAAW,KAAK;AAAA,IACrE,GAAG;AAAA,IACH,YAAY,gBAAgB,QAAQ,iBAAiB,mBAAmB;AAAA,EAC1E,CAAC;AAED,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,cAAQ,QAAQ,6DAA6D,SAAS,MAAM,KAAK,UAAU;AAC3G,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,OAAO,mDAAmD,WAAW,KAAK,UAAU;AAC5F,UAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,MAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,MAC1B,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC5B,CAAC;AACD,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,QAAQ,QAAQ,UAAU;AAClF;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,YAAQ,OAAO,iDAAiD,WAAW,KAAK;AAAA,MAC9E,GAAG;AAAA,MACH,YAAY,UAAU;AAAA,IACxB,CAAC;AACD,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,YAAQ,OAAO,uBAAuB,WAAW,yBAAyB,WAAW,KAAK,UAAU;AACpG,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACA,QACA,YACe;AAEf,QAAM,aAAa,OAAO,SAAK,2BAAO,CAAC,EAAE,SAAK,gCAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,cAAQ,OAAO,4DAA4D;AAAA,QACzE,GAAG;AAAA,QACH,SAAS,OAAO;AAAA,MAClB,CAAC;AACD,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AGrHA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,QAAQ,OAAO;AAC5E,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AACA,QAAM,EAAE,OAAO,IAAI;AAEnB,UAAQ,OAAO,4CAA4C,WAAW,KAAK,UAAU;AAErF,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,UAAQ,OAAO,oBAAoB,KAAK,MAAM,uBAAuB,WAAW,KAAK;AAAA,IACnF,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,IACf,gBAAgB,QAAQ,UAAU,UAAU;AAAA,EAC9C,CAAC;AACD,SAAO;AACT;;;ACnCA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,2BAA2B,YAAY,OAAO,IAAI;AAClE,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,gBAAgB,gBAAgB,QAAQ;AAC9C,QAAM,cAAc,UAAU,qBAAqB;AACnD,QAAM,aAAa,EAAE,aAAa,UAAU,aAAa,WAAW;AAGpE,MAAI,CAAC,eAAe;AAClB,YAAQ,OAAO,+DAA+D,WAAW,KAAK,UAAU;AACxG,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,YAAY;AACf,UAAM,gBAAgB,eAAe;AAGrC,QAAI,eAAe;AACjB,cAAQ,OAAO,2EAA2E,WAAW,KAAK;AAAA,QACxG,GAAG;AAAA,QACH;AAAA,MACF,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,UAAQ,OAAO,2DAA2D,WAAW,KAAK,UAAU;AACpG,QAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC1B,GAAG,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC5B,CAAC;AAGD,UAAQ,OAAO,mEAAmE,WAAW,KAAK,UAAU;AAC5G,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAC3D,UAAQ,OAAO,kEAAkE,WAAW,KAAK;AAAA,IAC/F,GAAG;AAAA,IACH,WAAW,aAAa;AAAA,IACxB,YAAY,aAAa;AAAA,EAC3B,CAAC;AAID,QAAM,oBAAoB,wBAAwB,eAAe;AACjE,QAAM,gBAAgB,kBAAkB,mBAAmB;AAE3D,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped","import_node_stream"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["// Types\nexport type * from './types.js'\n\n// Errors\nexport * from './errors.js'\n\n// Core functions\nexport * from './api/index.js'\n","/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import { Readable } from 'node:stream'\nimport { pipeline } from 'node:stream/promises'\nimport type { ReadableStream as WebReadableStream } from 'node:stream/web'\nimport type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n\nfunction isWebReadableStream(body: unknown): body is WebReadableStream {\n return typeof (body as WebReadableStream).getReader === 'function'\n}\n\nexport async function pipeUrlToOutput(\n url: string,\n output: NodeJS.WritableStream,\n signal?: AbortSignal\n): Promise<void> {\n // Uses Node 20+ global fetch with Web streams.\n if (signal?.aborted)\n throw new AbortError('Aborted while streaming')\n\n const response = await fetch(url, signal ? { signal } : undefined)\n if (!response.ok) {\n throw new Error(\n `Failed to fetch external link: ${response.status} ${response.statusText}`\n )\n }\n\n if (!response.body)\n return void output.end()\n\n const body = response.body\n const input = isWebReadableStream(body)\n ? Readable.fromWeb(body)\n : (body as NodeJS.ReadableStream)\n\n await pipeline(input, output)\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n QueryInfo,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n// Base path for Query History API.\nconst HISTORY_BASE_PATH = '/api/2.0/sql/history/queries'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get query metrics from Query History API\n * GET /api/2.0/sql/history/queries/{query_id}?include_metrics=true\n */\nexport async function getQueryMetrics(\n auth: AuthInfo,\n queryId: string,\n signal?: AbortSignal\n): Promise<QueryInfo> {\n return httpRequest<QueryInfo>(auth, {\n method: 'GET',\n path: `${HISTORY_BASE_PATH}/${queryId}?include_metrics=true`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n QueryMetrics,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement, getQueryMetrics } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 5000\n\nasync function fetchMetrics(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<QueryMetrics | undefined> {\n const queryInfo = await getQueryMetrics(auth, statementId, signal)\n return queryInfo.metrics\n}\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress, enableMetrics, logger } = options\n const waitTimeout = options.wait_timeout ?? (onProgress ? '0s' : '50s')\n let cancelIssued = false\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // Helper to call onProgress with optional metrics\n const emitProgress = onProgress\n ? async () => result ? onProgress(\n result,\n enableMetrics ? await fetchMetrics(auth, result.statement_id, signal).catch(e => {\n logger?.error?.(`executeStatement Failed to fetch query metrics for statement ${result?.statement_id}: ${String(e)}`, { statementId: result?.statement_id })\n return undefined\n }) : undefined\n ) : undefined\n : undefined\n\n // 1. Build request (filter out undefined values)\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout ?? 'CONTINUE',\n wait_timeout: waitTimeout,\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n logger?.info?.(`executeStatement Executing statement on warehouse ${warehouseId}...`)\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n const cancelStatementSafely = async () => {\n if (cancelIssued) return\n logger?.info?.('executeStatement Abort signal received during executeStatement.')\n cancelIssued = true\n await cancelStatement(auth, result.statement_id).catch((err) => {\n logger?.error?.('executeStatement Failed to cancel statement after abort.', err)\n })\n }\n\n if (signal?.aborted) {\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n\n const onAbort = () => cancelStatementSafely().catch(() => { })\n\n try {\n signal?.addEventListener('abort', onAbort, { once: true })\n\n // 3. Poll until terminal state\n while (!TERMINAL_STATES.has(result.status.state)) {\n logger?.info?.(`executeStatement Statement ${result.statement_id} in state ${result.status.state}; polling for status...`)\n await delay(POLL_INTERVAL_MS, signal)\n result = await getStatement(auth, result.statement_id, signal)\n await emitProgress?.()\n }\n } catch (err) {\n if (err instanceof AbortError || signal?.aborted) {\n logger?.info?.('executeStatement Abort detected in executeStatement polling loop.')\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n logger?.error?.(`executeStatement Error during executeStatement polling: ${String(err)}`)\n throw err\n } finally {\n logger?.info?.(`executeStatement Statement ${result.statement_id} reached final state: ${result.status.state}`)\n signal?.removeEventListener('abort', onAbort)\n }\n\n // 4. Final progress callback\n await emitProgress?.()\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import type { Readable } from 'node:stream'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\n\nimport { getChunk } from '../databricks-api.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const statementId = statementResult.statement_id\n const logContext = { statementId, manifest, requestedFormat: format }\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format, {\n ...options.encodeBigInt ? { encodeBigInt: options.encodeBigInt } : {},\n ...options.encodeTimestamp ? { encodeTimestamp: options.encodeTimestamp } : {},\n })\n\n logger?.info?.(`fetchRow fetching rows for statement ${statementId}.`, {\n ...logContext,\n resultType: statementResult.result?.external_links ? 'EXTERNAL_LINKS' : 'INLINE',\n })\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n logger?.error?.(`fetchRow only supports JSON_ARRAY for external_links; got ${manifest.format}.`, logContext)\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchRow streaming external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...logger ? { logger } : {},\n })\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal, logger, logContext)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n logger?.info?.(`fetchRow processing inline rows for statement ${statementId}.`, {\n ...logContext,\n inlineRows: dataArray.length,\n })\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n logger?.info?.(`fetchRow processing ${totalChunks} chunks for statement ${statementId}.`, logContext)\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined,\n logger: FetchRowsOptions['logger'],\n logContext: Record<string, unknown>\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n logger?.info?.('fetchRow abort detected while streaming JSON_ARRAY rows.', {\n ...logContext,\n aborted: signal.aborted,\n })\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n RowMapperOptions,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst TIMESTAMP_TYPES = new Set(['TIMESTAMP', 'TIMESTAMP_NTZ', 'TIMESTAMP_LTZ'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format'],\n options: RowMapperOptions = {}\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column, options),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(\n column: ColumnInfo,\n options: RowMapperOptions\n): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value, options)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(\n descriptor: TypeDescriptor,\n value: unknown,\n options: RowMapperOptions\n): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value, options)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value, options)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value, options)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value, options.encodeBigInt)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (TIMESTAMP_TYPES.has(descriptor.typeName))\n return convertTimestamp(value, options.encodeTimestamp)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(\n fields: StructField[],\n value: unknown,\n options: RowMapperOptions\n): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name], options)\n\n return mapped\n}\n\nfunction convertArrayValue(\n elementType: TypeDescriptor,\n value: unknown,\n options: RowMapperOptions\n): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry, options))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown,\n options: RowMapperOptions\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0], options)\n mapped[String(convertedKey)] = convertValue(valueType, entry[1], options)\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key, options)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue, options)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown, encodeBigInt?: (value: bigint) => unknown): unknown {\n if (typeof value === 'bigint')\n return encodeBigInt ? encodeBigInt(value) : value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value)) {\n const bigintValue = BigInt(value)\n return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue\n }\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n const bigintValue = BigInt(value)\n return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertTimestamp(\n value: unknown,\n encodeTimestamp?: (value: string) => unknown\n): unknown {\n if (typeof value !== 'string')\n return value\n\n return encodeTimestamp ? encodeTimestamp(value) : value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import type { MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n ExternalLinkInfo,\n FetchStreamOptions,\n StatementManifest,\n StatementResult,\n} from '../types.js'\n\nimport { PassThrough, Readable } from 'node:stream'\n\nimport { mergeStreamsFromUrls } from '@bitofsky/merge-streams'\n\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { pipeUrlToOutput, validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal, forceMerge, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n\n if (statementResult.result?.data_array) {\n logger?.error?.(\n `fetchStream only supports EXTERNAL_LINKS results for statement ${statementId}.`,\n { ...baseLog, hasDataArray: true }\n )\n throw new DatabricksSqlError(\n 'fetchStream only supports EXTERNAL_LINKS results',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchStream creating stream for statement ${statementId}.`, {\n ...baseLog,\n hasExternalLinks: Boolean(statementResult.result?.external_links?.length),\n })\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => {\n logger?.info?.(`fetchStream abort signal received while streaming statement ${statementId}.`, baseLog)\n output.destroy(new AbortError('Stream aborted'))\n }\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => signal.removeEventListener('abort', onAbort))\n }\n\n // Prevent AbortError from becoming an uncaught exception when no error handler is attached.\n output.on('error', (err) => {\n if (err instanceof AbortError)\n return\n if (output.listenerCount('error') === 1)\n throw err\n })\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge, logger)\n .catch((err) => {\n logger?.error?.(`fetchStream error while streaming statement ${statementId}.`, {\n ...baseLog,\n error: err,\n })\n output.destroy(err as Error)\n })\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal,\n forceMerge?: boolean,\n logger?: FetchStreamOptions['logger']\n): Promise<void> {\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n logger?.info?.(`fetchStream collecting external links for statement ${statementId}.`, baseLog)\n const urls = await collectExternalUrls(statementResult, auth, manifest, signal)\n\n // No external links - close the stream\n if (urls.length === 0) {\n logger?.info?.(`fetchStream no external links found for statement ${statementId}.`, baseLog)\n return void output.end()\n }\n\n // Single URL - pipe directly to output unless forcing merge\n if (urls.length === 1 && !forceMerge) {\n logger?.info?.(`fetchStream piping single external link for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n // Avoid merge-streams overhead for a single URL unless forced.\n return pipeUrlToOutput(urls[0]!, output, signal)\n }\n\n // Merge all URLs using merge-streams\n logger?.info?.(`fetchStream merging ${urls.length} external links for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n return mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n\nasync function collectExternalUrls(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n signal?: AbortSignal\n): Promise<string[]> {\n const chunkUrls = new Map<number, string[]>()\n\n addChunkLinks(chunkUrls, statementResult.result?.external_links)\n\n if (!manifest.total_chunk_count)\n return flattenChunkUrls(chunkUrls)\n\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (chunkUrls.has(i))\n continue\n if (signal?.aborted)\n throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n addChunkLinks(chunkUrls, chunkData.external_links)\n }\n\n return flattenChunkUrls(chunkUrls)\n}\n\nfunction addChunkLinks(\n chunkUrls: Map<number, string[]>,\n externalLinks?: ExternalLinkInfo[]\n): void {\n if (!externalLinks)\n return\n\n for (const link of externalLinks) {\n if (!isNonEmptyString(link.external_link))\n continue\n\n const existing = chunkUrls.get(link.chunk_index)\n if (existing) {\n existing.push(link.external_link)\n } else {\n chunkUrls.set(link.chunk_index, [link.external_link])\n }\n }\n}\n\nfunction flattenChunkUrls(chunkUrls: Map<number, string[]>): string[] {\n if (chunkUrls.size === 0)\n return []\n\n const sorted = [...chunkUrls.entries()].sort(([a], [b]) => a - b)\n const urls: string[] = []\n for (const [, links] of sorted) {\n urls.push(...links)\n }\n return urls\n}\n\nfunction isNonEmptyString(value: unknown): value is string {\n return typeof value === 'string' && value.length > 0\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const logContext = { statementId, manifest, requestedFormat: options.format }\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n const { logger } = options\n\n logger?.info?.(`fetchAll fetching all rows for statement ${statementId}.`, logContext)\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n if (options.logger)\n fetchOptions.logger = options.logger\n\n if (options.encodeBigInt)\n fetchOptions.encodeBigInt = options.encodeBigInt\n\n if (options.encodeTimestamp)\n fetchOptions.encodeTimestamp = options.encodeTimestamp\n\n await fetchRow(statementResult, auth, fetchOptions)\n logger?.info?.(`fetchAll fetched ${rows.length} rows for statement ${statementId}.`, {\n ...logContext,\n rowCount: rows.length,\n resolvedFormat: options.format ?? manifest?.format,\n })\n return rows\n}\n","import type {\n AuthInfo,\n MergeExternalLinksOptions,\n StatementResult,\n} from '../types.js'\n\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink, forceMerge, logger } = options\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const externalLinks = statementResult.result?.external_links\n const totalChunks = manifest?.total_chunk_count ?? 0\n const logContext = { statementId, manifest, totalChunks, forceMerge }\n\n // If not external links, return original as-is\n if (!externalLinks) {\n logger?.info?.(`mergeExternalLinks no external links to merge for statement ${statementId}.`, logContext)\n return statementResult\n }\n\n if (!forceMerge) {\n const isSingleChunk = totalChunks <= 1\n\n // Skip merging when a single external link already exists unless forced.\n if (isSingleChunk) {\n logger?.info?.(`mergeExternalLinks skipping merge for single external link in statement ${statementId}.`, {\n ...logContext,\n totalChunks,\n })\n return statementResult\n }\n }\n\n // Get merged stream via fetchStream\n logger?.info?.(`mergeExternalLinks merging external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...forceMerge !== undefined ? { forceMerge } : {},\n ...logger ? { logger } : {},\n })\n\n // Upload via callback\n logger?.info?.(`mergeExternalLinks uploading merged external link for statement ${statementId}.`, logContext)\n const uploadResult = await mergeStreamToExternalLink(stream)\n logger?.info?.(`mergeExternalLinks uploaded merged external link for statement ${statementId}.`, {\n ...logContext,\n byteCount: uploadResult.byte_count,\n expiration: uploadResult.expiration,\n })\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const validatedManifest = validateSucceededResult(statementResult)\n const totalRowCount = validatedManifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...validatedManifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;AC5DA,yBAAyB;AACzB,sBAAyB;AASlB,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;AAEA,SAAS,oBAAoB,MAA0C;AACrE,SAAO,OAAQ,KAA2B,cAAc;AAC1D;AAEA,eAAsB,gBACpB,KACA,QACA,QACe;AAEf,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,yBAAyB;AAEhD,QAAM,WAAW,MAAM,MAAM,KAAK,SAAS,EAAE,OAAO,IAAI,MAAS;AACjE,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,kCAAkC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IAC1E;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,OAAO,IAAI;AAEzB,QAAM,OAAO,SAAS;AACtB,QAAM,QAAQ,oBAAoB,IAAI,IAClC,4BAAS,QAAQ,IAAI,IACpB;AAEL,YAAM,0BAAS,OAAO,MAAM;AAC9B;;;AC1GA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;AClHA,IAAM,YAAY;AAElB,IAAM,oBAAoB;AAM1B,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,SACA,QACoB;AACpB,SAAO,YAAuB,MAAM;AAAA,IAClC,QAAQ;AAAA,IACR,MAAM,GAAG,iBAAiB,IAAI,OAAO;AAAA,IACrC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC9EA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AAEzB,eAAe,aACb,MACA,aACA,QACmC;AACnC,QAAM,YAAY,MAAM,gBAAgB,MAAM,aAAa,MAAM;AACjE,SAAO,UAAU;AACnB;AAKA,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,YAAY,eAAe,OAAO,IAAI;AACtD,QAAM,cAAc,QAAQ,iBAAiB,aAAa,OAAO;AACjE,MAAI,eAAe;AAGnB,iBAAe,QAAQ,kBAAkB;AAGzC,QAAM,eAAe,aACjB,YAAY,SAAS;AAAA,IACrB;AAAA,IACA,gBAAgB,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM,EAAE,MAAM,OAAK;AAC/E,cAAQ,QAAQ,gEAAgE,QAAQ,YAAY,KAAK,OAAO,CAAC,CAAC,IAAI,EAAE,aAAa,QAAQ,aAAa,CAAC;AAC3J,aAAO;AAAA,IACT,CAAC,IAAI;AAAA,EACP,IAAI,SACF;AAGJ,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ,mBAAmB;AAAA,MAC5C,cAAc;AAAA,MACd,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAEA,UAAQ,OAAO,qDAAqD,WAAW,KAAK;AAGpF,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AACtD,QAAM,wBAAwB,YAAY;AACxC,QAAI,aAAc;AAClB,YAAQ,OAAO,iEAAiE;AAChF,mBAAe;AACf,UAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,CAAC,QAAQ;AAC9D,cAAQ,QAAQ,4DAA4D,GAAG;AAAA,IACjF,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,SAAS;AACnB,UAAM,sBAAsB;AAC5B,UAAM,IAAI,WAAW,wBAAwB;AAAA,EAC/C;AAEA,QAAM,UAAU,MAAM,sBAAsB,EAAE,MAAM,MAAM;AAAA,EAAE,CAAC;AAE7D,MAAI;AACF,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAGzD,WAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAChD,cAAQ,OAAO,8BAA8B,OAAO,YAAY,aAAa,OAAO,OAAO,KAAK,yBAAyB;AACzH,YAAM,MAAM,kBAAkB,MAAM;AACpC,eAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAC7D,YAAM,eAAe;AAAA,IACvB;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,eAAe,cAAc,QAAQ,SAAS;AAChD,cAAQ,OAAO,mEAAmE;AAClF,YAAM,sBAAsB;AAC5B,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AACA,YAAQ,QAAQ,2DAA2D,OAAO,GAAG,CAAC,EAAE;AACxF,UAAM;AAAA,EACR,UAAE;AACA,YAAQ,OAAO,8BAA8B,OAAO,YAAY,yBAAyB,OAAO,OAAO,KAAK,EAAE;AAC9G,YAAQ,oBAAoB,SAAS,OAAO;AAAA,EAC9C;AAGA,QAAM,eAAe;AAGrB,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;AC/HA,yBAAuB;AACvB,yBAA4B;;;ACmB5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,kBAAkB,oBAAI,IAAI,CAAC,aAAa,iBAAiB,eAAe,CAAC;AAC/E,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACA,UAA4B,CAAC,GAClB;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,QAAQ,OAAO;AAAA,EAChD,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBACP,QACA,SAC6B;AAC7B,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,OAAO,OAAO;AAC3D;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aACP,YACA,OACA,SACS;AACT,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,OAAO,OAAO;AAE7D,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,OAAO,OAAO;AAEjE,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,OAAO,OAAO;AAEjF,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,OAAO,QAAQ,YAAY;AAEnD,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,gBAAgB,IAAI,WAAW,QAAQ;AACzC,WAAO,iBAAiB,OAAO,QAAQ,eAAe;AAExD,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBACP,QACA,OACA,SACS;AACT,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,GAAG,OAAO;AAEvF,SAAO;AACT;AAEA,SAAS,kBACP,aACA,OACA,SACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,OAAO,OAAO,CAAC;AACrE;AAEA,SAAS,gBACP,SACA,WACA,OACA,SACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,GAAG,OAAO;AAC5D,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,GAAG,OAAO;AAAA,IAC1E;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,KAAK,OAAO;AACvD,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,YAAY,OAAO;AAAA,EAC5E;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAgB,cAAoD;AAC1F,MAAI,OAAO,UAAU;AACnB,WAAO,eAAe,aAAa,KAAK,IAAI;AAE9C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK,GAAG;AAC3B,YAAM,cAAc,OAAO,KAAK;AAChC,aAAO,eAAe,aAAa,WAAW,IAAI;AAAA,IACpD;AACA,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,YAAM,cAAc,OAAO,KAAK;AAChC,aAAO,eAAe,aAAa,WAAW,IAAI;AAAA,IACpD,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,iBACP,OACA,iBACS;AACT,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,SAAO,kBAAkB,gBAAgB,KAAK,IAAI;AACpD;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;AC7aA,IAAAC,sBAAsC;AAEtC,2BAAqC;AAW9B,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,QAAQ,YAAY,OAAO,IAAI;AACvC,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AACxB,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAE5D,MAAI,gBAAgB,QAAQ,YAAY;AACtC,YAAQ;AAAA,MACN,kEAAkE,WAAW;AAAA,MAC7E,EAAE,GAAG,SAAS,cAAc,KAAK;AAAA,IACnC;AACA,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,OAAO,6CAA6C,WAAW,KAAK;AAAA,IAC1E,GAAG;AAAA,IACH,kBAAkB,QAAQ,gBAAgB,QAAQ,gBAAgB,MAAM;AAAA,EAC1E,CAAC;AAGD,QAAM,SAAS,IAAI,gCAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM;AACpB,cAAQ,OAAO,+DAA+D,WAAW,KAAK,OAAO;AACrG,aAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACjD;AACA,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM,OAAO,oBAAoB,SAAS,OAAO,CAAC;AAAA,EACzE;AAGA,SAAO,GAAG,SAAS,CAAC,QAAQ;AAC1B,QAAI,eAAe;AACjB;AACF,QAAI,OAAO,cAAc,OAAO,MAAM;AACpC,YAAM;AAAA,EACV,CAAC;AAID,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,QAAQ,YAAY,MAAM,EAC5F,MAAM,CAAC,QAAQ;AACd,YAAQ,QAAQ,+CAA+C,WAAW,KAAK;AAAA,MAC7E,GAAG;AAAA,MACH,OAAO;AAAA,IACT,CAAC;AACD,WAAO,QAAQ,GAAY;AAAA,EAC7B,CAAC;AAEH,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACA,YACA,QACe;AACf,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAC5D,UAAQ,OAAO,uDAAuD,WAAW,KAAK,OAAO;AAC7F,QAAM,OAAO,MAAM,oBAAoB,iBAAiB,MAAM,UAAU,MAAM;AAG9E,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,OAAO,qDAAqD,WAAW,KAAK,OAAO;AAC3F,WAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAGA,MAAI,KAAK,WAAW,KAAK,CAAC,YAAY;AACpC,YAAQ,OAAO,yDAAyD,WAAW,KAAK;AAAA,MACtF,GAAG;AAAA,MACH,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,WAAO,gBAAgB,KAAK,CAAC,GAAI,QAAQ,MAAM;AAAA,EACjD;AAGA,UAAQ,OAAO,uBAAuB,KAAK,MAAM,iCAAiC,WAAW,KAAK;AAAA,IAChG,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,EACjB,CAAC;AACD,aAAO,2CAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AAC1F;AAEA,eAAe,oBACb,iBACA,MACA,UACA,QACmB;AACnB,QAAM,YAAY,oBAAI,IAAsB;AAE5C,gBAAc,WAAW,gBAAgB,QAAQ,cAAc;AAE/D,MAAI,CAAC,SAAS;AACZ,WAAO,iBAAiB,SAAS;AAEnC,WAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,QAAI,UAAU,IAAI,CAAC;AACjB;AACF,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW,+BAA+B;AAGtD,UAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,kBAAc,WAAW,UAAU,cAAc;AAAA,EACnD;AAEA,SAAO,iBAAiB,SAAS;AACnC;AAEA,SAAS,cACP,WACA,eACM;AACN,MAAI,CAAC;AACH;AAEF,aAAW,QAAQ,eAAe;AAChC,QAAI,CAAC,iBAAiB,KAAK,aAAa;AACtC;AAEF,UAAM,WAAW,UAAU,IAAI,KAAK,WAAW;AAC/C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK,aAAa;AAAA,IAClC,OAAO;AACL,gBAAU,IAAI,KAAK,aAAa,CAAC,KAAK,aAAa,CAAC;AAAA,IACtD;AAAA,EACF;AACF;AAEA,SAAS,iBAAiB,WAA4C;AACpE,MAAI,UAAU,SAAS;AACrB,WAAO,CAAC;AAEV,QAAM,SAAS,CAAC,GAAG,UAAU,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC;AAChE,QAAM,OAAiB,CAAC;AACxB,aAAW,CAAC,EAAE,KAAK,KAAK,QAAQ;AAC9B,SAAK,KAAK,GAAG,KAAK;AAAA,EACpB;AACA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAiC;AACzD,SAAO,OAAO,UAAU,YAAY,MAAM,SAAS;AACrD;;;AFtKA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,QAAQ,OAAO,IAAI;AAC9C,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,cAAc,gBAAgB;AACpC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,OAAO;AAEpE,QAAM,SAAS,gBAAgB,UAAU,QAAQ;AAAA,IAC/C,GAAG,QAAQ,eAAe,EAAE,cAAc,QAAQ,aAAa,IAAI,CAAC;AAAA,IACpE,GAAG,QAAQ,kBAAkB,EAAE,iBAAiB,QAAQ,gBAAgB,IAAI,CAAC;AAAA,EAC/E,CAAC;AAED,UAAQ,OAAO,wCAAwC,WAAW,KAAK;AAAA,IACrE,GAAG;AAAA,IACH,YAAY,gBAAgB,QAAQ,iBAAiB,mBAAmB;AAAA,EAC1E,CAAC;AAED,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,cAAQ,QAAQ,6DAA6D,SAAS,MAAM,KAAK,UAAU;AAC3G,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,OAAO,mDAAmD,WAAW,KAAK,UAAU;AAC5F,UAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,MAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,MAC1B,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC5B,CAAC;AACD,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,QAAQ,QAAQ,UAAU;AAClF;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,YAAQ,OAAO,iDAAiD,WAAW,KAAK;AAAA,MAC9E,GAAG;AAAA,MACH,YAAY,UAAU;AAAA,IACxB,CAAC;AACD,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,YAAQ,OAAO,uBAAuB,WAAW,yBAAyB,WAAW,KAAK,UAAU;AACpG,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACA,QACA,YACe;AAEf,QAAM,aAAa,OAAO,SAAK,2BAAO,CAAC,EAAE,SAAK,gCAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,cAAQ,OAAO,4DAA4D;AAAA,QACzE,GAAG;AAAA,QACH,SAAS,OAAO;AAAA,MAClB,CAAC;AACD,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AGxHA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,QAAQ,OAAO;AAC5E,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AACA,QAAM,EAAE,OAAO,IAAI;AAEnB,UAAQ,OAAO,4CAA4C,WAAW,KAAK,UAAU;AAErF,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,eAAe,QAAQ;AAEtC,MAAI,QAAQ;AACV,iBAAa,kBAAkB,QAAQ;AAEzC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,UAAQ,OAAO,oBAAoB,KAAK,MAAM,uBAAuB,WAAW,KAAK;AAAA,IACnF,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,IACf,gBAAgB,QAAQ,UAAU,UAAU;AAAA,EAC9C,CAAC;AACD,SAAO;AACT;;;ACzCA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,2BAA2B,YAAY,OAAO,IAAI;AAClE,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,gBAAgB,gBAAgB,QAAQ;AAC9C,QAAM,cAAc,UAAU,qBAAqB;AACnD,QAAM,aAAa,EAAE,aAAa,UAAU,aAAa,WAAW;AAGpE,MAAI,CAAC,eAAe;AAClB,YAAQ,OAAO,+DAA+D,WAAW,KAAK,UAAU;AACxG,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,YAAY;AACf,UAAM,gBAAgB,eAAe;AAGrC,QAAI,eAAe;AACjB,cAAQ,OAAO,2EAA2E,WAAW,KAAK;AAAA,QACxG,GAAG;AAAA,QACH;AAAA,MACF,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,UAAQ,OAAO,2DAA2D,WAAW,KAAK,UAAU;AACpG,QAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC1B,GAAG,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC5B,CAAC;AAGD,UAAQ,OAAO,mEAAmE,WAAW,KAAK,UAAU;AAC5G,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAC3D,UAAQ,OAAO,kEAAkE,WAAW,KAAK;AAAA,IAC/F,GAAG;AAAA,IACH,WAAW,aAAa;AAAA,IACxB,YAAY,aAAa;AAAA,EAC3B,CAAC;AAID,QAAM,oBAAoB,wBAAwB,eAAe;AACjE,QAAM,gBAAgB,kBAAkB,mBAAmB;AAE3D,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped","import_node_stream"]}
|
package/dist/index.d.cts
CHANGED
|
@@ -210,6 +210,13 @@ type SignalOptions = {
|
|
|
210
210
|
type RowArray = unknown[];
|
|
211
211
|
/** Row data as JSON object */
|
|
212
212
|
type RowObject = Record<string, unknown>;
|
|
213
|
+
/** Options for row mapping */
|
|
214
|
+
type RowMapperOptions = {
|
|
215
|
+
/** Hook to transform bigint values (e.g., to string or number) */
|
|
216
|
+
encodeBigInt?: (value: bigint) => unknown;
|
|
217
|
+
/** Hook to transform timestamp-like values (e.g., to Date) */
|
|
218
|
+
encodeTimestamp?: (value: string) => unknown;
|
|
219
|
+
};
|
|
213
220
|
/** Format for fetchRow/fetchAll */
|
|
214
221
|
type FetchRowFormat = 'JSON_ARRAY' | 'JSON_OBJECT';
|
|
215
222
|
/** Options for fetchStream */
|
|
@@ -221,12 +228,16 @@ type FetchStreamOptions = SignalOptions & {
|
|
|
221
228
|
};
|
|
222
229
|
/** Options for fetchRow */
|
|
223
230
|
type FetchRowsOptions = SignalOptions & {
|
|
224
|
-
/** Callback for each row */
|
|
225
|
-
onEachRow?: (row: RowArray | RowObject) => void;
|
|
226
231
|
/** Row format (default: JSON_ARRAY) */
|
|
227
232
|
format?: FetchRowFormat;
|
|
228
233
|
/** Optional logger for lifecycle events */
|
|
229
234
|
logger?: Logger;
|
|
235
|
+
/** Callback for each row */
|
|
236
|
+
onEachRow?: (row: RowArray | RowObject) => void;
|
|
237
|
+
/** Customize bigint conversion for JSON_OBJECT rows */
|
|
238
|
+
encodeBigInt?: RowMapperOptions['encodeBigInt'];
|
|
239
|
+
/** Customize TIMESTAMP* conversion for JSON_OBJECT rows */
|
|
240
|
+
encodeTimestamp?: RowMapperOptions['encodeTimestamp'];
|
|
230
241
|
};
|
|
231
242
|
/** Options for fetchAll */
|
|
232
243
|
type FetchAllOptions = SignalOptions & {
|
|
@@ -234,6 +245,10 @@ type FetchAllOptions = SignalOptions & {
|
|
|
234
245
|
format?: FetchRowFormat;
|
|
235
246
|
/** Optional logger for lifecycle events */
|
|
236
247
|
logger?: Logger;
|
|
248
|
+
/** Customize bigint conversion for JSON_OBJECT rows */
|
|
249
|
+
encodeBigInt?: RowMapperOptions['encodeBigInt'];
|
|
250
|
+
/** Customize TIMESTAMP* conversion for JSON_OBJECT rows */
|
|
251
|
+
encodeTimestamp?: RowMapperOptions['encodeTimestamp'];
|
|
237
252
|
};
|
|
238
253
|
/** Result from mergeStreamToExternalLink callback */
|
|
239
254
|
type MergeExternalLinksResult = {
|
|
@@ -346,4 +361,4 @@ declare function fetchStream(statementResult: StatementResult, auth: AuthInfo, o
|
|
|
346
361
|
*/
|
|
347
362
|
declare function mergeExternalLinks(statementResult: StatementResult, auth: AuthInfo, options: MergeExternalLinksOptions): Promise<StatementResult>;
|
|
348
363
|
|
|
349
|
-
export { AbortError, type AuthInfo, AuthenticationError, type ChunkInfo, type ColumnInfo, DatabricksSqlError, type ExecuteStatementOptions, type ExecuteStatementRequest, type ExternalLinkInfo, type ExternalLinksResultData, type FetchAllOptions, type FetchRowFormat, type FetchRowsOptions, type FetchStreamOptions, type GetChunkResponse, HttpError, type InlineResultData, type Logger, type MergeExternalLinksOptions, type MergeExternalLinksResult, type QueryInfo, type QueryMetrics, RateLimitError, type ResultData, type RowArray, type RowObject, type SignalOptions, StatementCancelledError, type StatementManifest, type StatementParameter, type StatementResult, type StatementState, type StatementStatus, executeStatement, fetchAll, fetchRow, fetchStream, mergeExternalLinks };
|
|
364
|
+
export { AbortError, type AuthInfo, AuthenticationError, type ChunkInfo, type ColumnInfo, DatabricksSqlError, type ExecuteStatementOptions, type ExecuteStatementRequest, type ExternalLinkInfo, type ExternalLinksResultData, type FetchAllOptions, type FetchRowFormat, type FetchRowsOptions, type FetchStreamOptions, type GetChunkResponse, HttpError, type InlineResultData, type Logger, type MergeExternalLinksOptions, type MergeExternalLinksResult, type QueryInfo, type QueryMetrics, RateLimitError, type ResultData, type RowArray, type RowMapperOptions, type RowObject, type SignalOptions, StatementCancelledError, type StatementManifest, type StatementParameter, type StatementResult, type StatementState, type StatementStatus, executeStatement, fetchAll, fetchRow, fetchStream, mergeExternalLinks };
|
package/dist/index.d.ts
CHANGED
|
@@ -210,6 +210,13 @@ type SignalOptions = {
|
|
|
210
210
|
type RowArray = unknown[];
|
|
211
211
|
/** Row data as JSON object */
|
|
212
212
|
type RowObject = Record<string, unknown>;
|
|
213
|
+
/** Options for row mapping */
|
|
214
|
+
type RowMapperOptions = {
|
|
215
|
+
/** Hook to transform bigint values (e.g., to string or number) */
|
|
216
|
+
encodeBigInt?: (value: bigint) => unknown;
|
|
217
|
+
/** Hook to transform timestamp-like values (e.g., to Date) */
|
|
218
|
+
encodeTimestamp?: (value: string) => unknown;
|
|
219
|
+
};
|
|
213
220
|
/** Format for fetchRow/fetchAll */
|
|
214
221
|
type FetchRowFormat = 'JSON_ARRAY' | 'JSON_OBJECT';
|
|
215
222
|
/** Options for fetchStream */
|
|
@@ -221,12 +228,16 @@ type FetchStreamOptions = SignalOptions & {
|
|
|
221
228
|
};
|
|
222
229
|
/** Options for fetchRow */
|
|
223
230
|
type FetchRowsOptions = SignalOptions & {
|
|
224
|
-
/** Callback for each row */
|
|
225
|
-
onEachRow?: (row: RowArray | RowObject) => void;
|
|
226
231
|
/** Row format (default: JSON_ARRAY) */
|
|
227
232
|
format?: FetchRowFormat;
|
|
228
233
|
/** Optional logger for lifecycle events */
|
|
229
234
|
logger?: Logger;
|
|
235
|
+
/** Callback for each row */
|
|
236
|
+
onEachRow?: (row: RowArray | RowObject) => void;
|
|
237
|
+
/** Customize bigint conversion for JSON_OBJECT rows */
|
|
238
|
+
encodeBigInt?: RowMapperOptions['encodeBigInt'];
|
|
239
|
+
/** Customize TIMESTAMP* conversion for JSON_OBJECT rows */
|
|
240
|
+
encodeTimestamp?: RowMapperOptions['encodeTimestamp'];
|
|
230
241
|
};
|
|
231
242
|
/** Options for fetchAll */
|
|
232
243
|
type FetchAllOptions = SignalOptions & {
|
|
@@ -234,6 +245,10 @@ type FetchAllOptions = SignalOptions & {
|
|
|
234
245
|
format?: FetchRowFormat;
|
|
235
246
|
/** Optional logger for lifecycle events */
|
|
236
247
|
logger?: Logger;
|
|
248
|
+
/** Customize bigint conversion for JSON_OBJECT rows */
|
|
249
|
+
encodeBigInt?: RowMapperOptions['encodeBigInt'];
|
|
250
|
+
/** Customize TIMESTAMP* conversion for JSON_OBJECT rows */
|
|
251
|
+
encodeTimestamp?: RowMapperOptions['encodeTimestamp'];
|
|
237
252
|
};
|
|
238
253
|
/** Result from mergeStreamToExternalLink callback */
|
|
239
254
|
type MergeExternalLinksResult = {
|
|
@@ -346,4 +361,4 @@ declare function fetchStream(statementResult: StatementResult, auth: AuthInfo, o
|
|
|
346
361
|
*/
|
|
347
362
|
declare function mergeExternalLinks(statementResult: StatementResult, auth: AuthInfo, options: MergeExternalLinksOptions): Promise<StatementResult>;
|
|
348
363
|
|
|
349
|
-
export { AbortError, type AuthInfo, AuthenticationError, type ChunkInfo, type ColumnInfo, DatabricksSqlError, type ExecuteStatementOptions, type ExecuteStatementRequest, type ExternalLinkInfo, type ExternalLinksResultData, type FetchAllOptions, type FetchRowFormat, type FetchRowsOptions, type FetchStreamOptions, type GetChunkResponse, HttpError, type InlineResultData, type Logger, type MergeExternalLinksOptions, type MergeExternalLinksResult, type QueryInfo, type QueryMetrics, RateLimitError, type ResultData, type RowArray, type RowObject, type SignalOptions, StatementCancelledError, type StatementManifest, type StatementParameter, type StatementResult, type StatementState, type StatementStatus, executeStatement, fetchAll, fetchRow, fetchStream, mergeExternalLinks };
|
|
364
|
+
export { AbortError, type AuthInfo, AuthenticationError, type ChunkInfo, type ColumnInfo, DatabricksSqlError, type ExecuteStatementOptions, type ExecuteStatementRequest, type ExternalLinkInfo, type ExternalLinksResultData, type FetchAllOptions, type FetchRowFormat, type FetchRowsOptions, type FetchStreamOptions, type GetChunkResponse, HttpError, type InlineResultData, type Logger, type MergeExternalLinksOptions, type MergeExternalLinksResult, type QueryInfo, type QueryMetrics, RateLimitError, type ResultData, type RowArray, type RowMapperOptions, type RowObject, type SignalOptions, StatementCancelledError, type StatementManifest, type StatementParameter, type StatementResult, type StatementState, type StatementStatus, executeStatement, fetchAll, fetchRow, fetchStream, mergeExternalLinks };
|
package/dist/index.js
CHANGED
|
@@ -236,12 +236,8 @@ var TERMINAL_STATES = /* @__PURE__ */ new Set([
|
|
|
236
236
|
]);
|
|
237
237
|
var POLL_INTERVAL_MS = 5e3;
|
|
238
238
|
async function fetchMetrics(auth, statementId, signal) {
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
return queryInfo.metrics;
|
|
242
|
-
} catch {
|
|
243
|
-
return void 0;
|
|
244
|
-
}
|
|
239
|
+
const queryInfo = await getQueryMetrics(auth, statementId, signal);
|
|
240
|
+
return queryInfo.metrics;
|
|
245
241
|
}
|
|
246
242
|
async function executeStatement(query, auth, options = {}) {
|
|
247
243
|
const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath);
|
|
@@ -249,7 +245,13 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
249
245
|
const waitTimeout = options.wait_timeout ?? (onProgress ? "0s" : "50s");
|
|
250
246
|
let cancelIssued = false;
|
|
251
247
|
throwIfAborted(signal, "executeStatement");
|
|
252
|
-
const emitProgress = onProgress ? async (
|
|
248
|
+
const emitProgress = onProgress ? async () => result ? onProgress(
|
|
249
|
+
result,
|
|
250
|
+
enableMetrics ? await fetchMetrics(auth, result.statement_id, signal).catch((e) => {
|
|
251
|
+
logger?.error?.(`executeStatement Failed to fetch query metrics for statement ${result?.statement_id}: ${String(e)}`, { statementId: result?.statement_id });
|
|
252
|
+
return void 0;
|
|
253
|
+
}) : void 0
|
|
254
|
+
) : void 0 : void 0;
|
|
253
255
|
const request = Object.fromEntries(
|
|
254
256
|
Object.entries({
|
|
255
257
|
warehouse_id: warehouseId,
|
|
@@ -285,9 +287,9 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
285
287
|
signal?.addEventListener("abort", onAbort, { once: true });
|
|
286
288
|
while (!TERMINAL_STATES.has(result.status.state)) {
|
|
287
289
|
logger?.info?.(`executeStatement Statement ${result.statement_id} in state ${result.status.state}; polling for status...`);
|
|
288
|
-
await emitProgress?.(result.statement_id);
|
|
289
290
|
await delay(POLL_INTERVAL_MS, signal);
|
|
290
291
|
result = await getStatement(auth, result.statement_id, signal);
|
|
292
|
+
await emitProgress?.();
|
|
291
293
|
}
|
|
292
294
|
} catch (err) {
|
|
293
295
|
if (err instanceof AbortError || signal?.aborted) {
|
|
@@ -301,7 +303,7 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
301
303
|
logger?.info?.(`executeStatement Statement ${result.statement_id} reached final state: ${result.status.state}`);
|
|
302
304
|
signal?.removeEventListener("abort", onAbort);
|
|
303
305
|
}
|
|
304
|
-
await emitProgress?.(
|
|
306
|
+
await emitProgress?.();
|
|
305
307
|
if (result.status.state === "SUCCEEDED")
|
|
306
308
|
return result;
|
|
307
309
|
if (result.status.state === "CANCELED")
|
|
@@ -322,20 +324,18 @@ var INTEGER_TYPES = /* @__PURE__ */ new Set(["TINYINT", "SMALLINT", "INT"]);
|
|
|
322
324
|
var BIGINT_TYPES = /* @__PURE__ */ new Set(["BIGINT", "LONG"]);
|
|
323
325
|
var FLOAT_TYPES = /* @__PURE__ */ new Set(["FLOAT", "DOUBLE"]);
|
|
324
326
|
var BOOLEAN_TYPES = /* @__PURE__ */ new Set(["BOOLEAN"]);
|
|
327
|
+
var TIMESTAMP_TYPES = /* @__PURE__ */ new Set(["TIMESTAMP", "TIMESTAMP_NTZ", "TIMESTAMP_LTZ"]);
|
|
325
328
|
var STRING_TYPES = /* @__PURE__ */ new Set([
|
|
326
329
|
"STRING",
|
|
327
330
|
"DATE",
|
|
328
|
-
"TIMESTAMP",
|
|
329
|
-
"TIMESTAMP_NTZ",
|
|
330
|
-
"TIMESTAMP_LTZ",
|
|
331
331
|
"TIME"
|
|
332
332
|
]);
|
|
333
|
-
function createRowMapper(manifest, format) {
|
|
333
|
+
function createRowMapper(manifest, format, options = {}) {
|
|
334
334
|
if (format !== "JSON_OBJECT")
|
|
335
335
|
return (row) => row;
|
|
336
336
|
const columnConverters = manifest.schema.columns.map((column) => ({
|
|
337
337
|
name: column.name,
|
|
338
|
-
convert: createColumnConverter(column)
|
|
338
|
+
convert: createColumnConverter(column, options)
|
|
339
339
|
}));
|
|
340
340
|
return (row) => {
|
|
341
341
|
const mapped = {};
|
|
@@ -350,9 +350,9 @@ function createRowMapper(manifest, format) {
|
|
|
350
350
|
return mapped;
|
|
351
351
|
};
|
|
352
352
|
}
|
|
353
|
-
function createColumnConverter(column) {
|
|
353
|
+
function createColumnConverter(column, options) {
|
|
354
354
|
const descriptor = parseColumnType(column);
|
|
355
|
-
return (value) => convertValue(descriptor, value);
|
|
355
|
+
return (value) => convertValue(descriptor, value, options);
|
|
356
356
|
}
|
|
357
357
|
function parseColumnType(column) {
|
|
358
358
|
if (column.type_name === "STRUCT" || column.type_name === "ARRAY" || column.type_name === "MAP")
|
|
@@ -493,45 +493,47 @@ function stripNotNull(typeText) {
|
|
|
493
493
|
trimmed = trimmed.slice(0, -"NOT NULL".length).trim();
|
|
494
494
|
return trimmed;
|
|
495
495
|
}
|
|
496
|
-
function convertValue(descriptor, value) {
|
|
496
|
+
function convertValue(descriptor, value, options) {
|
|
497
497
|
if (value === null || value === void 0)
|
|
498
498
|
return value;
|
|
499
499
|
if (descriptor.typeName === "STRUCT" && descriptor.fields)
|
|
500
|
-
return convertStructValue(descriptor.fields, value);
|
|
500
|
+
return convertStructValue(descriptor.fields, value, options);
|
|
501
501
|
if (descriptor.typeName === "ARRAY" && descriptor.elementType)
|
|
502
|
-
return convertArrayValue(descriptor.elementType, value);
|
|
502
|
+
return convertArrayValue(descriptor.elementType, value, options);
|
|
503
503
|
if (descriptor.typeName === "MAP" && descriptor.keyType && descriptor.valueType)
|
|
504
|
-
return convertMapValue(descriptor.keyType, descriptor.valueType, value);
|
|
504
|
+
return convertMapValue(descriptor.keyType, descriptor.valueType, value, options);
|
|
505
505
|
if (descriptor.typeName === "DECIMAL")
|
|
506
506
|
return convertNumber(value);
|
|
507
507
|
if (INTEGER_TYPES.has(descriptor.typeName))
|
|
508
508
|
return convertNumber(value);
|
|
509
509
|
if (BIGINT_TYPES.has(descriptor.typeName))
|
|
510
|
-
return convertInteger(value);
|
|
510
|
+
return convertInteger(value, options.encodeBigInt);
|
|
511
511
|
if (FLOAT_TYPES.has(descriptor.typeName))
|
|
512
512
|
return convertNumber(value);
|
|
513
513
|
if (BOOLEAN_TYPES.has(descriptor.typeName))
|
|
514
514
|
return convertBoolean(value);
|
|
515
|
+
if (TIMESTAMP_TYPES.has(descriptor.typeName))
|
|
516
|
+
return convertTimestamp(value, options.encodeTimestamp);
|
|
515
517
|
if (STRING_TYPES.has(descriptor.typeName))
|
|
516
518
|
return value;
|
|
517
519
|
return value;
|
|
518
520
|
}
|
|
519
|
-
function convertStructValue(fields, value) {
|
|
521
|
+
function convertStructValue(fields, value, options) {
|
|
520
522
|
const raw = parseStructValue(value);
|
|
521
523
|
if (!raw || typeof raw !== "object" || Array.isArray(raw))
|
|
522
524
|
return value;
|
|
523
525
|
const mapped = {};
|
|
524
526
|
for (const field of fields)
|
|
525
|
-
mapped[field.name] = convertValue(field.type, raw[field.name]);
|
|
527
|
+
mapped[field.name] = convertValue(field.type, raw[field.name], options);
|
|
526
528
|
return mapped;
|
|
527
529
|
}
|
|
528
|
-
function convertArrayValue(elementType, value) {
|
|
530
|
+
function convertArrayValue(elementType, value, options) {
|
|
529
531
|
const raw = parseJsonValue(value);
|
|
530
532
|
if (!Array.isArray(raw))
|
|
531
533
|
return value;
|
|
532
|
-
return raw.map((entry) => convertValue(elementType, entry));
|
|
534
|
+
return raw.map((entry) => convertValue(elementType, entry, options));
|
|
533
535
|
}
|
|
534
|
-
function convertMapValue(keyType, valueType, value) {
|
|
536
|
+
function convertMapValue(keyType, valueType, value, options) {
|
|
535
537
|
const raw = parseJsonValue(value);
|
|
536
538
|
if (!raw || typeof raw !== "object")
|
|
537
539
|
return value;
|
|
@@ -540,15 +542,15 @@ function convertMapValue(keyType, valueType, value) {
|
|
|
540
542
|
for (const entry of raw) {
|
|
541
543
|
if (!Array.isArray(entry) || entry.length < 2)
|
|
542
544
|
continue;
|
|
543
|
-
const convertedKey = convertValue(keyType, entry[0]);
|
|
544
|
-
mapped2[String(convertedKey)] = convertValue(valueType, entry[1]);
|
|
545
|
+
const convertedKey = convertValue(keyType, entry[0], options);
|
|
546
|
+
mapped2[String(convertedKey)] = convertValue(valueType, entry[1], options);
|
|
545
547
|
}
|
|
546
548
|
return mapped2;
|
|
547
549
|
}
|
|
548
550
|
const mapped = {};
|
|
549
551
|
for (const [key, entryValue] of Object.entries(raw)) {
|
|
550
|
-
const convertedKey = convertValue(keyType, key);
|
|
551
|
-
mapped[String(convertedKey)] = convertValue(valueType, entryValue);
|
|
552
|
+
const convertedKey = convertValue(keyType, key, options);
|
|
553
|
+
mapped[String(convertedKey)] = convertValue(valueType, entryValue, options);
|
|
552
554
|
}
|
|
553
555
|
return mapped;
|
|
554
556
|
}
|
|
@@ -577,23 +579,31 @@ function convertNumber(value) {
|
|
|
577
579
|
}
|
|
578
580
|
return value;
|
|
579
581
|
}
|
|
580
|
-
function convertInteger(value) {
|
|
582
|
+
function convertInteger(value, encodeBigInt) {
|
|
581
583
|
if (typeof value === "bigint")
|
|
582
|
-
return value;
|
|
584
|
+
return encodeBigInt ? encodeBigInt(value) : value;
|
|
583
585
|
if (typeof value === "number") {
|
|
584
|
-
if (Number.isInteger(value))
|
|
585
|
-
|
|
586
|
+
if (Number.isInteger(value)) {
|
|
587
|
+
const bigintValue = BigInt(value);
|
|
588
|
+
return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue;
|
|
589
|
+
}
|
|
586
590
|
return value;
|
|
587
591
|
}
|
|
588
592
|
if (typeof value === "string") {
|
|
589
593
|
try {
|
|
590
|
-
|
|
594
|
+
const bigintValue = BigInt(value);
|
|
595
|
+
return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue;
|
|
591
596
|
} catch {
|
|
592
597
|
return value;
|
|
593
598
|
}
|
|
594
599
|
}
|
|
595
600
|
return value;
|
|
596
601
|
}
|
|
602
|
+
function convertTimestamp(value, encodeTimestamp) {
|
|
603
|
+
if (typeof value !== "string")
|
|
604
|
+
return value;
|
|
605
|
+
return encodeTimestamp ? encodeTimestamp(value) : value;
|
|
606
|
+
}
|
|
597
607
|
function convertBoolean(value) {
|
|
598
608
|
if (typeof value === "boolean")
|
|
599
609
|
return value;
|
|
@@ -723,7 +733,10 @@ async function fetchRow(statementResult, auth, options = {}) {
|
|
|
723
733
|
const manifest = validateSucceededResult(statementResult);
|
|
724
734
|
const statementId = statementResult.statement_id;
|
|
725
735
|
const logContext = { statementId, manifest, requestedFormat: format };
|
|
726
|
-
const mapRow = createRowMapper(manifest, format
|
|
736
|
+
const mapRow = createRowMapper(manifest, format, {
|
|
737
|
+
...options.encodeBigInt ? { encodeBigInt: options.encodeBigInt } : {},
|
|
738
|
+
...options.encodeTimestamp ? { encodeTimestamp: options.encodeTimestamp } : {}
|
|
739
|
+
});
|
|
727
740
|
logger?.info?.(`fetchRow fetching rows for statement ${statementId}.`, {
|
|
728
741
|
...logContext,
|
|
729
742
|
resultType: statementResult.result?.external_links ? "EXTERNAL_LINKS" : "INLINE"
|
|
@@ -819,6 +832,10 @@ async function fetchAll(statementResult, auth, options = {}) {
|
|
|
819
832
|
fetchOptions.format = options.format;
|
|
820
833
|
if (options.logger)
|
|
821
834
|
fetchOptions.logger = options.logger;
|
|
835
|
+
if (options.encodeBigInt)
|
|
836
|
+
fetchOptions.encodeBigInt = options.encodeBigInt;
|
|
837
|
+
if (options.encodeTimestamp)
|
|
838
|
+
fetchOptions.encodeTimestamp = options.encodeTimestamp;
|
|
822
839
|
await fetchRow(statementResult, auth, fetchOptions);
|
|
823
840
|
logger?.info?.(`fetchAll fetched ${rows.length} rows for statement ${statementId}.`, {
|
|
824
841
|
...logContext,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import { Readable } from 'node:stream'\nimport { pipeline } from 'node:stream/promises'\nimport type { ReadableStream as WebReadableStream } from 'node:stream/web'\nimport type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n\nfunction isWebReadableStream(body: unknown): body is WebReadableStream {\n return typeof (body as WebReadableStream).getReader === 'function'\n}\n\nexport async function pipeUrlToOutput(\n url: string,\n output: NodeJS.WritableStream,\n signal?: AbortSignal\n): Promise<void> {\n // Uses Node 20+ global fetch with Web streams.\n if (signal?.aborted)\n throw new AbortError('Aborted while streaming')\n\n const response = await fetch(url, signal ? { signal } : undefined)\n if (!response.ok) {\n throw new Error(\n `Failed to fetch external link: ${response.status} ${response.statusText}`\n )\n }\n\n if (!response.body)\n return void output.end()\n\n const body = response.body\n const input = isWebReadableStream(body)\n ? Readable.fromWeb(body)\n : (body as NodeJS.ReadableStream)\n\n await pipeline(input, output)\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n QueryInfo,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n// Base path for Query History API.\nconst HISTORY_BASE_PATH = '/api/2.0/sql/history/queries'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get query metrics from Query History API\n * GET /api/2.0/sql/history/queries/{query_id}?include_metrics=true\n */\nexport async function getQueryMetrics(\n auth: AuthInfo,\n queryId: string,\n signal?: AbortSignal\n): Promise<QueryInfo> {\n return httpRequest<QueryInfo>(auth, {\n method: 'GET',\n path: `${HISTORY_BASE_PATH}/${queryId}?include_metrics=true`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n QueryMetrics,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement, getQueryMetrics } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 5000\n\nasync function fetchMetrics(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<QueryMetrics | undefined> {\n try {\n const queryInfo = await getQueryMetrics(auth, statementId, signal)\n return queryInfo.metrics\n } catch {\n // Ignore metrics fetch errors - non-critical\n return undefined\n }\n}\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress, enableMetrics, logger } = options\n const waitTimeout = options.wait_timeout ?? (onProgress ? '0s' : '50s')\n let cancelIssued = false\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // Helper to call onProgress with optional metrics\n const emitProgress = onProgress\n ? async (statementId: string) => onProgress(result, enableMetrics ? await fetchMetrics(auth, statementId, signal) : undefined)\n : undefined\n\n // 1. Build request (filter out undefined values)\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout ?? 'CONTINUE',\n wait_timeout: waitTimeout,\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n logger?.info?.(`executeStatement Executing statement on warehouse ${warehouseId}...`)\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n const cancelStatementSafely = async () => {\n if (cancelIssued) return\n logger?.info?.('executeStatement Abort signal received during executeStatement.')\n cancelIssued = true\n await cancelStatement(auth, result.statement_id).catch((err) => {\n logger?.error?.('executeStatement Failed to cancel statement after abort.', err)\n })\n }\n\n if (signal?.aborted) {\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n\n const onAbort = () => cancelStatementSafely().catch(() => { })\n\n try {\n signal?.addEventListener('abort', onAbort, { once: true })\n\n // 3. Poll until terminal state\n while (!TERMINAL_STATES.has(result.status.state)) {\n logger?.info?.(`executeStatement Statement ${result.statement_id} in state ${result.status.state}; polling for status...`)\n await emitProgress?.(result.statement_id)\n await delay(POLL_INTERVAL_MS, signal)\n result = await getStatement(auth, result.statement_id, signal)\n }\n } catch (err) {\n if (err instanceof AbortError || signal?.aborted) {\n logger?.info?.('executeStatement Abort detected in executeStatement polling loop.')\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n logger?.error?.(`executeStatement Error during executeStatement polling: ${String(err)}`)\n throw err\n } finally {\n logger?.info?.(`executeStatement Statement ${result.statement_id} reached final state: ${result.status.state}`)\n signal?.removeEventListener('abort', onAbort)\n }\n\n // 4. Final progress callback\n await emitProgress?.(result.statement_id)\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import type { Readable } from 'node:stream'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\n\nimport { getChunk } from '../databricks-api.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const statementId = statementResult.statement_id\n const logContext = { statementId, manifest, requestedFormat: format }\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format)\n\n logger?.info?.(`fetchRow fetching rows for statement ${statementId}.`, {\n ...logContext,\n resultType: statementResult.result?.external_links ? 'EXTERNAL_LINKS' : 'INLINE',\n })\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n logger?.error?.(`fetchRow only supports JSON_ARRAY for external_links; got ${manifest.format}.`, logContext)\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchRow streaming external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...logger ? { logger } : {},\n })\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal, logger, logContext)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n logger?.info?.(`fetchRow processing inline rows for statement ${statementId}.`, {\n ...logContext,\n inlineRows: dataArray.length,\n })\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n logger?.info?.(`fetchRow processing ${totalChunks} chunks for statement ${statementId}.`, logContext)\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined,\n logger: FetchRowsOptions['logger'],\n logContext: Record<string, unknown>\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n logger?.info?.('fetchRow abort detected while streaming JSON_ARRAY rows.', {\n ...logContext,\n aborted: signal.aborted,\n })\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIMESTAMP',\n 'TIMESTAMP_NTZ',\n 'TIMESTAMP_LTZ',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format']\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(column: ColumnInfo): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(descriptor: TypeDescriptor, value: unknown): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(fields: StructField[], value: unknown): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name])\n\n return mapped\n}\n\nfunction convertArrayValue(elementType: TypeDescriptor, value: unknown): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0])\n mapped[String(convertedKey)] = convertValue(valueType, entry[1])\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown): unknown {\n if (typeof value === 'bigint')\n return value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value))\n return BigInt(value)\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n return BigInt(value)\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import type { MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n ExternalLinkInfo,\n FetchStreamOptions,\n StatementManifest,\n StatementResult,\n} from '../types.js'\n\nimport { PassThrough, Readable } from 'node:stream'\n\nimport { mergeStreamsFromUrls } from '@bitofsky/merge-streams'\n\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { pipeUrlToOutput, validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal, forceMerge, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n\n if (statementResult.result?.data_array) {\n logger?.error?.(\n `fetchStream only supports EXTERNAL_LINKS results for statement ${statementId}.`,\n { ...baseLog, hasDataArray: true }\n )\n throw new DatabricksSqlError(\n 'fetchStream only supports EXTERNAL_LINKS results',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchStream creating stream for statement ${statementId}.`, {\n ...baseLog,\n hasExternalLinks: Boolean(statementResult.result?.external_links?.length),\n })\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => {\n logger?.info?.(`fetchStream abort signal received while streaming statement ${statementId}.`, baseLog)\n output.destroy(new AbortError('Stream aborted'))\n }\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => signal.removeEventListener('abort', onAbort))\n }\n\n // Prevent AbortError from becoming an uncaught exception when no error handler is attached.\n output.on('error', (err) => {\n if (err instanceof AbortError)\n return\n if (output.listenerCount('error') === 1)\n throw err\n })\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge, logger)\n .catch((err) => {\n logger?.error?.(`fetchStream error while streaming statement ${statementId}.`, {\n ...baseLog,\n error: err,\n })\n output.destroy(err as Error)\n })\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal,\n forceMerge?: boolean,\n logger?: FetchStreamOptions['logger']\n): Promise<void> {\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n logger?.info?.(`fetchStream collecting external links for statement ${statementId}.`, baseLog)\n const urls = await collectExternalUrls(statementResult, auth, manifest, signal)\n\n // No external links - close the stream\n if (urls.length === 0) {\n logger?.info?.(`fetchStream no external links found for statement ${statementId}.`, baseLog)\n return void output.end()\n }\n\n // Single URL - pipe directly to output unless forcing merge\n if (urls.length === 1 && !forceMerge) {\n logger?.info?.(`fetchStream piping single external link for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n // Avoid merge-streams overhead for a single URL unless forced.\n return pipeUrlToOutput(urls[0]!, output, signal)\n }\n\n // Merge all URLs using merge-streams\n logger?.info?.(`fetchStream merging ${urls.length} external links for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n return mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n\nasync function collectExternalUrls(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n signal?: AbortSignal\n): Promise<string[]> {\n const chunkUrls = new Map<number, string[]>()\n\n addChunkLinks(chunkUrls, statementResult.result?.external_links)\n\n if (!manifest.total_chunk_count)\n return flattenChunkUrls(chunkUrls)\n\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (chunkUrls.has(i))\n continue\n if (signal?.aborted)\n throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n addChunkLinks(chunkUrls, chunkData.external_links)\n }\n\n return flattenChunkUrls(chunkUrls)\n}\n\nfunction addChunkLinks(\n chunkUrls: Map<number, string[]>,\n externalLinks?: ExternalLinkInfo[]\n): void {\n if (!externalLinks)\n return\n\n for (const link of externalLinks) {\n if (!isNonEmptyString(link.external_link))\n continue\n\n const existing = chunkUrls.get(link.chunk_index)\n if (existing) {\n existing.push(link.external_link)\n } else {\n chunkUrls.set(link.chunk_index, [link.external_link])\n }\n }\n}\n\nfunction flattenChunkUrls(chunkUrls: Map<number, string[]>): string[] {\n if (chunkUrls.size === 0)\n return []\n\n const sorted = [...chunkUrls.entries()].sort(([a], [b]) => a - b)\n const urls: string[] = []\n for (const [, links] of sorted) {\n urls.push(...links)\n }\n return urls\n}\n\nfunction isNonEmptyString(value: unknown): value is string {\n return typeof value === 'string' && value.length > 0\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const logContext = { statementId, manifest, requestedFormat: options.format }\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n const { logger } = options\n\n logger?.info?.(`fetchAll fetching all rows for statement ${statementId}.`, logContext)\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n if (options.logger)\n fetchOptions.logger = options.logger\n\n await fetchRow(statementResult, auth, fetchOptions)\n logger?.info?.(`fetchAll fetched ${rows.length} rows for statement ${statementId}.`, {\n ...logContext,\n rowCount: rows.length,\n resolvedFormat: options.format ?? manifest?.format,\n })\n return rows\n}\n","import type {\n AuthInfo,\n MergeExternalLinksOptions,\n StatementResult,\n} from '../types.js'\n\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink, forceMerge, logger } = options\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const externalLinks = statementResult.result?.external_links\n const totalChunks = manifest?.total_chunk_count ?? 0\n const logContext = { statementId, manifest, totalChunks, forceMerge }\n\n // If not external links, return original as-is\n if (!externalLinks) {\n logger?.info?.(`mergeExternalLinks no external links to merge for statement ${statementId}.`, logContext)\n return statementResult\n }\n\n if (!forceMerge) {\n const isSingleChunk = totalChunks <= 1\n\n // Skip merging when a single external link already exists unless forced.\n if (isSingleChunk) {\n logger?.info?.(`mergeExternalLinks skipping merge for single external link in statement ${statementId}.`, {\n ...logContext,\n totalChunks,\n })\n return statementResult\n }\n }\n\n // Get merged stream via fetchStream\n logger?.info?.(`mergeExternalLinks merging external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...forceMerge !== undefined ? { forceMerge } : {},\n ...logger ? { logger } : {},\n })\n\n // Upload via callback\n logger?.info?.(`mergeExternalLinks uploading merged external link for statement ${statementId}.`, logContext)\n const uploadResult = await mergeStreamToExternalLink(stream)\n logger?.info?.(`mergeExternalLinks uploaded merged external link for statement ${statementId}.`, {\n ...logContext,\n byteCount: uploadResult.byte_count,\n expiration: uploadResult.expiration,\n })\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const validatedManifest = validateSucceededResult(statementResult)\n const totalRowCount = validatedManifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...validatedManifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";AACO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;AC5DA,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AASlB,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;AAEA,SAAS,oBAAoB,MAA0C;AACrE,SAAO,OAAQ,KAA2B,cAAc;AAC1D;AAEA,eAAsB,gBACpB,KACA,QACA,QACe;AAEf,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,yBAAyB;AAEhD,QAAM,WAAW,MAAM,MAAM,KAAK,SAAS,EAAE,OAAO,IAAI,MAAS;AACjE,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,kCAAkC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IAC1E;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,OAAO,IAAI;AAEzB,QAAM,OAAO,SAAS;AACtB,QAAM,QAAQ,oBAAoB,IAAI,IAClC,SAAS,QAAQ,IAAI,IACpB;AAEL,QAAM,SAAS,OAAO,MAAM;AAC9B;;;AC1GA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;AClHA,IAAM,YAAY;AAElB,IAAM,oBAAoB;AAM1B,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,SACA,QACoB;AACpB,SAAO,YAAuB,MAAM;AAAA,IAClC,QAAQ;AAAA,IACR,MAAM,GAAG,iBAAiB,IAAI,OAAO;AAAA,IACrC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC9EA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AAEzB,eAAe,aACb,MACA,aACA,QACmC;AACnC,MAAI;AACF,UAAM,YAAY,MAAM,gBAAgB,MAAM,aAAa,MAAM;AACjE,WAAO,UAAU;AAAA,EACnB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,YAAY,eAAe,OAAO,IAAI;AACtD,QAAM,cAAc,QAAQ,iBAAiB,aAAa,OAAO;AACjE,MAAI,eAAe;AAGnB,iBAAe,QAAQ,kBAAkB;AAGzC,QAAM,eAAe,aACjB,OAAO,gBAAwB,WAAW,QAAQ,gBAAgB,MAAM,aAAa,MAAM,aAAa,MAAM,IAAI,MAAS,IAC3H;AAGJ,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ,mBAAmB;AAAA,MAC5C,cAAc;AAAA,MACd,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAEA,UAAQ,OAAO,qDAAqD,WAAW,KAAK;AAGpF,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AACtD,QAAM,wBAAwB,YAAY;AACxC,QAAI,aAAc;AAClB,YAAQ,OAAO,iEAAiE;AAChF,mBAAe;AACf,UAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,CAAC,QAAQ;AAC9D,cAAQ,QAAQ,4DAA4D,GAAG;AAAA,IACjF,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,SAAS;AACnB,UAAM,sBAAsB;AAC5B,UAAM,IAAI,WAAW,wBAAwB;AAAA,EAC/C;AAEA,QAAM,UAAU,MAAM,sBAAsB,EAAE,MAAM,MAAM;AAAA,EAAE,CAAC;AAE7D,MAAI;AACF,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAGzD,WAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAChD,cAAQ,OAAO,8BAA8B,OAAO,YAAY,aAAa,OAAO,OAAO,KAAK,yBAAyB;AACzH,YAAM,eAAe,OAAO,YAAY;AACxC,YAAM,MAAM,kBAAkB,MAAM;AACpC,eAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAAA,IAC/D;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,eAAe,cAAc,QAAQ,SAAS;AAChD,cAAQ,OAAO,mEAAmE;AAClF,YAAM,sBAAsB;AAC5B,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AACA,YAAQ,QAAQ,2DAA2D,OAAO,GAAG,CAAC,EAAE;AACxF,UAAM;AAAA,EACR,UAAE;AACA,YAAQ,OAAO,8BAA8B,OAAO,YAAY,yBAAyB,OAAO,OAAO,KAAK,EAAE;AAC9G,YAAQ,oBAAoB,SAAS,OAAO;AAAA,EAC9C;AAGA,QAAM,eAAe,OAAO,YAAY;AAGxC,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;AC9HA,SAAS,cAAc;AACvB,SAAS,mBAAmB;;;ACkB5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACW;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,MAAM;AAAA,EACvC,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBAAsB,QAAiD;AAC9E,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,KAAK;AAClD;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aAAa,YAA4B,OAAyB;AACzE,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,KAAK;AAEpD,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,KAAK;AAExD,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,KAAK;AAExE,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,KAAK;AAE7B,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBAAmB,QAAuB,OAAyB;AAC1E,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,CAAC;AAE9E,SAAO;AACT;AAEA,SAAS,kBAAkB,aAA6B,OAAyB;AAC/E,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,KAAK,CAAC;AAC5D;AAEA,SAAS,gBACP,SACA,WACA,OACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,CAAC;AACnD,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,CAAC;AAAA,IACjE;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,GAAG;AAC9C,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,UAAU;AAAA,EACnE;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK;AACxB,aAAO,OAAO,KAAK;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,aAAO,OAAO,KAAK;AAAA,IACrB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;AC7YA,SAAS,mBAA6B;AAEtC,SAAS,4BAA4B;AAW9B,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,QAAQ,YAAY,OAAO,IAAI;AACvC,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AACxB,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAE5D,MAAI,gBAAgB,QAAQ,YAAY;AACtC,YAAQ;AAAA,MACN,kEAAkE,WAAW;AAAA,MAC7E,EAAE,GAAG,SAAS,cAAc,KAAK;AAAA,IACnC;AACA,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,OAAO,6CAA6C,WAAW,KAAK;AAAA,IAC1E,GAAG;AAAA,IACH,kBAAkB,QAAQ,gBAAgB,QAAQ,gBAAgB,MAAM;AAAA,EAC1E,CAAC;AAGD,QAAM,SAAS,IAAI,YAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM;AACpB,cAAQ,OAAO,+DAA+D,WAAW,KAAK,OAAO;AACrG,aAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACjD;AACA,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM,OAAO,oBAAoB,SAAS,OAAO,CAAC;AAAA,EACzE;AAGA,SAAO,GAAG,SAAS,CAAC,QAAQ;AAC1B,QAAI,eAAe;AACjB;AACF,QAAI,OAAO,cAAc,OAAO,MAAM;AACpC,YAAM;AAAA,EACV,CAAC;AAID,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,QAAQ,YAAY,MAAM,EAC5F,MAAM,CAAC,QAAQ;AACd,YAAQ,QAAQ,+CAA+C,WAAW,KAAK;AAAA,MAC7E,GAAG;AAAA,MACH,OAAO;AAAA,IACT,CAAC;AACD,WAAO,QAAQ,GAAY;AAAA,EAC7B,CAAC;AAEH,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACA,YACA,QACe;AACf,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAC5D,UAAQ,OAAO,uDAAuD,WAAW,KAAK,OAAO;AAC7F,QAAM,OAAO,MAAM,oBAAoB,iBAAiB,MAAM,UAAU,MAAM;AAG9E,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,OAAO,qDAAqD,WAAW,KAAK,OAAO;AAC3F,WAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAGA,MAAI,KAAK,WAAW,KAAK,CAAC,YAAY;AACpC,YAAQ,OAAO,yDAAyD,WAAW,KAAK;AAAA,MACtF,GAAG;AAAA,MACH,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,WAAO,gBAAgB,KAAK,CAAC,GAAI,QAAQ,MAAM;AAAA,EACjD;AAGA,UAAQ,OAAO,uBAAuB,KAAK,MAAM,iCAAiC,WAAW,KAAK;AAAA,IAChG,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,EACjB,CAAC;AACD,SAAO,qBAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AAC1F;AAEA,eAAe,oBACb,iBACA,MACA,UACA,QACmB;AACnB,QAAM,YAAY,oBAAI,IAAsB;AAE5C,gBAAc,WAAW,gBAAgB,QAAQ,cAAc;AAE/D,MAAI,CAAC,SAAS;AACZ,WAAO,iBAAiB,SAAS;AAEnC,WAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,QAAI,UAAU,IAAI,CAAC;AACjB;AACF,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW,+BAA+B;AAGtD,UAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,kBAAc,WAAW,UAAU,cAAc;AAAA,EACnD;AAEA,SAAO,iBAAiB,SAAS;AACnC;AAEA,SAAS,cACP,WACA,eACM;AACN,MAAI,CAAC;AACH;AAEF,aAAW,QAAQ,eAAe;AAChC,QAAI,CAAC,iBAAiB,KAAK,aAAa;AACtC;AAEF,UAAM,WAAW,UAAU,IAAI,KAAK,WAAW;AAC/C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK,aAAa;AAAA,IAClC,OAAO;AACL,gBAAU,IAAI,KAAK,aAAa,CAAC,KAAK,aAAa,CAAC;AAAA,IACtD;AAAA,EACF;AACF;AAEA,SAAS,iBAAiB,WAA4C;AACpE,MAAI,UAAU,SAAS;AACrB,WAAO,CAAC;AAEV,QAAM,SAAS,CAAC,GAAG,UAAU,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC;AAChE,QAAM,OAAiB,CAAC;AACxB,aAAW,CAAC,EAAE,KAAK,KAAK,QAAQ;AAC9B,SAAK,KAAK,GAAG,KAAK;AAAA,EACpB;AACA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAiC;AACzD,SAAO,OAAO,UAAU,YAAY,MAAM,SAAS;AACrD;;;AFtKA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,QAAQ,OAAO,IAAI;AAC9C,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,cAAc,gBAAgB;AACpC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,OAAO;AAEpE,QAAM,SAAS,gBAAgB,UAAU,MAAM;AAE/C,UAAQ,OAAO,wCAAwC,WAAW,KAAK;AAAA,IACrE,GAAG;AAAA,IACH,YAAY,gBAAgB,QAAQ,iBAAiB,mBAAmB;AAAA,EAC1E,CAAC;AAED,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,cAAQ,QAAQ,6DAA6D,SAAS,MAAM,KAAK,UAAU;AAC3G,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,OAAO,mDAAmD,WAAW,KAAK,UAAU;AAC5F,UAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,MAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,MAC1B,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC5B,CAAC;AACD,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,QAAQ,QAAQ,UAAU;AAClF;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,YAAQ,OAAO,iDAAiD,WAAW,KAAK;AAAA,MAC9E,GAAG;AAAA,MACH,YAAY,UAAU;AAAA,IACxB,CAAC;AACD,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,YAAQ,OAAO,uBAAuB,WAAW,yBAAyB,WAAW,KAAK,UAAU;AACpG,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACA,QACA,YACe;AAEf,QAAM,aAAa,OAAO,KAAK,OAAO,CAAC,EAAE,KAAK,YAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,cAAQ,OAAO,4DAA4D;AAAA,QACzE,GAAG;AAAA,QACH,SAAS,OAAO;AAAA,MAClB,CAAC;AACD,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AGrHA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,QAAQ,OAAO;AAC5E,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AACA,QAAM,EAAE,OAAO,IAAI;AAEnB,UAAQ,OAAO,4CAA4C,WAAW,KAAK,UAAU;AAErF,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,UAAQ,OAAO,oBAAoB,KAAK,MAAM,uBAAuB,WAAW,KAAK;AAAA,IACnF,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,IACf,gBAAgB,QAAQ,UAAU,UAAU;AAAA,EAC9C,CAAC;AACD,SAAO;AACT;;;ACnCA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,2BAA2B,YAAY,OAAO,IAAI;AAClE,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,gBAAgB,gBAAgB,QAAQ;AAC9C,QAAM,cAAc,UAAU,qBAAqB;AACnD,QAAM,aAAa,EAAE,aAAa,UAAU,aAAa,WAAW;AAGpE,MAAI,CAAC,eAAe;AAClB,YAAQ,OAAO,+DAA+D,WAAW,KAAK,UAAU;AACxG,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,YAAY;AACf,UAAM,gBAAgB,eAAe;AAGrC,QAAI,eAAe;AACjB,cAAQ,OAAO,2EAA2E,WAAW,KAAK;AAAA,QACxG,GAAG;AAAA,QACH;AAAA,MACF,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,UAAQ,OAAO,2DAA2D,WAAW,KAAK,UAAU;AACpG,QAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC1B,GAAG,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC5B,CAAC;AAGD,UAAQ,OAAO,mEAAmE,WAAW,KAAK,UAAU;AAC5G,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAC3D,UAAQ,OAAO,kEAAkE,WAAW,KAAK;AAAA,IAC/F,GAAG;AAAA,IACH,WAAW,aAAa;AAAA,IACxB,YAAY,aAAa;AAAA,EAC3B,CAAC;AAID,QAAM,oBAAoB,wBAAwB,eAAe;AACjE,QAAM,gBAAgB,kBAAkB,mBAAmB;AAE3D,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped"]}
|
|
1
|
+
{"version":3,"sources":["../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import { Readable } from 'node:stream'\nimport { pipeline } from 'node:stream/promises'\nimport type { ReadableStream as WebReadableStream } from 'node:stream/web'\nimport type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n\nfunction isWebReadableStream(body: unknown): body is WebReadableStream {\n return typeof (body as WebReadableStream).getReader === 'function'\n}\n\nexport async function pipeUrlToOutput(\n url: string,\n output: NodeJS.WritableStream,\n signal?: AbortSignal\n): Promise<void> {\n // Uses Node 20+ global fetch with Web streams.\n if (signal?.aborted)\n throw new AbortError('Aborted while streaming')\n\n const response = await fetch(url, signal ? { signal } : undefined)\n if (!response.ok) {\n throw new Error(\n `Failed to fetch external link: ${response.status} ${response.statusText}`\n )\n }\n\n if (!response.body)\n return void output.end()\n\n const body = response.body\n const input = isWebReadableStream(body)\n ? Readable.fromWeb(body)\n : (body as NodeJS.ReadableStream)\n\n await pipeline(input, output)\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n QueryInfo,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n// Base path for Query History API.\nconst HISTORY_BASE_PATH = '/api/2.0/sql/history/queries'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get query metrics from Query History API\n * GET /api/2.0/sql/history/queries/{query_id}?include_metrics=true\n */\nexport async function getQueryMetrics(\n auth: AuthInfo,\n queryId: string,\n signal?: AbortSignal\n): Promise<QueryInfo> {\n return httpRequest<QueryInfo>(auth, {\n method: 'GET',\n path: `${HISTORY_BASE_PATH}/${queryId}?include_metrics=true`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n QueryMetrics,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement, getQueryMetrics } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 5000\n\nasync function fetchMetrics(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<QueryMetrics | undefined> {\n const queryInfo = await getQueryMetrics(auth, statementId, signal)\n return queryInfo.metrics\n}\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress, enableMetrics, logger } = options\n const waitTimeout = options.wait_timeout ?? (onProgress ? '0s' : '50s')\n let cancelIssued = false\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // Helper to call onProgress with optional metrics\n const emitProgress = onProgress\n ? async () => result ? onProgress(\n result,\n enableMetrics ? await fetchMetrics(auth, result.statement_id, signal).catch(e => {\n logger?.error?.(`executeStatement Failed to fetch query metrics for statement ${result?.statement_id}: ${String(e)}`, { statementId: result?.statement_id })\n return undefined\n }) : undefined\n ) : undefined\n : undefined\n\n // 1. Build request (filter out undefined values)\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout ?? 'CONTINUE',\n wait_timeout: waitTimeout,\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n logger?.info?.(`executeStatement Executing statement on warehouse ${warehouseId}...`)\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n const cancelStatementSafely = async () => {\n if (cancelIssued) return\n logger?.info?.('executeStatement Abort signal received during executeStatement.')\n cancelIssued = true\n await cancelStatement(auth, result.statement_id).catch((err) => {\n logger?.error?.('executeStatement Failed to cancel statement after abort.', err)\n })\n }\n\n if (signal?.aborted) {\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n\n const onAbort = () => cancelStatementSafely().catch(() => { })\n\n try {\n signal?.addEventListener('abort', onAbort, { once: true })\n\n // 3. Poll until terminal state\n while (!TERMINAL_STATES.has(result.status.state)) {\n logger?.info?.(`executeStatement Statement ${result.statement_id} in state ${result.status.state}; polling for status...`)\n await delay(POLL_INTERVAL_MS, signal)\n result = await getStatement(auth, result.statement_id, signal)\n await emitProgress?.()\n }\n } catch (err) {\n if (err instanceof AbortError || signal?.aborted) {\n logger?.info?.('executeStatement Abort detected in executeStatement polling loop.')\n await cancelStatementSafely()\n throw new AbortError('Aborted during polling')\n }\n logger?.error?.(`executeStatement Error during executeStatement polling: ${String(err)}`)\n throw err\n } finally {\n logger?.info?.(`executeStatement Statement ${result.statement_id} reached final state: ${result.status.state}`)\n signal?.removeEventListener('abort', onAbort)\n }\n\n // 4. Final progress callback\n await emitProgress?.()\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import type { Readable } from 'node:stream'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\n\nimport { getChunk } from '../databricks-api.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const statementId = statementResult.statement_id\n const logContext = { statementId, manifest, requestedFormat: format }\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format, {\n ...options.encodeBigInt ? { encodeBigInt: options.encodeBigInt } : {},\n ...options.encodeTimestamp ? { encodeTimestamp: options.encodeTimestamp } : {},\n })\n\n logger?.info?.(`fetchRow fetching rows for statement ${statementId}.`, {\n ...logContext,\n resultType: statementResult.result?.external_links ? 'EXTERNAL_LINKS' : 'INLINE',\n })\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n logger?.error?.(`fetchRow only supports JSON_ARRAY for external_links; got ${manifest.format}.`, logContext)\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchRow streaming external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...logger ? { logger } : {},\n })\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal, logger, logContext)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n logger?.info?.(`fetchRow processing inline rows for statement ${statementId}.`, {\n ...logContext,\n inlineRows: dataArray.length,\n })\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n logger?.info?.(`fetchRow processing ${totalChunks} chunks for statement ${statementId}.`, logContext)\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined,\n logger: FetchRowsOptions['logger'],\n logContext: Record<string, unknown>\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n logger?.info?.('fetchRow abort detected while streaming JSON_ARRAY rows.', {\n ...logContext,\n aborted: signal.aborted,\n })\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n RowMapperOptions,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst TIMESTAMP_TYPES = new Set(['TIMESTAMP', 'TIMESTAMP_NTZ', 'TIMESTAMP_LTZ'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format'],\n options: RowMapperOptions = {}\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column, options),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(\n column: ColumnInfo,\n options: RowMapperOptions\n): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value, options)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(\n descriptor: TypeDescriptor,\n value: unknown,\n options: RowMapperOptions\n): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value, options)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value, options)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value, options)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value, options.encodeBigInt)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (TIMESTAMP_TYPES.has(descriptor.typeName))\n return convertTimestamp(value, options.encodeTimestamp)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(\n fields: StructField[],\n value: unknown,\n options: RowMapperOptions\n): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name], options)\n\n return mapped\n}\n\nfunction convertArrayValue(\n elementType: TypeDescriptor,\n value: unknown,\n options: RowMapperOptions\n): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry, options))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown,\n options: RowMapperOptions\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0], options)\n mapped[String(convertedKey)] = convertValue(valueType, entry[1], options)\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key, options)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue, options)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown, encodeBigInt?: (value: bigint) => unknown): unknown {\n if (typeof value === 'bigint')\n return encodeBigInt ? encodeBigInt(value) : value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value)) {\n const bigintValue = BigInt(value)\n return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue\n }\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n const bigintValue = BigInt(value)\n return encodeBigInt ? encodeBigInt(bigintValue) : bigintValue\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertTimestamp(\n value: unknown,\n encodeTimestamp?: (value: string) => unknown\n): unknown {\n if (typeof value !== 'string')\n return value\n\n return encodeTimestamp ? encodeTimestamp(value) : value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import type { MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n ExternalLinkInfo,\n FetchStreamOptions,\n StatementManifest,\n StatementResult,\n} from '../types.js'\n\nimport { PassThrough, Readable } from 'node:stream'\n\nimport { mergeStreamsFromUrls } from '@bitofsky/merge-streams'\n\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { pipeUrlToOutput, validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal, forceMerge, logger } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n\n if (statementResult.result?.data_array) {\n logger?.error?.(\n `fetchStream only supports EXTERNAL_LINKS results for statement ${statementId}.`,\n { ...baseLog, hasDataArray: true }\n )\n throw new DatabricksSqlError(\n 'fetchStream only supports EXTERNAL_LINKS results',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n }\n\n logger?.info?.(`fetchStream creating stream for statement ${statementId}.`, {\n ...baseLog,\n hasExternalLinks: Boolean(statementResult.result?.external_links?.length),\n })\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => {\n logger?.info?.(`fetchStream abort signal received while streaming statement ${statementId}.`, baseLog)\n output.destroy(new AbortError('Stream aborted'))\n }\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => signal.removeEventListener('abort', onAbort))\n }\n\n // Prevent AbortError from becoming an uncaught exception when no error handler is attached.\n output.on('error', (err) => {\n if (err instanceof AbortError)\n return\n if (output.listenerCount('error') === 1)\n throw err\n })\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge, logger)\n .catch((err) => {\n logger?.error?.(`fetchStream error while streaming statement ${statementId}.`, {\n ...baseLog,\n error: err,\n })\n output.destroy(err as Error)\n })\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal,\n forceMerge?: boolean,\n logger?: FetchStreamOptions['logger']\n): Promise<void> {\n const statementId = statementResult.statement_id\n const baseLog = { statementId, manifest, format, forceMerge }\n logger?.info?.(`fetchStream collecting external links for statement ${statementId}.`, baseLog)\n const urls = await collectExternalUrls(statementResult, auth, manifest, signal)\n\n // No external links - close the stream\n if (urls.length === 0) {\n logger?.info?.(`fetchStream no external links found for statement ${statementId}.`, baseLog)\n return void output.end()\n }\n\n // Single URL - pipe directly to output unless forcing merge\n if (urls.length === 1 && !forceMerge) {\n logger?.info?.(`fetchStream piping single external link for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n // Avoid merge-streams overhead for a single URL unless forced.\n return pipeUrlToOutput(urls[0]!, output, signal)\n }\n\n // Merge all URLs using merge-streams\n logger?.info?.(`fetchStream merging ${urls.length} external links for statement ${statementId}.`, {\n ...baseLog,\n urlCount: urls.length,\n })\n return mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n\nasync function collectExternalUrls(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n signal?: AbortSignal\n): Promise<string[]> {\n const chunkUrls = new Map<number, string[]>()\n\n addChunkLinks(chunkUrls, statementResult.result?.external_links)\n\n if (!manifest.total_chunk_count)\n return flattenChunkUrls(chunkUrls)\n\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (chunkUrls.has(i))\n continue\n if (signal?.aborted)\n throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n addChunkLinks(chunkUrls, chunkData.external_links)\n }\n\n return flattenChunkUrls(chunkUrls)\n}\n\nfunction addChunkLinks(\n chunkUrls: Map<number, string[]>,\n externalLinks?: ExternalLinkInfo[]\n): void {\n if (!externalLinks)\n return\n\n for (const link of externalLinks) {\n if (!isNonEmptyString(link.external_link))\n continue\n\n const existing = chunkUrls.get(link.chunk_index)\n if (existing) {\n existing.push(link.external_link)\n } else {\n chunkUrls.set(link.chunk_index, [link.external_link])\n }\n }\n}\n\nfunction flattenChunkUrls(chunkUrls: Map<number, string[]>): string[] {\n if (chunkUrls.size === 0)\n return []\n\n const sorted = [...chunkUrls.entries()].sort(([a], [b]) => a - b)\n const urls: string[] = []\n for (const [, links] of sorted) {\n urls.push(...links)\n }\n return urls\n}\n\nfunction isNonEmptyString(value: unknown): value is string {\n return typeof value === 'string' && value.length > 0\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const logContext = { statementId, manifest, requestedFormat: options.format }\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n const { logger } = options\n\n logger?.info?.(`fetchAll fetching all rows for statement ${statementId}.`, logContext)\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n if (options.logger)\n fetchOptions.logger = options.logger\n\n if (options.encodeBigInt)\n fetchOptions.encodeBigInt = options.encodeBigInt\n\n if (options.encodeTimestamp)\n fetchOptions.encodeTimestamp = options.encodeTimestamp\n\n await fetchRow(statementResult, auth, fetchOptions)\n logger?.info?.(`fetchAll fetched ${rows.length} rows for statement ${statementId}.`, {\n ...logContext,\n rowCount: rows.length,\n resolvedFormat: options.format ?? manifest?.format,\n })\n return rows\n}\n","import type {\n AuthInfo,\n MergeExternalLinksOptions,\n StatementResult,\n} from '../types.js'\n\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink, forceMerge, logger } = options\n const statementId = statementResult.statement_id\n const manifest = statementResult.manifest\n const externalLinks = statementResult.result?.external_links\n const totalChunks = manifest?.total_chunk_count ?? 0\n const logContext = { statementId, manifest, totalChunks, forceMerge }\n\n // If not external links, return original as-is\n if (!externalLinks) {\n logger?.info?.(`mergeExternalLinks no external links to merge for statement ${statementId}.`, logContext)\n return statementResult\n }\n\n if (!forceMerge) {\n const isSingleChunk = totalChunks <= 1\n\n // Skip merging when a single external link already exists unless forced.\n if (isSingleChunk) {\n logger?.info?.(`mergeExternalLinks skipping merge for single external link in statement ${statementId}.`, {\n ...logContext,\n totalChunks,\n })\n return statementResult\n }\n }\n\n // Get merged stream via fetchStream\n logger?.info?.(`mergeExternalLinks merging external links for statement ${statementId}.`, logContext)\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...forceMerge !== undefined ? { forceMerge } : {},\n ...logger ? { logger } : {},\n })\n\n // Upload via callback\n logger?.info?.(`mergeExternalLinks uploading merged external link for statement ${statementId}.`, logContext)\n const uploadResult = await mergeStreamToExternalLink(stream)\n logger?.info?.(`mergeExternalLinks uploaded merged external link for statement ${statementId}.`, {\n ...logContext,\n byteCount: uploadResult.byte_count,\n expiration: uploadResult.expiration,\n })\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const validatedManifest = validateSucceededResult(statementResult)\n const totalRowCount = validatedManifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...validatedManifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";AACO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;AC5DA,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AASlB,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;AAEA,SAAS,oBAAoB,MAA0C;AACrE,SAAO,OAAQ,KAA2B,cAAc;AAC1D;AAEA,eAAsB,gBACpB,KACA,QACA,QACe;AAEf,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,yBAAyB;AAEhD,QAAM,WAAW,MAAM,MAAM,KAAK,SAAS,EAAE,OAAO,IAAI,MAAS;AACjE,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,kCAAkC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IAC1E;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,OAAO,IAAI;AAEzB,QAAM,OAAO,SAAS;AACtB,QAAM,QAAQ,oBAAoB,IAAI,IAClC,SAAS,QAAQ,IAAI,IACpB;AAEL,QAAM,SAAS,OAAO,MAAM;AAC9B;;;AC1GA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;AClHA,IAAM,YAAY;AAElB,IAAM,oBAAoB;AAM1B,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,SACA,QACoB;AACpB,SAAO,YAAuB,MAAM;AAAA,IAClC,QAAQ;AAAA,IACR,MAAM,GAAG,iBAAiB,IAAI,OAAO;AAAA,IACrC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC9EA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AAEzB,eAAe,aACb,MACA,aACA,QACmC;AACnC,QAAM,YAAY,MAAM,gBAAgB,MAAM,aAAa,MAAM;AACjE,SAAO,UAAU;AACnB;AAKA,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,YAAY,eAAe,OAAO,IAAI;AACtD,QAAM,cAAc,QAAQ,iBAAiB,aAAa,OAAO;AACjE,MAAI,eAAe;AAGnB,iBAAe,QAAQ,kBAAkB;AAGzC,QAAM,eAAe,aACjB,YAAY,SAAS;AAAA,IACrB;AAAA,IACA,gBAAgB,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM,EAAE,MAAM,OAAK;AAC/E,cAAQ,QAAQ,gEAAgE,QAAQ,YAAY,KAAK,OAAO,CAAC,CAAC,IAAI,EAAE,aAAa,QAAQ,aAAa,CAAC;AAC3J,aAAO;AAAA,IACT,CAAC,IAAI;AAAA,EACP,IAAI,SACF;AAGJ,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ,mBAAmB;AAAA,MAC5C,cAAc;AAAA,MACd,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAEA,UAAQ,OAAO,qDAAqD,WAAW,KAAK;AAGpF,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AACtD,QAAM,wBAAwB,YAAY;AACxC,QAAI,aAAc;AAClB,YAAQ,OAAO,iEAAiE;AAChF,mBAAe;AACf,UAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,CAAC,QAAQ;AAC9D,cAAQ,QAAQ,4DAA4D,GAAG;AAAA,IACjF,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,SAAS;AACnB,UAAM,sBAAsB;AAC5B,UAAM,IAAI,WAAW,wBAAwB;AAAA,EAC/C;AAEA,QAAM,UAAU,MAAM,sBAAsB,EAAE,MAAM,MAAM;AAAA,EAAE,CAAC;AAE7D,MAAI;AACF,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAGzD,WAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAChD,cAAQ,OAAO,8BAA8B,OAAO,YAAY,aAAa,OAAO,OAAO,KAAK,yBAAyB;AACzH,YAAM,MAAM,kBAAkB,MAAM;AACpC,eAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAC7D,YAAM,eAAe;AAAA,IACvB;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,eAAe,cAAc,QAAQ,SAAS;AAChD,cAAQ,OAAO,mEAAmE;AAClF,YAAM,sBAAsB;AAC5B,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AACA,YAAQ,QAAQ,2DAA2D,OAAO,GAAG,CAAC,EAAE;AACxF,UAAM;AAAA,EACR,UAAE;AACA,YAAQ,OAAO,8BAA8B,OAAO,YAAY,yBAAyB,OAAO,OAAO,KAAK,EAAE;AAC9G,YAAQ,oBAAoB,SAAS,OAAO;AAAA,EAC9C;AAGA,QAAM,eAAe;AAGrB,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;AC/HA,SAAS,cAAc;AACvB,SAAS,mBAAmB;;;ACmB5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,kBAAkB,oBAAI,IAAI,CAAC,aAAa,iBAAiB,eAAe,CAAC;AAC/E,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACA,UAA4B,CAAC,GAClB;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,QAAQ,OAAO;AAAA,EAChD,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBACP,QACA,SAC6B;AAC7B,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,OAAO,OAAO;AAC3D;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aACP,YACA,OACA,SACS;AACT,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,OAAO,OAAO;AAE7D,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,OAAO,OAAO;AAEjE,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,OAAO,OAAO;AAEjF,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,OAAO,QAAQ,YAAY;AAEnD,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,gBAAgB,IAAI,WAAW,QAAQ;AACzC,WAAO,iBAAiB,OAAO,QAAQ,eAAe;AAExD,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBACP,QACA,OACA,SACS;AACT,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,GAAG,OAAO;AAEvF,SAAO;AACT;AAEA,SAAS,kBACP,aACA,OACA,SACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,OAAO,OAAO,CAAC;AACrE;AAEA,SAAS,gBACP,SACA,WACA,OACA,SACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,GAAG,OAAO;AAC5D,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,GAAG,OAAO;AAAA,IAC1E;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,KAAK,OAAO;AACvD,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,YAAY,OAAO;AAAA,EAC5E;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAgB,cAAoD;AAC1F,MAAI,OAAO,UAAU;AACnB,WAAO,eAAe,aAAa,KAAK,IAAI;AAE9C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK,GAAG;AAC3B,YAAM,cAAc,OAAO,KAAK;AAChC,aAAO,eAAe,aAAa,WAAW,IAAI;AAAA,IACpD;AACA,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,YAAM,cAAc,OAAO,KAAK;AAChC,aAAO,eAAe,aAAa,WAAW,IAAI;AAAA,IACpD,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,iBACP,OACA,iBACS;AACT,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,SAAO,kBAAkB,gBAAgB,KAAK,IAAI;AACpD;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;AC7aA,SAAS,mBAA6B;AAEtC,SAAS,4BAA4B;AAW9B,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,QAAQ,YAAY,OAAO,IAAI;AACvC,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AACxB,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAE5D,MAAI,gBAAgB,QAAQ,YAAY;AACtC,YAAQ;AAAA,MACN,kEAAkE,WAAW;AAAA,MAC7E,EAAE,GAAG,SAAS,cAAc,KAAK;AAAA,IACnC;AACA,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,OAAO,6CAA6C,WAAW,KAAK;AAAA,IAC1E,GAAG;AAAA,IACH,kBAAkB,QAAQ,gBAAgB,QAAQ,gBAAgB,MAAM;AAAA,EAC1E,CAAC;AAGD,QAAM,SAAS,IAAI,YAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM;AACpB,cAAQ,OAAO,+DAA+D,WAAW,KAAK,OAAO;AACrG,aAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACjD;AACA,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM,OAAO,oBAAoB,SAAS,OAAO,CAAC;AAAA,EACzE;AAGA,SAAO,GAAG,SAAS,CAAC,QAAQ;AAC1B,QAAI,eAAe;AACjB;AACF,QAAI,OAAO,cAAc,OAAO,MAAM;AACpC,YAAM;AAAA,EACV,CAAC;AAID,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,QAAQ,YAAY,MAAM,EAC5F,MAAM,CAAC,QAAQ;AACd,YAAQ,QAAQ,+CAA+C,WAAW,KAAK;AAAA,MAC7E,GAAG;AAAA,MACH,OAAO;AAAA,IACT,CAAC;AACD,WAAO,QAAQ,GAAY;AAAA,EAC7B,CAAC;AAEH,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACA,YACA,QACe;AACf,QAAM,cAAc,gBAAgB;AACpC,QAAM,UAAU,EAAE,aAAa,UAAU,QAAQ,WAAW;AAC5D,UAAQ,OAAO,uDAAuD,WAAW,KAAK,OAAO;AAC7F,QAAM,OAAO,MAAM,oBAAoB,iBAAiB,MAAM,UAAU,MAAM;AAG9E,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,OAAO,qDAAqD,WAAW,KAAK,OAAO;AAC3F,WAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAGA,MAAI,KAAK,WAAW,KAAK,CAAC,YAAY;AACpC,YAAQ,OAAO,yDAAyD,WAAW,KAAK;AAAA,MACtF,GAAG;AAAA,MACH,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,WAAO,gBAAgB,KAAK,CAAC,GAAI,QAAQ,MAAM;AAAA,EACjD;AAGA,UAAQ,OAAO,uBAAuB,KAAK,MAAM,iCAAiC,WAAW,KAAK;AAAA,IAChG,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,EACjB,CAAC;AACD,SAAO,qBAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AAC1F;AAEA,eAAe,oBACb,iBACA,MACA,UACA,QACmB;AACnB,QAAM,YAAY,oBAAI,IAAsB;AAE5C,gBAAc,WAAW,gBAAgB,QAAQ,cAAc;AAE/D,MAAI,CAAC,SAAS;AACZ,WAAO,iBAAiB,SAAS;AAEnC,WAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,QAAI,UAAU,IAAI,CAAC;AACjB;AACF,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW,+BAA+B;AAGtD,UAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,kBAAc,WAAW,UAAU,cAAc;AAAA,EACnD;AAEA,SAAO,iBAAiB,SAAS;AACnC;AAEA,SAAS,cACP,WACA,eACM;AACN,MAAI,CAAC;AACH;AAEF,aAAW,QAAQ,eAAe;AAChC,QAAI,CAAC,iBAAiB,KAAK,aAAa;AACtC;AAEF,UAAM,WAAW,UAAU,IAAI,KAAK,WAAW;AAC/C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK,aAAa;AAAA,IAClC,OAAO;AACL,gBAAU,IAAI,KAAK,aAAa,CAAC,KAAK,aAAa,CAAC;AAAA,IACtD;AAAA,EACF;AACF;AAEA,SAAS,iBAAiB,WAA4C;AACpE,MAAI,UAAU,SAAS;AACrB,WAAO,CAAC;AAEV,QAAM,SAAS,CAAC,GAAG,UAAU,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC;AAChE,QAAM,OAAiB,CAAC;AACxB,aAAW,CAAC,EAAE,KAAK,KAAK,QAAQ;AAC9B,SAAK,KAAK,GAAG,KAAK;AAAA,EACpB;AACA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAiC;AACzD,SAAO,OAAO,UAAU,YAAY,MAAM,SAAS;AACrD;;;AFtKA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,QAAQ,OAAO,IAAI;AAC9C,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,cAAc,gBAAgB;AACpC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,OAAO;AAEpE,QAAM,SAAS,gBAAgB,UAAU,QAAQ;AAAA,IAC/C,GAAG,QAAQ,eAAe,EAAE,cAAc,QAAQ,aAAa,IAAI,CAAC;AAAA,IACpE,GAAG,QAAQ,kBAAkB,EAAE,iBAAiB,QAAQ,gBAAgB,IAAI,CAAC;AAAA,EAC/E,CAAC;AAED,UAAQ,OAAO,wCAAwC,WAAW,KAAK;AAAA,IACrE,GAAG;AAAA,IACH,YAAY,gBAAgB,QAAQ,iBAAiB,mBAAmB;AAAA,EAC1E,CAAC;AAED,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,cAAQ,QAAQ,6DAA6D,SAAS,MAAM,KAAK,UAAU;AAC3G,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,OAAO,mDAAmD,WAAW,KAAK,UAAU;AAC5F,UAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,MAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,MAC1B,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC5B,CAAC;AACD,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,QAAQ,QAAQ,UAAU;AAClF;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,YAAQ,OAAO,iDAAiD,WAAW,KAAK;AAAA,MAC9E,GAAG;AAAA,MACH,YAAY,UAAU;AAAA,IACxB,CAAC;AACD,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,YAAQ,OAAO,uBAAuB,WAAW,yBAAyB,WAAW,KAAK,UAAU;AACpG,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACA,QACA,YACe;AAEf,QAAM,aAAa,OAAO,KAAK,OAAO,CAAC,EAAE,KAAK,YAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,cAAQ,OAAO,4DAA4D;AAAA,QACzE,GAAG;AAAA,QACH,SAAS,OAAO;AAAA,MAClB,CAAC;AACD,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AGxHA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,EAAE,aAAa,UAAU,iBAAiB,QAAQ,OAAO;AAC5E,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AACA,QAAM,EAAE,OAAO,IAAI;AAEnB,UAAQ,OAAO,4CAA4C,WAAW,KAAK,UAAU;AAErF,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,eAAe,QAAQ;AAEtC,MAAI,QAAQ;AACV,iBAAa,kBAAkB,QAAQ;AAEzC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,UAAQ,OAAO,oBAAoB,KAAK,MAAM,uBAAuB,WAAW,KAAK;AAAA,IACnF,GAAG;AAAA,IACH,UAAU,KAAK;AAAA,IACf,gBAAgB,QAAQ,UAAU,UAAU;AAAA,EAC9C,CAAC;AACD,SAAO;AACT;;;ACzCA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,2BAA2B,YAAY,OAAO,IAAI;AAClE,QAAM,cAAc,gBAAgB;AACpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,gBAAgB,gBAAgB,QAAQ;AAC9C,QAAM,cAAc,UAAU,qBAAqB;AACnD,QAAM,aAAa,EAAE,aAAa,UAAU,aAAa,WAAW;AAGpE,MAAI,CAAC,eAAe;AAClB,YAAQ,OAAO,+DAA+D,WAAW,KAAK,UAAU;AACxG,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,YAAY;AACf,UAAM,gBAAgB,eAAe;AAGrC,QAAI,eAAe;AACjB,cAAQ,OAAO,2EAA2E,WAAW,KAAK;AAAA,QACxG,GAAG;AAAA,QACH;AAAA,MACF,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,UAAQ,OAAO,2DAA2D,WAAW,KAAK,UAAU;AACpG,QAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC1B,GAAG,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC5B,CAAC;AAGD,UAAQ,OAAO,mEAAmE,WAAW,KAAK,UAAU;AAC5G,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAC3D,UAAQ,OAAO,kEAAkE,WAAW,KAAK;AAAA,IAC/F,GAAG;AAAA,IACH,WAAW,aAAa;AAAA,IACxB,YAAY,aAAa;AAAA,EAC3B,CAAC;AAID,QAAM,oBAAoB,wBAAwB,eAAe;AACjE,QAAM,gBAAgB,kBAAkB,mBAAmB;AAE3D,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped"]}
|