@bitofsky/databricks-sql 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +53 -6
- package/dist/index.cjs +97 -38
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +87 -3
- package/dist/index.d.ts +87 -3
- package/dist/index.js +95 -36
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
# @bitofsky/databricks-sql
|
|
2
2
|
|
|
3
|
+
[](https://www.npmjs.com/package/@bitofsky/databricks-sql)
|
|
4
|
+
[](https://www.npmjs.com/package/@bitofsky/databricks-sql)
|
|
5
|
+
[](https://github.com/bitofsky/databricks-sql/blob/main/LICENSE)
|
|
6
|
+
[](https://nodejs.org)
|
|
7
|
+
[](https://www.typescriptlang.org/)
|
|
8
|
+
|
|
3
9
|
Databricks SQL client for Node.js that talks directly to the REST API and streams large results efficiently. No SDK lock-in, no warehouse-side streaming bottlenecks.
|
|
4
10
|
|
|
5
11
|
## Why This Exists
|
|
@@ -14,7 +20,8 @@ The goal is simple: stream big results with stable memory usage and without forc
|
|
|
14
20
|
|
|
15
21
|
## Highlights
|
|
16
22
|
- Direct REST calls to Statement Execution API.
|
|
17
|
-
-
|
|
23
|
+
- Optimized polling with server-side wait (up to 50s) before falling back to client polling.
|
|
24
|
+
- Query metrics support via Query History API (`enableMetrics` option).
|
|
18
25
|
- Efficient external link handling: merge chunks into a single stream.
|
|
19
26
|
- `mergeExternalLinks` supports streaming uploads and returns a new StatementResult with a presigned URL.
|
|
20
27
|
- `fetchRow`/`fetchAll` support `JSON_OBJECT` (schema-based row mapping).
|
|
@@ -92,6 +99,35 @@ const merged = await mergeExternalLinks(result, auth, {
|
|
|
92
99
|
console.log(merged.result?.external_links?.[0].external_link) // Presigned URL to merged CSV
|
|
93
100
|
```
|
|
94
101
|
|
|
102
|
+
## Sample (Progress with Metrics)
|
|
103
|
+
Track query progress with execution metrics:
|
|
104
|
+
|
|
105
|
+
```ts
|
|
106
|
+
import { executeStatement } from '@bitofsky/databricks-sql'
|
|
107
|
+
|
|
108
|
+
const auth = {
|
|
109
|
+
token: process.env.DATABRICKS_TOKEN!,
|
|
110
|
+
host: process.env.DATABRICKS_HOST!,
|
|
111
|
+
httpPath: process.env.DATABRICKS_HTTP_PATH!,
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const result = await executeStatement(
|
|
115
|
+
'SELECT * FROM samples.tpch.lineitem LIMIT 10000',
|
|
116
|
+
auth,
|
|
117
|
+
{
|
|
118
|
+
enableMetrics: true,
|
|
119
|
+
onProgress: (status, metrics) => {
|
|
120
|
+
console.log(`State: ${status.state}`)
|
|
121
|
+
if (metrics) { // metrics is optional, only present when enableMetrics: true
|
|
122
|
+
console.log(` Execution time: ${metrics.execution_time_ms}ms`)
|
|
123
|
+
console.log(` Rows produced: ${metrics.rows_produced_count}`)
|
|
124
|
+
console.log(` Bytes read: ${metrics.read_bytes}`)
|
|
125
|
+
}
|
|
126
|
+
},
|
|
127
|
+
}
|
|
128
|
+
)
|
|
129
|
+
```
|
|
130
|
+
|
|
95
131
|
## Sample (Abort)
|
|
96
132
|
Cancel a long-running query and stop polling/streaming:
|
|
97
133
|
|
|
@@ -141,7 +177,9 @@ function executeStatement(
|
|
|
141
177
|
): Promise<StatementResult>
|
|
142
178
|
```
|
|
143
179
|
- Calls the Databricks Statement Execution API and polls until completion.
|
|
144
|
-
-
|
|
180
|
+
- Server waits up to 50s (`wait_timeout`) before client-side polling begins.
|
|
181
|
+
- Use `options.onProgress` to receive status updates with optional metrics.
|
|
182
|
+
- Set `enableMetrics: true` to fetch query metrics from Query History API on each poll.
|
|
145
183
|
- Throws `DatabricksSqlError` on failure, `StatementCancelledError` on cancel, and `AbortError` on abort.
|
|
146
184
|
|
|
147
185
|
### fetchRow(statementResult, auth, options?)
|
|
@@ -177,7 +215,9 @@ function fetchStream(
|
|
|
177
215
|
```
|
|
178
216
|
- Merges `EXTERNAL_LINKS` into a single binary stream.
|
|
179
217
|
- Preserves the original format (`JSON_ARRAY`, `CSV`, `ARROW_STREAM`).
|
|
218
|
+
- Throws if the result is `INLINE`.
|
|
180
219
|
- Ends as an empty stream when no external links exist.
|
|
220
|
+
- `forceMerge: true` forces merge even when there is only a single external link.
|
|
181
221
|
|
|
182
222
|
### mergeExternalLinks(statementResult, auth, options)
|
|
183
223
|
```ts
|
|
@@ -190,22 +230,24 @@ function mergeExternalLinks(
|
|
|
190
230
|
- Creates a merged stream from `EXTERNAL_LINKS`, uploads it via
|
|
191
231
|
`options.mergeStreamToExternalLink`, then returns a `StatementResult`
|
|
192
232
|
with a single external link.
|
|
193
|
-
- Returns the original result unchanged when input is `INLINE
|
|
233
|
+
- Returns the original result unchanged when input is `INLINE` or already a
|
|
234
|
+
single external link (unless `forceMerge: true`).
|
|
194
235
|
|
|
195
236
|
### Options (Summary)
|
|
196
237
|
```ts
|
|
197
238
|
type ExecuteStatementOptions = {
|
|
198
|
-
onProgress?: (status: StatementStatus) => void
|
|
239
|
+
onProgress?: (status: StatementStatus, metrics?: QueryMetrics) => void
|
|
240
|
+
enableMetrics?: boolean // Fetch metrics from Query History API (default: false)
|
|
199
241
|
signal?: AbortSignal
|
|
200
242
|
disposition?: 'INLINE' | 'EXTERNAL_LINKS'
|
|
201
243
|
format?: 'JSON_ARRAY' | 'ARROW_STREAM' | 'CSV'
|
|
202
|
-
wait_timeout?: string
|
|
244
|
+
wait_timeout?: string // Server wait time (default: '50s', max: '50s')
|
|
203
245
|
row_limit?: number
|
|
204
246
|
byte_limit?: number
|
|
205
247
|
catalog?: string
|
|
206
248
|
schema?: string
|
|
207
249
|
parameters?: StatementParameter[]
|
|
208
|
-
on_wait_timeout?: 'CONTINUE' | 'CANCEL'
|
|
250
|
+
on_wait_timeout?: 'CONTINUE' | 'CANCEL' // Default: 'CONTINUE'
|
|
209
251
|
warehouse_id?: string
|
|
210
252
|
}
|
|
211
253
|
|
|
@@ -222,10 +264,12 @@ type FetchAllOptions = {
|
|
|
222
264
|
|
|
223
265
|
type FetchStreamOptions = {
|
|
224
266
|
signal?: AbortSignal
|
|
267
|
+
forceMerge?: boolean
|
|
225
268
|
}
|
|
226
269
|
|
|
227
270
|
type MergeExternalLinksOptions = {
|
|
228
271
|
signal?: AbortSignal
|
|
272
|
+
forceMerge?: boolean
|
|
229
273
|
mergeStreamToExternalLink: (stream: Readable) => Promise<{
|
|
230
274
|
externalLink: string
|
|
231
275
|
byte_count: number
|
|
@@ -237,6 +281,9 @@ type MergeExternalLinksOptions = {
|
|
|
237
281
|
## Notes
|
|
238
282
|
- Databricks requires `INLINE` results to use `JSON_ARRAY` format. `INLINE + CSV` is rejected by the API.
|
|
239
283
|
- `EXTERNAL_LINKS` are merged using `@bitofsky/merge-streams`.
|
|
284
|
+
- Query metrics are fetched from `/api/2.0/sql/history/queries/{query_id}?include_metrics=true` when `enableMetrics: true`.
|
|
285
|
+
- Metrics may not be immediately available; `is_final: true` indicates complete metrics.
|
|
286
|
+
- Requires Node.js >= 20 for global `fetch` and Web streams.
|
|
240
287
|
|
|
241
288
|
## Development
|
|
242
289
|
```bash
|
package/dist/index.cjs
CHANGED
|
@@ -84,6 +84,8 @@ var RateLimitError = class extends HttpError {
|
|
|
84
84
|
};
|
|
85
85
|
|
|
86
86
|
// src/util.ts
|
|
87
|
+
var import_node_stream = require("stream");
|
|
88
|
+
var import_promises = require("stream/promises");
|
|
87
89
|
function extractWarehouseId(httpPath) {
|
|
88
90
|
const match = httpPath.match(/\/sql\/\d+\.\d+\/warehouses\/([a-zA-Z0-9]+)/);
|
|
89
91
|
if (!match?.[1])
|
|
@@ -133,6 +135,24 @@ function validateSucceededResult(statementResult) {
|
|
|
133
135
|
);
|
|
134
136
|
return statementResult.manifest;
|
|
135
137
|
}
|
|
138
|
+
function isWebReadableStream(body) {
|
|
139
|
+
return typeof body.getReader === "function";
|
|
140
|
+
}
|
|
141
|
+
async function pipeUrlToOutput(url, output, signal) {
|
|
142
|
+
if (signal?.aborted)
|
|
143
|
+
throw new AbortError("Aborted while streaming");
|
|
144
|
+
const response = await fetch(url, signal ? { signal } : void 0);
|
|
145
|
+
if (!response.ok) {
|
|
146
|
+
throw new Error(
|
|
147
|
+
`Failed to fetch external link: ${response.status} ${response.statusText}`
|
|
148
|
+
);
|
|
149
|
+
}
|
|
150
|
+
if (!response.body)
|
|
151
|
+
return void output.end();
|
|
152
|
+
const body = response.body;
|
|
153
|
+
const input = isWebReadableStream(body) ? import_node_stream.Readable.fromWeb(body) : body;
|
|
154
|
+
await (0, import_promises.pipeline)(input, output);
|
|
155
|
+
}
|
|
136
156
|
|
|
137
157
|
// src/http.ts
|
|
138
158
|
var MAX_RETRIES = 3;
|
|
@@ -205,6 +225,7 @@ async function httpRequest(auth, options) {
|
|
|
205
225
|
|
|
206
226
|
// src/databricks-api.ts
|
|
207
227
|
var BASE_PATH = "/api/2.0/sql/statements";
|
|
228
|
+
var HISTORY_BASE_PATH = "/api/2.0/sql/history/queries";
|
|
208
229
|
async function postStatement(auth, request, signal) {
|
|
209
230
|
return httpRequest(auth, {
|
|
210
231
|
method: "POST",
|
|
@@ -234,6 +255,13 @@ async function getChunk(auth, statementId, chunkIndex, signal) {
|
|
|
234
255
|
...signal ? { signal } : {}
|
|
235
256
|
});
|
|
236
257
|
}
|
|
258
|
+
async function getQueryMetrics(auth, queryId, signal) {
|
|
259
|
+
return httpRequest(auth, {
|
|
260
|
+
method: "GET",
|
|
261
|
+
path: `${HISTORY_BASE_PATH}/${queryId}?include_metrics=true`,
|
|
262
|
+
...signal ? { signal } : {}
|
|
263
|
+
});
|
|
264
|
+
}
|
|
237
265
|
|
|
238
266
|
// src/api/executeStatement.ts
|
|
239
267
|
var TERMINAL_STATES = /* @__PURE__ */ new Set([
|
|
@@ -242,12 +270,20 @@ var TERMINAL_STATES = /* @__PURE__ */ new Set([
|
|
|
242
270
|
"CANCELED",
|
|
243
271
|
"CLOSED"
|
|
244
272
|
]);
|
|
245
|
-
var POLL_INTERVAL_MS =
|
|
246
|
-
|
|
273
|
+
var POLL_INTERVAL_MS = 5e3;
|
|
274
|
+
async function fetchMetrics(auth, statementId, signal) {
|
|
275
|
+
try {
|
|
276
|
+
const queryInfo = await getQueryMetrics(auth, statementId, signal);
|
|
277
|
+
return queryInfo.metrics;
|
|
278
|
+
} catch {
|
|
279
|
+
return void 0;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
247
282
|
async function executeStatement(query, auth, options = {}) {
|
|
248
283
|
const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath);
|
|
249
|
-
const { signal, onProgress } = options;
|
|
284
|
+
const { signal, onProgress, enableMetrics } = options;
|
|
250
285
|
throwIfAborted(signal, "executeStatement");
|
|
286
|
+
const emitProgress = onProgress ? async (statementId) => onProgress(result.status, enableMetrics ? await fetchMetrics(auth, statementId, signal) : void 0) : void 0;
|
|
251
287
|
const request = Object.fromEntries(
|
|
252
288
|
Object.entries({
|
|
253
289
|
warehouse_id: warehouseId,
|
|
@@ -255,8 +291,8 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
255
291
|
byte_limit: options.byte_limit,
|
|
256
292
|
disposition: options.disposition,
|
|
257
293
|
format: options.format,
|
|
258
|
-
on_wait_timeout: options.on_wait_timeout,
|
|
259
|
-
wait_timeout: options.wait_timeout,
|
|
294
|
+
on_wait_timeout: options.on_wait_timeout ?? "CONTINUE",
|
|
295
|
+
wait_timeout: options.wait_timeout ?? "50s",
|
|
260
296
|
row_limit: options.row_limit,
|
|
261
297
|
catalog: options.catalog,
|
|
262
298
|
schema: options.schema,
|
|
@@ -264,19 +300,17 @@ async function executeStatement(query, auth, options = {}) {
|
|
|
264
300
|
}).filter(([, v]) => v !== void 0)
|
|
265
301
|
);
|
|
266
302
|
let result = await postStatement(auth, request, signal);
|
|
267
|
-
let pollInterval = POLL_INTERVAL_MS;
|
|
268
303
|
while (!TERMINAL_STATES.has(result.status.state)) {
|
|
269
304
|
if (signal?.aborted) {
|
|
270
305
|
await cancelStatement(auth, result.statement_id).catch(() => {
|
|
271
306
|
});
|
|
272
307
|
throw new AbortError("Aborted during polling");
|
|
273
308
|
}
|
|
274
|
-
|
|
275
|
-
await delay(
|
|
276
|
-
pollInterval = Math.min(pollInterval * 1.5, MAX_POLL_INTERVAL_MS);
|
|
309
|
+
await emitProgress?.(result.statement_id);
|
|
310
|
+
await delay(POLL_INTERVAL_MS, signal);
|
|
277
311
|
result = await getStatement(auth, result.statement_id, signal);
|
|
278
312
|
}
|
|
279
|
-
|
|
313
|
+
await emitProgress?.(result.statement_id);
|
|
280
314
|
if (result.status.state === "SUCCEEDED")
|
|
281
315
|
return result;
|
|
282
316
|
if (result.status.state === "CANCELED")
|
|
@@ -580,43 +614,58 @@ function convertBoolean(value) {
|
|
|
580
614
|
}
|
|
581
615
|
|
|
582
616
|
// src/api/fetchStream.ts
|
|
583
|
-
var
|
|
617
|
+
var import_node_stream2 = require("stream");
|
|
584
618
|
var import_merge_streams = require("@bitofsky/merge-streams");
|
|
585
619
|
function fetchStream(statementResult, auth, options = {}) {
|
|
586
|
-
const { signal } = options;
|
|
620
|
+
const { signal, forceMerge } = options;
|
|
587
621
|
const manifest = validateSucceededResult(statementResult);
|
|
588
622
|
const format = manifest.format;
|
|
589
|
-
|
|
623
|
+
if (statementResult.result?.data_array) {
|
|
624
|
+
throw new DatabricksSqlError(
|
|
625
|
+
"fetchStream only supports EXTERNAL_LINKS results",
|
|
626
|
+
"UNSUPPORTED_FORMAT",
|
|
627
|
+
statementResult.statement_id
|
|
628
|
+
);
|
|
629
|
+
}
|
|
630
|
+
const output = new import_node_stream2.PassThrough();
|
|
590
631
|
if (signal) {
|
|
591
|
-
const onAbort = () =>
|
|
592
|
-
output.destroy(new AbortError("Stream aborted"));
|
|
593
|
-
};
|
|
632
|
+
const onAbort = () => output.destroy(new AbortError("Stream aborted"));
|
|
594
633
|
signal.addEventListener("abort", onAbort, { once: true });
|
|
595
|
-
output.once("close", () =>
|
|
596
|
-
signal.removeEventListener("abort", onAbort);
|
|
597
|
-
});
|
|
634
|
+
output.once("close", () => signal.removeEventListener("abort", onAbort));
|
|
598
635
|
}
|
|
599
|
-
mergeChunksToStream(statementResult, auth, manifest, format, output, signal).catch(
|
|
600
|
-
(err) => {
|
|
601
|
-
output.destroy(err);
|
|
602
|
-
}
|
|
603
|
-
);
|
|
636
|
+
mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge).catch((err) => output.destroy(err));
|
|
604
637
|
return output;
|
|
605
638
|
}
|
|
606
|
-
async function mergeChunksToStream(statementResult, auth, manifest, format, output, signal) {
|
|
607
|
-
const
|
|
608
|
-
let urls = result?.external_links?.map((link) => link.external_link) ?? [];
|
|
609
|
-
if (urls.length === 0 && manifest.total_chunk_count > 0) {
|
|
610
|
-
for (let i = 0; i < manifest.total_chunk_count; i++) {
|
|
611
|
-
if (signal?.aborted) throw new AbortError("Aborted while collecting URLs");
|
|
612
|
-
const chunkData = await getChunk(auth, statementResult.statement_id, i, signal);
|
|
613
|
-
const chunkUrls = chunkData.external_links?.map((link) => link.external_link) ?? [];
|
|
614
|
-
urls.push(...chunkUrls);
|
|
615
|
-
}
|
|
616
|
-
}
|
|
639
|
+
async function mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge) {
|
|
640
|
+
const urls = await collectExternalUrls(statementResult, auth, manifest, signal);
|
|
617
641
|
if (urls.length === 0)
|
|
618
642
|
return void output.end();
|
|
619
|
-
|
|
643
|
+
if (urls.length === 1 && !forceMerge)
|
|
644
|
+
return pipeUrlToOutput(urls[0], output, signal);
|
|
645
|
+
return (0, import_merge_streams.mergeStreamsFromUrls)(format, signal ? { urls, output, signal } : { urls, output });
|
|
646
|
+
}
|
|
647
|
+
async function collectExternalUrls(statementResult, auth, manifest, signal) {
|
|
648
|
+
const urls = extractExternalLinks(statementResult.result?.external_links);
|
|
649
|
+
if (urls.length > 0)
|
|
650
|
+
return urls;
|
|
651
|
+
if (!manifest.total_chunk_count)
|
|
652
|
+
return [];
|
|
653
|
+
const chunkUrls = [];
|
|
654
|
+
for (let i = 0; i < manifest.total_chunk_count; i++) {
|
|
655
|
+
if (signal?.aborted)
|
|
656
|
+
throw new AbortError("Aborted while collecting URLs");
|
|
657
|
+
const chunkData = await getChunk(auth, statementResult.statement_id, i, signal);
|
|
658
|
+
chunkUrls.push(...extractExternalLinks(chunkData.external_links));
|
|
659
|
+
}
|
|
660
|
+
return chunkUrls;
|
|
661
|
+
}
|
|
662
|
+
function extractExternalLinks(externalLinks) {
|
|
663
|
+
if (!externalLinks)
|
|
664
|
+
return [];
|
|
665
|
+
return externalLinks.map((link) => link.external_link).filter(isNonEmptyString);
|
|
666
|
+
}
|
|
667
|
+
function isNonEmptyString(value) {
|
|
668
|
+
return typeof value === "string" && value.length > 0;
|
|
620
669
|
}
|
|
621
670
|
|
|
622
671
|
// src/api/fetchRow.ts
|
|
@@ -701,10 +750,20 @@ async function fetchAll(statementResult, auth, options = {}) {
|
|
|
701
750
|
|
|
702
751
|
// src/api/mergeExternalLinks.ts
|
|
703
752
|
async function mergeExternalLinks(statementResult, auth, options) {
|
|
704
|
-
const { signal, mergeStreamToExternalLink } = options;
|
|
753
|
+
const { signal, mergeStreamToExternalLink, forceMerge } = options;
|
|
705
754
|
if (!statementResult.result?.external_links)
|
|
706
755
|
return statementResult;
|
|
707
|
-
|
|
756
|
+
if (!forceMerge) {
|
|
757
|
+
const totalChunks = statementResult.manifest?.total_chunk_count;
|
|
758
|
+
const externalLinks = statementResult.result.external_links;
|
|
759
|
+
const isSingleChunk = totalChunks === void 0 ? externalLinks.length <= 1 : totalChunks <= 1;
|
|
760
|
+
if (isSingleChunk && externalLinks.length <= 1)
|
|
761
|
+
return statementResult;
|
|
762
|
+
}
|
|
763
|
+
const stream = fetchStream(statementResult, auth, {
|
|
764
|
+
...signal ? { signal } : {},
|
|
765
|
+
...forceMerge !== void 0 ? { forceMerge } : {}
|
|
766
|
+
});
|
|
708
767
|
const uploadResult = await mergeStreamToExternalLink(stream);
|
|
709
768
|
const manifest = validateSucceededResult(statementResult);
|
|
710
769
|
const totalRowCount = manifest.total_row_count ?? 0;
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["// Types\nexport type * from './types.js'\n\n// Errors\nexport * from './errors.js'\n\n// Core functions\nexport * from './api'","/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 500\nconst MAX_POLL_INTERVAL_MS = 5000\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress } = options\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // 1. Build request (filter out undefined values)\n // Keep payload small and aligned with the REST API contract.\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout,\n wait_timeout: options.wait_timeout,\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n\n // 3. Poll until terminal state\n let pollInterval = POLL_INTERVAL_MS\n\n while (!TERMINAL_STATES.has(result.status.state)) {\n // Check abort signal\n if (signal?.aborted) {\n // Try to cancel on server\n await cancelStatement(auth, result.statement_id).catch(() => {\n // Ignore cancel errors\n })\n throw new AbortError('Aborted during polling')\n }\n\n // Call progress callback\n onProgress?.(result.status)\n\n // Wait before next poll (exponential backoff)\n await delay(pollInterval, signal)\n pollInterval = Math.min(pollInterval * 1.5, MAX_POLL_INTERVAL_MS)\n\n // Get current status\n result = await getStatement(auth, result.statement_id, signal)\n }\n\n // 4. Final progress callback\n onProgress?.(result.status)\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\nimport type { Readable } from 'node:stream'\nimport { getChunk } from '../databricks-api.js'\nimport { DatabricksSqlError, AbortError } from '../errors.js'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\nimport { validateSucceededResult } from '../util.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format } = options\n const manifest = validateSucceededResult(statementResult)\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format)\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementResult.statement_id\n )\n }\n\n const stream = fetchStream(statementResult, auth, signal ? { signal } : {})\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n const statementId = statementResult.statement_id\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIMESTAMP',\n 'TIMESTAMP_NTZ',\n 'TIMESTAMP_LTZ',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format']\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(column: ColumnInfo): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(descriptor: TypeDescriptor, value: unknown): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(fields: StructField[], value: unknown): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name])\n\n return mapped\n}\n\nfunction convertArrayValue(elementType: TypeDescriptor, value: unknown): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0])\n mapped[String(convertedKey)] = convertValue(valueType, entry[1])\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown): unknown {\n if (typeof value === 'bigint')\n return value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value))\n return BigInt(value)\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n return BigInt(value)\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import { PassThrough, type Readable } from 'node:stream'\nimport { mergeStreamsFromUrls, type MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n StatementResult,\n FetchStreamOptions,\n StatementManifest,\n} from '../types.js'\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => {\n output.destroy(new AbortError('Stream aborted'))\n }\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => {\n signal.removeEventListener('abort', onAbort)\n })\n }\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal).catch(\n (err) => {\n output.destroy(err as Error)\n }\n )\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal\n): Promise<void> {\n const result = statementResult.result\n\n // Collect all external link URLs\n let urls = result?.external_links?.map((link) => link.external_link) ?? []\n\n // If no URLs in initial result, fetch from chunks\n if (urls.length === 0 && manifest.total_chunk_count > 0) {\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (signal?.aborted) throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n const chunkUrls = chunkData.external_links?.map((link) => link.external_link) ?? []\n urls.push(...chunkUrls)\n }\n }\n\n // No external links - close the stream\n if (urls.length === 0)\n return void output.end()\n\n // Merge all URLs using merge-streams\n await mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n await fetchRow(statementResult, auth, fetchOptions)\n return rows\n}\n","import type {\n AuthInfo,\n StatementResult,\n MergeExternalLinksOptions,\n} from '../types.js'\nimport { fetchStream } from './fetchStream.js'\nimport { validateSucceededResult } from '../util.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink } = options\n\n // If not external links, return original as-is\n if (!statementResult.result?.external_links)\n return statementResult\n\n // Get merged stream via fetchStream\n const stream = fetchStream(statementResult, auth, signal ? { signal } : {})\n\n // Upload via callback\n const uploadResult = await mergeStreamToExternalLink(stream)\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const manifest = validateSucceededResult(statementResult)\n const totalRowCount = manifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...manifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;ACrDO,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;;;ACxEA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;ACnHA,IAAM,YAAY;AAMlB,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC5DA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AACzB,IAAM,uBAAuB;AAK7B,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,WAAW,IAAI;AAG/B,iBAAe,QAAQ,kBAAkB;AAIzC,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ;AAAA,MACzB,cAAc,QAAQ;AAAA,MACtB,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAGA,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AAGtD,MAAI,eAAe;AAEnB,SAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAEhD,QAAI,QAAQ,SAAS;AAEnB,YAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM;AAAA,MAE7D,CAAC;AACD,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AAGA,iBAAa,OAAO,MAAM;AAG1B,UAAM,MAAM,cAAc,MAAM;AAChC,mBAAe,KAAK,IAAI,eAAe,KAAK,oBAAoB;AAGhE,aAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAAA,EAC/D;AAGA,eAAa,OAAO,MAAM;AAG1B,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;ACnGA,yBAAuB;AACvB,yBAA4B;;;AC2B5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACW;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,MAAM;AAAA,EACvC,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBAAsB,QAAiD;AAC9E,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,KAAK;AAClD;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aAAa,YAA4B,OAAyB;AACzE,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,KAAK;AAEpD,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,KAAK;AAExD,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,KAAK;AAExE,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,KAAK;AAE7B,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBAAmB,QAAuB,OAAyB;AAC1E,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,CAAC;AAE9E,SAAO;AACT;AAEA,SAAS,kBAAkB,aAA6B,OAAyB;AAC/E,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,KAAK,CAAC;AAC5D;AAEA,SAAS,gBACP,SACA,WACA,OACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,CAAC;AACnD,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,CAAC;AAAA,IACjE;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,GAAG;AAC9C,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,UAAU;AAAA,EACnE;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK;AACxB,aAAO,OAAO,KAAK;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,aAAO,OAAO,KAAK;AAAA,IACrB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;ACtZA,yBAA2C;AAC3C,2BAAuD;AAgBhD,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,OAAO,IAAI;AACnB,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AAGxB,QAAM,SAAS,IAAI,+BAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM;AACpB,aAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AAAA,IACjD;AACA,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM;AACzB,aAAO,oBAAoB,SAAS,OAAO;AAAA,IAC7C,CAAC;AAAA,EACH;AAIA,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,MAAM,EAAE;AAAA,IAC3E,CAAC,QAAQ;AACP,aAAO,QAAQ,GAAY;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACe;AACf,QAAM,SAAS,gBAAgB;AAG/B,MAAI,OAAO,QAAQ,gBAAgB,IAAI,CAAC,SAAS,KAAK,aAAa,KAAK,CAAC;AAGzE,MAAI,KAAK,WAAW,KAAK,SAAS,oBAAoB,GAAG;AACvD,aAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,+BAA+B;AAGzE,YAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,YAAM,YAAY,UAAU,gBAAgB,IAAI,CAAC,SAAS,KAAK,aAAa,KAAK,CAAC;AAClF,WAAK,KAAK,GAAG,SAAS;AAAA,IACxB;AAAA,EACF;AAGA,MAAI,KAAK,WAAW;AAClB,WAAO,KAAK,OAAO,IAAI;AAGzB,YAAM,2CAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AACzF;;;AFjEA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,OAAO,IAAI;AACtC,QAAM,WAAW,wBAAwB,eAAe;AAExD,QAAM,SAAS,gBAAgB,UAAU,MAAM;AAE/C,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,IACF;AAEA,UAAM,SAAS,YAAY,iBAAiB,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC,CAAC;AAC1E,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,MAAM;AAC9D;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,UAAM,cAAc,gBAAgB;AACpC,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACe;AAEf,QAAM,aAAa,OAAO,SAAK,2BAAO,CAAC,EAAE,SAAK,gCAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AG9FA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AAEA,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,SAAO;AACT;;;ACrBA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,0BAA0B,IAAI;AAG9C,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,WAAO;AAGT,QAAM,SAAS,YAAY,iBAAiB,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC,CAAC;AAG1E,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAI3D,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,gBAAgB,SAAS,mBAAmB;AAElD,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/errors.ts","../src/util.ts","../src/http.ts","../src/databricks-api.ts","../src/api/executeStatement.ts","../src/api/fetchRow.ts","../src/createRowMapper.ts","../src/api/fetchStream.ts","../src/api/fetchAll.ts","../src/api/mergeExternalLinks.ts"],"sourcesContent":["// Types\nexport type * from './types.js'\n\n// Errors\nexport * from './errors.js'\n\n// Core functions\nexport * from './api/index.js'\n","/** Base error for Databricks SQL operations */\nexport class DatabricksSqlError extends Error {\n readonly code: string\n readonly statementId: string | undefined\n\n constructor(message: string, code?: string, statementId?: string) {\n super(message)\n this.name = 'DatabricksSqlError'\n this.code = code ?? 'UNKNOWN_ERROR'\n this.statementId = statementId\n Error.captureStackTrace?.(this, DatabricksSqlError)\n }\n}\n\n/** Error when statement is cancelled */\nexport class StatementCancelledError extends DatabricksSqlError {\n constructor(statementId: string) {\n super(`Statement ${statementId} was cancelled`, 'CANCELLED', statementId)\n this.name = 'StatementCancelledError'\n }\n}\n\n/** Error when operation is aborted via AbortSignal */\nexport class AbortError extends DatabricksSqlError {\n constructor(message: string = 'Operation was aborted') {\n super(message, 'ABORTED')\n this.name = 'AbortError'\n }\n}\n\n/** HTTP error from API calls */\nexport class HttpError extends DatabricksSqlError {\n readonly status: number\n readonly statusText: string\n\n constructor(status: number, statusText: string, message?: string) {\n super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`)\n this.name = 'HttpError'\n this.status = status\n this.statusText = statusText\n }\n}\n\n/** Authentication error (401) */\nexport class AuthenticationError extends HttpError {\n constructor() {\n super(401, 'Unauthorized', 'Authentication failed. Check your token.')\n this.name = 'AuthenticationError'\n }\n}\n\n/** Rate limit error (429) */\nexport class RateLimitError extends HttpError {\n readonly retryAfter: number | undefined\n\n constructor(retryAfter?: number) {\n super(429, 'Too Many Requests', 'Rate limit exceeded')\n this.name = 'RateLimitError'\n this.retryAfter = retryAfter\n }\n}\n","import { Readable } from 'node:stream'\nimport { pipeline } from 'node:stream/promises'\nimport type { ReadableStream as WebReadableStream } from 'node:stream/web'\nimport type { StatementResult, StatementManifest } from './types.js'\nimport { AbortError, DatabricksSqlError } from './errors.js'\n\n/**\n * Extract warehouse_id from httpPath\n * @example \"/sql/1.0/warehouses/abc123def456\" -> \"abc123def456\"\n */\nexport function extractWarehouseId(httpPath: string): string {\n const match = httpPath.match(/\\/sql\\/\\d+\\.\\d+\\/warehouses\\/([a-zA-Z0-9]+)/)\n if (!match?.[1])\n throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`)\n return match[1]\n}\n\n/**\n * Throw AbortError if signal is aborted\n */\nexport function throwIfAborted(signal: AbortSignal | undefined, context: string): void {\n if (signal?.aborted)\n throw new AbortError(`[${context}] Aborted`)\n}\n\n/**\n * Delay for specified milliseconds with AbortSignal support\n */\nexport async function delay(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n if (signal?.aborted)\n return reject(new AbortError('Aborted before delay'))\n\n let settled = false\n\n const onAbort = () => {\n if (settled) return\n settled = true\n clearTimeout(timer)\n reject(new AbortError('Aborted during delay'))\n }\n\n const timer = setTimeout(() => {\n if (settled) return\n settled = true\n signal?.removeEventListener('abort', onAbort)\n resolve()\n }, ms)\n\n signal?.addEventListener('abort', onAbort, { once: true })\n })\n}\n\n/**\n * Build full URL from host and path\n */\nexport function buildUrl(host: string, path: string): string {\n const base = host.startsWith('https://') ? host : `https://${host}`\n return new URL(path, base).href\n}\n\n/**\n * Validate statement result is in SUCCEEDED state with manifest.\n * Returns the manifest for convenience.\n * @throws {DatabricksSqlError} If state is not SUCCEEDED or manifest is missing\n */\nexport function validateSucceededResult(\n statementResult: StatementResult\n): StatementManifest {\n if (statementResult.status.state !== 'SUCCEEDED')\n throw new DatabricksSqlError(\n `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,\n 'INVALID_STATE',\n statementResult.statement_id\n )\n\n if (!statementResult.manifest)\n throw new DatabricksSqlError(\n 'Statement result has no manifest',\n 'MISSING_MANIFEST',\n statementResult.statement_id\n )\n\n return statementResult.manifest\n}\n\nfunction isWebReadableStream(body: unknown): body is WebReadableStream {\n return typeof (body as WebReadableStream).getReader === 'function'\n}\n\nexport async function pipeUrlToOutput(\n url: string,\n output: NodeJS.WritableStream,\n signal?: AbortSignal\n): Promise<void> {\n // Uses Node 20+ global fetch with Web streams.\n if (signal?.aborted)\n throw new AbortError('Aborted while streaming')\n\n const response = await fetch(url, signal ? { signal } : undefined)\n if (!response.ok) {\n throw new Error(\n `Failed to fetch external link: ${response.status} ${response.statusText}`\n )\n }\n\n if (!response.body)\n return void output.end()\n\n const body = response.body\n const input = isWebReadableStream(body)\n ? Readable.fromWeb(body)\n : (body as NodeJS.ReadableStream)\n\n await pipeline(input, output)\n}\n","import type { AuthInfo } from './types.js'\nimport {\n HttpError,\n AuthenticationError,\n RateLimitError,\n AbortError,\n} from './errors.js'\nimport { buildUrl, delay } from './util.js'\n\nconst MAX_RETRIES = 3\nconst INITIAL_RETRY_DELAY_MS = 1000\n\ntype HttpMethod = 'GET' | 'POST' | 'DELETE'\n\ntype HttpRequestOptions = {\n method: HttpMethod\n path: string\n body?: unknown\n signal?: AbortSignal\n}\n\n/**\n * HTTP request wrapper with retry and error handling\n */\nexport async function httpRequest<T>(\n auth: AuthInfo,\n options: HttpRequestOptions\n): Promise<T> {\n const { method, path, body, signal } = options\n const url = buildUrl(auth.host, path)\n\n let lastError: Error | undefined\n let retryDelay = INITIAL_RETRY_DELAY_MS\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n if (signal?.aborted)\n throw new AbortError()\n\n try {\n // Build a minimal fetch init, skipping undefined values.\n const fetchInit = Object.fromEntries(\n Object.entries({\n method,\n headers: {\n Authorization: `Bearer ${auth.token}`,\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal,\n }).filter(([, v]) => v !== undefined)\n ) as RequestInit\n\n const response = await fetch(url, fetchInit)\n\n // Success\n if (response.ok)\n return (await response.json()) as T\n\n // Authentication error (no retry)\n if (response.status === 401)\n throw new AuthenticationError()\n\n // Rate limit\n if (response.status === 429) {\n const retryAfterHeader = response.headers.get('Retry-After')\n const retryAfter = retryAfterHeader\n ? parseInt(retryAfterHeader, 10)\n : undefined\n const error = new RateLimitError(\n isNaN(retryAfter as number) ? undefined : retryAfter\n )\n\n if (error.retryAfter && attempt < MAX_RETRIES) {\n await delay(error.retryAfter * 1000, signal)\n continue\n }\n\n throw error\n }\n\n // Server error (can retry)\n if (response.status >= 500) {\n const errorBody = await response.text().catch(() => '')\n lastError = new HttpError(response.status, response.statusText, errorBody)\n\n if (attempt < MAX_RETRIES) {\n // Exponential backoff for transient server errors.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n // Other client errors\n const errorBody = await response.text().catch(() => '')\n\n throw new HttpError(response.status, response.statusText, errorBody)\n\n } catch (err) {\n // Re-throw known errors\n if (\n err instanceof AbortError ||\n err instanceof AuthenticationError ||\n err instanceof HttpError\n )\n throw err\n\n // Network error\n if (err instanceof TypeError && err.message.includes('fetch')) {\n lastError = err\n if (attempt < MAX_RETRIES) {\n // Network errors are retried with backoff.\n await delay(retryDelay, signal)\n retryDelay *= 2\n continue\n }\n }\n\n throw err\n }\n }\n\n throw lastError ?? new Error('Request failed after retries')\n}\n","import type {\n AuthInfo,\n ExecuteStatementRequest,\n StatementResult,\n GetChunkResponse,\n QueryInfo,\n} from './types.js'\nimport { httpRequest } from './http.js'\n\n// Base path for Databricks SQL Statement Execution API.\nconst BASE_PATH = '/api/2.0/sql/statements'\n// Base path for Query History API.\nconst HISTORY_BASE_PATH = '/api/2.0/sql/history/queries'\n\n/**\n * Execute SQL statement\n * POST /api/2.0/sql/statements\n */\nexport async function postStatement(\n auth: AuthInfo,\n request: ExecuteStatementRequest,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'POST',\n path: BASE_PATH,\n body: request,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get statement status and result\n * GET /api/2.0/sql/statements/{statement_id}\n */\nexport async function getStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<StatementResult> {\n return httpRequest<StatementResult>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Cancel statement execution\n * POST /api/2.0/sql/statements/{statement_id}/cancel\n */\nexport async function cancelStatement(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<void> {\n await httpRequest<unknown>(auth, {\n method: 'POST',\n path: `${BASE_PATH}/${statementId}/cancel`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get result chunk by index\n * GET /api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}\n */\nexport async function getChunk(\n auth: AuthInfo,\n statementId: string,\n chunkIndex: number,\n signal?: AbortSignal\n): Promise<GetChunkResponse> {\n return httpRequest<GetChunkResponse>(auth, {\n method: 'GET',\n path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,\n ...(signal ? { signal } : {}),\n })\n}\n\n/**\n * Get query metrics from Query History API\n * GET /api/2.0/sql/history/queries/{query_id}?include_metrics=true\n */\nexport async function getQueryMetrics(\n auth: AuthInfo,\n queryId: string,\n signal?: AbortSignal\n): Promise<QueryInfo> {\n return httpRequest<QueryInfo>(auth, {\n method: 'GET',\n path: `${HISTORY_BASE_PATH}/${queryId}?include_metrics=true`,\n ...(signal ? { signal } : {}),\n })\n}\n","import type {\n AuthInfo,\n ExecuteStatementOptions,\n ExecuteStatementRequest,\n StatementResult,\n StatementState,\n QueryMetrics,\n} from '../types.js'\nimport { postStatement, getStatement, cancelStatement, getQueryMetrics } from '../databricks-api.js'\nimport { extractWarehouseId, throwIfAborted, delay } from '../util.js'\nimport {\n DatabricksSqlError,\n StatementCancelledError,\n AbortError,\n} from '../errors.js'\n\nconst TERMINAL_STATES = new Set<StatementState>([\n 'SUCCEEDED',\n 'FAILED',\n 'CANCELED',\n 'CLOSED',\n])\nconst POLL_INTERVAL_MS = 5000\n\nasync function fetchMetrics(\n auth: AuthInfo,\n statementId: string,\n signal?: AbortSignal\n): Promise<QueryMetrics | undefined> {\n try {\n const queryInfo = await getQueryMetrics(auth, statementId, signal)\n return queryInfo.metrics\n } catch {\n // Ignore metrics fetch errors - non-critical\n return undefined\n }\n}\n\n/**\n * Execute SQL statement and poll until completion\n */\nexport async function executeStatement(\n query: string,\n auth: AuthInfo,\n options: ExecuteStatementOptions = {}\n): Promise<StatementResult> {\n const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath)\n const { signal, onProgress, enableMetrics } = options\n\n // Check if already aborted\n throwIfAborted(signal, 'executeStatement')\n\n // Helper to call onProgress with optional metrics\n const emitProgress = onProgress\n ? async (statementId: string) => onProgress(result.status, enableMetrics ? await fetchMetrics(auth, statementId, signal) : undefined)\n : undefined\n\n // 1. Build request (filter out undefined values)\n const request = Object.fromEntries(\n Object.entries({\n warehouse_id: warehouseId,\n statement: query,\n byte_limit: options.byte_limit,\n disposition: options.disposition,\n format: options.format,\n on_wait_timeout: options.on_wait_timeout ?? 'CONTINUE',\n wait_timeout: options.wait_timeout ?? '50s',\n row_limit: options.row_limit,\n catalog: options.catalog,\n schema: options.schema,\n parameters: options.parameters,\n }).filter(([, v]) => v !== undefined)\n ) as ExecuteStatementRequest\n\n // 2. Submit statement execution request\n let result = await postStatement(auth, request, signal)\n\n // 3. Poll until terminal state\n while (!TERMINAL_STATES.has(result.status.state)) {\n if (signal?.aborted) {\n await cancelStatement(auth, result.statement_id).catch(() => { })\n throw new AbortError('Aborted during polling')\n }\n\n await emitProgress?.(result.statement_id)\n await delay(POLL_INTERVAL_MS, signal)\n result = await getStatement(auth, result.statement_id, signal)\n }\n\n // 4. Final progress callback\n await emitProgress?.(result.statement_id)\n\n // 5. Handle terminal states\n if (result.status.state === 'SUCCEEDED')\n return result\n\n if (result.status.state === 'CANCELED')\n throw new StatementCancelledError(result.statement_id)\n\n // FAILED or CLOSED\n throw new DatabricksSqlError(\n result.status.error?.message ?? 'Statement execution failed',\n result.status.error?.error_code,\n result.statement_id\n )\n}\n","import type { Readable } from 'node:stream'\nimport type {\n AuthInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { parser } from 'stream-json'\nimport { streamArray } from 'stream-json/streamers/StreamArray'\n\nimport { getChunk } from '../databricks-api.js'\nimport { createRowMapper } from '../createRowMapper.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Process each row from statement result with a callback.\n * Supports INLINE results and JSON_ARRAY external links.\n */\nexport async function fetchRow(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchRowsOptions = {}\n): Promise<void> {\n const { signal, onEachRow, format } = options\n const manifest = validateSucceededResult(statementResult)\n // Map JSON_ARRAY rows to JSON_OBJECT when requested.\n const mapRow = createRowMapper(manifest, format)\n\n if (statementResult.result?.external_links) {\n if (manifest.format !== 'JSON_ARRAY') {\n throw new DatabricksSqlError(\n `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,\n 'UNSUPPORTED_FORMAT',\n statementResult.statement_id\n )\n }\n\n const stream = fetchStream(statementResult, auth, signal ? { signal } : {})\n await consumeJsonArrayStream(stream, mapRow, onEachRow, signal)\n return\n }\n\n const totalChunks = manifest.total_chunk_count\n\n // Process first chunk (inline data_array)\n const dataArray = statementResult.result?.data_array\n if (dataArray) {\n for (const row of dataArray) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Convert row to requested shape before callback.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n\n // Process additional chunks if any\n if (totalChunks > 1) {\n const statementId = statementResult.statement_id\n for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {\n if (signal?.aborted) throw new AbortError('Aborted')\n\n const chunk = await getChunk(auth, statementId, chunkIndex, signal)\n\n // Additional chunks should also be data_array (INLINE)\n if (chunk.external_links)\n throw new DatabricksSqlError(\n 'fetchRow only supports INLINE results. Chunk contains external_links.',\n 'UNSUPPORTED_FORMAT',\n statementId\n )\n\n if (chunk.data_array) {\n for (const row of chunk.data_array) {\n if (signal?.aborted) throw new AbortError('Aborted')\n // Apply the same mapping for each chunked row.\n onEachRow?.(mapRow(row as RowArray))\n }\n }\n }\n }\n}\n\nasync function consumeJsonArrayStream(\n stream: Readable,\n mapRow: (row: RowArray) => RowArray | RowObject,\n onEachRow: ((row: RowArray | RowObject) => void) | undefined,\n signal: AbortSignal | undefined\n): Promise<void> {\n // Stream JSON_ARRAY as individual rows to avoid buffering whole payloads.\n const jsonStream = stream.pipe(parser()).pipe(streamArray())\n\n for await (const item of jsonStream) {\n if (signal?.aborted) {\n stream.destroy(new AbortError('Aborted'))\n throw new AbortError('Aborted')\n }\n\n const row = item.value\n if (!Array.isArray(row)) {\n throw new DatabricksSqlError(\n 'Expected JSON_ARRAY rows to be arrays',\n 'INVALID_FORMAT'\n )\n }\n\n onEachRow?.(mapRow(row))\n }\n}\n","import { DatabricksSqlError } from './errors.js'\nimport type {\n ColumnInfo,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementManifest,\n} from './types.js'\n\ntype RowMapper = (row: RowArray) => RowArray | RowObject\n\ntype TypeDescriptor = {\n typeName: string\n typeText: string\n precision?: number\n scale?: number\n fields?: StructField[]\n elementType?: TypeDescriptor\n keyType?: TypeDescriptor\n valueType?: TypeDescriptor\n}\n\ntype StructField = {\n name: string\n type: TypeDescriptor\n}\n\n// Type buckets used for value conversion decisions.\nconst INTEGER_TYPES = new Set(['TINYINT', 'SMALLINT', 'INT'])\nconst BIGINT_TYPES = new Set(['BIGINT', 'LONG'])\nconst FLOAT_TYPES = new Set(['FLOAT', 'DOUBLE'])\nconst BOOLEAN_TYPES = new Set(['BOOLEAN'])\nconst STRING_TYPES = new Set([\n 'STRING',\n 'DATE',\n 'TIMESTAMP',\n 'TIMESTAMP_NTZ',\n 'TIMESTAMP_LTZ',\n 'TIME',\n])\n\n/**\n * Create a row mapper that converts JSON_ARRAY rows into JSON_OBJECTs.\n * Datetime-like fields are preserved as strings to avoid locale/zone surprises.\n * DECIMAL values are converted to numbers to match the Databricks SDK behavior.\n */\nexport function createRowMapper(\n manifest: StatementManifest,\n format: FetchRowsOptions['format']\n): RowMapper {\n if (format !== 'JSON_OBJECT')\n return (row) => row\n\n // Precompute per-column converters for fast row mapping.\n const columnConverters = manifest.schema.columns.map((column: ColumnInfo) => ({\n name: column.name,\n convert: createColumnConverter(column),\n }))\n\n return (row) => {\n const mapped: RowObject = {}\n for (let index = 0; index < columnConverters.length; index++) {\n const converter = columnConverters[index]\n if (!converter)\n continue\n\n const { name, convert } = converter\n if (name)\n mapped[name] = convert(row[index])\n }\n return mapped\n }\n}\n\nfunction createColumnConverter(column: ColumnInfo): (value: unknown) => unknown {\n const descriptor = parseColumnType(column)\n return (value) => convertValue(descriptor, value)\n}\n\nfunction parseColumnType(column: ColumnInfo): TypeDescriptor {\n if (column.type_name === 'STRUCT' || column.type_name === 'ARRAY' || column.type_name === 'MAP')\n return parseTypeDescriptor(column.type_text)\n\n if (column.type_name === 'DECIMAL')\n // Prefer precision/scale provided by the API when available.\n return createDecimalDescriptor({\n typeName: column.type_name,\n typeText: column.type_text,\n }, column.type_precision, column.type_scale)\n\n return {\n typeName: column.type_name,\n typeText: column.type_text,\n }\n}\n\nfunction parseTypeDescriptor(typeText: string): TypeDescriptor {\n const trimmed = typeText.trim()\n const typeName = getTypeName(trimmed)\n\n if (typeName === 'STRUCT')\n // STRUCT fields are parsed recursively from type_text.\n return {\n typeName,\n typeText: trimmed,\n fields: parseStructFields(trimmed),\n }\n\n if (typeName === 'ARRAY') {\n const elementTypeText = parseSingleTypeArgument(trimmed)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (elementTypeText)\n descriptor.elementType = parseTypeDescriptor(elementTypeText)\n return descriptor\n }\n\n if (typeName === 'MAP') {\n const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2)\n const descriptor: TypeDescriptor = {\n typeName,\n typeText: trimmed,\n }\n if (keyTypeText)\n descriptor.keyType = parseTypeDescriptor(keyTypeText)\n if (valueTypeText)\n descriptor.valueType = parseTypeDescriptor(valueTypeText)\n return descriptor\n }\n\n if (typeName === 'DECIMAL') {\n // DECIMAL(precision, scale) needs explicit parsing for integer conversion.\n const { precision, scale } = parseDecimalInfo(trimmed)\n return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale)\n }\n\n return {\n typeName,\n typeText: trimmed,\n }\n}\n\nfunction getTypeName(typeText: string): string {\n return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText\n}\n\nfunction parseDecimalInfo(typeText: string): { precision?: number; scale?: number } {\n const match = typeText.match(/DECIMAL\\((\\d+),\\s*(\\d+)\\)/)\n if (!match)\n return {}\n\n return {\n precision: Number(match[1]),\n scale: Number(match[2]),\n }\n}\n\nfunction createDecimalDescriptor(\n base: Omit<TypeDescriptor, 'precision' | 'scale'>,\n precision?: number,\n scale?: number\n): TypeDescriptor {\n const descriptor: TypeDescriptor = { ...base }\n if (precision !== undefined)\n descriptor.precision = precision\n if (scale !== undefined)\n descriptor.scale = scale\n return descriptor\n}\n\nfunction parseStructFields(typeText: string): StructField[] {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n // Split by commas only at the top level of nested type definitions.\n const parts = splitTopLevel(inner)\n const fields: StructField[] = []\n\n for (const part of parts) {\n const separatorIndex = part.indexOf(':')\n if (separatorIndex === -1)\n continue\n\n const name = part.slice(0, separatorIndex).trim()\n let fieldTypeText = part.slice(separatorIndex + 1).trim()\n fieldTypeText = stripNotNull(fieldTypeText)\n\n if (!name)\n continue\n\n fields.push({\n name,\n type: parseTypeDescriptor(fieldTypeText),\n })\n }\n\n return fields\n}\n\nfunction parseSingleTypeArgument(typeText: string): string | null {\n const [arg] = parseTypeArguments(typeText, 1)\n return arg ?? null\n}\n\nfunction parseTypeArguments(typeText: string, expectedCount: number): Array<string | undefined> {\n const start = typeText.indexOf('<')\n const end = typeText.lastIndexOf('>')\n if (start === -1 || end === -1 || end <= start)\n return []\n\n const inner = typeText.slice(start + 1, end)\n const parts = splitTopLevel(inner)\n if (parts.length < expectedCount)\n return parts\n\n return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()))\n}\n\nfunction splitTopLevel(value: string): string[] {\n const result: string[] = []\n let current = ''\n let angleDepth = 0\n let parenDepth = 0\n\n for (const char of value) {\n if (char === '<') angleDepth++\n if (char === '>') angleDepth--\n if (char === '(') parenDepth++\n if (char === ')') parenDepth--\n\n if (char === ',' && angleDepth === 0 && parenDepth === 0) {\n result.push(current.trim())\n current = ''\n continue\n }\n\n current += char\n }\n\n if (current.trim().length > 0)\n result.push(current.trim())\n\n return result\n}\n\nfunction stripNotNull(typeText: string): string {\n let trimmed = typeText.trim()\n while (trimmed.endsWith('NOT NULL'))\n trimmed = trimmed.slice(0, -'NOT NULL'.length).trim()\n return trimmed\n}\n\nfunction convertValue(descriptor: TypeDescriptor, value: unknown): unknown {\n if (value === null || value === undefined)\n return value\n\n if (descriptor.typeName === 'STRUCT' && descriptor.fields)\n // STRUCT values are JSON strings in JSON_ARRAY format.\n return convertStructValue(descriptor.fields, value)\n\n if (descriptor.typeName === 'ARRAY' && descriptor.elementType)\n return convertArrayValue(descriptor.elementType, value)\n\n if (descriptor.typeName === 'MAP' && descriptor.keyType && descriptor.valueType)\n return convertMapValue(descriptor.keyType, descriptor.valueType, value)\n\n if (descriptor.typeName === 'DECIMAL')\n return convertNumber(value)\n\n if (INTEGER_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BIGINT_TYPES.has(descriptor.typeName))\n return convertInteger(value)\n\n if (FLOAT_TYPES.has(descriptor.typeName))\n return convertNumber(value)\n\n if (BOOLEAN_TYPES.has(descriptor.typeName))\n return convertBoolean(value)\n\n if (STRING_TYPES.has(descriptor.typeName))\n return value\n\n return value\n}\n\nfunction convertStructValue(fields: StructField[], value: unknown): unknown {\n const raw = parseStructValue(value)\n if (!raw || typeof raw !== 'object' || Array.isArray(raw))\n return value\n\n // Apply nested field conversions based on the parsed STRUCT schema.\n const mapped: RowObject = {}\n for (const field of fields)\n mapped[field.name] = convertValue(field.type, (raw as RowObject)[field.name])\n\n return mapped\n}\n\nfunction convertArrayValue(elementType: TypeDescriptor, value: unknown): unknown {\n const raw = parseJsonValue(value)\n if (!Array.isArray(raw))\n return value\n\n return raw.map((entry) => convertValue(elementType, entry))\n}\n\nfunction convertMapValue(\n keyType: TypeDescriptor,\n valueType: TypeDescriptor,\n value: unknown\n): unknown {\n const raw = parseJsonValue(value)\n if (!raw || typeof raw !== 'object')\n return value\n\n if (Array.isArray(raw)) {\n const mapped: RowObject = {}\n for (const entry of raw) {\n if (!Array.isArray(entry) || entry.length < 2)\n continue\n const convertedKey = convertValue(keyType, entry[0])\n mapped[String(convertedKey)] = convertValue(valueType, entry[1])\n }\n return mapped\n }\n\n const mapped: RowObject = {}\n for (const [key, entryValue] of Object.entries(raw)) {\n const convertedKey = convertValue(keyType, key)\n mapped[String(convertedKey)] = convertValue(valueType, entryValue)\n }\n\n return mapped\n}\n\nfunction parseStructValue(value: unknown): RowObject | null {\n const parsed = parseJsonValue(value)\n if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))\n return parsed as RowObject\n\n return parsed as RowObject | null\n}\n\nfunction parseJsonValue(value: unknown): unknown {\n if (typeof value === 'string') {\n try {\n return JSON.parse(value)\n } catch {\n throw new DatabricksSqlError('Failed to parse JSON value', 'INVALID_JSON')\n }\n }\n\n return value\n}\n\nfunction convertNumber(value: unknown): unknown {\n if (typeof value === 'number')\n return value\n\n if (typeof value === 'string') {\n const parsed = Number(value)\n return Number.isNaN(parsed) ? value : parsed\n }\n\n return value\n}\n\nfunction convertInteger(value: unknown): unknown {\n if (typeof value === 'bigint')\n return value\n\n if (typeof value === 'number') {\n if (Number.isInteger(value))\n return BigInt(value)\n return value\n }\n\n if (typeof value === 'string') {\n try {\n // Preserve integer semantics for BIGINT/DECIMAL(scale=0) by returning bigint.\n return BigInt(value)\n } catch {\n return value\n }\n }\n\n return value\n}\n\nfunction convertBoolean(value: unknown): unknown {\n if (typeof value === 'boolean')\n return value\n\n if (typeof value === 'string') {\n if (value === 'true') return true\n if (value === 'false') return false\n }\n\n return value\n}\n","import type { MergeFormat } from '@bitofsky/merge-streams'\nimport type {\n AuthInfo,\n ExternalLinkInfo,\n FetchStreamOptions,\n StatementManifest,\n StatementResult,\n} from '../types.js'\n\nimport { PassThrough, Readable } from 'node:stream'\n\nimport { mergeStreamsFromUrls } from '@bitofsky/merge-streams'\n\nimport { getChunk } from '../databricks-api.js'\nimport { AbortError, DatabricksSqlError } from '../errors.js'\nimport { pipeUrlToOutput, validateSucceededResult } from '../util.js'\n\n/**\n * Create a readable stream from statement result.\n * Merges all external link chunks into a single binary stream,\n * preserving the original format (JSON_ARRAY, CSV, ARROW_STREAM).\n */\nexport function fetchStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchStreamOptions = {}\n): Readable {\n const { signal, forceMerge } = options\n const manifest = validateSucceededResult(statementResult)\n const format = manifest.format as MergeFormat\n\n if (statementResult.result?.data_array) {\n throw new DatabricksSqlError(\n 'fetchStream only supports EXTERNAL_LINKS results',\n 'UNSUPPORTED_FORMAT',\n statementResult.statement_id\n )\n }\n\n // Create PassThrough as output (readable by consumer)\n const output = new PassThrough()\n\n // Handle AbortSignal\n if (signal) {\n const onAbort = () => output.destroy(new AbortError('Stream aborted'))\n signal.addEventListener('abort', onAbort, { once: true })\n output.once('close', () => signal.removeEventListener('abort', onAbort))\n }\n\n // Start async merge process\n // Errors are forwarded to the stream consumer via destroy.\n mergeChunksToStream(statementResult, auth, manifest, format, output, signal, forceMerge)\n .catch((err) => output.destroy(err as Error))\n\n return output\n}\n\n/**\n * Collect all external link URLs and merge them into output stream\n */\nasync function mergeChunksToStream(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n format: MergeFormat,\n output: PassThrough,\n signal?: AbortSignal,\n forceMerge?: boolean\n): Promise<void> {\n const urls = await collectExternalUrls(statementResult, auth, manifest, signal)\n\n // No external links - close the stream\n if (urls.length === 0)\n return void output.end()\n\n // Single URL - pipe directly to output unless forcing merge\n if (urls.length === 1 && !forceMerge)\n // Avoid merge-streams overhead for a single URL unless forced.\n return pipeUrlToOutput(urls[0]!, output, signal)\n\n // Merge all URLs using merge-streams\n return mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output })\n}\n\nasync function collectExternalUrls(\n statementResult: StatementResult,\n auth: AuthInfo,\n manifest: StatementManifest,\n signal?: AbortSignal\n): Promise<string[]> {\n const urls = extractExternalLinks(statementResult.result?.external_links)\n if (urls.length > 0)\n return urls\n\n if (!manifest.total_chunk_count)\n return []\n\n const chunkUrls: string[] = []\n for (let i = 0; i < manifest.total_chunk_count; i++) {\n if (signal?.aborted)\n throw new AbortError('Aborted while collecting URLs')\n\n // Chunk metadata contains external link URLs when results are chunked.\n const chunkData = await getChunk(auth, statementResult.statement_id, i, signal)\n chunkUrls.push(...extractExternalLinks(chunkData.external_links))\n }\n\n return chunkUrls\n}\n\nfunction extractExternalLinks(externalLinks?: ExternalLinkInfo[]): string[] {\n if (!externalLinks)\n return []\n\n return externalLinks\n .map((link) => link.external_link)\n .filter(isNonEmptyString)\n}\n\nfunction isNonEmptyString(value: unknown): value is string {\n return typeof value === 'string' && value.length > 0\n}\n","import type {\n AuthInfo,\n FetchAllOptions,\n FetchRowsOptions,\n RowArray,\n RowObject,\n StatementResult,\n} from '../types.js'\n\nimport { fetchRow } from './fetchRow.js'\n\n/**\n * Fetch all rows from statement result as an array.\n * Only supports INLINE results or JSON_ARRAY external links.\n */\nexport async function fetchAll(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: FetchAllOptions = {}\n): Promise<Array<RowArray | RowObject>> {\n const rows: Array<RowArray | RowObject> = []\n const fetchOptions: FetchRowsOptions = {\n // Collect rows as they are streamed in.\n onEachRow: (row) => {\n rows.push(row)\n },\n }\n\n if (options.signal)\n fetchOptions.signal = options.signal\n\n if (options.format)\n fetchOptions.format = options.format\n\n await fetchRow(statementResult, auth, fetchOptions)\n return rows\n}\n","import type {\n AuthInfo,\n MergeExternalLinksOptions,\n StatementResult,\n} from '../types.js'\n\nimport { validateSucceededResult } from '../util.js'\nimport { fetchStream } from './fetchStream.js'\n\n/**\n * Merge external links from StatementResult into a single stream,\n * upload it via the provided callback, and return updated StatementResult.\n *\n * If the result is not external links (inline data or empty), returns the original as-is.\n */\nexport async function mergeExternalLinks(\n statementResult: StatementResult,\n auth: AuthInfo,\n options: MergeExternalLinksOptions\n): Promise<StatementResult> {\n const { signal, mergeStreamToExternalLink, forceMerge } = options\n\n // If not external links, return original as-is\n if (!statementResult.result?.external_links)\n return statementResult\n\n if (!forceMerge) {\n const totalChunks = statementResult.manifest?.total_chunk_count\n const externalLinks = statementResult.result.external_links\n const isSingleChunk = totalChunks === undefined ? externalLinks.length <= 1 : totalChunks <= 1\n\n // Skip merging when a single external link already exists unless forced.\n if (isSingleChunk && externalLinks.length <= 1)\n return statementResult\n }\n\n // Get merged stream via fetchStream\n const stream = fetchStream(statementResult, auth, {\n ...signal ? { signal } : {},\n ...forceMerge !== undefined ? { forceMerge } : {},\n })\n\n // Upload via callback\n const uploadResult = await mergeStreamToExternalLink(stream)\n\n // Build updated StatementResult\n // Manifest must exist for external links; validate before constructing new result.\n const manifest = validateSucceededResult(statementResult)\n const totalRowCount = manifest.total_row_count ?? 0\n\n return {\n statement_id: statementResult.statement_id,\n status: statementResult.status,\n manifest: {\n ...manifest,\n total_chunk_count: 1,\n total_byte_count: uploadResult.byte_count,\n chunks: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n },\n ],\n },\n result: {\n external_links: [\n {\n chunk_index: 0,\n row_offset: 0,\n row_count: totalRowCount,\n byte_count: uploadResult.byte_count,\n external_link: uploadResult.externalLink,\n expiration: uploadResult.expiration,\n },\n ],\n },\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EACnC;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,MAAe,aAAsB;AAChE,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc;AACnB,UAAM,oBAAoB,MAAM,mBAAkB;AAAA,EACpD;AACF;AAGO,IAAM,0BAAN,cAAsC,mBAAmB;AAAA,EAC9D,YAAY,aAAqB;AAC/B,UAAM,aAAa,WAAW,kBAAkB,aAAa,WAAW;AACxE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,aAAN,cAAyB,mBAAmB;AAAA,EACjD,YAAY,UAAkB,yBAAyB;AACrD,UAAM,SAAS,SAAS;AACxB,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,YAAN,cAAwB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EAET,YAAY,QAAgB,YAAoB,SAAkB;AAChE,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU,IAAI,QAAQ,MAAM,EAAE;AAClE,SAAK,OAAO;AACZ,SAAK,SAAS;AACd,SAAK,aAAa;AAAA,EACpB;AACF;AAGO,IAAM,sBAAN,cAAkC,UAAU;AAAA,EACjD,cAAc;AACZ,UAAM,KAAK,gBAAgB,0CAA0C;AACrE,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,iBAAN,cAA6B,UAAU;AAAA,EACnC;AAAA,EAET,YAAY,YAAqB;AAC/B,UAAM,KAAK,qBAAqB,qBAAqB;AACrD,SAAK,OAAO;AACZ,SAAK,aAAa;AAAA,EACpB;AACF;;;AC5DA,yBAAyB;AACzB,sBAAyB;AASlB,SAAS,mBAAmB,UAA0B;AAC3D,QAAM,QAAQ,SAAS,MAAM,6CAA6C;AAC1E,MAAI,CAAC,QAAQ,CAAC;AACZ,UAAM,IAAI,MAAM,8CAA8C,QAAQ,EAAE;AAC1E,SAAO,MAAM,CAAC;AAChB;AAKO,SAAS,eAAe,QAAiC,SAAuB;AACrF,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,IAAI,OAAO,WAAW;AAC/C;AAKA,eAAsB,MAAM,IAAY,QAAqC;AAC3E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,QAAQ;AACV,aAAO,OAAO,IAAI,WAAW,sBAAsB,CAAC;AAEtD,QAAI,UAAU;AAEd,UAAM,UAAU,MAAM;AACpB,UAAI,QAAS;AACb,gBAAU;AACV,mBAAa,KAAK;AAClB,aAAO,IAAI,WAAW,sBAAsB,CAAC;AAAA,IAC/C;AAEA,UAAM,QAAQ,WAAW,MAAM;AAC7B,UAAI,QAAS;AACb,gBAAU;AACV,cAAQ,oBAAoB,SAAS,OAAO;AAC5C,cAAQ;AAAA,IACV,GAAG,EAAE;AAEL,YAAQ,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,CAAC;AACH;AAKO,SAAS,SAAS,MAAc,MAAsB;AAC3D,QAAM,OAAO,KAAK,WAAW,UAAU,IAAI,OAAO,WAAW,IAAI;AACjE,SAAO,IAAI,IAAI,MAAM,IAAI,EAAE;AAC7B;AAOO,SAAS,wBACd,iBACmB;AACnB,MAAI,gBAAgB,OAAO,UAAU;AACnC,UAAM,IAAI;AAAA,MACR,8CAA8C,gBAAgB,OAAO,KAAK;AAAA,MAC1E;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEF,SAAO,gBAAgB;AACzB;AAEA,SAAS,oBAAoB,MAA0C;AACrE,SAAO,OAAQ,KAA2B,cAAc;AAC1D;AAEA,eAAsB,gBACpB,KACA,QACA,QACe;AAEf,MAAI,QAAQ;AACV,UAAM,IAAI,WAAW,yBAAyB;AAEhD,QAAM,WAAW,MAAM,MAAM,KAAK,SAAS,EAAE,OAAO,IAAI,MAAS;AACjE,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,kCAAkC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IAC1E;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,OAAO,IAAI;AAEzB,QAAM,OAAO,SAAS;AACtB,QAAM,QAAQ,oBAAoB,IAAI,IAClC,4BAAS,QAAQ,IAAI,IACpB;AAEL,YAAM,0BAAS,OAAO,MAAM;AAC9B;;;AC1GA,IAAM,cAAc;AACpB,IAAM,yBAAyB;AAc/B,eAAsB,YACpB,MACA,SACY;AACZ,QAAM,EAAE,QAAQ,MAAM,MAAM,OAAO,IAAI;AACvC,QAAM,MAAM,SAAS,KAAK,MAAM,IAAI;AAEpC,MAAI;AACJ,MAAI,aAAa;AAEjB,WAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW;AAEvB,QAAI;AAEF,YAAM,YAAY,OAAO;AAAA,QACvB,OAAO,QAAQ;AAAA,UACb;AAAA,UACA,SAAS;AAAA,YACP,eAAe,UAAU,KAAK,KAAK;AAAA,YACnC,gBAAgB;AAAA,YAChB,QAAQ;AAAA,UACV;AAAA,UACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,UACpC;AAAA,QACF,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,MACtC;AAEA,YAAM,WAAW,MAAM,MAAM,KAAK,SAAS;AAG3C,UAAI,SAAS;AACX,eAAQ,MAAM,SAAS,KAAK;AAG9B,UAAI,SAAS,WAAW;AACtB,cAAM,IAAI,oBAAoB;AAGhC,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,mBAAmB,SAAS,QAAQ,IAAI,aAAa;AAC3D,cAAM,aAAa,mBACf,SAAS,kBAAkB,EAAE,IAC7B;AACJ,cAAM,QAAQ,IAAI;AAAA,UAChB,MAAM,UAAoB,IAAI,SAAY;AAAA,QAC5C;AAEA,YAAI,MAAM,cAAc,UAAU,aAAa;AAC7C,gBAAM,MAAM,MAAM,aAAa,KAAM,MAAM;AAC3C;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAGA,UAAI,SAAS,UAAU,KAAK;AAC1B,cAAMA,aAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAYA,UAAS;AAEzE,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAEtD,YAAM,IAAI,UAAU,SAAS,QAAQ,SAAS,YAAY,SAAS;AAAA,IAErE,SAAS,KAAK;AAEZ,UACE,eAAe,cACf,eAAe,uBACf,eAAe;AAEf,cAAM;AAGR,UAAI,eAAe,aAAa,IAAI,QAAQ,SAAS,OAAO,GAAG;AAC7D,oBAAY;AACZ,YAAI,UAAU,aAAa;AAEzB,gBAAM,MAAM,YAAY,MAAM;AAC9B,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,aAAa,IAAI,MAAM,8BAA8B;AAC7D;;;AClHA,IAAM,YAAY;AAElB,IAAM,oBAAoB;AAM1B,eAAsB,cACpB,MACA,SACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,IACN,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,aACpB,MACA,aACA,QAC0B;AAC1B,SAAO,YAA6B,MAAM;AAAA,IACxC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,aACA,QACe;AACf,QAAM,YAAqB,MAAM;AAAA,IAC/B,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW;AAAA,IACjC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,SACpB,MACA,aACA,YACA,QAC2B;AAC3B,SAAO,YAA8B,MAAM;AAAA,IACzC,QAAQ;AAAA,IACR,MAAM,GAAG,SAAS,IAAI,WAAW,kBAAkB,UAAU;AAAA,IAC7D,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;AAMA,eAAsB,gBACpB,MACA,SACA,QACoB;AACpB,SAAO,YAAuB,MAAM;AAAA,IAClC,QAAQ;AAAA,IACR,MAAM,GAAG,iBAAiB,IAAI,OAAO;AAAA,IACrC,GAAI,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,EAC7B,CAAC;AACH;;;AC9EA,IAAM,kBAAkB,oBAAI,IAAoB;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AACD,IAAM,mBAAmB;AAEzB,eAAe,aACb,MACA,aACA,QACmC;AACnC,MAAI;AACF,UAAM,YAAY,MAAM,gBAAgB,MAAM,aAAa,MAAM;AACjE,WAAO,UAAU;AAAA,EACnB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,iBACpB,OACA,MACA,UAAmC,CAAC,GACV;AAC1B,QAAM,cAAc,QAAQ,gBAAgB,mBAAmB,KAAK,QAAQ;AAC5E,QAAM,EAAE,QAAQ,YAAY,cAAc,IAAI;AAG9C,iBAAe,QAAQ,kBAAkB;AAGzC,QAAM,eAAe,aACjB,OAAO,gBAAwB,WAAW,OAAO,QAAQ,gBAAgB,MAAM,aAAa,MAAM,aAAa,MAAM,IAAI,MAAS,IAClI;AAGJ,QAAM,UAAU,OAAO;AAAA,IACrB,OAAO,QAAQ;AAAA,MACb,cAAc;AAAA,MACd,WAAW;AAAA,MACX,YAAY,QAAQ;AAAA,MACpB,aAAa,QAAQ;AAAA,MACrB,QAAQ,QAAQ;AAAA,MAChB,iBAAiB,QAAQ,mBAAmB;AAAA,MAC5C,cAAc,QAAQ,gBAAgB;AAAA,MACtC,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,YAAY,QAAQ;AAAA,IACtB,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS;AAAA,EACtC;AAGA,MAAI,SAAS,MAAM,cAAc,MAAM,SAAS,MAAM;AAGtD,SAAO,CAAC,gBAAgB,IAAI,OAAO,OAAO,KAAK,GAAG;AAChD,QAAI,QAAQ,SAAS;AACnB,YAAM,gBAAgB,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM;AAAA,MAAE,CAAC;AAChE,YAAM,IAAI,WAAW,wBAAwB;AAAA,IAC/C;AAEA,UAAM,eAAe,OAAO,YAAY;AACxC,UAAM,MAAM,kBAAkB,MAAM;AACpC,aAAS,MAAM,aAAa,MAAM,OAAO,cAAc,MAAM;AAAA,EAC/D;AAGA,QAAM,eAAe,OAAO,YAAY;AAGxC,MAAI,OAAO,OAAO,UAAU;AAC1B,WAAO;AAET,MAAI,OAAO,OAAO,UAAU;AAC1B,UAAM,IAAI,wBAAwB,OAAO,YAAY;AAGvD,QAAM,IAAI;AAAA,IACR,OAAO,OAAO,OAAO,WAAW;AAAA,IAChC,OAAO,OAAO,OAAO;AAAA,IACrB,OAAO;AAAA,EACT;AACF;;;AChGA,yBAAuB;AACvB,yBAA4B;;;ACkB5B,IAAM,gBAAgB,oBAAI,IAAI,CAAC,WAAW,YAAY,KAAK,CAAC;AAC5D,IAAM,eAAe,oBAAI,IAAI,CAAC,UAAU,MAAM,CAAC;AAC/C,IAAM,cAAc,oBAAI,IAAI,CAAC,SAAS,QAAQ,CAAC;AAC/C,IAAM,gBAAgB,oBAAI,IAAI,CAAC,SAAS,CAAC;AACzC,IAAM,eAAe,oBAAI,IAAI;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAOM,SAAS,gBACd,UACA,QACW;AACX,MAAI,WAAW;AACb,WAAO,CAAC,QAAQ;AAGlB,QAAM,mBAAmB,SAAS,OAAO,QAAQ,IAAI,CAAC,YAAwB;AAAA,IAC5E,MAAM,OAAO;AAAA,IACb,SAAS,sBAAsB,MAAM;AAAA,EACvC,EAAE;AAEF,SAAO,CAAC,QAAQ;AACd,UAAM,SAAoB,CAAC;AAC3B,aAAS,QAAQ,GAAG,QAAQ,iBAAiB,QAAQ,SAAS;AAC5D,YAAM,YAAY,iBAAiB,KAAK;AACxC,UAAI,CAAC;AACH;AAEF,YAAM,EAAE,MAAM,QAAQ,IAAI;AAC1B,UAAI;AACF,eAAO,IAAI,IAAI,QAAQ,IAAI,KAAK,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,sBAAsB,QAAiD;AAC9E,QAAM,aAAa,gBAAgB,MAAM;AACzC,SAAO,CAAC,UAAU,aAAa,YAAY,KAAK;AAClD;AAEA,SAAS,gBAAgB,QAAoC;AAC3D,MAAI,OAAO,cAAc,YAAY,OAAO,cAAc,WAAW,OAAO,cAAc;AACxF,WAAO,oBAAoB,OAAO,SAAS;AAE7C,MAAI,OAAO,cAAc;AAEvB,WAAO,wBAAwB;AAAA,MAC7B,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,IACnB,GAAG,OAAO,gBAAgB,OAAO,UAAU;AAE7C,SAAO;AAAA,IACL,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,EACnB;AACF;AAEA,SAAS,oBAAoB,UAAkC;AAC7D,QAAM,UAAU,SAAS,KAAK;AAC9B,QAAM,WAAW,YAAY,OAAO;AAEpC,MAAI,aAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,QAAQ,kBAAkB,OAAO;AAAA,IACnC;AAEF,MAAI,aAAa,SAAS;AACxB,UAAM,kBAAkB,wBAAwB,OAAO;AACvD,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,cAAc,oBAAoB,eAAe;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,OAAO;AACtB,UAAM,CAAC,aAAa,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAClE,UAAM,aAA6B;AAAA,MACjC;AAAA,MACA,UAAU;AAAA,IACZ;AACA,QAAI;AACF,iBAAW,UAAU,oBAAoB,WAAW;AACtD,QAAI;AACF,iBAAW,YAAY,oBAAoB,aAAa;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,aAAa,WAAW;AAE1B,UAAM,EAAE,WAAW,MAAM,IAAI,iBAAiB,OAAO;AACrD,WAAO,wBAAwB,EAAE,UAAU,UAAU,QAAQ,GAAG,WAAW,KAAK;AAAA,EAClF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,YAAY,UAA0B;AAC7C,SAAO,SAAS,MAAM,UAAU,IAAI,CAAC,KAAK;AAC5C;AAEA,SAAS,iBAAiB,UAA0D;AAClF,QAAM,QAAQ,SAAS,MAAM,2BAA2B;AACxD,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO;AAAA,IACL,WAAW,OAAO,MAAM,CAAC,CAAC;AAAA,IAC1B,OAAO,OAAO,MAAM,CAAC,CAAC;AAAA,EACxB;AACF;AAEA,SAAS,wBACP,MACA,WACA,OACgB;AAChB,QAAM,aAA6B,EAAE,GAAG,KAAK;AAC7C,MAAI,cAAc;AAChB,eAAW,YAAY;AACzB,MAAI,UAAU;AACZ,eAAW,QAAQ;AACrB,SAAO;AACT;AAEA,SAAS,kBAAkB,UAAiC;AAC1D,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAE3C,QAAM,QAAQ,cAAc,KAAK;AACjC,QAAM,SAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,iBAAiB,KAAK,QAAQ,GAAG;AACvC,QAAI,mBAAmB;AACrB;AAEF,UAAM,OAAO,KAAK,MAAM,GAAG,cAAc,EAAE,KAAK;AAChD,QAAI,gBAAgB,KAAK,MAAM,iBAAiB,CAAC,EAAE,KAAK;AACxD,oBAAgB,aAAa,aAAa;AAE1C,QAAI,CAAC;AACH;AAEF,WAAO,KAAK;AAAA,MACV;AAAA,MACA,MAAM,oBAAoB,aAAa;AAAA,IACzC,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,UAAiC;AAChE,QAAM,CAAC,GAAG,IAAI,mBAAmB,UAAU,CAAC;AAC5C,SAAO,OAAO;AAChB;AAEA,SAAS,mBAAmB,UAAkB,eAAkD;AAC9F,QAAM,QAAQ,SAAS,QAAQ,GAAG;AAClC,QAAM,MAAM,SAAS,YAAY,GAAG;AACpC,MAAI,UAAU,MAAM,QAAQ,MAAM,OAAO;AACvC,WAAO,CAAC;AAEV,QAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,GAAG;AAC3C,QAAM,QAAQ,cAAc,KAAK;AACjC,MAAI,MAAM,SAAS;AACjB,WAAO;AAET,SAAO,MAAM,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,SAAS,aAAa,KAAK,KAAK,CAAC,CAAC;AAC9E;AAEA,SAAS,cAAc,OAAyB;AAC9C,QAAM,SAAmB,CAAC;AAC1B,MAAI,UAAU;AACd,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAClB,QAAI,SAAS,IAAK;AAElB,QAAI,SAAS,OAAO,eAAe,KAAK,eAAe,GAAG;AACxD,aAAO,KAAK,QAAQ,KAAK,CAAC;AAC1B,gBAAU;AACV;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS;AAC1B,WAAO,KAAK,QAAQ,KAAK,CAAC;AAE5B,SAAO;AACT;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI,UAAU,SAAS,KAAK;AAC5B,SAAO,QAAQ,SAAS,UAAU;AAChC,cAAU,QAAQ,MAAM,GAAG,CAAC,WAAW,MAAM,EAAE,KAAK;AACtD,SAAO;AACT;AAEA,SAAS,aAAa,YAA4B,OAAyB;AACzE,MAAI,UAAU,QAAQ,UAAU;AAC9B,WAAO;AAET,MAAI,WAAW,aAAa,YAAY,WAAW;AAEjD,WAAO,mBAAmB,WAAW,QAAQ,KAAK;AAEpD,MAAI,WAAW,aAAa,WAAW,WAAW;AAChD,WAAO,kBAAkB,WAAW,aAAa,KAAK;AAExD,MAAI,WAAW,aAAa,SAAS,WAAW,WAAW,WAAW;AACpE,WAAO,gBAAgB,WAAW,SAAS,WAAW,WAAW,KAAK;AAExE,MAAI,WAAW,aAAa;AAC1B,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,cAAc,KAAK;AAE5B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO,eAAe,KAAK;AAE7B,MAAI,YAAY,IAAI,WAAW,QAAQ;AACrC,WAAO,cAAc,KAAK;AAE5B,MAAI,cAAc,IAAI,WAAW,QAAQ;AACvC,WAAO,eAAe,KAAK;AAE7B,MAAI,aAAa,IAAI,WAAW,QAAQ;AACtC,WAAO;AAET,SAAO;AACT;AAEA,SAAS,mBAAmB,QAAuB,OAAyB;AAC1E,QAAM,MAAM,iBAAiB,KAAK;AAClC,MAAI,CAAC,OAAO,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG;AACtD,WAAO;AAGT,QAAM,SAAoB,CAAC;AAC3B,aAAW,SAAS;AAClB,WAAO,MAAM,IAAI,IAAI,aAAa,MAAM,MAAO,IAAkB,MAAM,IAAI,CAAC;AAE9E,SAAO;AACT;AAEA,SAAS,kBAAkB,aAA6B,OAAyB;AAC/E,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,MAAM,QAAQ,GAAG;AACpB,WAAO;AAET,SAAO,IAAI,IAAI,CAAC,UAAU,aAAa,aAAa,KAAK,CAAC;AAC5D;AAEA,SAAS,gBACP,SACA,WACA,OACS;AACT,QAAM,MAAM,eAAe,KAAK;AAChC,MAAI,CAAC,OAAO,OAAO,QAAQ;AACzB,WAAO;AAET,MAAI,MAAM,QAAQ,GAAG,GAAG;AACtB,UAAMC,UAAoB,CAAC;AAC3B,eAAW,SAAS,KAAK;AACvB,UAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS;AAC1C;AACF,YAAM,eAAe,aAAa,SAAS,MAAM,CAAC,CAAC;AACnD,MAAAA,QAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,MAAM,CAAC,CAAC;AAAA,IACjE;AACA,WAAOA;AAAA,EACT;AAEA,QAAM,SAAoB,CAAC;AAC3B,aAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,GAAG,GAAG;AACnD,UAAM,eAAe,aAAa,SAAS,GAAG;AAC9C,WAAO,OAAO,YAAY,CAAC,IAAI,aAAa,WAAW,UAAU;AAAA,EACnE;AAEA,SAAO;AACT;AAEA,SAAS,iBAAiB,OAAkC;AAC1D,QAAM,SAAS,eAAe,KAAK;AACnC,MAAI,UAAU,OAAO,WAAW,YAAY,CAAC,MAAM,QAAQ,MAAM;AAC/D,WAAO;AAET,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI,mBAAmB,8BAA8B,cAAc;AAAA,IAC3E;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAS,OAAO,KAAK;AAC3B,WAAO,OAAO,MAAM,MAAM,IAAI,QAAQ;AAAA,EACxC;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,OAAO,UAAU,KAAK;AACxB,aAAO,OAAO,KAAK;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI;AAEF,aAAO,OAAO,KAAK;AAAA,IACrB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,OAAyB;AAC/C,MAAI,OAAO,UAAU;AACnB,WAAO;AAET,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,UAAU,OAAQ,QAAO;AAC7B,QAAI,UAAU,QAAS,QAAO;AAAA,EAChC;AAEA,SAAO;AACT;;;AC7YA,IAAAC,sBAAsC;AAEtC,2BAAqC;AAW9B,SAAS,YACd,iBACA,MACA,UAA8B,CAAC,GACrB;AACV,QAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,SAAS,SAAS;AAExB,MAAI,gBAAgB,QAAQ,YAAY;AACtC,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAAA,EACF;AAGA,QAAM,SAAS,IAAI,gCAAY;AAG/B,MAAI,QAAQ;AACV,UAAM,UAAU,MAAM,OAAO,QAAQ,IAAI,WAAW,gBAAgB,CAAC;AACrE,WAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AACxD,WAAO,KAAK,SAAS,MAAM,OAAO,oBAAoB,SAAS,OAAO,CAAC;AAAA,EACzE;AAIA,sBAAoB,iBAAiB,MAAM,UAAU,QAAQ,QAAQ,QAAQ,UAAU,EACpF,MAAM,CAAC,QAAQ,OAAO,QAAQ,GAAY,CAAC;AAE9C,SAAO;AACT;AAKA,eAAe,oBACb,iBACA,MACA,UACA,QACA,QACA,QACA,YACe;AACf,QAAM,OAAO,MAAM,oBAAoB,iBAAiB,MAAM,UAAU,MAAM;AAG9E,MAAI,KAAK,WAAW;AAClB,WAAO,KAAK,OAAO,IAAI;AAGzB,MAAI,KAAK,WAAW,KAAK,CAAC;AAExB,WAAO,gBAAgB,KAAK,CAAC,GAAI,QAAQ,MAAM;AAGjD,aAAO,2CAAqB,QAAQ,SAAS,EAAE,MAAM,QAAQ,OAAO,IAAI,EAAE,MAAM,OAAO,CAAC;AAC1F;AAEA,eAAe,oBACb,iBACA,MACA,UACA,QACmB;AACnB,QAAM,OAAO,qBAAqB,gBAAgB,QAAQ,cAAc;AACxE,MAAI,KAAK,SAAS;AAChB,WAAO;AAET,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAEV,QAAM,YAAsB,CAAC;AAC7B,WAAS,IAAI,GAAG,IAAI,SAAS,mBAAmB,KAAK;AACnD,QAAI,QAAQ;AACV,YAAM,IAAI,WAAW,+BAA+B;AAGtD,UAAM,YAAY,MAAM,SAAS,MAAM,gBAAgB,cAAc,GAAG,MAAM;AAC9E,cAAU,KAAK,GAAG,qBAAqB,UAAU,cAAc,CAAC;AAAA,EAClE;AAEA,SAAO;AACT;AAEA,SAAS,qBAAqB,eAA8C;AAC1E,MAAI,CAAC;AACH,WAAO,CAAC;AAEV,SAAO,cACJ,IAAI,CAAC,SAAS,KAAK,aAAa,EAChC,OAAO,gBAAgB;AAC5B;AAEA,SAAS,iBAAiB,OAAiC;AACzD,SAAO,OAAO,UAAU,YAAY,MAAM,SAAS;AACrD;;;AFnGA,eAAsB,SACpB,iBACA,MACA,UAA4B,CAAC,GACd;AACf,QAAM,EAAE,QAAQ,WAAW,OAAO,IAAI;AACtC,QAAM,WAAW,wBAAwB,eAAe;AAExD,QAAM,SAAS,gBAAgB,UAAU,MAAM;AAE/C,MAAI,gBAAgB,QAAQ,gBAAgB;AAC1C,QAAI,SAAS,WAAW,cAAc;AACpC,YAAM,IAAI;AAAA,QACR,mEAAmE,SAAS,MAAM;AAAA,QAClF;AAAA,QACA,gBAAgB;AAAA,MAClB;AAAA,IACF;AAEA,UAAM,SAAS,YAAY,iBAAiB,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC,CAAC;AAC1E,UAAM,uBAAuB,QAAQ,QAAQ,WAAW,MAAM;AAC9D;AAAA,EACF;AAEA,QAAM,cAAc,SAAS;AAG7B,QAAM,YAAY,gBAAgB,QAAQ;AAC1C,MAAI,WAAW;AACb,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,kBAAY,OAAO,GAAe,CAAC;AAAA,IACrC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,UAAM,cAAc,gBAAgB;AACpC,aAAS,aAAa,GAAG,aAAa,aAAa,cAAc;AAC/D,UAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,YAAM,QAAQ,MAAM,SAAS,MAAM,aAAa,YAAY,MAAM;AAGlE,UAAI,MAAM;AACR,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,UAAI,MAAM,YAAY;AACpB,mBAAW,OAAO,MAAM,YAAY;AAClC,cAAI,QAAQ,QAAS,OAAM,IAAI,WAAW,SAAS;AAEnD,sBAAY,OAAO,GAAe,CAAC;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,uBACb,QACA,QACA,WACA,QACe;AAEf,QAAM,aAAa,OAAO,SAAK,2BAAO,CAAC,EAAE,SAAK,gCAAY,CAAC;AAE3D,mBAAiB,QAAQ,YAAY;AACnC,QAAI,QAAQ,SAAS;AACnB,aAAO,QAAQ,IAAI,WAAW,SAAS,CAAC;AACxC,YAAM,IAAI,WAAW,SAAS;AAAA,IAChC;AAEA,UAAM,MAAM,KAAK;AACjB,QAAI,CAAC,MAAM,QAAQ,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,gBAAY,OAAO,GAAG,CAAC;AAAA,EACzB;AACF;;;AG/FA,eAAsB,SACpB,iBACA,MACA,UAA2B,CAAC,GACU;AACtC,QAAM,OAAoC,CAAC;AAC3C,QAAM,eAAiC;AAAA;AAAA,IAErC,WAAW,CAAC,QAAQ;AAClB,WAAK,KAAK,GAAG;AAAA,IACf;AAAA,EACF;AAEA,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,MAAI,QAAQ;AACV,iBAAa,SAAS,QAAQ;AAEhC,QAAM,SAAS,iBAAiB,MAAM,YAAY;AAClD,SAAO;AACT;;;ACrBA,eAAsB,mBACpB,iBACA,MACA,SAC0B;AAC1B,QAAM,EAAE,QAAQ,2BAA2B,WAAW,IAAI;AAG1D,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,WAAO;AAET,MAAI,CAAC,YAAY;AACf,UAAM,cAAc,gBAAgB,UAAU;AAC9C,UAAM,gBAAgB,gBAAgB,OAAO;AAC7C,UAAM,gBAAgB,gBAAgB,SAAY,cAAc,UAAU,IAAI,eAAe;AAG7F,QAAI,iBAAiB,cAAc,UAAU;AAC3C,aAAO;AAAA,EACX;AAGA,QAAM,SAAS,YAAY,iBAAiB,MAAM;AAAA,IAChD,GAAG,SAAS,EAAE,OAAO,IAAI,CAAC;AAAA,IAC1B,GAAG,eAAe,SAAY,EAAE,WAAW,IAAI,CAAC;AAAA,EAClD,CAAC;AAGD,QAAM,eAAe,MAAM,0BAA0B,MAAM;AAI3D,QAAM,WAAW,wBAAwB,eAAe;AACxD,QAAM,gBAAgB,SAAS,mBAAmB;AAElD,SAAO;AAAA,IACL,cAAc,gBAAgB;AAAA,IAC9B,QAAQ,gBAAgB;AAAA,IACxB,UAAU;AAAA,MACR,GAAG;AAAA,MACH,mBAAmB;AAAA,MACnB,kBAAkB,aAAa;AAAA,MAC/B,QAAQ;AAAA,QACN;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,gBAAgB;AAAA,QACd;AAAA,UACE,aAAa;AAAA,UACb,YAAY;AAAA,UACZ,WAAW;AAAA,UACX,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["errorBody","mapped","import_node_stream"]}
|