@bitofsky/databricks-sql 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,755 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ AbortError: () => AbortError,
24
+ AuthenticationError: () => AuthenticationError,
25
+ DatabricksSqlError: () => DatabricksSqlError,
26
+ HttpError: () => HttpError,
27
+ RateLimitError: () => RateLimitError,
28
+ StatementCancelledError: () => StatementCancelledError,
29
+ executeStatement: () => executeStatement,
30
+ fetchAll: () => fetchAll,
31
+ fetchRow: () => fetchRow,
32
+ fetchStream: () => fetchStream,
33
+ mergeExternalLinks: () => mergeExternalLinks
34
+ });
35
+ module.exports = __toCommonJS(index_exports);
36
+
37
+ // src/errors.ts
38
+ var DatabricksSqlError = class _DatabricksSqlError extends Error {
39
+ code;
40
+ statementId;
41
+ constructor(message, code, statementId) {
42
+ super(message);
43
+ this.name = "DatabricksSqlError";
44
+ this.code = code ?? "UNKNOWN_ERROR";
45
+ this.statementId = statementId;
46
+ Error.captureStackTrace?.(this, _DatabricksSqlError);
47
+ }
48
+ };
49
+ var StatementCancelledError = class extends DatabricksSqlError {
50
+ constructor(statementId) {
51
+ super(`Statement ${statementId} was cancelled`, "CANCELLED", statementId);
52
+ this.name = "StatementCancelledError";
53
+ }
54
+ };
55
+ var AbortError = class extends DatabricksSqlError {
56
+ constructor(message = "Operation was aborted") {
57
+ super(message, "ABORTED");
58
+ this.name = "AbortError";
59
+ }
60
+ };
61
+ var HttpError = class extends DatabricksSqlError {
62
+ status;
63
+ statusText;
64
+ constructor(status, statusText, message) {
65
+ super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`);
66
+ this.name = "HttpError";
67
+ this.status = status;
68
+ this.statusText = statusText;
69
+ }
70
+ };
71
+ var AuthenticationError = class extends HttpError {
72
+ constructor() {
73
+ super(401, "Unauthorized", "Authentication failed. Check your token.");
74
+ this.name = "AuthenticationError";
75
+ }
76
+ };
77
+ var RateLimitError = class extends HttpError {
78
+ retryAfter;
79
+ constructor(retryAfter) {
80
+ super(429, "Too Many Requests", "Rate limit exceeded");
81
+ this.name = "RateLimitError";
82
+ this.retryAfter = retryAfter;
83
+ }
84
+ };
85
+
86
+ // src/util.ts
87
+ function extractWarehouseId(httpPath) {
88
+ const match = httpPath.match(/\/sql\/\d+\.\d+\/warehouses\/([a-zA-Z0-9]+)/);
89
+ if (!match?.[1])
90
+ throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`);
91
+ return match[1];
92
+ }
93
+ function throwIfAborted(signal, context) {
94
+ if (signal?.aborted)
95
+ throw new AbortError(`[${context}] Aborted`);
96
+ }
97
+ async function delay(ms, signal) {
98
+ return new Promise((resolve, reject) => {
99
+ if (signal?.aborted)
100
+ return reject(new AbortError("Aborted before delay"));
101
+ let settled = false;
102
+ const onAbort = () => {
103
+ if (settled) return;
104
+ settled = true;
105
+ clearTimeout(timer);
106
+ reject(new AbortError("Aborted during delay"));
107
+ };
108
+ const timer = setTimeout(() => {
109
+ if (settled) return;
110
+ settled = true;
111
+ signal?.removeEventListener("abort", onAbort);
112
+ resolve();
113
+ }, ms);
114
+ signal?.addEventListener("abort", onAbort, { once: true });
115
+ });
116
+ }
117
+ function buildUrl(host, path) {
118
+ const base = host.startsWith("https://") ? host : `https://${host}`;
119
+ return new URL(path, base).href;
120
+ }
121
+ function validateSucceededResult(statementResult) {
122
+ if (statementResult.status.state !== "SUCCEEDED")
123
+ throw new DatabricksSqlError(
124
+ `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,
125
+ "INVALID_STATE",
126
+ statementResult.statement_id
127
+ );
128
+ if (!statementResult.manifest)
129
+ throw new DatabricksSqlError(
130
+ "Statement result has no manifest",
131
+ "MISSING_MANIFEST",
132
+ statementResult.statement_id
133
+ );
134
+ return statementResult.manifest;
135
+ }
136
+
137
+ // src/http.ts
138
+ var MAX_RETRIES = 3;
139
+ var INITIAL_RETRY_DELAY_MS = 1e3;
140
+ async function httpRequest(auth, options) {
141
+ const { method, path, body, signal } = options;
142
+ const url = buildUrl(auth.host, path);
143
+ let lastError;
144
+ let retryDelay = INITIAL_RETRY_DELAY_MS;
145
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
146
+ if (signal?.aborted)
147
+ throw new AbortError();
148
+ try {
149
+ const fetchInit = Object.fromEntries(
150
+ Object.entries({
151
+ method,
152
+ headers: {
153
+ Authorization: `Bearer ${auth.token}`,
154
+ "Content-Type": "application/json",
155
+ Accept: "application/json"
156
+ },
157
+ body: body ? JSON.stringify(body) : void 0,
158
+ signal
159
+ }).filter(([, v]) => v !== void 0)
160
+ );
161
+ const response = await fetch(url, fetchInit);
162
+ if (response.ok)
163
+ return await response.json();
164
+ if (response.status === 401)
165
+ throw new AuthenticationError();
166
+ if (response.status === 429) {
167
+ const retryAfterHeader = response.headers.get("Retry-After");
168
+ const retryAfter = retryAfterHeader ? parseInt(retryAfterHeader, 10) : void 0;
169
+ const error = new RateLimitError(
170
+ isNaN(retryAfter) ? void 0 : retryAfter
171
+ );
172
+ if (error.retryAfter && attempt < MAX_RETRIES) {
173
+ await delay(error.retryAfter * 1e3, signal);
174
+ continue;
175
+ }
176
+ throw error;
177
+ }
178
+ if (response.status >= 500) {
179
+ const errorBody2 = await response.text().catch(() => "");
180
+ lastError = new HttpError(response.status, response.statusText, errorBody2);
181
+ if (attempt < MAX_RETRIES) {
182
+ await delay(retryDelay, signal);
183
+ retryDelay *= 2;
184
+ continue;
185
+ }
186
+ }
187
+ const errorBody = await response.text().catch(() => "");
188
+ throw new HttpError(response.status, response.statusText, errorBody);
189
+ } catch (err) {
190
+ if (err instanceof AbortError || err instanceof AuthenticationError || err instanceof HttpError)
191
+ throw err;
192
+ if (err instanceof TypeError && err.message.includes("fetch")) {
193
+ lastError = err;
194
+ if (attempt < MAX_RETRIES) {
195
+ await delay(retryDelay, signal);
196
+ retryDelay *= 2;
197
+ continue;
198
+ }
199
+ }
200
+ throw err;
201
+ }
202
+ }
203
+ throw lastError ?? new Error("Request failed after retries");
204
+ }
205
+
206
+ // src/databricks-api.ts
207
+ var BASE_PATH = "/api/2.0/sql/statements";
208
+ async function postStatement(auth, request, signal) {
209
+ return httpRequest(auth, {
210
+ method: "POST",
211
+ path: BASE_PATH,
212
+ body: request,
213
+ ...signal ? { signal } : {}
214
+ });
215
+ }
216
+ async function getStatement(auth, statementId, signal) {
217
+ return httpRequest(auth, {
218
+ method: "GET",
219
+ path: `${BASE_PATH}/${statementId}`,
220
+ ...signal ? { signal } : {}
221
+ });
222
+ }
223
+ async function cancelStatement(auth, statementId, signal) {
224
+ await httpRequest(auth, {
225
+ method: "POST",
226
+ path: `${BASE_PATH}/${statementId}/cancel`,
227
+ ...signal ? { signal } : {}
228
+ });
229
+ }
230
+ async function getChunk(auth, statementId, chunkIndex, signal) {
231
+ return httpRequest(auth, {
232
+ method: "GET",
233
+ path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,
234
+ ...signal ? { signal } : {}
235
+ });
236
+ }
237
+
238
+ // src/api/executeStatement.ts
239
+ var TERMINAL_STATES = /* @__PURE__ */ new Set([
240
+ "SUCCEEDED",
241
+ "FAILED",
242
+ "CANCELED",
243
+ "CLOSED"
244
+ ]);
245
+ var POLL_INTERVAL_MS = 500;
246
+ var MAX_POLL_INTERVAL_MS = 5e3;
247
+ async function executeStatement(query, auth, options = {}) {
248
+ const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath);
249
+ const { signal, onProgress } = options;
250
+ throwIfAborted(signal, "executeStatement");
251
+ const request = Object.fromEntries(
252
+ Object.entries({
253
+ warehouse_id: warehouseId,
254
+ statement: query,
255
+ byte_limit: options.byte_limit,
256
+ disposition: options.disposition,
257
+ format: options.format,
258
+ on_wait_timeout: options.on_wait_timeout,
259
+ wait_timeout: options.wait_timeout,
260
+ row_limit: options.row_limit,
261
+ catalog: options.catalog,
262
+ schema: options.schema,
263
+ parameters: options.parameters
264
+ }).filter(([, v]) => v !== void 0)
265
+ );
266
+ let result = await postStatement(auth, request, signal);
267
+ let pollInterval = POLL_INTERVAL_MS;
268
+ while (!TERMINAL_STATES.has(result.status.state)) {
269
+ if (signal?.aborted) {
270
+ await cancelStatement(auth, result.statement_id).catch(() => {
271
+ });
272
+ throw new AbortError("Aborted during polling");
273
+ }
274
+ onProgress?.(result.status);
275
+ await delay(pollInterval, signal);
276
+ pollInterval = Math.min(pollInterval * 1.5, MAX_POLL_INTERVAL_MS);
277
+ result = await getStatement(auth, result.statement_id, signal);
278
+ }
279
+ onProgress?.(result.status);
280
+ if (result.status.state === "SUCCEEDED")
281
+ return result;
282
+ if (result.status.state === "CANCELED")
283
+ throw new StatementCancelledError(result.statement_id);
284
+ throw new DatabricksSqlError(
285
+ result.status.error?.message ?? "Statement execution failed",
286
+ result.status.error?.error_code,
287
+ result.statement_id
288
+ );
289
+ }
290
+
291
+ // src/api/fetchRow.ts
292
+ var import_stream_json = require("stream-json");
293
+ var import_StreamArray = require("stream-json/streamers/StreamArray");
294
+
295
+ // src/createRowMapper.ts
296
+ var INTEGER_TYPES = /* @__PURE__ */ new Set(["TINYINT", "SMALLINT", "INT"]);
297
+ var BIGINT_TYPES = /* @__PURE__ */ new Set(["BIGINT", "LONG"]);
298
+ var FLOAT_TYPES = /* @__PURE__ */ new Set(["FLOAT", "DOUBLE"]);
299
+ var BOOLEAN_TYPES = /* @__PURE__ */ new Set(["BOOLEAN"]);
300
+ var STRING_TYPES = /* @__PURE__ */ new Set([
301
+ "STRING",
302
+ "DATE",
303
+ "TIMESTAMP",
304
+ "TIMESTAMP_NTZ",
305
+ "TIMESTAMP_LTZ",
306
+ "TIME"
307
+ ]);
308
+ function createRowMapper(manifest, format) {
309
+ if (format !== "JSON_OBJECT")
310
+ return (row) => row;
311
+ const columnConverters = manifest.schema.columns.map((column) => ({
312
+ name: column.name,
313
+ convert: createColumnConverter(column)
314
+ }));
315
+ return (row) => {
316
+ const mapped = {};
317
+ for (let index = 0; index < columnConverters.length; index++) {
318
+ const converter = columnConverters[index];
319
+ if (!converter)
320
+ continue;
321
+ const { name, convert } = converter;
322
+ if (name)
323
+ mapped[name] = convert(row[index]);
324
+ }
325
+ return mapped;
326
+ };
327
+ }
328
+ function createColumnConverter(column) {
329
+ const descriptor = parseColumnType(column);
330
+ return (value) => convertValue(descriptor, value);
331
+ }
332
+ function parseColumnType(column) {
333
+ if (column.type_name === "STRUCT" || column.type_name === "ARRAY" || column.type_name === "MAP")
334
+ return parseTypeDescriptor(column.type_text);
335
+ if (column.type_name === "DECIMAL")
336
+ return createDecimalDescriptor({
337
+ typeName: column.type_name,
338
+ typeText: column.type_text
339
+ }, column.type_precision, column.type_scale);
340
+ return {
341
+ typeName: column.type_name,
342
+ typeText: column.type_text
343
+ };
344
+ }
345
+ function parseTypeDescriptor(typeText) {
346
+ const trimmed = typeText.trim();
347
+ const typeName = getTypeName(trimmed);
348
+ if (typeName === "STRUCT")
349
+ return {
350
+ typeName,
351
+ typeText: trimmed,
352
+ fields: parseStructFields(trimmed)
353
+ };
354
+ if (typeName === "ARRAY") {
355
+ const elementTypeText = parseSingleTypeArgument(trimmed);
356
+ const descriptor = {
357
+ typeName,
358
+ typeText: trimmed
359
+ };
360
+ if (elementTypeText)
361
+ descriptor.elementType = parseTypeDescriptor(elementTypeText);
362
+ return descriptor;
363
+ }
364
+ if (typeName === "MAP") {
365
+ const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2);
366
+ const descriptor = {
367
+ typeName,
368
+ typeText: trimmed
369
+ };
370
+ if (keyTypeText)
371
+ descriptor.keyType = parseTypeDescriptor(keyTypeText);
372
+ if (valueTypeText)
373
+ descriptor.valueType = parseTypeDescriptor(valueTypeText);
374
+ return descriptor;
375
+ }
376
+ if (typeName === "DECIMAL") {
377
+ const { precision, scale } = parseDecimalInfo(trimmed);
378
+ return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale);
379
+ }
380
+ return {
381
+ typeName,
382
+ typeText: trimmed
383
+ };
384
+ }
385
+ function getTypeName(typeText) {
386
+ return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText;
387
+ }
388
+ function parseDecimalInfo(typeText) {
389
+ const match = typeText.match(/DECIMAL\((\d+),\s*(\d+)\)/);
390
+ if (!match)
391
+ return {};
392
+ return {
393
+ precision: Number(match[1]),
394
+ scale: Number(match[2])
395
+ };
396
+ }
397
+ function createDecimalDescriptor(base, precision, scale) {
398
+ const descriptor = { ...base };
399
+ if (precision !== void 0)
400
+ descriptor.precision = precision;
401
+ if (scale !== void 0)
402
+ descriptor.scale = scale;
403
+ return descriptor;
404
+ }
405
+ function parseStructFields(typeText) {
406
+ const start = typeText.indexOf("<");
407
+ const end = typeText.lastIndexOf(">");
408
+ if (start === -1 || end === -1 || end <= start)
409
+ return [];
410
+ const inner = typeText.slice(start + 1, end);
411
+ const parts = splitTopLevel(inner);
412
+ const fields = [];
413
+ for (const part of parts) {
414
+ const separatorIndex = part.indexOf(":");
415
+ if (separatorIndex === -1)
416
+ continue;
417
+ const name = part.slice(0, separatorIndex).trim();
418
+ let fieldTypeText = part.slice(separatorIndex + 1).trim();
419
+ fieldTypeText = stripNotNull(fieldTypeText);
420
+ if (!name)
421
+ continue;
422
+ fields.push({
423
+ name,
424
+ type: parseTypeDescriptor(fieldTypeText)
425
+ });
426
+ }
427
+ return fields;
428
+ }
429
+ function parseSingleTypeArgument(typeText) {
430
+ const [arg] = parseTypeArguments(typeText, 1);
431
+ return arg ?? null;
432
+ }
433
+ function parseTypeArguments(typeText, expectedCount) {
434
+ const start = typeText.indexOf("<");
435
+ const end = typeText.lastIndexOf(">");
436
+ if (start === -1 || end === -1 || end <= start)
437
+ return [];
438
+ const inner = typeText.slice(start + 1, end);
439
+ const parts = splitTopLevel(inner);
440
+ if (parts.length < expectedCount)
441
+ return parts;
442
+ return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()));
443
+ }
444
+ function splitTopLevel(value) {
445
+ const result = [];
446
+ let current = "";
447
+ let angleDepth = 0;
448
+ let parenDepth = 0;
449
+ for (const char of value) {
450
+ if (char === "<") angleDepth++;
451
+ if (char === ">") angleDepth--;
452
+ if (char === "(") parenDepth++;
453
+ if (char === ")") parenDepth--;
454
+ if (char === "," && angleDepth === 0 && parenDepth === 0) {
455
+ result.push(current.trim());
456
+ current = "";
457
+ continue;
458
+ }
459
+ current += char;
460
+ }
461
+ if (current.trim().length > 0)
462
+ result.push(current.trim());
463
+ return result;
464
+ }
465
+ function stripNotNull(typeText) {
466
+ let trimmed = typeText.trim();
467
+ while (trimmed.endsWith("NOT NULL"))
468
+ trimmed = trimmed.slice(0, -"NOT NULL".length).trim();
469
+ return trimmed;
470
+ }
471
+ function convertValue(descriptor, value) {
472
+ if (value === null || value === void 0)
473
+ return value;
474
+ if (descriptor.typeName === "STRUCT" && descriptor.fields)
475
+ return convertStructValue(descriptor.fields, value);
476
+ if (descriptor.typeName === "ARRAY" && descriptor.elementType)
477
+ return convertArrayValue(descriptor.elementType, value);
478
+ if (descriptor.typeName === "MAP" && descriptor.keyType && descriptor.valueType)
479
+ return convertMapValue(descriptor.keyType, descriptor.valueType, value);
480
+ if (descriptor.typeName === "DECIMAL")
481
+ return convertNumber(value);
482
+ if (INTEGER_TYPES.has(descriptor.typeName))
483
+ return convertNumber(value);
484
+ if (BIGINT_TYPES.has(descriptor.typeName))
485
+ return convertInteger(value);
486
+ if (FLOAT_TYPES.has(descriptor.typeName))
487
+ return convertNumber(value);
488
+ if (BOOLEAN_TYPES.has(descriptor.typeName))
489
+ return convertBoolean(value);
490
+ if (STRING_TYPES.has(descriptor.typeName))
491
+ return value;
492
+ return value;
493
+ }
494
+ function convertStructValue(fields, value) {
495
+ const raw = parseStructValue(value);
496
+ if (!raw || typeof raw !== "object" || Array.isArray(raw))
497
+ return value;
498
+ const mapped = {};
499
+ for (const field of fields)
500
+ mapped[field.name] = convertValue(field.type, raw[field.name]);
501
+ return mapped;
502
+ }
503
+ function convertArrayValue(elementType, value) {
504
+ const raw = parseJsonValue(value);
505
+ if (!Array.isArray(raw))
506
+ return value;
507
+ return raw.map((entry) => convertValue(elementType, entry));
508
+ }
509
+ function convertMapValue(keyType, valueType, value) {
510
+ const raw = parseJsonValue(value);
511
+ if (!raw || typeof raw !== "object")
512
+ return value;
513
+ if (Array.isArray(raw)) {
514
+ const mapped2 = {};
515
+ for (const entry of raw) {
516
+ if (!Array.isArray(entry) || entry.length < 2)
517
+ continue;
518
+ const convertedKey = convertValue(keyType, entry[0]);
519
+ mapped2[String(convertedKey)] = convertValue(valueType, entry[1]);
520
+ }
521
+ return mapped2;
522
+ }
523
+ const mapped = {};
524
+ for (const [key, entryValue] of Object.entries(raw)) {
525
+ const convertedKey = convertValue(keyType, key);
526
+ mapped[String(convertedKey)] = convertValue(valueType, entryValue);
527
+ }
528
+ return mapped;
529
+ }
530
+ function parseStructValue(value) {
531
+ const parsed = parseJsonValue(value);
532
+ if (parsed && typeof parsed === "object" && !Array.isArray(parsed))
533
+ return parsed;
534
+ return parsed;
535
+ }
536
+ function parseJsonValue(value) {
537
+ if (typeof value === "string") {
538
+ try {
539
+ return JSON.parse(value);
540
+ } catch {
541
+ throw new DatabricksSqlError("Failed to parse JSON value", "INVALID_JSON");
542
+ }
543
+ }
544
+ return value;
545
+ }
546
+ function convertNumber(value) {
547
+ if (typeof value === "number")
548
+ return value;
549
+ if (typeof value === "string") {
550
+ const parsed = Number(value);
551
+ return Number.isNaN(parsed) ? value : parsed;
552
+ }
553
+ return value;
554
+ }
555
+ function convertInteger(value) {
556
+ if (typeof value === "bigint")
557
+ return value;
558
+ if (typeof value === "number") {
559
+ if (Number.isInteger(value))
560
+ return BigInt(value);
561
+ return value;
562
+ }
563
+ if (typeof value === "string") {
564
+ try {
565
+ return BigInt(value);
566
+ } catch {
567
+ return value;
568
+ }
569
+ }
570
+ return value;
571
+ }
572
+ function convertBoolean(value) {
573
+ if (typeof value === "boolean")
574
+ return value;
575
+ if (typeof value === "string") {
576
+ if (value === "true") return true;
577
+ if (value === "false") return false;
578
+ }
579
+ return value;
580
+ }
581
+
582
+ // src/api/fetchStream.ts
583
+ var import_node_stream = require("stream");
584
+ var import_merge_streams = require("@bitofsky/merge-streams");
585
+ function fetchStream(statementResult, auth, options = {}) {
586
+ const { signal } = options;
587
+ const manifest = validateSucceededResult(statementResult);
588
+ const format = manifest.format;
589
+ const output = new import_node_stream.PassThrough();
590
+ if (signal) {
591
+ const onAbort = () => {
592
+ output.destroy(new AbortError("Stream aborted"));
593
+ };
594
+ signal.addEventListener("abort", onAbort, { once: true });
595
+ output.once("close", () => {
596
+ signal.removeEventListener("abort", onAbort);
597
+ });
598
+ }
599
+ mergeChunksToStream(statementResult, auth, manifest, format, output, signal).catch(
600
+ (err) => {
601
+ output.destroy(err);
602
+ }
603
+ );
604
+ return output;
605
+ }
606
+ async function mergeChunksToStream(statementResult, auth, manifest, format, output, signal) {
607
+ const result = statementResult.result;
608
+ let urls = result?.external_links?.map((link) => link.external_link) ?? [];
609
+ if (urls.length === 0 && manifest.total_chunk_count > 0) {
610
+ for (let i = 0; i < manifest.total_chunk_count; i++) {
611
+ if (signal?.aborted) throw new AbortError("Aborted while collecting URLs");
612
+ const chunkData = await getChunk(auth, statementResult.statement_id, i, signal);
613
+ const chunkUrls = chunkData.external_links?.map((link) => link.external_link) ?? [];
614
+ urls.push(...chunkUrls);
615
+ }
616
+ }
617
+ if (urls.length === 0)
618
+ return void output.end();
619
+ await (0, import_merge_streams.mergeStreamsFromUrls)(format, signal ? { urls, output, signal } : { urls, output });
620
+ }
621
+
622
+ // src/api/fetchRow.ts
623
+ async function fetchRow(statementResult, auth, options = {}) {
624
+ const { signal, onEachRow, format } = options;
625
+ const manifest = validateSucceededResult(statementResult);
626
+ const mapRow = createRowMapper(manifest, format);
627
+ if (statementResult.result?.external_links) {
628
+ if (manifest.format !== "JSON_ARRAY") {
629
+ throw new DatabricksSqlError(
630
+ `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,
631
+ "UNSUPPORTED_FORMAT",
632
+ statementResult.statement_id
633
+ );
634
+ }
635
+ const stream = fetchStream(statementResult, auth, signal ? { signal } : {});
636
+ await consumeJsonArrayStream(stream, mapRow, onEachRow, signal);
637
+ return;
638
+ }
639
+ const totalChunks = manifest.total_chunk_count;
640
+ const dataArray = statementResult.result?.data_array;
641
+ if (dataArray) {
642
+ for (const row of dataArray) {
643
+ if (signal?.aborted) throw new AbortError("Aborted");
644
+ onEachRow?.(mapRow(row));
645
+ }
646
+ }
647
+ if (totalChunks > 1) {
648
+ const statementId = statementResult.statement_id;
649
+ for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {
650
+ if (signal?.aborted) throw new AbortError("Aborted");
651
+ const chunk = await getChunk(auth, statementId, chunkIndex, signal);
652
+ if (chunk.external_links)
653
+ throw new DatabricksSqlError(
654
+ "fetchRow only supports INLINE results. Chunk contains external_links.",
655
+ "UNSUPPORTED_FORMAT",
656
+ statementId
657
+ );
658
+ if (chunk.data_array) {
659
+ for (const row of chunk.data_array) {
660
+ if (signal?.aborted) throw new AbortError("Aborted");
661
+ onEachRow?.(mapRow(row));
662
+ }
663
+ }
664
+ }
665
+ }
666
+ }
667
+ async function consumeJsonArrayStream(stream, mapRow, onEachRow, signal) {
668
+ const jsonStream = stream.pipe((0, import_stream_json.parser)()).pipe((0, import_StreamArray.streamArray)());
669
+ for await (const item of jsonStream) {
670
+ if (signal?.aborted) {
671
+ stream.destroy(new AbortError("Aborted"));
672
+ throw new AbortError("Aborted");
673
+ }
674
+ const row = item.value;
675
+ if (!Array.isArray(row)) {
676
+ throw new DatabricksSqlError(
677
+ "Expected JSON_ARRAY rows to be arrays",
678
+ "INVALID_FORMAT"
679
+ );
680
+ }
681
+ onEachRow?.(mapRow(row));
682
+ }
683
+ }
684
+
685
+ // src/api/fetchAll.ts
686
+ async function fetchAll(statementResult, auth, options = {}) {
687
+ const rows = [];
688
+ const fetchOptions = {
689
+ // Collect rows as they are streamed in.
690
+ onEachRow: (row) => {
691
+ rows.push(row);
692
+ }
693
+ };
694
+ if (options.signal)
695
+ fetchOptions.signal = options.signal;
696
+ if (options.format)
697
+ fetchOptions.format = options.format;
698
+ await fetchRow(statementResult, auth, fetchOptions);
699
+ return rows;
700
+ }
701
+
702
+ // src/api/mergeExternalLinks.ts
703
+ async function mergeExternalLinks(statementResult, auth, options) {
704
+ const { signal, mergeStreamToExternalLink } = options;
705
+ if (!statementResult.result?.external_links)
706
+ return statementResult;
707
+ const stream = fetchStream(statementResult, auth, signal ? { signal } : {});
708
+ const uploadResult = await mergeStreamToExternalLink(stream);
709
+ const manifest = validateSucceededResult(statementResult);
710
+ const totalRowCount = manifest.total_row_count ?? 0;
711
+ return {
712
+ statement_id: statementResult.statement_id,
713
+ status: statementResult.status,
714
+ manifest: {
715
+ ...manifest,
716
+ total_chunk_count: 1,
717
+ total_byte_count: uploadResult.byte_count,
718
+ chunks: [
719
+ {
720
+ chunk_index: 0,
721
+ row_offset: 0,
722
+ row_count: totalRowCount,
723
+ byte_count: uploadResult.byte_count
724
+ }
725
+ ]
726
+ },
727
+ result: {
728
+ external_links: [
729
+ {
730
+ chunk_index: 0,
731
+ row_offset: 0,
732
+ row_count: totalRowCount,
733
+ byte_count: uploadResult.byte_count,
734
+ external_link: uploadResult.externalLink,
735
+ expiration: uploadResult.expiration
736
+ }
737
+ ]
738
+ }
739
+ };
740
+ }
741
+ // Annotate the CommonJS export names for ESM import in node:
742
+ 0 && (module.exports = {
743
+ AbortError,
744
+ AuthenticationError,
745
+ DatabricksSqlError,
746
+ HttpError,
747
+ RateLimitError,
748
+ StatementCancelledError,
749
+ executeStatement,
750
+ fetchAll,
751
+ fetchRow,
752
+ fetchStream,
753
+ mergeExternalLinks
754
+ });
755
+ //# sourceMappingURL=index.cjs.map