@bitofsky/databricks-sql 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,718 @@
1
+ // src/errors.ts
2
+ var DatabricksSqlError = class _DatabricksSqlError extends Error {
3
+ code;
4
+ statementId;
5
+ constructor(message, code, statementId) {
6
+ super(message);
7
+ this.name = "DatabricksSqlError";
8
+ this.code = code ?? "UNKNOWN_ERROR";
9
+ this.statementId = statementId;
10
+ Error.captureStackTrace?.(this, _DatabricksSqlError);
11
+ }
12
+ };
13
+ var StatementCancelledError = class extends DatabricksSqlError {
14
+ constructor(statementId) {
15
+ super(`Statement ${statementId} was cancelled`, "CANCELLED", statementId);
16
+ this.name = "StatementCancelledError";
17
+ }
18
+ };
19
+ var AbortError = class extends DatabricksSqlError {
20
+ constructor(message = "Operation was aborted") {
21
+ super(message, "ABORTED");
22
+ this.name = "AbortError";
23
+ }
24
+ };
25
+ var HttpError = class extends DatabricksSqlError {
26
+ status;
27
+ statusText;
28
+ constructor(status, statusText, message) {
29
+ super(message ?? `HTTP ${status}: ${statusText}`, `HTTP_${status}`);
30
+ this.name = "HttpError";
31
+ this.status = status;
32
+ this.statusText = statusText;
33
+ }
34
+ };
35
+ var AuthenticationError = class extends HttpError {
36
+ constructor() {
37
+ super(401, "Unauthorized", "Authentication failed. Check your token.");
38
+ this.name = "AuthenticationError";
39
+ }
40
+ };
41
+ var RateLimitError = class extends HttpError {
42
+ retryAfter;
43
+ constructor(retryAfter) {
44
+ super(429, "Too Many Requests", "Rate limit exceeded");
45
+ this.name = "RateLimitError";
46
+ this.retryAfter = retryAfter;
47
+ }
48
+ };
49
+
50
+ // src/util.ts
51
+ function extractWarehouseId(httpPath) {
52
+ const match = httpPath.match(/\/sql\/\d+\.\d+\/warehouses\/([a-zA-Z0-9]+)/);
53
+ if (!match?.[1])
54
+ throw new Error(`Cannot extract warehouse_id from httpPath: ${httpPath}`);
55
+ return match[1];
56
+ }
57
+ function throwIfAborted(signal, context) {
58
+ if (signal?.aborted)
59
+ throw new AbortError(`[${context}] Aborted`);
60
+ }
61
+ async function delay(ms, signal) {
62
+ return new Promise((resolve, reject) => {
63
+ if (signal?.aborted)
64
+ return reject(new AbortError("Aborted before delay"));
65
+ let settled = false;
66
+ const onAbort = () => {
67
+ if (settled) return;
68
+ settled = true;
69
+ clearTimeout(timer);
70
+ reject(new AbortError("Aborted during delay"));
71
+ };
72
+ const timer = setTimeout(() => {
73
+ if (settled) return;
74
+ settled = true;
75
+ signal?.removeEventListener("abort", onAbort);
76
+ resolve();
77
+ }, ms);
78
+ signal?.addEventListener("abort", onAbort, { once: true });
79
+ });
80
+ }
81
+ function buildUrl(host, path) {
82
+ const base = host.startsWith("https://") ? host : `https://${host}`;
83
+ return new URL(path, base).href;
84
+ }
85
+ function validateSucceededResult(statementResult) {
86
+ if (statementResult.status.state !== "SUCCEEDED")
87
+ throw new DatabricksSqlError(
88
+ `Cannot fetch from non-succeeded statement: ${statementResult.status.state}`,
89
+ "INVALID_STATE",
90
+ statementResult.statement_id
91
+ );
92
+ if (!statementResult.manifest)
93
+ throw new DatabricksSqlError(
94
+ "Statement result has no manifest",
95
+ "MISSING_MANIFEST",
96
+ statementResult.statement_id
97
+ );
98
+ return statementResult.manifest;
99
+ }
100
+
101
+ // src/http.ts
102
+ var MAX_RETRIES = 3;
103
+ var INITIAL_RETRY_DELAY_MS = 1e3;
104
+ async function httpRequest(auth, options) {
105
+ const { method, path, body, signal } = options;
106
+ const url = buildUrl(auth.host, path);
107
+ let lastError;
108
+ let retryDelay = INITIAL_RETRY_DELAY_MS;
109
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
110
+ if (signal?.aborted)
111
+ throw new AbortError();
112
+ try {
113
+ const fetchInit = Object.fromEntries(
114
+ Object.entries({
115
+ method,
116
+ headers: {
117
+ Authorization: `Bearer ${auth.token}`,
118
+ "Content-Type": "application/json",
119
+ Accept: "application/json"
120
+ },
121
+ body: body ? JSON.stringify(body) : void 0,
122
+ signal
123
+ }).filter(([, v]) => v !== void 0)
124
+ );
125
+ const response = await fetch(url, fetchInit);
126
+ if (response.ok)
127
+ return await response.json();
128
+ if (response.status === 401)
129
+ throw new AuthenticationError();
130
+ if (response.status === 429) {
131
+ const retryAfterHeader = response.headers.get("Retry-After");
132
+ const retryAfter = retryAfterHeader ? parseInt(retryAfterHeader, 10) : void 0;
133
+ const error = new RateLimitError(
134
+ isNaN(retryAfter) ? void 0 : retryAfter
135
+ );
136
+ if (error.retryAfter && attempt < MAX_RETRIES) {
137
+ await delay(error.retryAfter * 1e3, signal);
138
+ continue;
139
+ }
140
+ throw error;
141
+ }
142
+ if (response.status >= 500) {
143
+ const errorBody2 = await response.text().catch(() => "");
144
+ lastError = new HttpError(response.status, response.statusText, errorBody2);
145
+ if (attempt < MAX_RETRIES) {
146
+ await delay(retryDelay, signal);
147
+ retryDelay *= 2;
148
+ continue;
149
+ }
150
+ }
151
+ const errorBody = await response.text().catch(() => "");
152
+ throw new HttpError(response.status, response.statusText, errorBody);
153
+ } catch (err) {
154
+ if (err instanceof AbortError || err instanceof AuthenticationError || err instanceof HttpError)
155
+ throw err;
156
+ if (err instanceof TypeError && err.message.includes("fetch")) {
157
+ lastError = err;
158
+ if (attempt < MAX_RETRIES) {
159
+ await delay(retryDelay, signal);
160
+ retryDelay *= 2;
161
+ continue;
162
+ }
163
+ }
164
+ throw err;
165
+ }
166
+ }
167
+ throw lastError ?? new Error("Request failed after retries");
168
+ }
169
+
170
+ // src/databricks-api.ts
171
+ var BASE_PATH = "/api/2.0/sql/statements";
172
+ async function postStatement(auth, request, signal) {
173
+ return httpRequest(auth, {
174
+ method: "POST",
175
+ path: BASE_PATH,
176
+ body: request,
177
+ ...signal ? { signal } : {}
178
+ });
179
+ }
180
+ async function getStatement(auth, statementId, signal) {
181
+ return httpRequest(auth, {
182
+ method: "GET",
183
+ path: `${BASE_PATH}/${statementId}`,
184
+ ...signal ? { signal } : {}
185
+ });
186
+ }
187
+ async function cancelStatement(auth, statementId, signal) {
188
+ await httpRequest(auth, {
189
+ method: "POST",
190
+ path: `${BASE_PATH}/${statementId}/cancel`,
191
+ ...signal ? { signal } : {}
192
+ });
193
+ }
194
+ async function getChunk(auth, statementId, chunkIndex, signal) {
195
+ return httpRequest(auth, {
196
+ method: "GET",
197
+ path: `${BASE_PATH}/${statementId}/result/chunks/${chunkIndex}`,
198
+ ...signal ? { signal } : {}
199
+ });
200
+ }
201
+
202
+ // src/api/executeStatement.ts
203
+ var TERMINAL_STATES = /* @__PURE__ */ new Set([
204
+ "SUCCEEDED",
205
+ "FAILED",
206
+ "CANCELED",
207
+ "CLOSED"
208
+ ]);
209
+ var POLL_INTERVAL_MS = 500;
210
+ var MAX_POLL_INTERVAL_MS = 5e3;
211
+ async function executeStatement(query, auth, options = {}) {
212
+ const warehouseId = options.warehouse_id ?? extractWarehouseId(auth.httpPath);
213
+ const { signal, onProgress } = options;
214
+ throwIfAborted(signal, "executeStatement");
215
+ const request = Object.fromEntries(
216
+ Object.entries({
217
+ warehouse_id: warehouseId,
218
+ statement: query,
219
+ byte_limit: options.byte_limit,
220
+ disposition: options.disposition,
221
+ format: options.format,
222
+ on_wait_timeout: options.on_wait_timeout,
223
+ wait_timeout: options.wait_timeout,
224
+ row_limit: options.row_limit,
225
+ catalog: options.catalog,
226
+ schema: options.schema,
227
+ parameters: options.parameters
228
+ }).filter(([, v]) => v !== void 0)
229
+ );
230
+ let result = await postStatement(auth, request, signal);
231
+ let pollInterval = POLL_INTERVAL_MS;
232
+ while (!TERMINAL_STATES.has(result.status.state)) {
233
+ if (signal?.aborted) {
234
+ await cancelStatement(auth, result.statement_id).catch(() => {
235
+ });
236
+ throw new AbortError("Aborted during polling");
237
+ }
238
+ onProgress?.(result.status);
239
+ await delay(pollInterval, signal);
240
+ pollInterval = Math.min(pollInterval * 1.5, MAX_POLL_INTERVAL_MS);
241
+ result = await getStatement(auth, result.statement_id, signal);
242
+ }
243
+ onProgress?.(result.status);
244
+ if (result.status.state === "SUCCEEDED")
245
+ return result;
246
+ if (result.status.state === "CANCELED")
247
+ throw new StatementCancelledError(result.statement_id);
248
+ throw new DatabricksSqlError(
249
+ result.status.error?.message ?? "Statement execution failed",
250
+ result.status.error?.error_code,
251
+ result.statement_id
252
+ );
253
+ }
254
+
255
+ // src/api/fetchRow.ts
256
+ import { parser } from "stream-json";
257
+ import { streamArray } from "stream-json/streamers/StreamArray";
258
+
259
+ // src/createRowMapper.ts
260
+ var INTEGER_TYPES = /* @__PURE__ */ new Set(["TINYINT", "SMALLINT", "INT"]);
261
+ var BIGINT_TYPES = /* @__PURE__ */ new Set(["BIGINT", "LONG"]);
262
+ var FLOAT_TYPES = /* @__PURE__ */ new Set(["FLOAT", "DOUBLE"]);
263
+ var BOOLEAN_TYPES = /* @__PURE__ */ new Set(["BOOLEAN"]);
264
+ var STRING_TYPES = /* @__PURE__ */ new Set([
265
+ "STRING",
266
+ "DATE",
267
+ "TIMESTAMP",
268
+ "TIMESTAMP_NTZ",
269
+ "TIMESTAMP_LTZ",
270
+ "TIME"
271
+ ]);
272
+ function createRowMapper(manifest, format) {
273
+ if (format !== "JSON_OBJECT")
274
+ return (row) => row;
275
+ const columnConverters = manifest.schema.columns.map((column) => ({
276
+ name: column.name,
277
+ convert: createColumnConverter(column)
278
+ }));
279
+ return (row) => {
280
+ const mapped = {};
281
+ for (let index = 0; index < columnConverters.length; index++) {
282
+ const converter = columnConverters[index];
283
+ if (!converter)
284
+ continue;
285
+ const { name, convert } = converter;
286
+ if (name)
287
+ mapped[name] = convert(row[index]);
288
+ }
289
+ return mapped;
290
+ };
291
+ }
292
+ function createColumnConverter(column) {
293
+ const descriptor = parseColumnType(column);
294
+ return (value) => convertValue(descriptor, value);
295
+ }
296
+ function parseColumnType(column) {
297
+ if (column.type_name === "STRUCT" || column.type_name === "ARRAY" || column.type_name === "MAP")
298
+ return parseTypeDescriptor(column.type_text);
299
+ if (column.type_name === "DECIMAL")
300
+ return createDecimalDescriptor({
301
+ typeName: column.type_name,
302
+ typeText: column.type_text
303
+ }, column.type_precision, column.type_scale);
304
+ return {
305
+ typeName: column.type_name,
306
+ typeText: column.type_text
307
+ };
308
+ }
309
+ function parseTypeDescriptor(typeText) {
310
+ const trimmed = typeText.trim();
311
+ const typeName = getTypeName(trimmed);
312
+ if (typeName === "STRUCT")
313
+ return {
314
+ typeName,
315
+ typeText: trimmed,
316
+ fields: parseStructFields(trimmed)
317
+ };
318
+ if (typeName === "ARRAY") {
319
+ const elementTypeText = parseSingleTypeArgument(trimmed);
320
+ const descriptor = {
321
+ typeName,
322
+ typeText: trimmed
323
+ };
324
+ if (elementTypeText)
325
+ descriptor.elementType = parseTypeDescriptor(elementTypeText);
326
+ return descriptor;
327
+ }
328
+ if (typeName === "MAP") {
329
+ const [keyTypeText, valueTypeText] = parseTypeArguments(trimmed, 2);
330
+ const descriptor = {
331
+ typeName,
332
+ typeText: trimmed
333
+ };
334
+ if (keyTypeText)
335
+ descriptor.keyType = parseTypeDescriptor(keyTypeText);
336
+ if (valueTypeText)
337
+ descriptor.valueType = parseTypeDescriptor(valueTypeText);
338
+ return descriptor;
339
+ }
340
+ if (typeName === "DECIMAL") {
341
+ const { precision, scale } = parseDecimalInfo(trimmed);
342
+ return createDecimalDescriptor({ typeName, typeText: trimmed }, precision, scale);
343
+ }
344
+ return {
345
+ typeName,
346
+ typeText: trimmed
347
+ };
348
+ }
349
+ function getTypeName(typeText) {
350
+ return typeText.match(/^[A-Z_]+/)?.[0] ?? typeText;
351
+ }
352
+ function parseDecimalInfo(typeText) {
353
+ const match = typeText.match(/DECIMAL\((\d+),\s*(\d+)\)/);
354
+ if (!match)
355
+ return {};
356
+ return {
357
+ precision: Number(match[1]),
358
+ scale: Number(match[2])
359
+ };
360
+ }
361
+ function createDecimalDescriptor(base, precision, scale) {
362
+ const descriptor = { ...base };
363
+ if (precision !== void 0)
364
+ descriptor.precision = precision;
365
+ if (scale !== void 0)
366
+ descriptor.scale = scale;
367
+ return descriptor;
368
+ }
369
+ function parseStructFields(typeText) {
370
+ const start = typeText.indexOf("<");
371
+ const end = typeText.lastIndexOf(">");
372
+ if (start === -1 || end === -1 || end <= start)
373
+ return [];
374
+ const inner = typeText.slice(start + 1, end);
375
+ const parts = splitTopLevel(inner);
376
+ const fields = [];
377
+ for (const part of parts) {
378
+ const separatorIndex = part.indexOf(":");
379
+ if (separatorIndex === -1)
380
+ continue;
381
+ const name = part.slice(0, separatorIndex).trim();
382
+ let fieldTypeText = part.slice(separatorIndex + 1).trim();
383
+ fieldTypeText = stripNotNull(fieldTypeText);
384
+ if (!name)
385
+ continue;
386
+ fields.push({
387
+ name,
388
+ type: parseTypeDescriptor(fieldTypeText)
389
+ });
390
+ }
391
+ return fields;
392
+ }
393
+ function parseSingleTypeArgument(typeText) {
394
+ const [arg] = parseTypeArguments(typeText, 1);
395
+ return arg ?? null;
396
+ }
397
+ function parseTypeArguments(typeText, expectedCount) {
398
+ const start = typeText.indexOf("<");
399
+ const end = typeText.lastIndexOf(">");
400
+ if (start === -1 || end === -1 || end <= start)
401
+ return [];
402
+ const inner = typeText.slice(start + 1, end);
403
+ const parts = splitTopLevel(inner);
404
+ if (parts.length < expectedCount)
405
+ return parts;
406
+ return parts.slice(0, expectedCount).map((part) => stripNotNull(part.trim()));
407
+ }
408
+ function splitTopLevel(value) {
409
+ const result = [];
410
+ let current = "";
411
+ let angleDepth = 0;
412
+ let parenDepth = 0;
413
+ for (const char of value) {
414
+ if (char === "<") angleDepth++;
415
+ if (char === ">") angleDepth--;
416
+ if (char === "(") parenDepth++;
417
+ if (char === ")") parenDepth--;
418
+ if (char === "," && angleDepth === 0 && parenDepth === 0) {
419
+ result.push(current.trim());
420
+ current = "";
421
+ continue;
422
+ }
423
+ current += char;
424
+ }
425
+ if (current.trim().length > 0)
426
+ result.push(current.trim());
427
+ return result;
428
+ }
429
+ function stripNotNull(typeText) {
430
+ let trimmed = typeText.trim();
431
+ while (trimmed.endsWith("NOT NULL"))
432
+ trimmed = trimmed.slice(0, -"NOT NULL".length).trim();
433
+ return trimmed;
434
+ }
435
+ function convertValue(descriptor, value) {
436
+ if (value === null || value === void 0)
437
+ return value;
438
+ if (descriptor.typeName === "STRUCT" && descriptor.fields)
439
+ return convertStructValue(descriptor.fields, value);
440
+ if (descriptor.typeName === "ARRAY" && descriptor.elementType)
441
+ return convertArrayValue(descriptor.elementType, value);
442
+ if (descriptor.typeName === "MAP" && descriptor.keyType && descriptor.valueType)
443
+ return convertMapValue(descriptor.keyType, descriptor.valueType, value);
444
+ if (descriptor.typeName === "DECIMAL")
445
+ return convertNumber(value);
446
+ if (INTEGER_TYPES.has(descriptor.typeName))
447
+ return convertNumber(value);
448
+ if (BIGINT_TYPES.has(descriptor.typeName))
449
+ return convertInteger(value);
450
+ if (FLOAT_TYPES.has(descriptor.typeName))
451
+ return convertNumber(value);
452
+ if (BOOLEAN_TYPES.has(descriptor.typeName))
453
+ return convertBoolean(value);
454
+ if (STRING_TYPES.has(descriptor.typeName))
455
+ return value;
456
+ return value;
457
+ }
458
+ function convertStructValue(fields, value) {
459
+ const raw = parseStructValue(value);
460
+ if (!raw || typeof raw !== "object" || Array.isArray(raw))
461
+ return value;
462
+ const mapped = {};
463
+ for (const field of fields)
464
+ mapped[field.name] = convertValue(field.type, raw[field.name]);
465
+ return mapped;
466
+ }
467
+ function convertArrayValue(elementType, value) {
468
+ const raw = parseJsonValue(value);
469
+ if (!Array.isArray(raw))
470
+ return value;
471
+ return raw.map((entry) => convertValue(elementType, entry));
472
+ }
473
+ function convertMapValue(keyType, valueType, value) {
474
+ const raw = parseJsonValue(value);
475
+ if (!raw || typeof raw !== "object")
476
+ return value;
477
+ if (Array.isArray(raw)) {
478
+ const mapped2 = {};
479
+ for (const entry of raw) {
480
+ if (!Array.isArray(entry) || entry.length < 2)
481
+ continue;
482
+ const convertedKey = convertValue(keyType, entry[0]);
483
+ mapped2[String(convertedKey)] = convertValue(valueType, entry[1]);
484
+ }
485
+ return mapped2;
486
+ }
487
+ const mapped = {};
488
+ for (const [key, entryValue] of Object.entries(raw)) {
489
+ const convertedKey = convertValue(keyType, key);
490
+ mapped[String(convertedKey)] = convertValue(valueType, entryValue);
491
+ }
492
+ return mapped;
493
+ }
494
+ function parseStructValue(value) {
495
+ const parsed = parseJsonValue(value);
496
+ if (parsed && typeof parsed === "object" && !Array.isArray(parsed))
497
+ return parsed;
498
+ return parsed;
499
+ }
500
+ function parseJsonValue(value) {
501
+ if (typeof value === "string") {
502
+ try {
503
+ return JSON.parse(value);
504
+ } catch {
505
+ throw new DatabricksSqlError("Failed to parse JSON value", "INVALID_JSON");
506
+ }
507
+ }
508
+ return value;
509
+ }
510
+ function convertNumber(value) {
511
+ if (typeof value === "number")
512
+ return value;
513
+ if (typeof value === "string") {
514
+ const parsed = Number(value);
515
+ return Number.isNaN(parsed) ? value : parsed;
516
+ }
517
+ return value;
518
+ }
519
+ function convertInteger(value) {
520
+ if (typeof value === "bigint")
521
+ return value;
522
+ if (typeof value === "number") {
523
+ if (Number.isInteger(value))
524
+ return BigInt(value);
525
+ return value;
526
+ }
527
+ if (typeof value === "string") {
528
+ try {
529
+ return BigInt(value);
530
+ } catch {
531
+ return value;
532
+ }
533
+ }
534
+ return value;
535
+ }
536
+ function convertBoolean(value) {
537
+ if (typeof value === "boolean")
538
+ return value;
539
+ if (typeof value === "string") {
540
+ if (value === "true") return true;
541
+ if (value === "false") return false;
542
+ }
543
+ return value;
544
+ }
545
+
546
+ // src/api/fetchStream.ts
547
+ import { PassThrough } from "stream";
548
+ import { mergeStreamsFromUrls } from "@bitofsky/merge-streams";
549
+ function fetchStream(statementResult, auth, options = {}) {
550
+ const { signal } = options;
551
+ const manifest = validateSucceededResult(statementResult);
552
+ const format = manifest.format;
553
+ const output = new PassThrough();
554
+ if (signal) {
555
+ const onAbort = () => {
556
+ output.destroy(new AbortError("Stream aborted"));
557
+ };
558
+ signal.addEventListener("abort", onAbort, { once: true });
559
+ output.once("close", () => {
560
+ signal.removeEventListener("abort", onAbort);
561
+ });
562
+ }
563
+ mergeChunksToStream(statementResult, auth, manifest, format, output, signal).catch(
564
+ (err) => {
565
+ output.destroy(err);
566
+ }
567
+ );
568
+ return output;
569
+ }
570
+ async function mergeChunksToStream(statementResult, auth, manifest, format, output, signal) {
571
+ const result = statementResult.result;
572
+ let urls = result?.external_links?.map((link) => link.external_link) ?? [];
573
+ if (urls.length === 0 && manifest.total_chunk_count > 0) {
574
+ for (let i = 0; i < manifest.total_chunk_count; i++) {
575
+ if (signal?.aborted) throw new AbortError("Aborted while collecting URLs");
576
+ const chunkData = await getChunk(auth, statementResult.statement_id, i, signal);
577
+ const chunkUrls = chunkData.external_links?.map((link) => link.external_link) ?? [];
578
+ urls.push(...chunkUrls);
579
+ }
580
+ }
581
+ if (urls.length === 0)
582
+ return void output.end();
583
+ await mergeStreamsFromUrls(format, signal ? { urls, output, signal } : { urls, output });
584
+ }
585
+
586
+ // src/api/fetchRow.ts
587
+ async function fetchRow(statementResult, auth, options = {}) {
588
+ const { signal, onEachRow, format } = options;
589
+ const manifest = validateSucceededResult(statementResult);
590
+ const mapRow = createRowMapper(manifest, format);
591
+ if (statementResult.result?.external_links) {
592
+ if (manifest.format !== "JSON_ARRAY") {
593
+ throw new DatabricksSqlError(
594
+ `fetchRow only supports JSON_ARRAY for external_links. Received: ${manifest.format}`,
595
+ "UNSUPPORTED_FORMAT",
596
+ statementResult.statement_id
597
+ );
598
+ }
599
+ const stream = fetchStream(statementResult, auth, signal ? { signal } : {});
600
+ await consumeJsonArrayStream(stream, mapRow, onEachRow, signal);
601
+ return;
602
+ }
603
+ const totalChunks = manifest.total_chunk_count;
604
+ const dataArray = statementResult.result?.data_array;
605
+ if (dataArray) {
606
+ for (const row of dataArray) {
607
+ if (signal?.aborted) throw new AbortError("Aborted");
608
+ onEachRow?.(mapRow(row));
609
+ }
610
+ }
611
+ if (totalChunks > 1) {
612
+ const statementId = statementResult.statement_id;
613
+ for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex++) {
614
+ if (signal?.aborted) throw new AbortError("Aborted");
615
+ const chunk = await getChunk(auth, statementId, chunkIndex, signal);
616
+ if (chunk.external_links)
617
+ throw new DatabricksSqlError(
618
+ "fetchRow only supports INLINE results. Chunk contains external_links.",
619
+ "UNSUPPORTED_FORMAT",
620
+ statementId
621
+ );
622
+ if (chunk.data_array) {
623
+ for (const row of chunk.data_array) {
624
+ if (signal?.aborted) throw new AbortError("Aborted");
625
+ onEachRow?.(mapRow(row));
626
+ }
627
+ }
628
+ }
629
+ }
630
+ }
631
+ async function consumeJsonArrayStream(stream, mapRow, onEachRow, signal) {
632
+ const jsonStream = stream.pipe(parser()).pipe(streamArray());
633
+ for await (const item of jsonStream) {
634
+ if (signal?.aborted) {
635
+ stream.destroy(new AbortError("Aborted"));
636
+ throw new AbortError("Aborted");
637
+ }
638
+ const row = item.value;
639
+ if (!Array.isArray(row)) {
640
+ throw new DatabricksSqlError(
641
+ "Expected JSON_ARRAY rows to be arrays",
642
+ "INVALID_FORMAT"
643
+ );
644
+ }
645
+ onEachRow?.(mapRow(row));
646
+ }
647
+ }
648
+
649
+ // src/api/fetchAll.ts
650
+ async function fetchAll(statementResult, auth, options = {}) {
651
+ const rows = [];
652
+ const fetchOptions = {
653
+ // Collect rows as they are streamed in.
654
+ onEachRow: (row) => {
655
+ rows.push(row);
656
+ }
657
+ };
658
+ if (options.signal)
659
+ fetchOptions.signal = options.signal;
660
+ if (options.format)
661
+ fetchOptions.format = options.format;
662
+ await fetchRow(statementResult, auth, fetchOptions);
663
+ return rows;
664
+ }
665
+
666
+ // src/api/mergeExternalLinks.ts
667
+ async function mergeExternalLinks(statementResult, auth, options) {
668
+ const { signal, mergeStreamToExternalLink } = options;
669
+ if (!statementResult.result?.external_links)
670
+ return statementResult;
671
+ const stream = fetchStream(statementResult, auth, signal ? { signal } : {});
672
+ const uploadResult = await mergeStreamToExternalLink(stream);
673
+ const manifest = validateSucceededResult(statementResult);
674
+ const totalRowCount = manifest.total_row_count ?? 0;
675
+ return {
676
+ statement_id: statementResult.statement_id,
677
+ status: statementResult.status,
678
+ manifest: {
679
+ ...manifest,
680
+ total_chunk_count: 1,
681
+ total_byte_count: uploadResult.byte_count,
682
+ chunks: [
683
+ {
684
+ chunk_index: 0,
685
+ row_offset: 0,
686
+ row_count: totalRowCount,
687
+ byte_count: uploadResult.byte_count
688
+ }
689
+ ]
690
+ },
691
+ result: {
692
+ external_links: [
693
+ {
694
+ chunk_index: 0,
695
+ row_offset: 0,
696
+ row_count: totalRowCount,
697
+ byte_count: uploadResult.byte_count,
698
+ external_link: uploadResult.externalLink,
699
+ expiration: uploadResult.expiration
700
+ }
701
+ ]
702
+ }
703
+ };
704
+ }
705
+ export {
706
+ AbortError,
707
+ AuthenticationError,
708
+ DatabricksSqlError,
709
+ HttpError,
710
+ RateLimitError,
711
+ StatementCancelledError,
712
+ executeStatement,
713
+ fetchAll,
714
+ fetchRow,
715
+ fetchStream,
716
+ mergeExternalLinks
717
+ };
718
+ //# sourceMappingURL=index.js.map