@xata.io/client 0.0.0-next.v34f1d64a4f4c1ffd896bbb285ab38efd8315b259 → 0.0.0-next.v403cdd55cb26b69c074dbc07b44daa0c2a0a77b6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
22
22
  CLOUDFLARE_RAY_ID: "cf.ray"
23
23
  };
24
24
 
25
+ const lookup = [];
26
+ const revLookup = [];
27
+ const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
28
+ for (let i = 0, len = code.length; i < len; ++i) {
29
+ lookup[i] = code[i];
30
+ revLookup[code.charCodeAt(i)] = i;
31
+ }
32
+ revLookup["-".charCodeAt(0)] = 62;
33
+ revLookup["_".charCodeAt(0)] = 63;
34
+ function getLens(b64) {
35
+ const len = b64.length;
36
+ if (len % 4 > 0) {
37
+ throw new Error("Invalid string. Length must be a multiple of 4");
38
+ }
39
+ let validLen = b64.indexOf("=");
40
+ if (validLen === -1) validLen = len;
41
+ const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
42
+ return [validLen, placeHoldersLen];
43
+ }
44
+ function _byteLength(_b64, validLen, placeHoldersLen) {
45
+ return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
46
+ }
47
+ function toByteArray(b64) {
48
+ let tmp;
49
+ const lens = getLens(b64);
50
+ const validLen = lens[0];
51
+ const placeHoldersLen = lens[1];
52
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
53
+ let curByte = 0;
54
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
55
+ let i;
56
+ for (i = 0; i < len; i += 4) {
57
+ tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
58
+ arr[curByte++] = tmp >> 16 & 255;
59
+ arr[curByte++] = tmp >> 8 & 255;
60
+ arr[curByte++] = tmp & 255;
61
+ }
62
+ if (placeHoldersLen === 2) {
63
+ tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
64
+ arr[curByte++] = tmp & 255;
65
+ }
66
+ if (placeHoldersLen === 1) {
67
+ tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
68
+ arr[curByte++] = tmp >> 8 & 255;
69
+ arr[curByte++] = tmp & 255;
70
+ }
71
+ return arr;
72
+ }
73
+ function tripletToBase64(num) {
74
+ return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
75
+ }
76
+ function encodeChunk(uint8, start, end) {
77
+ let tmp;
78
+ const output = [];
79
+ for (let i = start; i < end; i += 3) {
80
+ tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
81
+ output.push(tripletToBase64(tmp));
82
+ }
83
+ return output.join("");
84
+ }
85
+ function fromByteArray(uint8) {
86
+ let tmp;
87
+ const len = uint8.length;
88
+ const extraBytes = len % 3;
89
+ const parts = [];
90
+ const maxChunkLength = 16383;
91
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
92
+ parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
93
+ }
94
+ if (extraBytes === 1) {
95
+ tmp = uint8[len - 1];
96
+ parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
97
+ } else if (extraBytes === 2) {
98
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
99
+ parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
100
+ }
101
+ return parts.join("");
102
+ }
103
+
104
+ const K_MAX_LENGTH = 2147483647;
105
+ const MAX_ARGUMENTS_LENGTH = 4096;
106
+ class Buffer extends Uint8Array {
107
+ /**
108
+ * Constructs a new `Buffer` instance.
109
+ *
110
+ * @param value
111
+ * @param encodingOrOffset
112
+ * @param length
113
+ */
114
+ constructor(value, encodingOrOffset, length) {
115
+ if (typeof value === "number") {
116
+ if (typeof encodingOrOffset === "string") {
117
+ throw new TypeError("The first argument must be of type string, received type number");
118
+ }
119
+ if (value < 0) {
120
+ throw new RangeError("The buffer size cannot be negative");
121
+ }
122
+ super(value < 0 ? 0 : Buffer._checked(value) | 0);
123
+ } else if (typeof value === "string") {
124
+ if (typeof encodingOrOffset !== "string") {
125
+ encodingOrOffset = "utf8";
126
+ }
127
+ if (!Buffer.isEncoding(encodingOrOffset)) {
128
+ throw new TypeError("Unknown encoding: " + encodingOrOffset);
129
+ }
130
+ const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
131
+ super(length2);
132
+ const written = this.write(value, 0, this.length, encodingOrOffset);
133
+ if (written !== length2) {
134
+ throw new TypeError(
135
+ "Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
136
+ );
137
+ }
138
+ } else if (ArrayBuffer.isView(value)) {
139
+ if (Buffer._isInstance(value, Uint8Array)) {
140
+ const copy = new Uint8Array(value);
141
+ const array = copy.buffer;
142
+ const byteOffset = copy.byteOffset;
143
+ const length2 = copy.byteLength;
144
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
145
+ throw new RangeError("offset is outside of buffer bounds");
146
+ }
147
+ if (array.byteLength < byteOffset + (length2 || 0)) {
148
+ throw new RangeError("length is outside of buffer bounds");
149
+ }
150
+ super(new Uint8Array(array, byteOffset, length2));
151
+ } else {
152
+ const array = value;
153
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
154
+ super(new Uint8Array(length2));
155
+ for (let i = 0; i < length2; i++) {
156
+ this[i] = array[i] & 255;
157
+ }
158
+ }
159
+ } else if (value == null) {
160
+ throw new TypeError(
161
+ "The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
162
+ );
163
+ } else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
164
+ const array = value;
165
+ const byteOffset = encodingOrOffset;
166
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
167
+ throw new RangeError("offset is outside of buffer bounds");
168
+ }
169
+ if (array.byteLength < byteOffset + (length || 0)) {
170
+ throw new RangeError("length is outside of buffer bounds");
171
+ }
172
+ super(new Uint8Array(array, byteOffset, length));
173
+ } else if (Array.isArray(value)) {
174
+ const array = value;
175
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
176
+ super(new Uint8Array(length2));
177
+ for (let i = 0; i < length2; i++) {
178
+ this[i] = array[i] & 255;
179
+ }
180
+ } else {
181
+ throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
182
+ }
183
+ }
184
+ /**
185
+ * Return JSON representation of the buffer.
186
+ */
187
+ toJSON() {
188
+ return {
189
+ type: "Buffer",
190
+ data: Array.prototype.slice.call(this)
191
+ };
192
+ }
193
+ /**
194
+ * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
195
+ * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
196
+ * only part of `string` will be written. However, partially encoded characters will not be written.
197
+ *
198
+ * @param string String to write to `buf`.
199
+ * @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
200
+ * @param length Maximum number of bytes to write: Default: `buf.length - offset`.
201
+ * @param encoding The character encoding of `string`. Default: `utf8`.
202
+ */
203
+ write(string, offset, length, encoding) {
204
+ if (typeof offset === "undefined") {
205
+ encoding = "utf8";
206
+ length = this.length;
207
+ offset = 0;
208
+ } else if (typeof length === "undefined" && typeof offset === "string") {
209
+ encoding = offset;
210
+ length = this.length;
211
+ offset = 0;
212
+ } else if (typeof offset === "number" && isFinite(offset)) {
213
+ offset = offset >>> 0;
214
+ if (typeof length === "number" && isFinite(length)) {
215
+ length = length >>> 0;
216
+ encoding ?? (encoding = "utf8");
217
+ } else if (typeof length === "string") {
218
+ encoding = length;
219
+ length = void 0;
220
+ }
221
+ } else {
222
+ throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
223
+ }
224
+ const remaining = this.length - offset;
225
+ if (typeof length === "undefined" || length > remaining) {
226
+ length = remaining;
227
+ }
228
+ if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
229
+ throw new RangeError("Attempt to write outside buffer bounds");
230
+ }
231
+ encoding || (encoding = "utf8");
232
+ switch (Buffer._getEncoding(encoding)) {
233
+ case "hex":
234
+ return Buffer._hexWrite(this, string, offset, length);
235
+ case "utf8":
236
+ return Buffer._utf8Write(this, string, offset, length);
237
+ case "ascii":
238
+ case "latin1":
239
+ case "binary":
240
+ return Buffer._asciiWrite(this, string, offset, length);
241
+ case "ucs2":
242
+ case "utf16le":
243
+ return Buffer._ucs2Write(this, string, offset, length);
244
+ case "base64":
245
+ return Buffer._base64Write(this, string, offset, length);
246
+ }
247
+ }
248
+ /**
249
+ * Decodes the buffer to a string according to the specified character encoding.
250
+ * Passing `start` and `end` will decode only a subset of the buffer.
251
+ *
252
+ * Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
253
+ * will be replaced with `U+FFFD`.
254
+ *
255
+ * @param encoding
256
+ * @param start
257
+ * @param end
258
+ */
259
+ toString(encoding, start, end) {
260
+ const length = this.length;
261
+ if (length === 0) {
262
+ return "";
263
+ }
264
+ if (arguments.length === 0) {
265
+ return Buffer._utf8Slice(this, 0, length);
266
+ }
267
+ if (typeof start === "undefined" || start < 0) {
268
+ start = 0;
269
+ }
270
+ if (start > this.length) {
271
+ return "";
272
+ }
273
+ if (typeof end === "undefined" || end > this.length) {
274
+ end = this.length;
275
+ }
276
+ if (end <= 0) {
277
+ return "";
278
+ }
279
+ end >>>= 0;
280
+ start >>>= 0;
281
+ if (end <= start) {
282
+ return "";
283
+ }
284
+ if (!encoding) {
285
+ encoding = "utf8";
286
+ }
287
+ switch (Buffer._getEncoding(encoding)) {
288
+ case "hex":
289
+ return Buffer._hexSlice(this, start, end);
290
+ case "utf8":
291
+ return Buffer._utf8Slice(this, start, end);
292
+ case "ascii":
293
+ return Buffer._asciiSlice(this, start, end);
294
+ case "latin1":
295
+ case "binary":
296
+ return Buffer._latin1Slice(this, start, end);
297
+ case "ucs2":
298
+ case "utf16le":
299
+ return Buffer._utf16leSlice(this, start, end);
300
+ case "base64":
301
+ return Buffer._base64Slice(this, start, end);
302
+ }
303
+ }
304
+ /**
305
+ * Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
306
+ *
307
+ * @param otherBuffer
308
+ */
309
+ equals(otherBuffer) {
310
+ if (!Buffer.isBuffer(otherBuffer)) {
311
+ throw new TypeError("Argument must be a Buffer");
312
+ }
313
+ if (this === otherBuffer) {
314
+ return true;
315
+ }
316
+ return Buffer.compare(this, otherBuffer) === 0;
317
+ }
318
+ /**
319
+ * Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
320
+ * or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
321
+ * buffer.
322
+ *
323
+ * - `0` is returned if `otherBuffer` is the same as this buffer.
324
+ * - `1` is returned if `otherBuffer` should come before this buffer when sorted.
325
+ * - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
326
+ *
327
+ * @param otherBuffer The buffer to compare to.
328
+ * @param targetStart The offset within `otherBuffer` at which to begin comparison.
329
+ * @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
330
+ * @param sourceStart The offset within this buffer at which to begin comparison.
331
+ * @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
332
+ */
333
+ compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
334
+ if (Buffer._isInstance(otherBuffer, Uint8Array)) {
335
+ otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
336
+ }
337
+ if (!Buffer.isBuffer(otherBuffer)) {
338
+ throw new TypeError("Argument must be a Buffer or Uint8Array");
339
+ }
340
+ targetStart ?? (targetStart = 0);
341
+ targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
342
+ sourceStart ?? (sourceStart = 0);
343
+ sourceEnd ?? (sourceEnd = this.length);
344
+ if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
345
+ throw new RangeError("Out of range index");
346
+ }
347
+ if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
348
+ return 0;
349
+ }
350
+ if (sourceStart >= sourceEnd) {
351
+ return -1;
352
+ }
353
+ if (targetStart >= targetEnd) {
354
+ return 1;
355
+ }
356
+ targetStart >>>= 0;
357
+ targetEnd >>>= 0;
358
+ sourceStart >>>= 0;
359
+ sourceEnd >>>= 0;
360
+ if (this === otherBuffer) {
361
+ return 0;
362
+ }
363
+ let x = sourceEnd - sourceStart;
364
+ let y = targetEnd - targetStart;
365
+ const len = Math.min(x, y);
366
+ const thisCopy = this.slice(sourceStart, sourceEnd);
367
+ const targetCopy = otherBuffer.slice(targetStart, targetEnd);
368
+ for (let i = 0; i < len; ++i) {
369
+ if (thisCopy[i] !== targetCopy[i]) {
370
+ x = thisCopy[i];
371
+ y = targetCopy[i];
372
+ break;
373
+ }
374
+ }
375
+ if (x < y) return -1;
376
+ if (y < x) return 1;
377
+ return 0;
378
+ }
379
+ /**
380
+ * Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
381
+ * region overlaps with this buffer.
382
+ *
383
+ * @param targetBuffer The target buffer to copy into.
384
+ * @param targetStart The offset within `targetBuffer` at which to begin writing.
385
+ * @param sourceStart The offset within this buffer at which to begin copying.
386
+ * @param sourceEnd The offset within this buffer at which to end copying (exclusive).
387
+ */
388
+ copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
389
+ if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
390
+ if (!sourceStart) sourceStart = 0;
391
+ if (!targetStart) targetStart = 0;
392
+ if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
393
+ if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
394
+ if (!targetStart) targetStart = 0;
395
+ if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
396
+ if (sourceEnd === sourceStart) return 0;
397
+ if (targetBuffer.length === 0 || this.length === 0) return 0;
398
+ if (targetStart < 0) {
399
+ throw new RangeError("targetStart out of bounds");
400
+ }
401
+ if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
402
+ if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
403
+ if (sourceEnd > this.length) sourceEnd = this.length;
404
+ if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
405
+ sourceEnd = targetBuffer.length - targetStart + sourceStart;
406
+ }
407
+ const len = sourceEnd - sourceStart;
408
+ if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
409
+ this.copyWithin(targetStart, sourceStart, sourceEnd);
410
+ } else {
411
+ Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
412
+ }
413
+ return len;
414
+ }
415
+ /**
416
+ * Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
417
+ * and `end` indices. This is the same behavior as `buf.subarray()`.
418
+ *
419
+ * This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
420
+ * the slice, use `Uint8Array.prototype.slice()`.
421
+ *
422
+ * @param start
423
+ * @param end
424
+ */
425
+ slice(start, end) {
426
+ if (!start) {
427
+ start = 0;
428
+ }
429
+ const len = this.length;
430
+ start = ~~start;
431
+ end = end === void 0 ? len : ~~end;
432
+ if (start < 0) {
433
+ start += len;
434
+ if (start < 0) {
435
+ start = 0;
436
+ }
437
+ } else if (start > len) {
438
+ start = len;
439
+ }
440
+ if (end < 0) {
441
+ end += len;
442
+ if (end < 0) {
443
+ end = 0;
444
+ }
445
+ } else if (end > len) {
446
+ end = len;
447
+ }
448
+ if (end < start) {
449
+ end = start;
450
+ }
451
+ const newBuf = this.subarray(start, end);
452
+ Object.setPrototypeOf(newBuf, Buffer.prototype);
453
+ return newBuf;
454
+ }
455
+ /**
456
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
457
+ * of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
458
+ *
459
+ * @param value Number to write.
460
+ * @param offset Number of bytes to skip before starting to write.
461
+ * @param byteLength Number of bytes to write, between 0 and 6.
462
+ * @param noAssert
463
+ * @returns `offset` plus the number of bytes written.
464
+ */
465
+ writeUIntLE(value, offset, byteLength, noAssert) {
466
+ value = +value;
467
+ offset = offset >>> 0;
468
+ byteLength = byteLength >>> 0;
469
+ if (!noAssert) {
470
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
471
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
472
+ }
473
+ let mul = 1;
474
+ let i = 0;
475
+ this[offset] = value & 255;
476
+ while (++i < byteLength && (mul *= 256)) {
477
+ this[offset + i] = value / mul & 255;
478
+ }
479
+ return offset + byteLength;
480
+ }
481
+ /**
482
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
483
+ * accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
484
+ *
485
+ * @param value Number to write.
486
+ * @param offset Number of bytes to skip before starting to write.
487
+ * @param byteLength Number of bytes to write, between 0 and 6.
488
+ * @param noAssert
489
+ * @returns `offset` plus the number of bytes written.
490
+ */
491
+ writeUIntBE(value, offset, byteLength, noAssert) {
492
+ value = +value;
493
+ offset = offset >>> 0;
494
+ byteLength = byteLength >>> 0;
495
+ if (!noAssert) {
496
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
497
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
498
+ }
499
+ let i = byteLength - 1;
500
+ let mul = 1;
501
+ this[offset + i] = value & 255;
502
+ while (--i >= 0 && (mul *= 256)) {
503
+ this[offset + i] = value / mul & 255;
504
+ }
505
+ return offset + byteLength;
506
+ }
507
+ /**
508
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
509
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
510
+ *
511
+ * @param value Number to write.
512
+ * @param offset Number of bytes to skip before starting to write.
513
+ * @param byteLength Number of bytes to write, between 0 and 6.
514
+ * @param noAssert
515
+ * @returns `offset` plus the number of bytes written.
516
+ */
517
+ writeIntLE(value, offset, byteLength, noAssert) {
518
+ value = +value;
519
+ offset = offset >>> 0;
520
+ if (!noAssert) {
521
+ const limit = Math.pow(2, 8 * byteLength - 1);
522
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
523
+ }
524
+ let i = 0;
525
+ let mul = 1;
526
+ let sub = 0;
527
+ this[offset] = value & 255;
528
+ while (++i < byteLength && (mul *= 256)) {
529
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
530
+ sub = 1;
531
+ }
532
+ this[offset + i] = (value / mul >> 0) - sub & 255;
533
+ }
534
+ return offset + byteLength;
535
+ }
536
+ /**
537
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
538
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
539
+ *
540
+ * @param value Number to write.
541
+ * @param offset Number of bytes to skip before starting to write.
542
+ * @param byteLength Number of bytes to write, between 0 and 6.
543
+ * @param noAssert
544
+ * @returns `offset` plus the number of bytes written.
545
+ */
546
+ writeIntBE(value, offset, byteLength, noAssert) {
547
+ value = +value;
548
+ offset = offset >>> 0;
549
+ if (!noAssert) {
550
+ const limit = Math.pow(2, 8 * byteLength - 1);
551
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
552
+ }
553
+ let i = byteLength - 1;
554
+ let mul = 1;
555
+ let sub = 0;
556
+ this[offset + i] = value & 255;
557
+ while (--i >= 0 && (mul *= 256)) {
558
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
559
+ sub = 1;
560
+ }
561
+ this[offset + i] = (value / mul >> 0) - sub & 255;
562
+ }
563
+ return offset + byteLength;
564
+ }
565
+ /**
566
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
567
+ * unsigned, little-endian integer supporting up to 48 bits of accuracy.
568
+ *
569
+ * @param offset Number of bytes to skip before starting to read.
570
+ * @param byteLength Number of bytes to read, between 0 and 6.
571
+ * @param noAssert
572
+ */
573
+ readUIntLE(offset, byteLength, noAssert) {
574
+ offset = offset >>> 0;
575
+ byteLength = byteLength >>> 0;
576
+ if (!noAssert) {
577
+ Buffer._checkOffset(offset, byteLength, this.length);
578
+ }
579
+ let val = this[offset];
580
+ let mul = 1;
581
+ let i = 0;
582
+ while (++i < byteLength && (mul *= 256)) {
583
+ val += this[offset + i] * mul;
584
+ }
585
+ return val;
586
+ }
587
+ /**
588
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
589
+ * unsigned, big-endian integer supporting up to 48 bits of accuracy.
590
+ *
591
+ * @param offset Number of bytes to skip before starting to read.
592
+ * @param byteLength Number of bytes to read, between 0 and 6.
593
+ * @param noAssert
594
+ */
595
+ readUIntBE(offset, byteLength, noAssert) {
596
+ offset = offset >>> 0;
597
+ byteLength = byteLength >>> 0;
598
+ if (!noAssert) {
599
+ Buffer._checkOffset(offset, byteLength, this.length);
600
+ }
601
+ let val = this[offset + --byteLength];
602
+ let mul = 1;
603
+ while (byteLength > 0 && (mul *= 256)) {
604
+ val += this[offset + --byteLength] * mul;
605
+ }
606
+ return val;
607
+ }
608
+ /**
609
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
610
+ * little-endian, two's complement signed value supporting up to 48 bits of accuracy.
611
+ *
612
+ * @param offset Number of bytes to skip before starting to read.
613
+ * @param byteLength Number of bytes to read, between 0 and 6.
614
+ * @param noAssert
615
+ */
616
+ readIntLE(offset, byteLength, noAssert) {
617
+ offset = offset >>> 0;
618
+ byteLength = byteLength >>> 0;
619
+ if (!noAssert) {
620
+ Buffer._checkOffset(offset, byteLength, this.length);
621
+ }
622
+ let val = this[offset];
623
+ let mul = 1;
624
+ let i = 0;
625
+ while (++i < byteLength && (mul *= 256)) {
626
+ val += this[offset + i] * mul;
627
+ }
628
+ mul *= 128;
629
+ if (val >= mul) {
630
+ val -= Math.pow(2, 8 * byteLength);
631
+ }
632
+ return val;
633
+ }
634
+ /**
635
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
636
+ * big-endian, two's complement signed value supporting up to 48 bits of accuracy.
637
+ *
638
+ * @param offset Number of bytes to skip before starting to read.
639
+ * @param byteLength Number of bytes to read, between 0 and 6.
640
+ * @param noAssert
641
+ */
642
+ readIntBE(offset, byteLength, noAssert) {
643
+ offset = offset >>> 0;
644
+ byteLength = byteLength >>> 0;
645
+ if (!noAssert) {
646
+ Buffer._checkOffset(offset, byteLength, this.length);
647
+ }
648
+ let i = byteLength;
649
+ let mul = 1;
650
+ let val = this[offset + --i];
651
+ while (i > 0 && (mul *= 256)) {
652
+ val += this[offset + --i] * mul;
653
+ }
654
+ mul *= 128;
655
+ if (val >= mul) {
656
+ val -= Math.pow(2, 8 * byteLength);
657
+ }
658
+ return val;
659
+ }
660
+ /**
661
+ * Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
662
+ *
663
+ * @param offset Number of bytes to skip before starting to read.
664
+ * @param noAssert
665
+ */
666
+ readUInt8(offset, noAssert) {
667
+ offset = offset >>> 0;
668
+ if (!noAssert) {
669
+ Buffer._checkOffset(offset, 1, this.length);
670
+ }
671
+ return this[offset];
672
+ }
673
+ /**
674
+ * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
675
+ *
676
+ * @param offset Number of bytes to skip before starting to read.
677
+ * @param noAssert
678
+ */
679
+ readUInt16LE(offset, noAssert) {
680
+ offset = offset >>> 0;
681
+ if (!noAssert) {
682
+ Buffer._checkOffset(offset, 2, this.length);
683
+ }
684
+ return this[offset] | this[offset + 1] << 8;
685
+ }
686
+ /**
687
+ * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
688
+ *
689
+ * @param offset Number of bytes to skip before starting to read.
690
+ * @param noAssert
691
+ */
692
+ readUInt16BE(offset, noAssert) {
693
+ offset = offset >>> 0;
694
+ if (!noAssert) {
695
+ Buffer._checkOffset(offset, 2, this.length);
696
+ }
697
+ return this[offset] << 8 | this[offset + 1];
698
+ }
699
+ /**
700
+ * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
701
+ *
702
+ * @param offset Number of bytes to skip before starting to read.
703
+ * @param noAssert
704
+ */
705
+ readUInt32LE(offset, noAssert) {
706
+ offset = offset >>> 0;
707
+ if (!noAssert) {
708
+ Buffer._checkOffset(offset, 4, this.length);
709
+ }
710
+ return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
711
+ }
712
+ /**
713
+ * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
714
+ *
715
+ * @param offset Number of bytes to skip before starting to read.
716
+ * @param noAssert
717
+ */
718
+ readUInt32BE(offset, noAssert) {
719
+ offset = offset >>> 0;
720
+ if (!noAssert) {
721
+ Buffer._checkOffset(offset, 4, this.length);
722
+ }
723
+ return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
724
+ }
725
+ /**
726
+ * Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
727
+ * as two's complement signed values.
728
+ *
729
+ * @param offset Number of bytes to skip before starting to read.
730
+ * @param noAssert
731
+ */
732
+ readInt8(offset, noAssert) {
733
+ offset = offset >>> 0;
734
+ if (!noAssert) {
735
+ Buffer._checkOffset(offset, 1, this.length);
736
+ }
737
+ if (!(this[offset] & 128)) {
738
+ return this[offset];
739
+ }
740
+ return (255 - this[offset] + 1) * -1;
741
+ }
742
+ /**
743
+ * Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
744
+ * are interpreted as two's complement signed values.
745
+ *
746
+ * @param offset Number of bytes to skip before starting to read.
747
+ * @param noAssert
748
+ */
749
+ readInt16LE(offset, noAssert) {
750
+ offset = offset >>> 0;
751
+ if (!noAssert) {
752
+ Buffer._checkOffset(offset, 2, this.length);
753
+ }
754
+ const val = this[offset] | this[offset + 1] << 8;
755
+ return val & 32768 ? val | 4294901760 : val;
756
+ }
757
+ /**
758
+ * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
759
+ * are interpreted as two's complement signed values.
760
+ *
761
+ * @param offset Number of bytes to skip before starting to read.
762
+ * @param noAssert
763
+ */
764
+ readInt16BE(offset, noAssert) {
765
+ offset = offset >>> 0;
766
+ if (!noAssert) {
767
+ Buffer._checkOffset(offset, 2, this.length);
768
+ }
769
+ const val = this[offset + 1] | this[offset] << 8;
770
+ return val & 32768 ? val | 4294901760 : val;
771
+ }
772
+ /**
773
+ * Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
774
+ * are interpreted as two's complement signed values.
775
+ *
776
+ * @param offset Number of bytes to skip before starting to read.
777
+ * @param noAssert
778
+ */
779
+ readInt32LE(offset, noAssert) {
780
+ offset = offset >>> 0;
781
+ if (!noAssert) {
782
+ Buffer._checkOffset(offset, 4, this.length);
783
+ }
784
+ return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
785
+ }
786
+ /**
787
+ * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
788
+ * are interpreted as two's complement signed values.
789
+ *
790
+ * @param offset Number of bytes to skip before starting to read.
791
+ * @param noAssert
792
+ */
793
+ readInt32BE(offset, noAssert) {
794
+ offset = offset >>> 0;
795
+ if (!noAssert) {
796
+ Buffer._checkOffset(offset, 4, this.length);
797
+ }
798
+ return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
799
+ }
800
+ /**
801
+ * Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
802
+ * Throws a `RangeError` if `buf.length` is not a multiple of 2.
803
+ */
804
+ swap16() {
805
+ const len = this.length;
806
+ if (len % 2 !== 0) {
807
+ throw new RangeError("Buffer size must be a multiple of 16-bits");
808
+ }
809
+ for (let i = 0; i < len; i += 2) {
810
+ this._swap(this, i, i + 1);
811
+ }
812
+ return this;
813
+ }
814
+ /**
815
+ * Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
816
+ * Throws a `RangeError` if `buf.length` is not a multiple of 4.
817
+ */
818
+ swap32() {
819
+ const len = this.length;
820
+ if (len % 4 !== 0) {
821
+ throw new RangeError("Buffer size must be a multiple of 32-bits");
822
+ }
823
+ for (let i = 0; i < len; i += 4) {
824
+ this._swap(this, i, i + 3);
825
+ this._swap(this, i + 1, i + 2);
826
+ }
827
+ return this;
828
+ }
829
+ /**
830
+ * Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
831
+ * Throws a `RangeError` if `buf.length` is not a multiple of 8.
832
+ */
833
+ swap64() {
834
+ const len = this.length;
835
+ if (len % 8 !== 0) {
836
+ throw new RangeError("Buffer size must be a multiple of 64-bits");
837
+ }
838
+ for (let i = 0; i < len; i += 8) {
839
+ this._swap(this, i, i + 7);
840
+ this._swap(this, i + 1, i + 6);
841
+ this._swap(this, i + 2, i + 5);
842
+ this._swap(this, i + 3, i + 4);
843
+ }
844
+ return this;
845
+ }
846
+ /**
847
+ * Swaps two octets.
848
+ *
849
+ * @param b
850
+ * @param n
851
+ * @param m
852
+ */
853
+ _swap(b, n, m) {
854
+ const i = b[n];
855
+ b[n] = b[m];
856
+ b[m] = i;
857
+ }
858
+ /**
859
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
860
+ * Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
861
+ *
862
+ * @param value Number to write.
863
+ * @param offset Number of bytes to skip before starting to write.
864
+ * @param noAssert
865
+ * @returns `offset` plus the number of bytes written.
866
+ */
867
+ writeUInt8(value, offset, noAssert) {
868
+ value = +value;
869
+ offset = offset >>> 0;
870
+ if (!noAssert) {
871
+ Buffer._checkInt(this, value, offset, 1, 255, 0);
872
+ }
873
+ this[offset] = value & 255;
874
+ return offset + 1;
875
+ }
876
+ /**
877
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
878
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
879
+ *
880
+ * @param value Number to write.
881
+ * @param offset Number of bytes to skip before starting to write.
882
+ * @param noAssert
883
+ * @returns `offset` plus the number of bytes written.
884
+ */
885
+ writeUInt16LE(value, offset, noAssert) {
886
+ value = +value;
887
+ offset = offset >>> 0;
888
+ if (!noAssert) {
889
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
890
+ }
891
+ this[offset] = value & 255;
892
+ this[offset + 1] = value >>> 8;
893
+ return offset + 2;
894
+ }
895
+ /**
896
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
897
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
898
+ *
899
+ * @param value Number to write.
900
+ * @param offset Number of bytes to skip before starting to write.
901
+ * @param noAssert
902
+ * @returns `offset` plus the number of bytes written.
903
+ */
904
+ writeUInt16BE(value, offset, noAssert) {
905
+ value = +value;
906
+ offset = offset >>> 0;
907
+ if (!noAssert) {
908
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
909
+ }
910
+ this[offset] = value >>> 8;
911
+ this[offset + 1] = value & 255;
912
+ return offset + 2;
913
+ }
914
+ /**
915
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
916
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
917
+ *
918
+ * @param value Number to write.
919
+ * @param offset Number of bytes to skip before starting to write.
920
+ * @param noAssert
921
+ * @returns `offset` plus the number of bytes written.
922
+ */
923
+ writeUInt32LE(value, offset, noAssert) {
924
+ value = +value;
925
+ offset = offset >>> 0;
926
+ if (!noAssert) {
927
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
928
+ }
929
+ this[offset + 3] = value >>> 24;
930
+ this[offset + 2] = value >>> 16;
931
+ this[offset + 1] = value >>> 8;
932
+ this[offset] = value & 255;
933
+ return offset + 4;
934
+ }
935
+ /**
936
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
937
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
938
+ *
939
+ * @param value Number to write.
940
+ * @param offset Number of bytes to skip before starting to write.
941
+ * @param noAssert
942
+ * @returns `offset` plus the number of bytes written.
943
+ */
944
+ writeUInt32BE(value, offset, noAssert) {
945
+ value = +value;
946
+ offset = offset >>> 0;
947
+ if (!noAssert) {
948
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
949
+ }
950
+ this[offset] = value >>> 24;
951
+ this[offset + 1] = value >>> 16;
952
+ this[offset + 2] = value >>> 8;
953
+ this[offset + 3] = value & 255;
954
+ return offset + 4;
955
+ }
956
+ /**
957
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
958
+ * Behavior is undefined when `value` is anything other than a signed 8-bit integer.
959
+ *
960
+ * @param value Number to write.
961
+ * @param offset Number of bytes to skip before starting to write.
962
+ * @param noAssert
963
+ * @returns `offset` plus the number of bytes written.
964
+ */
965
+ writeInt8(value, offset, noAssert) {
966
+ value = +value;
967
+ offset = offset >>> 0;
968
+ if (!noAssert) {
969
+ Buffer._checkInt(this, value, offset, 1, 127, -128);
970
+ }
971
+ if (value < 0) {
972
+ value = 255 + value + 1;
973
+ }
974
+ this[offset] = value & 255;
975
+ return offset + 1;
976
+ }
977
+ /**
978
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
979
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
980
+ *
981
+ * @param value Number to write.
982
+ * @param offset Number of bytes to skip before starting to write.
983
+ * @param noAssert
984
+ * @returns `offset` plus the number of bytes written.
985
+ */
986
+ writeInt16LE(value, offset, noAssert) {
987
+ value = +value;
988
+ offset = offset >>> 0;
989
+ if (!noAssert) {
990
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
991
+ }
992
+ this[offset] = value & 255;
993
+ this[offset + 1] = value >>> 8;
994
+ return offset + 2;
995
+ }
996
+ /**
997
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
998
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
999
+ *
1000
+ * @param value Number to write.
1001
+ * @param offset Number of bytes to skip before starting to write.
1002
+ * @param noAssert
1003
+ * @returns `offset` plus the number of bytes written.
1004
+ */
1005
+ writeInt16BE(value, offset, noAssert) {
1006
+ value = +value;
1007
+ offset = offset >>> 0;
1008
+ if (!noAssert) {
1009
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
1010
+ }
1011
+ this[offset] = value >>> 8;
1012
+ this[offset + 1] = value & 255;
1013
+ return offset + 2;
1014
+ }
1015
+ /**
1016
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
1017
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1018
+ *
1019
+ * @param value Number to write.
1020
+ * @param offset Number of bytes to skip before starting to write.
1021
+ * @param noAssert
1022
+ * @returns `offset` plus the number of bytes written.
1023
+ */
1024
+ writeInt32LE(value, offset, noAssert) {
1025
+ value = +value;
1026
+ offset = offset >>> 0;
1027
+ if (!noAssert) {
1028
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1029
+ }
1030
+ this[offset] = value & 255;
1031
+ this[offset + 1] = value >>> 8;
1032
+ this[offset + 2] = value >>> 16;
1033
+ this[offset + 3] = value >>> 24;
1034
+ return offset + 4;
1035
+ }
1036
+ /**
1037
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
1038
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1039
+ *
1040
+ * @param value Number to write.
1041
+ * @param offset Number of bytes to skip before starting to write.
1042
+ * @param noAssert
1043
+ * @returns `offset` plus the number of bytes written.
1044
+ */
1045
+ writeInt32BE(value, offset, noAssert) {
1046
+ value = +value;
1047
+ offset = offset >>> 0;
1048
+ if (!noAssert) {
1049
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1050
+ }
1051
+ if (value < 0) {
1052
+ value = 4294967295 + value + 1;
1053
+ }
1054
+ this[offset] = value >>> 24;
1055
+ this[offset + 1] = value >>> 16;
1056
+ this[offset + 2] = value >>> 8;
1057
+ this[offset + 3] = value & 255;
1058
+ return offset + 4;
1059
+ }
1060
+ /**
1061
+ * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
1062
+ * filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
1063
+ * integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
1064
+ *
1065
+ * If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
1066
+ * character that fit into `buf` are written.
1067
+ *
1068
+ * If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
1069
+ *
1070
+ * @param value
1071
+ * @param encoding
1072
+ */
1073
+ fill(value, offset, end, encoding) {
1074
+ if (typeof value === "string") {
1075
+ if (typeof offset === "string") {
1076
+ encoding = offset;
1077
+ offset = 0;
1078
+ end = this.length;
1079
+ } else if (typeof end === "string") {
1080
+ encoding = end;
1081
+ end = this.length;
1082
+ }
1083
+ if (encoding !== void 0 && typeof encoding !== "string") {
1084
+ throw new TypeError("encoding must be a string");
1085
+ }
1086
+ if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
1087
+ throw new TypeError("Unknown encoding: " + encoding);
1088
+ }
1089
+ if (value.length === 1) {
1090
+ const code = value.charCodeAt(0);
1091
+ if (encoding === "utf8" && code < 128) {
1092
+ value = code;
1093
+ }
1094
+ }
1095
+ } else if (typeof value === "number") {
1096
+ value = value & 255;
1097
+ } else if (typeof value === "boolean") {
1098
+ value = Number(value);
1099
+ }
1100
+ offset ?? (offset = 0);
1101
+ end ?? (end = this.length);
1102
+ if (offset < 0 || this.length < offset || this.length < end) {
1103
+ throw new RangeError("Out of range index");
1104
+ }
1105
+ if (end <= offset) {
1106
+ return this;
1107
+ }
1108
+ offset = offset >>> 0;
1109
+ end = end === void 0 ? this.length : end >>> 0;
1110
+ value || (value = 0);
1111
+ let i;
1112
+ if (typeof value === "number") {
1113
+ for (i = offset; i < end; ++i) {
1114
+ this[i] = value;
1115
+ }
1116
+ } else {
1117
+ const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
1118
+ const len = bytes.length;
1119
+ if (len === 0) {
1120
+ throw new TypeError('The value "' + value + '" is invalid for argument "value"');
1121
+ }
1122
+ for (i = 0; i < end - offset; ++i) {
1123
+ this[i + offset] = bytes[i % len];
1124
+ }
1125
+ }
1126
+ return this;
1127
+ }
1128
+ /**
1129
+ * Returns the index of the specified value.
1130
+ *
1131
+ * If `value` is:
1132
+ * - a string, `value` is interpreted according to the character encoding in `encoding`.
1133
+ * - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
1134
+ * - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
1135
+ *
1136
+ * Any other types will throw a `TypeError`.
1137
+ *
1138
+ * @param value What to search for.
1139
+ * @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
1140
+ * @param encoding If `value` is a string, this is the encoding used to search.
1141
+ * @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
1142
+ */
1143
+ indexOf(value, byteOffset, encoding) {
1144
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
1145
+ }
1146
+ /**
1147
+ * Gets the last index of the specified value.
1148
+ *
1149
+ * @see indexOf()
1150
+ * @param value
1151
+ * @param byteOffset
1152
+ * @param encoding
1153
+ */
1154
+ lastIndexOf(value, byteOffset, encoding) {
1155
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
1156
+ }
1157
+ _bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
1158
+ if (buffer.length === 0) {
1159
+ return -1;
1160
+ }
1161
+ if (typeof byteOffset === "string") {
1162
+ encoding = byteOffset;
1163
+ byteOffset = 0;
1164
+ } else if (typeof byteOffset === "undefined") {
1165
+ byteOffset = 0;
1166
+ } else if (byteOffset > 2147483647) {
1167
+ byteOffset = 2147483647;
1168
+ } else if (byteOffset < -2147483648) {
1169
+ byteOffset = -2147483648;
1170
+ }
1171
+ byteOffset = +byteOffset;
1172
+ if (byteOffset !== byteOffset) {
1173
+ byteOffset = dir ? 0 : buffer.length - 1;
1174
+ }
1175
+ if (byteOffset < 0) {
1176
+ byteOffset = buffer.length + byteOffset;
1177
+ }
1178
+ if (byteOffset >= buffer.length) {
1179
+ if (dir) {
1180
+ return -1;
1181
+ } else {
1182
+ byteOffset = buffer.length - 1;
1183
+ }
1184
+ } else if (byteOffset < 0) {
1185
+ if (dir) {
1186
+ byteOffset = 0;
1187
+ } else {
1188
+ return -1;
1189
+ }
1190
+ }
1191
+ if (typeof val === "string") {
1192
+ val = Buffer.from(val, encoding);
1193
+ }
1194
+ if (Buffer.isBuffer(val)) {
1195
+ if (val.length === 0) {
1196
+ return -1;
1197
+ }
1198
+ return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
1199
+ } else if (typeof val === "number") {
1200
+ val = val & 255;
1201
+ if (typeof Uint8Array.prototype.indexOf === "function") {
1202
+ if (dir) {
1203
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
1204
+ } else {
1205
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
1206
+ }
1207
+ }
1208
+ return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
1209
+ }
1210
+ throw new TypeError("val must be string, number or Buffer");
1211
+ }
1212
+ /**
1213
+ * Equivalent to `buf.indexOf() !== -1`.
1214
+ *
1215
+ * @param value
1216
+ * @param byteOffset
1217
+ * @param encoding
1218
+ */
1219
+ includes(value, byteOffset, encoding) {
1220
+ return this.indexOf(value, byteOffset, encoding) !== -1;
1221
+ }
1222
+ /**
1223
+ * Creates a new buffer from the given parameters.
1224
+ *
1225
+ * @param data
1226
+ * @param encoding
1227
+ */
1228
+ static from(a, b, c) {
1229
+ return new Buffer(a, b, c);
1230
+ }
1231
+ /**
1232
+ * Returns true if `obj` is a Buffer.
1233
+ *
1234
+ * @param obj
1235
+ */
1236
+ static isBuffer(obj) {
1237
+ return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
1238
+ }
1239
+ /**
1240
+ * Returns true if `encoding` is a supported encoding.
1241
+ *
1242
+ * @param encoding
1243
+ */
1244
+ static isEncoding(encoding) {
1245
+ switch (encoding.toLowerCase()) {
1246
+ case "hex":
1247
+ case "utf8":
1248
+ case "ascii":
1249
+ case "binary":
1250
+ case "latin1":
1251
+ case "ucs2":
1252
+ case "utf16le":
1253
+ case "base64":
1254
+ return true;
1255
+ default:
1256
+ return false;
1257
+ }
1258
+ }
1259
+ /**
1260
+ * Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
1261
+ * returns the number of characters in the string.
1262
+ *
1263
+ * @param string The string to test.
1264
+ * @param encoding The encoding to use for calculation. Defaults is `utf8`.
1265
+ */
1266
+ static byteLength(string, encoding) {
1267
+ if (Buffer.isBuffer(string)) {
1268
+ return string.length;
1269
+ }
1270
+ if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
1271
+ return string.byteLength;
1272
+ }
1273
+ if (typeof string !== "string") {
1274
+ throw new TypeError(
1275
+ 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
1276
+ );
1277
+ }
1278
+ const len = string.length;
1279
+ const mustMatch = arguments.length > 2 && arguments[2] === true;
1280
+ if (!mustMatch && len === 0) {
1281
+ return 0;
1282
+ }
1283
+ switch (encoding?.toLowerCase()) {
1284
+ case "ascii":
1285
+ case "latin1":
1286
+ case "binary":
1287
+ return len;
1288
+ case "utf8":
1289
+ return Buffer._utf8ToBytes(string).length;
1290
+ case "hex":
1291
+ return len >>> 1;
1292
+ case "ucs2":
1293
+ case "utf16le":
1294
+ return len * 2;
1295
+ case "base64":
1296
+ return Buffer._base64ToBytes(string).length;
1297
+ default:
1298
+ return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
1299
+ }
1300
+ }
1301
+ /**
1302
+ * Returns a Buffer which is the result of concatenating all the buffers in the list together.
1303
+ *
1304
+ * - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
1305
+ * - If the list has exactly one item, then the first item is returned.
1306
+ * - If the list has more than one item, then a new buffer is created.
1307
+ *
1308
+ * It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
1309
+ * a small computational expense.
1310
+ *
1311
+ * @param list An array of Buffer objects to concatenate.
1312
+ * @param totalLength Total length of the buffers when concatenated.
1313
+ */
1314
+ static concat(list, totalLength) {
1315
+ if (!Array.isArray(list)) {
1316
+ throw new TypeError('"list" argument must be an Array of Buffers');
1317
+ }
1318
+ if (list.length === 0) {
1319
+ return Buffer.alloc(0);
1320
+ }
1321
+ let i;
1322
+ if (totalLength === void 0) {
1323
+ totalLength = 0;
1324
+ for (i = 0; i < list.length; ++i) {
1325
+ totalLength += list[i].length;
1326
+ }
1327
+ }
1328
+ const buffer = Buffer.allocUnsafe(totalLength);
1329
+ let pos = 0;
1330
+ for (i = 0; i < list.length; ++i) {
1331
+ let buf = list[i];
1332
+ if (Buffer._isInstance(buf, Uint8Array)) {
1333
+ if (pos + buf.length > buffer.length) {
1334
+ if (!Buffer.isBuffer(buf)) {
1335
+ buf = Buffer.from(buf);
1336
+ }
1337
+ buf.copy(buffer, pos);
1338
+ } else {
1339
+ Uint8Array.prototype.set.call(buffer, buf, pos);
1340
+ }
1341
+ } else if (!Buffer.isBuffer(buf)) {
1342
+ throw new TypeError('"list" argument must be an Array of Buffers');
1343
+ } else {
1344
+ buf.copy(buffer, pos);
1345
+ }
1346
+ pos += buf.length;
1347
+ }
1348
+ return buffer;
1349
+ }
1350
+ /**
1351
+ * The same as `buf1.compare(buf2)`.
1352
+ */
1353
+ static compare(buf1, buf2) {
1354
+ if (Buffer._isInstance(buf1, Uint8Array)) {
1355
+ buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
1356
+ }
1357
+ if (Buffer._isInstance(buf2, Uint8Array)) {
1358
+ buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
1359
+ }
1360
+ if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
1361
+ throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
1362
+ }
1363
+ if (buf1 === buf2) {
1364
+ return 0;
1365
+ }
1366
+ let x = buf1.length;
1367
+ let y = buf2.length;
1368
+ for (let i = 0, len = Math.min(x, y); i < len; ++i) {
1369
+ if (buf1[i] !== buf2[i]) {
1370
+ x = buf1[i];
1371
+ y = buf2[i];
1372
+ break;
1373
+ }
1374
+ }
1375
+ if (x < y) {
1376
+ return -1;
1377
+ }
1378
+ if (y < x) {
1379
+ return 1;
1380
+ }
1381
+ return 0;
1382
+ }
1383
+ /**
1384
+ * Allocates a new buffer of `size` octets.
1385
+ *
1386
+ * @param size The number of octets to allocate.
1387
+ * @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
1388
+ * @param encoding The encoding used for the call to `buf.fill()` while initializing.
1389
+ */
1390
+ static alloc(size, fill, encoding) {
1391
+ if (typeof size !== "number") {
1392
+ throw new TypeError('"size" argument must be of type number');
1393
+ } else if (size < 0) {
1394
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1395
+ }
1396
+ if (size <= 0) {
1397
+ return new Buffer(size);
1398
+ }
1399
+ if (fill !== void 0) {
1400
+ return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
1401
+ }
1402
+ return new Buffer(size);
1403
+ }
1404
+ /**
1405
+ * Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
1406
+ *
1407
+ * @param size
1408
+ */
1409
+ static allocUnsafe(size) {
1410
+ if (typeof size !== "number") {
1411
+ throw new TypeError('"size" argument must be of type number');
1412
+ } else if (size < 0) {
1413
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1414
+ }
1415
+ return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
1416
+ }
1417
+ /**
1418
+ * Returns true if the given `obj` is an instance of `type`.
1419
+ *
1420
+ * @param obj
1421
+ * @param type
1422
+ */
1423
+ static _isInstance(obj, type) {
1424
+ return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
1425
+ }
1426
+ static _checked(length) {
1427
+ if (length >= K_MAX_LENGTH) {
1428
+ throw new RangeError(
1429
+ "Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
1430
+ );
1431
+ }
1432
+ return length | 0;
1433
+ }
1434
+ static _blitBuffer(src, dst, offset, length) {
1435
+ let i;
1436
+ for (i = 0; i < length; ++i) {
1437
+ if (i + offset >= dst.length || i >= src.length) {
1438
+ break;
1439
+ }
1440
+ dst[i + offset] = src[i];
1441
+ }
1442
+ return i;
1443
+ }
1444
+ static _utf8Write(buf, string, offset, length) {
1445
+ return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
1446
+ }
1447
+ static _asciiWrite(buf, string, offset, length) {
1448
+ return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
1449
+ }
1450
+ static _base64Write(buf, string, offset, length) {
1451
+ return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
1452
+ }
1453
+ static _ucs2Write(buf, string, offset, length) {
1454
+ return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
1455
+ }
1456
+ static _hexWrite(buf, string, offset, length) {
1457
+ offset = Number(offset) || 0;
1458
+ const remaining = buf.length - offset;
1459
+ if (!length) {
1460
+ length = remaining;
1461
+ } else {
1462
+ length = Number(length);
1463
+ if (length > remaining) {
1464
+ length = remaining;
1465
+ }
1466
+ }
1467
+ const strLen = string.length;
1468
+ if (length > strLen / 2) {
1469
+ length = strLen / 2;
1470
+ }
1471
+ let i;
1472
+ for (i = 0; i < length; ++i) {
1473
+ const parsed = parseInt(string.substr(i * 2, 2), 16);
1474
+ if (parsed !== parsed) {
1475
+ return i;
1476
+ }
1477
+ buf[offset + i] = parsed;
1478
+ }
1479
+ return i;
1480
+ }
1481
+ static _utf8ToBytes(string, units) {
1482
+ units = units || Infinity;
1483
+ const length = string.length;
1484
+ const bytes = [];
1485
+ let codePoint;
1486
+ let leadSurrogate = null;
1487
+ for (let i = 0; i < length; ++i) {
1488
+ codePoint = string.charCodeAt(i);
1489
+ if (codePoint > 55295 && codePoint < 57344) {
1490
+ if (!leadSurrogate) {
1491
+ if (codePoint > 56319) {
1492
+ if ((units -= 3) > -1) {
1493
+ bytes.push(239, 191, 189);
1494
+ }
1495
+ continue;
1496
+ } else if (i + 1 === length) {
1497
+ if ((units -= 3) > -1) {
1498
+ bytes.push(239, 191, 189);
1499
+ }
1500
+ continue;
1501
+ }
1502
+ leadSurrogate = codePoint;
1503
+ continue;
1504
+ }
1505
+ if (codePoint < 56320) {
1506
+ if ((units -= 3) > -1) {
1507
+ bytes.push(239, 191, 189);
1508
+ }
1509
+ leadSurrogate = codePoint;
1510
+ continue;
1511
+ }
1512
+ codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
1513
+ } else if (leadSurrogate) {
1514
+ if ((units -= 3) > -1) {
1515
+ bytes.push(239, 191, 189);
1516
+ }
1517
+ }
1518
+ leadSurrogate = null;
1519
+ if (codePoint < 128) {
1520
+ if ((units -= 1) < 0) {
1521
+ break;
1522
+ }
1523
+ bytes.push(codePoint);
1524
+ } else if (codePoint < 2048) {
1525
+ if ((units -= 2) < 0) {
1526
+ break;
1527
+ }
1528
+ bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
1529
+ } else if (codePoint < 65536) {
1530
+ if ((units -= 3) < 0) {
1531
+ break;
1532
+ }
1533
+ bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
1534
+ } else if (codePoint < 1114112) {
1535
+ if ((units -= 4) < 0) {
1536
+ break;
1537
+ }
1538
+ bytes.push(
1539
+ codePoint >> 18 | 240,
1540
+ codePoint >> 12 & 63 | 128,
1541
+ codePoint >> 6 & 63 | 128,
1542
+ codePoint & 63 | 128
1543
+ );
1544
+ } else {
1545
+ throw new Error("Invalid code point");
1546
+ }
1547
+ }
1548
+ return bytes;
1549
+ }
1550
+ static _base64ToBytes(str) {
1551
+ return toByteArray(base64clean(str));
1552
+ }
1553
+ static _asciiToBytes(str) {
1554
+ const byteArray = [];
1555
+ for (let i = 0; i < str.length; ++i) {
1556
+ byteArray.push(str.charCodeAt(i) & 255);
1557
+ }
1558
+ return byteArray;
1559
+ }
1560
+ static _utf16leToBytes(str, units) {
1561
+ let c, hi, lo;
1562
+ const byteArray = [];
1563
+ for (let i = 0; i < str.length; ++i) {
1564
+ if ((units -= 2) < 0) break;
1565
+ c = str.charCodeAt(i);
1566
+ hi = c >> 8;
1567
+ lo = c % 256;
1568
+ byteArray.push(lo);
1569
+ byteArray.push(hi);
1570
+ }
1571
+ return byteArray;
1572
+ }
1573
+ static _hexSlice(buf, start, end) {
1574
+ const len = buf.length;
1575
+ if (!start || start < 0) {
1576
+ start = 0;
1577
+ }
1578
+ if (!end || end < 0 || end > len) {
1579
+ end = len;
1580
+ }
1581
+ let out = "";
1582
+ for (let i = start; i < end; ++i) {
1583
+ out += hexSliceLookupTable[buf[i]];
1584
+ }
1585
+ return out;
1586
+ }
1587
+ static _base64Slice(buf, start, end) {
1588
+ if (start === 0 && end === buf.length) {
1589
+ return fromByteArray(buf);
1590
+ } else {
1591
+ return fromByteArray(buf.slice(start, end));
1592
+ }
1593
+ }
1594
+ static _utf8Slice(buf, start, end) {
1595
+ end = Math.min(buf.length, end);
1596
+ const res = [];
1597
+ let i = start;
1598
+ while (i < end) {
1599
+ const firstByte = buf[i];
1600
+ let codePoint = null;
1601
+ let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
1602
+ if (i + bytesPerSequence <= end) {
1603
+ let secondByte, thirdByte, fourthByte, tempCodePoint;
1604
+ switch (bytesPerSequence) {
1605
+ case 1:
1606
+ if (firstByte < 128) {
1607
+ codePoint = firstByte;
1608
+ }
1609
+ break;
1610
+ case 2:
1611
+ secondByte = buf[i + 1];
1612
+ if ((secondByte & 192) === 128) {
1613
+ tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
1614
+ if (tempCodePoint > 127) {
1615
+ codePoint = tempCodePoint;
1616
+ }
1617
+ }
1618
+ break;
1619
+ case 3:
1620
+ secondByte = buf[i + 1];
1621
+ thirdByte = buf[i + 2];
1622
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
1623
+ tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
1624
+ if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
1625
+ codePoint = tempCodePoint;
1626
+ }
1627
+ }
1628
+ break;
1629
+ case 4:
1630
+ secondByte = buf[i + 1];
1631
+ thirdByte = buf[i + 2];
1632
+ fourthByte = buf[i + 3];
1633
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
1634
+ tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
1635
+ if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
1636
+ codePoint = tempCodePoint;
1637
+ }
1638
+ }
1639
+ }
1640
+ }
1641
+ if (codePoint === null) {
1642
+ codePoint = 65533;
1643
+ bytesPerSequence = 1;
1644
+ } else if (codePoint > 65535) {
1645
+ codePoint -= 65536;
1646
+ res.push(codePoint >>> 10 & 1023 | 55296);
1647
+ codePoint = 56320 | codePoint & 1023;
1648
+ }
1649
+ res.push(codePoint);
1650
+ i += bytesPerSequence;
1651
+ }
1652
+ return Buffer._decodeCodePointsArray(res);
1653
+ }
1654
+ static _decodeCodePointsArray(codePoints) {
1655
+ const len = codePoints.length;
1656
+ if (len <= MAX_ARGUMENTS_LENGTH) {
1657
+ return String.fromCharCode.apply(String, codePoints);
1658
+ }
1659
+ let res = "";
1660
+ let i = 0;
1661
+ while (i < len) {
1662
+ res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
1663
+ }
1664
+ return res;
1665
+ }
1666
+ static _asciiSlice(buf, start, end) {
1667
+ let ret = "";
1668
+ end = Math.min(buf.length, end);
1669
+ for (let i = start; i < end; ++i) {
1670
+ ret += String.fromCharCode(buf[i] & 127);
1671
+ }
1672
+ return ret;
1673
+ }
1674
+ static _latin1Slice(buf, start, end) {
1675
+ let ret = "";
1676
+ end = Math.min(buf.length, end);
1677
+ for (let i = start; i < end; ++i) {
1678
+ ret += String.fromCharCode(buf[i]);
1679
+ }
1680
+ return ret;
1681
+ }
1682
+ static _utf16leSlice(buf, start, end) {
1683
+ const bytes = buf.slice(start, end);
1684
+ let res = "";
1685
+ for (let i = 0; i < bytes.length - 1; i += 2) {
1686
+ res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
1687
+ }
1688
+ return res;
1689
+ }
1690
+ static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
1691
+ let indexSize = 1;
1692
+ let arrLength = arr.length;
1693
+ let valLength = val.length;
1694
+ if (encoding !== void 0) {
1695
+ encoding = Buffer._getEncoding(encoding);
1696
+ if (encoding === "ucs2" || encoding === "utf16le") {
1697
+ if (arr.length < 2 || val.length < 2) {
1698
+ return -1;
1699
+ }
1700
+ indexSize = 2;
1701
+ arrLength /= 2;
1702
+ valLength /= 2;
1703
+ byteOffset /= 2;
1704
+ }
1705
+ }
1706
+ function read(buf, i2) {
1707
+ if (indexSize === 1) {
1708
+ return buf[i2];
1709
+ } else {
1710
+ return buf.readUInt16BE(i2 * indexSize);
1711
+ }
1712
+ }
1713
+ let i;
1714
+ if (dir) {
1715
+ let foundIndex = -1;
1716
+ for (i = byteOffset; i < arrLength; i++) {
1717
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
1718
+ if (foundIndex === -1) foundIndex = i;
1719
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
1720
+ } else {
1721
+ if (foundIndex !== -1) i -= i - foundIndex;
1722
+ foundIndex = -1;
1723
+ }
1724
+ }
1725
+ } else {
1726
+ if (byteOffset + valLength > arrLength) {
1727
+ byteOffset = arrLength - valLength;
1728
+ }
1729
+ for (i = byteOffset; i >= 0; i--) {
1730
+ let found = true;
1731
+ for (let j = 0; j < valLength; j++) {
1732
+ if (read(arr, i + j) !== read(val, j)) {
1733
+ found = false;
1734
+ break;
1735
+ }
1736
+ }
1737
+ if (found) {
1738
+ return i;
1739
+ }
1740
+ }
1741
+ }
1742
+ return -1;
1743
+ }
1744
+ static _checkOffset(offset, ext, length) {
1745
+ if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
1746
+ if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
1747
+ }
1748
+ static _checkInt(buf, value, offset, ext, max, min) {
1749
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
1750
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
1751
+ if (offset + ext > buf.length) throw new RangeError("Index out of range");
1752
+ }
1753
+ static _getEncoding(encoding) {
1754
+ let toLowerCase = false;
1755
+ let originalEncoding = "";
1756
+ for (; ; ) {
1757
+ switch (encoding) {
1758
+ case "hex":
1759
+ return "hex";
1760
+ case "utf8":
1761
+ return "utf8";
1762
+ case "ascii":
1763
+ return "ascii";
1764
+ case "binary":
1765
+ return "binary";
1766
+ case "latin1":
1767
+ return "latin1";
1768
+ case "ucs2":
1769
+ return "ucs2";
1770
+ case "utf16le":
1771
+ return "utf16le";
1772
+ case "base64":
1773
+ return "base64";
1774
+ default: {
1775
+ if (toLowerCase) {
1776
+ throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
1777
+ }
1778
+ toLowerCase = true;
1779
+ originalEncoding = encoding;
1780
+ encoding = encoding.toLowerCase();
1781
+ }
1782
+ }
1783
+ }
1784
+ }
1785
+ }
1786
+ const hexSliceLookupTable = function() {
1787
+ const alphabet = "0123456789abcdef";
1788
+ const table = new Array(256);
1789
+ for (let i = 0; i < 16; ++i) {
1790
+ const i16 = i * 16;
1791
+ for (let j = 0; j < 16; ++j) {
1792
+ table[i16 + j] = alphabet[i] + alphabet[j];
1793
+ }
1794
+ }
1795
+ return table;
1796
+ }();
1797
+ const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
1798
+ function base64clean(str) {
1799
+ str = str.split("=")[0];
1800
+ str = str.trim().replace(INVALID_BASE64_RE, "");
1801
+ if (str.length < 2) return "";
1802
+ while (str.length % 4 !== 0) {
1803
+ str = str + "=";
1804
+ }
1805
+ return str;
1806
+ }
1807
+
25
1808
  function notEmpty(value) {
26
1809
  return value !== null && value !== void 0;
27
1810
  }
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
116
1899
  return inputValues.reduce(reducer, Promise.resolve([]));
117
1900
  }
118
1901
 
119
- function getEnvironment() {
120
- try {
121
- if (isDefined(process) && isDefined(process.env)) {
122
- return {
123
- apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
124
- databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
125
- branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
126
- deployPreview: process.env.XATA_PREVIEW,
127
- deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
128
- vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
129
- vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
130
- };
131
- }
132
- } catch (err) {
133
- }
134
- try {
135
- if (isObject(Deno) && isObject(Deno.env)) {
136
- return {
137
- apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
138
- databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
139
- branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
140
- deployPreview: Deno.env.get("XATA_PREVIEW"),
141
- deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
142
- vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
143
- vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
144
- };
145
- }
146
- } catch (err) {
147
- }
148
- return {
149
- apiKey: getGlobalApiKey(),
150
- databaseURL: getGlobalDatabaseURL(),
151
- branch: getGlobalBranch(),
152
- deployPreview: void 0,
153
- deployPreviewBranch: void 0,
154
- vercelGitCommitRef: void 0,
155
- vercelGitRepoOwner: void 0
156
- };
157
- }
158
- function getEnableBrowserVariable() {
159
- try {
160
- if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
161
- return process.env.XATA_ENABLE_BROWSER === "true";
162
- }
163
- } catch (err) {
164
- }
165
- try {
166
- if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
167
- return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
168
- }
169
- } catch (err) {
170
- }
171
- try {
172
- return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
173
- } catch (err) {
174
- return void 0;
175
- }
176
- }
177
- function getGlobalApiKey() {
178
- try {
179
- return XATA_API_KEY;
180
- } catch (err) {
181
- return void 0;
182
- }
183
- }
184
- function getGlobalDatabaseURL() {
185
- try {
186
- return XATA_DATABASE_URL;
187
- } catch (err) {
188
- return void 0;
189
- }
190
- }
191
- function getGlobalBranch() {
192
- try {
193
- return XATA_BRANCH;
194
- } catch (err) {
195
- return void 0;
196
- }
197
- }
198
- function getDatabaseURL() {
199
- try {
200
- const { databaseURL } = getEnvironment();
201
- return databaseURL;
202
- } catch (err) {
203
- return void 0;
204
- }
205
- }
206
- function getAPIKey() {
207
- try {
208
- const { apiKey } = getEnvironment();
209
- return apiKey;
210
- } catch (err) {
211
- return void 0;
212
- }
213
- }
214
- function getBranch() {
215
- try {
216
- const { branch } = getEnvironment();
217
- return branch;
218
- } catch (err) {
219
- return void 0;
220
- }
221
- }
222
- function buildPreviewBranchName({ org, branch }) {
223
- return `preview-${org}-${branch}`;
224
- }
225
- function getPreviewBranch() {
226
- try {
227
- const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
228
- if (deployPreviewBranch)
229
- return deployPreviewBranch;
230
- switch (deployPreview) {
231
- case "vercel": {
232
- if (!vercelGitCommitRef || !vercelGitRepoOwner) {
233
- console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
234
- return void 0;
235
- }
236
- return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
237
- }
238
- }
239
- return void 0;
240
- } catch (err) {
241
- return void 0;
242
- }
243
- }
244
-
245
- var __accessCheck$6 = (obj, member, msg) => {
246
- if (!member.has(obj))
247
- throw TypeError("Cannot " + msg);
248
- };
249
- var __privateGet$5 = (obj, member, getter) => {
250
- __accessCheck$6(obj, member, "read from private field");
251
- return getter ? getter.call(obj) : member.get(obj);
252
- };
253
- var __privateAdd$6 = (obj, member, value) => {
254
- if (member.has(obj))
255
- throw TypeError("Cannot add the same private member more than once");
256
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
257
- };
258
- var __privateSet$4 = (obj, member, value, setter) => {
259
- __accessCheck$6(obj, member, "write to private field");
260
- setter ? setter.call(obj, value) : member.set(obj, value);
261
- return value;
262
- };
263
- var __privateMethod$4 = (obj, member, method) => {
264
- __accessCheck$6(obj, member, "access private method");
265
- return method;
1902
+ var __typeError$6 = (msg) => {
1903
+ throw TypeError(msg);
266
1904
  };
267
- var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
1905
+ var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
1906
+ var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
1907
+ var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1908
+ var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
1909
+ var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
1910
+ var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
268
1911
  const REQUEST_TIMEOUT = 5 * 60 * 1e3;
269
1912
  function getFetchImplementation(userFetch) {
270
1913
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
277
1920
  }
278
1921
  class ApiRequestPool {
279
1922
  constructor(concurrency = 10) {
280
- __privateAdd$6(this, _enqueue);
281
- __privateAdd$6(this, _fetch, void 0);
282
- __privateAdd$6(this, _queue, void 0);
283
- __privateAdd$6(this, _concurrency, void 0);
1923
+ __privateAdd$6(this, _ApiRequestPool_instances);
1924
+ __privateAdd$6(this, _fetch);
1925
+ __privateAdd$6(this, _queue);
1926
+ __privateAdd$6(this, _concurrency);
284
1927
  __privateSet$4(this, _queue, []);
285
1928
  __privateSet$4(this, _concurrency, concurrency);
286
1929
  this.running = 0;
@@ -315,7 +1958,7 @@ class ApiRequestPool {
315
1958
  }
316
1959
  return response;
317
1960
  };
318
- return __privateMethod$4(this, _enqueue, enqueue_fn).call(this, async () => {
1961
+ return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
319
1962
  return await runRequest();
320
1963
  });
321
1964
  }
@@ -323,7 +1966,7 @@ class ApiRequestPool {
323
1966
  _fetch = new WeakMap();
324
1967
  _queue = new WeakMap();
325
1968
  _concurrency = new WeakMap();
326
- _enqueue = new WeakSet();
1969
+ _ApiRequestPool_instances = new WeakSet();
327
1970
  enqueue_fn = function(task) {
328
1971
  const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
329
1972
  this.started--;
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
526
2169
  }
527
2170
  }
528
2171
 
529
- const VERSION = "0.29.3";
2172
+ const VERSION = "0.30.0";
530
2173
 
531
2174
  class ErrorWithCause extends Error {
532
2175
  constructor(message, options) {
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
606
2249
  return provider;
607
2250
  }
608
2251
  const [main, workspaces] = provider.split(",");
609
- if (!main || !workspaces)
610
- return null;
2252
+ if (!main || !workspaces) return null;
611
2253
  return { main, workspaces };
612
2254
  }
613
2255
  function buildProviderString(provider) {
614
- if (isHostProviderAlias(provider))
615
- return provider;
2256
+ if (isHostProviderAlias(provider)) return provider;
616
2257
  return `${provider.main},${provider.workspaces}`;
617
2258
  }
618
2259
  function parseWorkspacesUrlParts(url) {
619
- if (!isString(url))
620
- return null;
2260
+ if (!isString(url)) return null;
621
2261
  const matches = {
622
2262
  production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
623
2263
  staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
624
2264
  dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
625
- local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
2265
+ local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
626
2266
  };
627
2267
  const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
628
- if (!isHostProviderAlias(host) || !match)
629
- return null;
2268
+ if (!isHostProviderAlias(host) || !match) return null;
630
2269
  return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
631
2270
  }
632
2271
 
633
2272
  const pool = new ApiRequestPool();
634
2273
  const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
635
2274
  const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
636
- if (value === void 0 || value === null)
637
- return acc;
2275
+ if (value === void 0 || value === null) return acc;
638
2276
  return { ...acc, [key]: value };
639
2277
  }, {});
640
2278
  const query = new URLSearchParams(cleanQueryParams).toString();
@@ -682,8 +2320,7 @@ function hostHeader(url) {
682
2320
  return groups?.host ? { Host: groups.host } : {};
683
2321
  }
684
2322
  async function parseBody(body, headers) {
685
- if (!isDefined(body))
686
- return void 0;
2323
+ if (!isDefined(body)) return void 0;
687
2324
  if (isBlob(body) || typeof body.text === "function") {
688
2325
  return body;
689
2326
  }
@@ -762,8 +2399,7 @@ async function fetch$1({
762
2399
  [TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
763
2400
  });
764
2401
  const message = response.headers?.get("x-xata-message");
765
- if (message)
766
- console.warn(message);
2402
+ if (message) console.warn(message);
767
2403
  if (response.status === 204) {
768
2404
  return {};
769
2405
  }
@@ -847,7 +2483,72 @@ function parseUrl(url) {
847
2483
 
848
2484
  const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
849
2485
 
850
- const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
2486
+ const getTasks = (variables, signal) => dataPlaneFetch({
2487
+ url: "/tasks",
2488
+ method: "get",
2489
+ ...variables,
2490
+ signal
2491
+ });
2492
+ const getTaskStatus = (variables, signal) => dataPlaneFetch({
2493
+ url: "/tasks/{taskId}",
2494
+ method: "get",
2495
+ ...variables,
2496
+ signal
2497
+ });
2498
+ const listClusterBranches = (variables, signal) => dataPlaneFetch({
2499
+ url: "/cluster/{clusterId}/branches",
2500
+ method: "get",
2501
+ ...variables,
2502
+ signal
2503
+ });
2504
+ const listClusterExtensions = (variables, signal) => dataPlaneFetch({
2505
+ url: "/cluster/{clusterId}/extensions",
2506
+ method: "get",
2507
+ ...variables,
2508
+ signal
2509
+ });
2510
+ const installClusterExtension = (variables, signal) => dataPlaneFetch({
2511
+ url: "/cluster/{clusterId}/extensions",
2512
+ method: "post",
2513
+ ...variables,
2514
+ signal
2515
+ });
2516
+ const dropClusterExtension = (variables, signal) => dataPlaneFetch({
2517
+ url: "/cluster/{clusterId}/extensions",
2518
+ method: "delete",
2519
+ ...variables,
2520
+ signal
2521
+ });
2522
+ const getClusterMetrics = (variables, signal) => dataPlaneFetch({
2523
+ url: "/cluster/{clusterId}/metrics",
2524
+ method: "get",
2525
+ ...variables,
2526
+ signal
2527
+ });
2528
+ const applyMigration = (variables, signal) => dataPlaneFetch({
2529
+ url: "/db/{dbBranchName}/migrations/apply",
2530
+ method: "post",
2531
+ ...variables,
2532
+ signal
2533
+ });
2534
+ const startMigration = (variables, signal) => dataPlaneFetch({
2535
+ url: "/db/{dbBranchName}/migrations/start",
2536
+ method: "post",
2537
+ ...variables,
2538
+ signal
2539
+ });
2540
+ const completeMigration = (variables, signal) => dataPlaneFetch({
2541
+ url: "/db/{dbBranchName}/migrations/complete",
2542
+ method: "post",
2543
+ ...variables,
2544
+ signal
2545
+ });
2546
+ const rollbackMigration = (variables, signal) => dataPlaneFetch({
2547
+ url: "/db/{dbBranchName}/migrations/rollback",
2548
+ method: "post",
2549
+ ...variables,
2550
+ signal
2551
+ });
851
2552
  const adaptTable = (variables, signal) => dataPlaneFetch({
852
2553
  url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
853
2554
  method: "post",
@@ -860,9 +2561,30 @@ const adaptAllTables = (variables, signal) => dataPlaneFetch({
860
2561
  ...variables,
861
2562
  signal
862
2563
  });
863
- const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
864
- const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
865
- const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
2564
+ const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2565
+ url: "/db/{dbBranchName}/migrations/status",
2566
+ method: "get",
2567
+ ...variables,
2568
+ signal
2569
+ });
2570
+ const getMigrationJobs = (variables, signal) => dataPlaneFetch({
2571
+ url: "/db/{dbBranchName}/migrations/jobs",
2572
+ method: "get",
2573
+ ...variables,
2574
+ signal
2575
+ });
2576
+ const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2577
+ url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
2578
+ method: "get",
2579
+ ...variables,
2580
+ signal
2581
+ });
2582
+ const getMigrationHistory = (variables, signal) => dataPlaneFetch({
2583
+ url: "/db/{dbBranchName}/migrations/history",
2584
+ method: "get",
2585
+ ...variables,
2586
+ signal
2587
+ });
866
2588
  const getBranchList = (variables, signal) => dataPlaneFetch({
867
2589
  url: "/dbs/{dbName}",
868
2590
  method: "get",
@@ -876,6 +2598,7 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
876
2598
  signal
877
2599
  });
878
2600
  const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
2601
+ const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
879
2602
  const getBranchDetails = (variables, signal) => dataPlaneFetch({
880
2603
  url: "/db/{dbBranchName}",
881
2604
  method: "get",
@@ -889,68 +2612,166 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
889
2612
  ...variables,
890
2613
  signal
891
2614
  });
892
- const getSchema = (variables, signal) => dataPlaneFetch({
893
- url: "/db/{dbBranchName}/schema",
894
- method: "get",
2615
+ const getSchema = (variables, signal) => dataPlaneFetch({
2616
+ url: "/db/{dbBranchName}/schema",
2617
+ method: "get",
2618
+ ...variables,
2619
+ signal
2620
+ });
2621
+ const getSchemas = (variables, signal) => dataPlaneFetch({
2622
+ url: "/db/{dbBranchName}/schemas",
2623
+ method: "get",
2624
+ ...variables,
2625
+ signal
2626
+ });
2627
+ const copyBranch = (variables, signal) => dataPlaneFetch({
2628
+ url: "/db/{dbBranchName}/copy",
2629
+ method: "post",
2630
+ ...variables,
2631
+ signal
2632
+ });
2633
+ const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
2634
+ const moveBranch = (variables, signal) => dataPlaneFetch({
2635
+ url: "/db/{dbBranchName}/move",
2636
+ method: "put",
2637
+ ...variables,
2638
+ signal
2639
+ });
2640
+ const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
2641
+ url: "/db/{dbBranchName}/metadata",
2642
+ method: "put",
2643
+ ...variables,
2644
+ signal
2645
+ });
2646
+ const getBranchMetadata = (variables, signal) => dataPlaneFetch({
2647
+ url: "/db/{dbBranchName}/metadata",
2648
+ method: "get",
2649
+ ...variables,
2650
+ signal
2651
+ });
2652
+ const getBranchStats = (variables, signal) => dataPlaneFetch({
2653
+ url: "/db/{dbBranchName}/stats",
2654
+ method: "get",
2655
+ ...variables,
2656
+ signal
2657
+ });
2658
+ const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
2659
+ const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
2660
+ const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
2661
+ url: "/dbs/{dbName}/gitBranches",
2662
+ method: "delete",
2663
+ ...variables,
2664
+ signal
2665
+ });
2666
+ const resolveBranch = (variables, signal) => dataPlaneFetch({
2667
+ url: "/dbs/{dbName}/resolveBranch",
2668
+ method: "get",
2669
+ ...variables,
2670
+ signal
2671
+ });
2672
+ const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
2673
+ url: "/db/{dbBranchName}/migrations",
2674
+ method: "get",
2675
+ ...variables,
2676
+ signal
2677
+ });
2678
+ const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2679
+ url: "/db/{dbBranchName}/migrations/plan",
2680
+ method: "post",
2681
+ ...variables,
2682
+ signal
2683
+ });
2684
+ const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2685
+ url: "/db/{dbBranchName}/migrations/execute",
2686
+ method: "post",
2687
+ ...variables,
2688
+ signal
2689
+ });
2690
+ const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
2691
+ url: "/dbs/{dbName}/migrations/query",
2692
+ method: "post",
2693
+ ...variables,
2694
+ signal
2695
+ });
2696
+ const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
2697
+ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
2698
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2699
+ method: "get",
2700
+ ...variables,
2701
+ signal
2702
+ });
2703
+ const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
2704
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2705
+ method: "patch",
2706
+ ...variables,
2707
+ signal
2708
+ });
2709
+ const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
2710
+ url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
2711
+ method: "post",
2712
+ ...variables,
2713
+ signal
2714
+ });
2715
+ const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
2716
+ url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
2717
+ method: "post",
2718
+ ...variables,
2719
+ signal
2720
+ });
2721
+ const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
2722
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2723
+ method: "get",
2724
+ ...variables,
2725
+ signal
2726
+ });
2727
+ const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
2728
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2729
+ method: "post",
2730
+ ...variables,
2731
+ signal
2732
+ });
2733
+ const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
2734
+ url: "/db/{dbBranchName}/schema/history",
2735
+ method: "post",
895
2736
  ...variables,
896
2737
  signal
897
2738
  });
898
- const copyBranch = (variables, signal) => dataPlaneFetch({
899
- url: "/db/{dbBranchName}/copy",
2739
+ const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
2740
+ url: "/db/{dbBranchName}/schema/compare",
900
2741
  method: "post",
901
2742
  ...variables,
902
2743
  signal
903
2744
  });
904
- const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
905
- url: "/db/{dbBranchName}/metadata",
906
- method: "put",
2745
+ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
2746
+ url: "/db/{dbBranchName}/schema/compare/{branchName}",
2747
+ method: "post",
907
2748
  ...variables,
908
2749
  signal
909
2750
  });
910
- const getBranchMetadata = (variables, signal) => dataPlaneFetch({
911
- url: "/db/{dbBranchName}/metadata",
912
- method: "get",
2751
+ const updateBranchSchema = (variables, signal) => dataPlaneFetch({
2752
+ url: "/db/{dbBranchName}/schema/update",
2753
+ method: "post",
913
2754
  ...variables,
914
2755
  signal
915
2756
  });
916
- const getBranchStats = (variables, signal) => dataPlaneFetch({
917
- url: "/db/{dbBranchName}/stats",
918
- method: "get",
2757
+ const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2758
+ url: "/db/{dbBranchName}/schema/preview",
2759
+ method: "post",
919
2760
  ...variables,
920
2761
  signal
921
2762
  });
922
- const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
923
- const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
924
- const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "delete", ...variables, signal });
925
- const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
926
- const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
927
- const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
928
- const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
929
- const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
930
- const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
931
- const getMigrationRequest = (variables, signal) => dataPlaneFetch({
932
- url: "/dbs/{dbName}/migrations/{mrNumber}",
933
- method: "get",
2763
+ const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2764
+ url: "/db/{dbBranchName}/schema/apply",
2765
+ method: "post",
934
2766
  ...variables,
935
2767
  signal
936
2768
  });
937
- const updateMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}", method: "patch", ...variables, signal });
938
- const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/commits", method: "post", ...variables, signal });
939
- const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
940
- const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
941
- const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
942
- url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2769
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
2770
+ url: "/db/{dbBranchName}/schema/push",
943
2771
  method: "post",
944
2772
  ...variables,
945
2773
  signal
946
2774
  });
947
- const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
948
- const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
949
- const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
950
- const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
951
- const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
952
- const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
953
- const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
954
2775
  const createTable = (variables, signal) => dataPlaneFetch({
955
2776
  url: "/db/{dbBranchName}/tables/{tableName}",
956
2777
  method: "put",
@@ -963,14 +2784,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
963
2784
  ...variables,
964
2785
  signal
965
2786
  });
966
- const updateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}", method: "patch", ...variables, signal });
2787
+ const updateTable = (variables, signal) => dataPlaneFetch({
2788
+ url: "/db/{dbBranchName}/tables/{tableName}",
2789
+ method: "patch",
2790
+ ...variables,
2791
+ signal
2792
+ });
967
2793
  const getTableSchema = (variables, signal) => dataPlaneFetch({
968
2794
  url: "/db/{dbBranchName}/tables/{tableName}/schema",
969
2795
  method: "get",
970
2796
  ...variables,
971
2797
  signal
972
2798
  });
973
- const setTableSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/schema", method: "put", ...variables, signal });
2799
+ const setTableSchema = (variables, signal) => dataPlaneFetch({
2800
+ url: "/db/{dbBranchName}/tables/{tableName}/schema",
2801
+ method: "put",
2802
+ ...variables,
2803
+ signal
2804
+ });
974
2805
  const getTableColumns = (variables, signal) => dataPlaneFetch({
975
2806
  url: "/db/{dbBranchName}/tables/{tableName}/columns",
976
2807
  method: "get",
@@ -978,7 +2809,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
978
2809
  signal
979
2810
  });
980
2811
  const addTableColumn = (variables, signal) => dataPlaneFetch(
981
- { url: "/db/{dbBranchName}/tables/{tableName}/columns", method: "post", ...variables, signal }
2812
+ {
2813
+ url: "/db/{dbBranchName}/tables/{tableName}/columns",
2814
+ method: "post",
2815
+ ...variables,
2816
+ signal
2817
+ }
982
2818
  );
983
2819
  const getColumn = (variables, signal) => dataPlaneFetch({
984
2820
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
@@ -986,15 +2822,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
986
2822
  ...variables,
987
2823
  signal
988
2824
  });
989
- const updateColumn = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}", method: "patch", ...variables, signal });
2825
+ const updateColumn = (variables, signal) => dataPlaneFetch({
2826
+ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
2827
+ method: "patch",
2828
+ ...variables,
2829
+ signal
2830
+ });
990
2831
  const deleteColumn = (variables, signal) => dataPlaneFetch({
991
2832
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
992
2833
  method: "delete",
993
2834
  ...variables,
994
2835
  signal
995
2836
  });
996
- const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
997
- const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
2837
+ const branchTransaction = (variables, signal) => dataPlaneFetch({
2838
+ url: "/db/{dbBranchName}/transaction",
2839
+ method: "post",
2840
+ ...variables,
2841
+ signal
2842
+ });
2843
+ const insertRecord = (variables, signal) => dataPlaneFetch({
2844
+ url: "/db/{dbBranchName}/tables/{tableName}/data",
2845
+ method: "post",
2846
+ ...variables,
2847
+ signal
2848
+ });
998
2849
  const getFileItem = (variables, signal) => dataPlaneFetch({
999
2850
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
1000
2851
  method: "get",
@@ -1037,11 +2888,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
1037
2888
  ...variables,
1038
2889
  signal
1039
2890
  });
1040
- const insertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "put", ...variables, signal });
1041
- const updateRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "patch", ...variables, signal });
1042
- const upsertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "post", ...variables, signal });
1043
- const deleteRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "delete", ...variables, signal });
1044
- const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/bulk", method: "post", ...variables, signal });
2891
+ const insertRecordWithID = (variables, signal) => dataPlaneFetch({
2892
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2893
+ method: "put",
2894
+ ...variables,
2895
+ signal
2896
+ });
2897
+ const updateRecordWithID = (variables, signal) => dataPlaneFetch({
2898
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2899
+ method: "patch",
2900
+ ...variables,
2901
+ signal
2902
+ });
2903
+ const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
2904
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2905
+ method: "post",
2906
+ ...variables,
2907
+ signal
2908
+ });
2909
+ const deleteRecord = (variables, signal) => dataPlaneFetch({
2910
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2911
+ method: "delete",
2912
+ ...variables,
2913
+ signal
2914
+ });
2915
+ const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
2916
+ url: "/db/{dbBranchName}/tables/{tableName}/bulk",
2917
+ method: "post",
2918
+ ...variables,
2919
+ signal
2920
+ });
1045
2921
  const queryTable = (variables, signal) => dataPlaneFetch({
1046
2922
  url: "/db/{dbBranchName}/tables/{tableName}/query",
1047
2923
  method: "post",
@@ -1060,16 +2936,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
1060
2936
  ...variables,
1061
2937
  signal
1062
2938
  });
1063
- const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
2939
+ const vectorSearchTable = (variables, signal) => dataPlaneFetch({
2940
+ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
2941
+ method: "post",
2942
+ ...variables,
2943
+ signal
2944
+ });
1064
2945
  const askTable = (variables, signal) => dataPlaneFetch({
1065
2946
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
1066
2947
  method: "post",
1067
2948
  ...variables,
1068
2949
  signal
1069
2950
  });
1070
- const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
1071
- const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
1072
- const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
2951
+ const askTableSession = (variables, signal) => dataPlaneFetch({
2952
+ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
2953
+ method: "post",
2954
+ ...variables,
2955
+ signal
2956
+ });
2957
+ const summarizeTable = (variables, signal) => dataPlaneFetch({
2958
+ url: "/db/{dbBranchName}/tables/{tableName}/summarize",
2959
+ method: "post",
2960
+ ...variables,
2961
+ signal
2962
+ });
2963
+ const aggregateTable = (variables, signal) => dataPlaneFetch({
2964
+ url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
2965
+ method: "post",
2966
+ ...variables,
2967
+ signal
2968
+ });
1073
2969
  const fileAccess = (variables, signal) => dataPlaneFetch({
1074
2970
  url: "/file/{fileId}",
1075
2971
  method: "get",
@@ -1088,15 +2984,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
1088
2984
  ...variables,
1089
2985
  signal
1090
2986
  });
2987
+ const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
2988
+ url: "/db/{dbBranchName}/sql/batch",
2989
+ method: "post",
2990
+ ...variables,
2991
+ signal
2992
+ });
1091
2993
  const operationsByTag$2 = {
2994
+ tasks: { getTasks, getTaskStatus },
2995
+ cluster: {
2996
+ listClusterBranches,
2997
+ listClusterExtensions,
2998
+ installClusterExtension,
2999
+ dropClusterExtension,
3000
+ getClusterMetrics
3001
+ },
1092
3002
  migrations: {
1093
3003
  applyMigration,
3004
+ startMigration,
3005
+ completeMigration,
3006
+ rollbackMigration,
1094
3007
  adaptTable,
1095
3008
  adaptAllTables,
1096
3009
  getBranchMigrationJobStatus,
3010
+ getMigrationJobs,
1097
3011
  getMigrationJobStatus,
1098
3012
  getMigrationHistory,
1099
3013
  getSchema,
3014
+ getSchemas,
1100
3015
  getBranchMigrationHistory,
1101
3016
  getBranchMigrationPlan,
1102
3017
  executeBranchMigrationPlan,
@@ -1110,10 +3025,13 @@ const operationsByTag$2 = {
1110
3025
  },
1111
3026
  branch: {
1112
3027
  getBranchList,
3028
+ createBranchAsync,
1113
3029
  getBranchDetails,
1114
3030
  createBranch,
1115
3031
  deleteBranch,
1116
3032
  copyBranch,
3033
+ getBranchMoveStatus,
3034
+ moveBranch,
1117
3035
  updateBranchMetadata,
1118
3036
  getBranchMetadata,
1119
3037
  getBranchStats,
@@ -1155,7 +3073,16 @@ const operationsByTag$2 = {
1155
3073
  deleteRecord,
1156
3074
  bulkInsertTableRecords
1157
3075
  },
1158
- files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
3076
+ files: {
3077
+ getFileItem,
3078
+ putFileItem,
3079
+ deleteFileItem,
3080
+ getFile,
3081
+ putFile,
3082
+ deleteFile,
3083
+ fileAccess,
3084
+ fileUpload
3085
+ },
1159
3086
  searchAndFilter: {
1160
3087
  queryTable,
1161
3088
  searchBranch,
@@ -1166,7 +3093,7 @@ const operationsByTag$2 = {
1166
3093
  summarizeTable,
1167
3094
  aggregateTable
1168
3095
  },
1169
- sql: { sqlQuery }
3096
+ sql: { sqlQuery, sqlBatchQuery }
1170
3097
  };
1171
3098
 
1172
3099
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
@@ -1233,7 +3160,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
1233
3160
  ...variables,
1234
3161
  signal
1235
3162
  });
1236
- const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
3163
+ const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
3164
+ url: "/user/oauth/tokens/{token}",
3165
+ method: "patch",
3166
+ ...variables,
3167
+ signal
3168
+ });
1237
3169
  const getWorkspacesList = (variables, signal) => controlPlaneFetch({
1238
3170
  url: "/workspaces",
1239
3171
  method: "get",
@@ -1264,49 +3196,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
1264
3196
  ...variables,
1265
3197
  signal
1266
3198
  });
1267
- const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
1268
- const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
1269
- const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
1270
- const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
3199
+ const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3200
+ url: "/workspaces/{workspaceId}/settings",
3201
+ method: "get",
3202
+ ...variables,
3203
+ signal
3204
+ });
3205
+ const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3206
+ url: "/workspaces/{workspaceId}/settings",
3207
+ method: "patch",
3208
+ ...variables,
3209
+ signal
3210
+ });
3211
+ const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
3212
+ url: "/workspaces/{workspaceId}/members",
3213
+ method: "get",
3214
+ ...variables,
3215
+ signal
3216
+ });
3217
+ const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
3218
+ url: "/workspaces/{workspaceId}/members/{userId}",
3219
+ method: "put",
3220
+ ...variables,
3221
+ signal
3222
+ });
1271
3223
  const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
1272
3224
  url: "/workspaces/{workspaceId}/members/{userId}",
1273
3225
  method: "delete",
1274
3226
  ...variables,
1275
3227
  signal
1276
3228
  });
1277
- const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites", method: "post", ...variables, signal });
1278
- const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "patch", ...variables, signal });
1279
- const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
1280
- const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
1281
- const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
1282
- const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
1283
- const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
3229
+ const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
3230
+ url: "/workspaces/{workspaceId}/invites",
3231
+ method: "post",
3232
+ ...variables,
3233
+ signal
3234
+ });
3235
+ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3236
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3237
+ method: "patch",
3238
+ ...variables,
3239
+ signal
3240
+ });
3241
+ const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3242
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3243
+ method: "delete",
3244
+ ...variables,
3245
+ signal
3246
+ });
3247
+ const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3248
+ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
3249
+ method: "post",
3250
+ ...variables,
3251
+ signal
3252
+ });
3253
+ const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3254
+ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
3255
+ method: "post",
3256
+ ...variables,
3257
+ signal
3258
+ });
3259
+ const listClusters = (variables, signal) => controlPlaneFetch({
3260
+ url: "/workspaces/{workspaceId}/clusters",
3261
+ method: "get",
3262
+ ...variables,
3263
+ signal
3264
+ });
3265
+ const createCluster = (variables, signal) => controlPlaneFetch({
3266
+ url: "/workspaces/{workspaceId}/clusters",
3267
+ method: "post",
3268
+ ...variables,
3269
+ signal
3270
+ });
1284
3271
  const getCluster = (variables, signal) => controlPlaneFetch({
1285
3272
  url: "/workspaces/{workspaceId}/clusters/{clusterId}",
1286
3273
  method: "get",
1287
3274
  ...variables,
1288
3275
  signal
1289
3276
  });
1290
- const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
3277
+ const updateCluster = (variables, signal) => controlPlaneFetch({
3278
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3279
+ method: "patch",
3280
+ ...variables,
3281
+ signal
3282
+ });
3283
+ const deleteCluster = (variables, signal) => controlPlaneFetch({
3284
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3285
+ method: "delete",
3286
+ ...variables,
3287
+ signal
3288
+ });
1291
3289
  const getDatabaseList = (variables, signal) => controlPlaneFetch({
1292
3290
  url: "/workspaces/{workspaceId}/dbs",
1293
3291
  method: "get",
1294
3292
  ...variables,
1295
3293
  signal
1296
3294
  });
1297
- const createDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "put", ...variables, signal });
3295
+ const createDatabase = (variables, signal) => controlPlaneFetch({
3296
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3297
+ method: "put",
3298
+ ...variables,
3299
+ signal
3300
+ });
1298
3301
  const deleteDatabase = (variables, signal) => controlPlaneFetch({
1299
3302
  url: "/workspaces/{workspaceId}/dbs/{dbName}",
1300
3303
  method: "delete",
1301
3304
  ...variables,
1302
3305
  signal
1303
3306
  });
1304
- const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
1305
- const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1306
- const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
1307
- const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
1308
- const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
1309
- const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
3307
+ const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3308
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3309
+ method: "get",
3310
+ ...variables,
3311
+ signal
3312
+ });
3313
+ const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3314
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3315
+ method: "patch",
3316
+ ...variables,
3317
+ signal
3318
+ });
3319
+ const renameDatabase = (variables, signal) => controlPlaneFetch({
3320
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
3321
+ method: "post",
3322
+ ...variables,
3323
+ signal
3324
+ });
3325
+ const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3326
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3327
+ method: "get",
3328
+ ...variables,
3329
+ signal
3330
+ });
3331
+ const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3332
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3333
+ method: "put",
3334
+ ...variables,
3335
+ signal
3336
+ });
3337
+ const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3338
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3339
+ method: "delete",
3340
+ ...variables,
3341
+ signal
3342
+ });
1310
3343
  const listRegions = (variables, signal) => controlPlaneFetch({
1311
3344
  url: "/workspaces/{workspaceId}/regions",
1312
3345
  method: "get",
@@ -1344,7 +3377,13 @@ const operationsByTag$1 = {
1344
3377
  acceptWorkspaceMemberInvite,
1345
3378
  resendWorkspaceMemberInvite
1346
3379
  },
1347
- xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
3380
+ xbcontrolOther: {
3381
+ listClusters,
3382
+ createCluster,
3383
+ getCluster,
3384
+ updateCluster,
3385
+ deleteCluster
3386
+ },
1348
3387
  databases: {
1349
3388
  getDatabaseList,
1350
3389
  createDatabase,
@@ -1364,7 +3403,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
1364
3403
  const buildApiClient = () => class {
1365
3404
  constructor(options = {}) {
1366
3405
  const provider = options.host ?? "production";
1367
- const apiKey = options.apiKey ?? getAPIKey();
3406
+ const apiKey = options.apiKey;
1368
3407
  const trace = options.trace ?? defaultTrace;
1369
3408
  const clientID = generateUUID();
1370
3409
  if (!apiKey) {
@@ -1431,8 +3470,7 @@ function buildTransformString(transformations) {
1431
3470
  ).join(",");
1432
3471
  }
1433
3472
  function transformImage(url, ...transformations) {
1434
- if (!isDefined(url))
1435
- return void 0;
3473
+ if (!isDefined(url)) return void 0;
1436
3474
  const newTransformations = buildTransformString(transformations);
1437
3475
  const { hostname, pathname, search } = new URL(url);
1438
3476
  const pathParts = pathname.split("/");
@@ -1545,8 +3583,7 @@ class XataFile {
1545
3583
  }
1546
3584
  }
1547
3585
  const parseInputFileEntry = async (entry) => {
1548
- if (!isDefined(entry))
1549
- return null;
3586
+ if (!isDefined(entry)) return null;
1550
3587
  const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
1551
3588
  return compactObject({
1552
3589
  id,
@@ -1561,24 +3598,19 @@ const parseInputFileEntry = async (entry) => {
1561
3598
  };
1562
3599
 
1563
3600
  function cleanFilter(filter) {
1564
- if (!isDefined(filter))
1565
- return void 0;
1566
- if (!isObject(filter))
1567
- return filter;
3601
+ if (!isDefined(filter)) return void 0;
3602
+ if (!isObject(filter)) return filter;
1568
3603
  const values = Object.fromEntries(
1569
3604
  Object.entries(filter).reduce((acc, [key, value]) => {
1570
- if (!isDefined(value))
1571
- return acc;
3605
+ if (!isDefined(value)) return acc;
1572
3606
  if (Array.isArray(value)) {
1573
3607
  const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
1574
- if (clean.length === 0)
1575
- return acc;
3608
+ if (clean.length === 0) return acc;
1576
3609
  return [...acc, [key, clean]];
1577
3610
  }
1578
3611
  if (isObject(value)) {
1579
3612
  const clean = cleanFilter(value);
1580
- if (!isDefined(clean))
1581
- return acc;
3613
+ if (!isDefined(clean)) return acc;
1582
3614
  return [...acc, [key, clean]];
1583
3615
  }
1584
3616
  return [...acc, [key, value]];
@@ -1588,10 +3620,8 @@ function cleanFilter(filter) {
1588
3620
  }
1589
3621
 
1590
3622
  function stringifyJson(value) {
1591
- if (!isDefined(value))
1592
- return value;
1593
- if (isString(value))
1594
- return value;
3623
+ if (!isDefined(value)) return value;
3624
+ if (isString(value)) return value;
1595
3625
  try {
1596
3626
  return JSON.stringify(value);
1597
3627
  } catch (e) {
@@ -1606,28 +3636,17 @@ function parseJson(value) {
1606
3636
  }
1607
3637
  }
1608
3638
 
1609
- var __accessCheck$5 = (obj, member, msg) => {
1610
- if (!member.has(obj))
1611
- throw TypeError("Cannot " + msg);
1612
- };
1613
- var __privateGet$4 = (obj, member, getter) => {
1614
- __accessCheck$5(obj, member, "read from private field");
1615
- return getter ? getter.call(obj) : member.get(obj);
1616
- };
1617
- var __privateAdd$5 = (obj, member, value) => {
1618
- if (member.has(obj))
1619
- throw TypeError("Cannot add the same private member more than once");
1620
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1621
- };
1622
- var __privateSet$3 = (obj, member, value, setter) => {
1623
- __accessCheck$5(obj, member, "write to private field");
1624
- setter ? setter.call(obj, value) : member.set(obj, value);
1625
- return value;
3639
+ var __typeError$5 = (msg) => {
3640
+ throw TypeError(msg);
1626
3641
  };
3642
+ var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
3643
+ var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3644
+ var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3645
+ var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
1627
3646
  var _query, _page;
1628
3647
  class Page {
1629
3648
  constructor(query, meta, records = []) {
1630
- __privateAdd$5(this, _query, void 0);
3649
+ __privateAdd$5(this, _query);
1631
3650
  __privateSet$3(this, _query, query);
1632
3651
  this.meta = meta;
1633
3652
  this.records = new PageRecordArray(this, records);
@@ -1714,7 +3733,7 @@ class RecordArray extends Array {
1714
3733
  const _PageRecordArray = class _PageRecordArray extends Array {
1715
3734
  constructor(...args) {
1716
3735
  super(..._PageRecordArray.parseConstructorParams(...args));
1717
- __privateAdd$5(this, _page, void 0);
3736
+ __privateAdd$5(this, _page);
1718
3737
  __privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
1719
3738
  }
1720
3739
  static parseConstructorParams(...args) {
@@ -1785,34 +3804,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1785
3804
  _page = new WeakMap();
1786
3805
  let PageRecordArray = _PageRecordArray;
1787
3806
 
1788
- var __accessCheck$4 = (obj, member, msg) => {
1789
- if (!member.has(obj))
1790
- throw TypeError("Cannot " + msg);
1791
- };
1792
- var __privateGet$3 = (obj, member, getter) => {
1793
- __accessCheck$4(obj, member, "read from private field");
1794
- return getter ? getter.call(obj) : member.get(obj);
1795
- };
1796
- var __privateAdd$4 = (obj, member, value) => {
1797
- if (member.has(obj))
1798
- throw TypeError("Cannot add the same private member more than once");
1799
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1800
- };
1801
- var __privateSet$2 = (obj, member, value, setter) => {
1802
- __accessCheck$4(obj, member, "write to private field");
1803
- setter ? setter.call(obj, value) : member.set(obj, value);
1804
- return value;
1805
- };
1806
- var __privateMethod$3 = (obj, member, method) => {
1807
- __accessCheck$4(obj, member, "access private method");
1808
- return method;
3807
+ var __typeError$4 = (msg) => {
3808
+ throw TypeError(msg);
1809
3809
  };
1810
- var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
3810
+ var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
3811
+ var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3812
+ var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3813
+ var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
3814
+ var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
3815
+ var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
1811
3816
  const _Query = class _Query {
1812
3817
  constructor(repository, table, data, rawParent) {
1813
- __privateAdd$4(this, _cleanFilterConstraint);
1814
- __privateAdd$4(this, _table$1, void 0);
1815
- __privateAdd$4(this, _repository, void 0);
3818
+ __privateAdd$4(this, _Query_instances);
3819
+ __privateAdd$4(this, _table$1);
3820
+ __privateAdd$4(this, _repository);
1816
3821
  __privateAdd$4(this, _data, { filter: {} });
1817
3822
  // Implements pagination
1818
3823
  this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
@@ -1890,12 +3895,12 @@ const _Query = class _Query {
1890
3895
  filter(a, b) {
1891
3896
  if (arguments.length === 1) {
1892
3897
  const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
1893
- [column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
3898
+ [column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
1894
3899
  }));
1895
3900
  const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1896
3901
  return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
1897
3902
  } else {
1898
- const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
3903
+ const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
1899
3904
  const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1900
3905
  return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
1901
3906
  }
@@ -1974,8 +3979,7 @@ const _Query = class _Query {
1974
3979
  }
1975
3980
  async getFirstOrThrow(options = {}) {
1976
3981
  const records = await this.getMany({ ...options, pagination: { size: 1 } });
1977
- if (records[0] === void 0)
1978
- throw new Error("No results found.");
3982
+ if (records[0] === void 0) throw new Error("No results found.");
1979
3983
  return records[0];
1980
3984
  }
1981
3985
  async summarize(params = {}) {
@@ -2030,7 +4034,7 @@ const _Query = class _Query {
2030
4034
  _table$1 = new WeakMap();
2031
4035
  _repository = new WeakMap();
2032
4036
  _data = new WeakMap();
2033
- _cleanFilterConstraint = new WeakSet();
4037
+ _Query_instances = new WeakSet();
2034
4038
  cleanFilterConstraint_fn = function(column, value) {
2035
4039
  const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
2036
4040
  if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
@@ -2091,8 +4095,7 @@ function isSortFilterString(value) {
2091
4095
  }
2092
4096
  function isSortFilterBase(filter) {
2093
4097
  return isObject(filter) && Object.entries(filter).every(([key, value]) => {
2094
- if (key === "*")
2095
- return value === "random";
4098
+ if (key === "*") return value === "random";
2096
4099
  return value === "asc" || value === "desc";
2097
4100
  });
2098
4101
  }
@@ -2113,29 +4116,15 @@ function buildSortFilter(filter) {
2113
4116
  }
2114
4117
  }
2115
4118
 
2116
- var __accessCheck$3 = (obj, member, msg) => {
2117
- if (!member.has(obj))
2118
- throw TypeError("Cannot " + msg);
4119
+ var __typeError$3 = (msg) => {
4120
+ throw TypeError(msg);
2119
4121
  };
2120
- var __privateGet$2 = (obj, member, getter) => {
2121
- __accessCheck$3(obj, member, "read from private field");
2122
- return getter ? getter.call(obj) : member.get(obj);
2123
- };
2124
- var __privateAdd$3 = (obj, member, value) => {
2125
- if (member.has(obj))
2126
- throw TypeError("Cannot add the same private member more than once");
2127
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2128
- };
2129
- var __privateSet$1 = (obj, member, value, setter) => {
2130
- __accessCheck$3(obj, member, "write to private field");
2131
- setter ? setter.call(obj, value) : member.set(obj, value);
2132
- return value;
2133
- };
2134
- var __privateMethod$2 = (obj, member, method) => {
2135
- __accessCheck$3(obj, member, "access private method");
2136
- return method;
2137
- };
2138
- var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
4122
+ var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
4123
+ var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4124
+ var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4125
+ var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
4126
+ var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
4127
+ var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
2139
4128
  const BULK_OPERATION_MAX_SIZE = 1e3;
2140
4129
  class Repository extends Query {
2141
4130
  }
@@ -2146,21 +4135,12 @@ class RestRepository extends Query {
2146
4135
  { name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
2147
4136
  {}
2148
4137
  );
2149
- __privateAdd$3(this, _insertRecordWithoutId);
2150
- __privateAdd$3(this, _insertRecordWithId);
2151
- __privateAdd$3(this, _insertRecords);
2152
- __privateAdd$3(this, _updateRecordWithID);
2153
- __privateAdd$3(this, _updateRecords);
2154
- __privateAdd$3(this, _upsertRecordWithID);
2155
- __privateAdd$3(this, _deleteRecord);
2156
- __privateAdd$3(this, _deleteRecords);
2157
- __privateAdd$3(this, _getSchemaTables);
2158
- __privateAdd$3(this, _transformObjectToApi);
2159
- __privateAdd$3(this, _table, void 0);
2160
- __privateAdd$3(this, _getFetchProps, void 0);
2161
- __privateAdd$3(this, _db, void 0);
2162
- __privateAdd$3(this, _schemaTables, void 0);
2163
- __privateAdd$3(this, _trace, void 0);
4138
+ __privateAdd$3(this, _RestRepository_instances);
4139
+ __privateAdd$3(this, _table);
4140
+ __privateAdd$3(this, _getFetchProps);
4141
+ __privateAdd$3(this, _db);
4142
+ __privateAdd$3(this, _schemaTables);
4143
+ __privateAdd$3(this, _trace);
2164
4144
  __privateSet$1(this, _table, options.table);
2165
4145
  __privateSet$1(this, _db, options.db);
2166
4146
  __privateSet$1(this, _schemaTables, options.schemaTables);
@@ -2179,31 +4159,28 @@ class RestRepository extends Query {
2179
4159
  return __privateGet$2(this, _trace).call(this, "create", async () => {
2180
4160
  const ifVersion = parseIfVersion(b, c, d);
2181
4161
  if (Array.isArray(a)) {
2182
- if (a.length === 0)
2183
- return [];
2184
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
4162
+ if (a.length === 0) return [];
4163
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
2185
4164
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2186
4165
  const result = await this.read(ids, columns);
2187
4166
  return result;
2188
4167
  }
2189
4168
  if (isString(a) && isObject(b)) {
2190
- if (a === "")
2191
- throw new Error("The id can't be empty");
4169
+ if (a === "") throw new Error("The id can't be empty");
2192
4170
  const columns = isValidSelectableColumns(c) ? c : void 0;
2193
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
4171
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
2194
4172
  }
2195
4173
  if (isObject(a) && isString(a.xata_id)) {
2196
- if (a.xata_id === "")
2197
- throw new Error("The id can't be empty");
4174
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2198
4175
  const columns = isValidSelectableColumns(b) ? b : void 0;
2199
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
4176
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2200
4177
  createOnly: true,
2201
4178
  ifVersion
2202
4179
  });
2203
4180
  }
2204
4181
  if (isObject(a)) {
2205
4182
  const columns = isValidSelectableColumns(b) ? b : void 0;
2206
- return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
4183
+ return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
2207
4184
  }
2208
4185
  throw new Error("Invalid arguments for create method");
2209
4186
  });
@@ -2212,8 +4189,7 @@ class RestRepository extends Query {
2212
4189
  return __privateGet$2(this, _trace).call(this, "read", async () => {
2213
4190
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2214
4191
  if (Array.isArray(a)) {
2215
- if (a.length === 0)
2216
- return [];
4192
+ if (a.length === 0) return [];
2217
4193
  const ids = a.map((item) => extractId(item));
2218
4194
  const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
2219
4195
  const dictionary = finalObjects.reduce((acc, object) => {
@@ -2236,7 +4212,7 @@ class RestRepository extends Query {
2236
4212
  queryParams: { columns },
2237
4213
  ...__privateGet$2(this, _getFetchProps).call(this)
2238
4214
  });
2239
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4215
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2240
4216
  return initObject(
2241
4217
  __privateGet$2(this, _db),
2242
4218
  schemaTables,
@@ -2277,11 +4253,10 @@ class RestRepository extends Query {
2277
4253
  return __privateGet$2(this, _trace).call(this, "update", async () => {
2278
4254
  const ifVersion = parseIfVersion(b, c, d);
2279
4255
  if (Array.isArray(a)) {
2280
- if (a.length === 0)
2281
- return [];
4256
+ if (a.length === 0) return [];
2282
4257
  const existing = await this.read(a, ["xata_id"]);
2283
4258
  const updates = a.filter((_item, index) => existing[index] !== null);
2284
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
4259
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
2285
4260
  ifVersion,
2286
4261
  upsert: false
2287
4262
  });
@@ -2292,15 +4267,14 @@ class RestRepository extends Query {
2292
4267
  try {
2293
4268
  if (isString(a) && isObject(b)) {
2294
4269
  const columns = isValidSelectableColumns(c) ? c : void 0;
2295
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4270
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2296
4271
  }
2297
4272
  if (isObject(a) && isString(a.xata_id)) {
2298
4273
  const columns = isValidSelectableColumns(b) ? b : void 0;
2299
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4274
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
2300
4275
  }
2301
4276
  } catch (error) {
2302
- if (error.status === 422)
2303
- return null;
4277
+ if (error.status === 422) return null;
2304
4278
  throw error;
2305
4279
  }
2306
4280
  throw new Error("Invalid arguments for update method");
@@ -2329,9 +4303,8 @@ class RestRepository extends Query {
2329
4303
  return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
2330
4304
  const ifVersion = parseIfVersion(b, c, d);
2331
4305
  if (Array.isArray(a)) {
2332
- if (a.length === 0)
2333
- return [];
2334
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
4306
+ if (a.length === 0) return [];
4307
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
2335
4308
  ifVersion,
2336
4309
  upsert: true
2337
4310
  });
@@ -2340,16 +4313,14 @@ class RestRepository extends Query {
2340
4313
  return result;
2341
4314
  }
2342
4315
  if (isString(a) && isObject(b)) {
2343
- if (a === "")
2344
- throw new Error("The id can't be empty");
4316
+ if (a === "") throw new Error("The id can't be empty");
2345
4317
  const columns = isValidSelectableColumns(c) ? c : void 0;
2346
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4318
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2347
4319
  }
2348
4320
  if (isObject(a) && isString(a.xata_id)) {
2349
- if (a.xata_id === "")
2350
- throw new Error("The id can't be empty");
4321
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2351
4322
  const columns = isValidSelectableColumns(c) ? c : void 0;
2352
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4323
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
2353
4324
  }
2354
4325
  if (!isDefined(a) && isObject(b)) {
2355
4326
  return await this.create(b, c);
@@ -2364,24 +4335,21 @@ class RestRepository extends Query {
2364
4335
  return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
2365
4336
  const ifVersion = parseIfVersion(b, c, d);
2366
4337
  if (Array.isArray(a)) {
2367
- if (a.length === 0)
2368
- return [];
2369
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
4338
+ if (a.length === 0) return [];
4339
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
2370
4340
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2371
4341
  const result = await this.read(ids, columns);
2372
4342
  return result;
2373
4343
  }
2374
4344
  if (isString(a) && isObject(b)) {
2375
- if (a === "")
2376
- throw new Error("The id can't be empty");
4345
+ if (a === "") throw new Error("The id can't be empty");
2377
4346
  const columns = isValidSelectableColumns(c) ? c : void 0;
2378
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
4347
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2379
4348
  }
2380
4349
  if (isObject(a) && isString(a.xata_id)) {
2381
- if (a.xata_id === "")
2382
- throw new Error("The id can't be empty");
4350
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2383
4351
  const columns = isValidSelectableColumns(c) ? c : void 0;
2384
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
4352
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2385
4353
  createOnly: false,
2386
4354
  ifVersion
2387
4355
  });
@@ -2398,25 +4366,22 @@ class RestRepository extends Query {
2398
4366
  async delete(a, b) {
2399
4367
  return __privateGet$2(this, _trace).call(this, "delete", async () => {
2400
4368
  if (Array.isArray(a)) {
2401
- if (a.length === 0)
2402
- return [];
4369
+ if (a.length === 0) return [];
2403
4370
  const ids = a.map((o) => {
2404
- if (isString(o))
2405
- return o;
2406
- if (isString(o.xata_id))
2407
- return o.xata_id;
4371
+ if (isString(o)) return o;
4372
+ if (isString(o.xata_id)) return o.xata_id;
2408
4373
  throw new Error("Invalid arguments for delete method");
2409
4374
  });
2410
4375
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2411
4376
  const result = await this.read(a, columns);
2412
- await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
4377
+ await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
2413
4378
  return result;
2414
4379
  }
2415
4380
  if (isString(a)) {
2416
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
4381
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
2417
4382
  }
2418
4383
  if (isObject(a) && isString(a.xata_id)) {
2419
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.xata_id, b);
4384
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
2420
4385
  }
2421
4386
  throw new Error("Invalid arguments for delete method");
2422
4387
  });
@@ -2460,7 +4425,7 @@ class RestRepository extends Query {
2460
4425
  },
2461
4426
  ...__privateGet$2(this, _getFetchProps).call(this)
2462
4427
  });
2463
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4428
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2464
4429
  return {
2465
4430
  records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
2466
4431
  totalCount
@@ -2485,7 +4450,7 @@ class RestRepository extends Query {
2485
4450
  },
2486
4451
  ...__privateGet$2(this, _getFetchProps).call(this)
2487
4452
  });
2488
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4453
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2489
4454
  return {
2490
4455
  records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
2491
4456
  totalCount
@@ -2527,7 +4492,7 @@ class RestRepository extends Query {
2527
4492
  fetchOptions: data.fetchOptions,
2528
4493
  ...__privateGet$2(this, _getFetchProps).call(this)
2529
4494
  });
2530
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4495
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2531
4496
  const records = objects.map(
2532
4497
  (record) => initObject(
2533
4498
  __privateGet$2(this, _db),
@@ -2561,7 +4526,7 @@ class RestRepository extends Query {
2561
4526
  },
2562
4527
  ...__privateGet$2(this, _getFetchProps).call(this)
2563
4528
  });
2564
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4529
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2565
4530
  return {
2566
4531
  ...result,
2567
4532
  summaries: result.summaries.map(
@@ -2609,9 +4574,9 @@ _getFetchProps = new WeakMap();
2609
4574
  _db = new WeakMap();
2610
4575
  _schemaTables = new WeakMap();
2611
4576
  _trace = new WeakMap();
2612
- _insertRecordWithoutId = new WeakSet();
4577
+ _RestRepository_instances = new WeakSet();
2613
4578
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2614
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4579
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2615
4580
  const response = await insertRecord({
2616
4581
  pathParams: {
2617
4582
  workspace: "{workspaceId}",
@@ -2623,14 +4588,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2623
4588
  body: record,
2624
4589
  ...__privateGet$2(this, _getFetchProps).call(this)
2625
4590
  });
2626
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4591
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2627
4592
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2628
4593
  };
2629
- _insertRecordWithId = new WeakSet();
2630
4594
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2631
- if (!recordId)
2632
- return null;
2633
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4595
+ if (!recordId) return null;
4596
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2634
4597
  const response = await insertRecordWithID({
2635
4598
  pathParams: {
2636
4599
  workspace: "{workspaceId}",
@@ -2643,13 +4606,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
2643
4606
  queryParams: { createOnly, columns, ifVersion },
2644
4607
  ...__privateGet$2(this, _getFetchProps).call(this)
2645
4608
  });
2646
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4609
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2647
4610
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2648
4611
  };
2649
- _insertRecords = new WeakSet();
2650
4612
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2651
4613
  const operations = await promiseMap(objects, async (object) => {
2652
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4614
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2653
4615
  return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
2654
4616
  });
2655
4617
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
@@ -2674,11 +4636,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2674
4636
  }
2675
4637
  return ids;
2676
4638
  };
2677
- _updateRecordWithID = new WeakSet();
2678
4639
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2679
- if (!recordId)
2680
- return null;
2681
- const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4640
+ if (!recordId) return null;
4641
+ const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2682
4642
  try {
2683
4643
  const response = await updateRecordWithID({
2684
4644
  pathParams: {
@@ -2692,7 +4652,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2692
4652
  body: record,
2693
4653
  ...__privateGet$2(this, _getFetchProps).call(this)
2694
4654
  });
2695
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4655
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2696
4656
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2697
4657
  } catch (e) {
2698
4658
  if (isObject(e) && e.status === 404) {
@@ -2701,10 +4661,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2701
4661
  throw e;
2702
4662
  }
2703
4663
  };
2704
- _updateRecords = new WeakSet();
2705
4664
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2706
4665
  const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
2707
- const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4666
+ const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2708
4667
  return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
2709
4668
  });
2710
4669
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
@@ -2729,10 +4688,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2729
4688
  }
2730
4689
  return ids;
2731
4690
  };
2732
- _upsertRecordWithID = new WeakSet();
2733
4691
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2734
- if (!recordId)
2735
- return null;
4692
+ if (!recordId) return null;
2736
4693
  const response = await upsertRecordWithID({
2737
4694
  pathParams: {
2738
4695
  workspace: "{workspaceId}",
@@ -2745,13 +4702,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2745
4702
  body: object,
2746
4703
  ...__privateGet$2(this, _getFetchProps).call(this)
2747
4704
  });
2748
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4705
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2749
4706
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2750
4707
  };
2751
- _deleteRecord = new WeakSet();
2752
4708
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
2753
- if (!recordId)
2754
- return null;
4709
+ if (!recordId) return null;
2755
4710
  try {
2756
4711
  const response = await deleteRecord({
2757
4712
  pathParams: {
@@ -2764,7 +4719,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2764
4719
  queryParams: { columns },
2765
4720
  ...__privateGet$2(this, _getFetchProps).call(this)
2766
4721
  });
2767
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4722
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2768
4723
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2769
4724
  } catch (e) {
2770
4725
  if (isObject(e) && e.status === 404) {
@@ -2773,7 +4728,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2773
4728
  throw e;
2774
4729
  }
2775
4730
  };
2776
- _deleteRecords = new WeakSet();
2777
4731
  deleteRecords_fn = async function(recordIds) {
2778
4732
  const chunkedOperations = chunk(
2779
4733
  compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
@@ -2791,10 +4745,8 @@ deleteRecords_fn = async function(recordIds) {
2791
4745
  });
2792
4746
  }
2793
4747
  };
2794
- _getSchemaTables = new WeakSet();
2795
4748
  getSchemaTables_fn = async function() {
2796
- if (__privateGet$2(this, _schemaTables))
2797
- return __privateGet$2(this, _schemaTables);
4749
+ if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
2798
4750
  const { schema } = await getBranchDetails({
2799
4751
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
2800
4752
  ...__privateGet$2(this, _getFetchProps).call(this)
@@ -2802,16 +4754,13 @@ getSchemaTables_fn = async function() {
2802
4754
  __privateSet$1(this, _schemaTables, schema.tables);
2803
4755
  return schema.tables;
2804
4756
  };
2805
- _transformObjectToApi = new WeakSet();
2806
4757
  transformObjectToApi_fn = async function(object) {
2807
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4758
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2808
4759
  const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
2809
- if (!schema)
2810
- throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
4760
+ if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
2811
4761
  const result = {};
2812
4762
  for (const [key, value] of Object.entries(object)) {
2813
- if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
2814
- continue;
4763
+ if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
2815
4764
  const type = schema.columns.find((column) => column.name === key)?.type;
2816
4765
  switch (type) {
2817
4766
  case "link": {
@@ -2841,11 +4790,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2841
4790
  const data = {};
2842
4791
  Object.assign(data, { ...object });
2843
4792
  const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
2844
- if (!columns)
2845
- console.error(`Table ${table} not found in schema`);
4793
+ if (!columns) console.error(`Table ${table} not found in schema`);
2846
4794
  for (const column of columns ?? []) {
2847
- if (!isValidColumn(selectedColumns, column))
2848
- continue;
4795
+ if (!isValidColumn(selectedColumns, column)) continue;
2849
4796
  const value = data[column.name];
2850
4797
  switch (column.type) {
2851
4798
  case "datetime": {
@@ -2931,15 +4878,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2931
4878
  return record;
2932
4879
  };
2933
4880
  function extractId(value) {
2934
- if (isString(value))
2935
- return value;
2936
- if (isObject(value) && isString(value.xata_id))
2937
- return value.xata_id;
4881
+ if (isString(value)) return value;
4882
+ if (isObject(value) && isString(value.xata_id)) return value.xata_id;
2938
4883
  return void 0;
2939
4884
  }
2940
4885
  function isValidColumn(columns, column) {
2941
- if (columns.includes("*"))
2942
- return true;
4886
+ if (columns.includes("*")) return true;
2943
4887
  return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
2944
4888
  }
2945
4889
  function parseIfVersion(...args) {
@@ -2979,19 +4923,12 @@ const includesAll = (value) => ({ $includesAll: value });
2979
4923
  const includesNone = (value) => ({ $includesNone: value });
2980
4924
  const includesAny = (value) => ({ $includesAny: value });
2981
4925
 
2982
- var __accessCheck$2 = (obj, member, msg) => {
2983
- if (!member.has(obj))
2984
- throw TypeError("Cannot " + msg);
2985
- };
2986
- var __privateGet$1 = (obj, member, getter) => {
2987
- __accessCheck$2(obj, member, "read from private field");
2988
- return getter ? getter.call(obj) : member.get(obj);
2989
- };
2990
- var __privateAdd$2 = (obj, member, value) => {
2991
- if (member.has(obj))
2992
- throw TypeError("Cannot add the same private member more than once");
2993
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4926
+ var __typeError$2 = (msg) => {
4927
+ throw TypeError(msg);
2994
4928
  };
4929
+ var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
4930
+ var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4931
+ var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2995
4932
  var _tables;
2996
4933
  class SchemaPlugin extends XataPlugin {
2997
4934
  constructor() {
@@ -3003,8 +4940,7 @@ class SchemaPlugin extends XataPlugin {
3003
4940
  {},
3004
4941
  {
3005
4942
  get: (_target, table) => {
3006
- if (!isString(table))
3007
- throw new Error("Invalid table name");
4943
+ if (!isString(table)) throw new Error("Invalid table name");
3008
4944
  if (__privateGet$1(this, _tables)[table] === void 0) {
3009
4945
  __privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
3010
4946
  }
@@ -3095,30 +5031,23 @@ function getContentType(file) {
3095
5031
  return "application/octet-stream";
3096
5032
  }
3097
5033
 
3098
- var __accessCheck$1 = (obj, member, msg) => {
3099
- if (!member.has(obj))
3100
- throw TypeError("Cannot " + msg);
3101
- };
3102
- var __privateAdd$1 = (obj, member, value) => {
3103
- if (member.has(obj))
3104
- throw TypeError("Cannot add the same private member more than once");
3105
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5034
+ var __typeError$1 = (msg) => {
5035
+ throw TypeError(msg);
3106
5036
  };
3107
- var __privateMethod$1 = (obj, member, method) => {
3108
- __accessCheck$1(obj, member, "access private method");
3109
- return method;
3110
- };
3111
- var _search, search_fn;
5037
+ var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
5038
+ var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5039
+ var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
5040
+ var _SearchPlugin_instances, search_fn;
3112
5041
  class SearchPlugin extends XataPlugin {
3113
5042
  constructor(db) {
3114
5043
  super();
3115
5044
  this.db = db;
3116
- __privateAdd$1(this, _search);
5045
+ __privateAdd$1(this, _SearchPlugin_instances);
3117
5046
  }
3118
5047
  build(pluginOptions) {
3119
5048
  return {
3120
5049
  all: async (query, options = {}) => {
3121
- const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5050
+ const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3122
5051
  return {
3123
5052
  totalCount,
3124
5053
  records: records.map((record) => {
@@ -3128,7 +5057,7 @@ class SearchPlugin extends XataPlugin {
3128
5057
  };
3129
5058
  },
3130
5059
  byTable: async (query, options = {}) => {
3131
- const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5060
+ const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3132
5061
  const records = rawRecords.reduce((acc, record) => {
3133
5062
  const table = record.xata_table;
3134
5063
  const items = acc[table] ?? [];
@@ -3140,7 +5069,7 @@ class SearchPlugin extends XataPlugin {
3140
5069
  };
3141
5070
  }
3142
5071
  }
3143
- _search = new WeakSet();
5072
+ _SearchPlugin_instances = new WeakSet();
3144
5073
  search_fn = async function(query, options, pluginOptions) {
3145
5074
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3146
5075
  const { records, totalCount } = await searchBranch({
@@ -3176,8 +5105,7 @@ function arrayString(val) {
3176
5105
  return result;
3177
5106
  }
3178
5107
  function prepareValue(value) {
3179
- if (!isDefined(value))
3180
- return null;
5108
+ if (!isDefined(value)) return null;
3181
5109
  if (value instanceof Date) {
3182
5110
  return value.toISOString();
3183
5111
  }
@@ -3217,19 +5145,28 @@ class SQLPlugin extends XataPlugin {
3217
5145
  throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
3218
5146
  }
3219
5147
  const { statement, params, consistency, responseType } = prepareParams(query, parameters);
3220
- const {
3221
- records,
3222
- rows,
3223
- warning,
3224
- columns = []
3225
- } = await sqlQuery({
5148
+ const { warning, columns, ...response } = await sqlQuery({
3226
5149
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
3227
5150
  body: { statement, params, consistency, responseType },
3228
5151
  ...pluginOptions
3229
5152
  });
5153
+ const records = "records" in response ? response.records : void 0;
5154
+ const rows = "rows" in response ? response.rows : void 0;
3230
5155
  return { records, rows, warning, columns };
3231
5156
  };
3232
5157
  sqlFunction.connectionString = buildConnectionString(pluginOptions);
5158
+ sqlFunction.batch = async (query) => {
5159
+ const { results } = await sqlBatchQuery({
5160
+ pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
5161
+ body: {
5162
+ statements: query.statements.map(({ statement, params }) => ({ statement, params })),
5163
+ consistency: query.consistency,
5164
+ responseType: query.responseType
5165
+ },
5166
+ ...pluginOptions
5167
+ });
5168
+ return { results };
5169
+ };
3233
5170
  return sqlFunction;
3234
5171
  }
3235
5172
  }
@@ -3256,8 +5193,7 @@ function buildDomain(host, region) {
3256
5193
  function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
3257
5194
  const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
3258
5195
  const parts = parseWorkspacesUrlParts(url);
3259
- if (!parts)
3260
- throw new Error("Invalid workspaces URL");
5196
+ if (!parts) throw new Error("Invalid workspaces URL");
3261
5197
  const { workspace: workspaceSlug, region, database, host } = parts;
3262
5198
  const domain = buildDomain(host, region);
3263
5199
  const workspace = workspaceSlug.split("-").pop();
@@ -3282,39 +5218,24 @@ class TransactionPlugin extends XataPlugin {
3282
5218
  }
3283
5219
  }
3284
5220
 
3285
- var __accessCheck = (obj, member, msg) => {
3286
- if (!member.has(obj))
3287
- throw TypeError("Cannot " + msg);
3288
- };
3289
- var __privateGet = (obj, member, getter) => {
3290
- __accessCheck(obj, member, "read from private field");
3291
- return getter ? getter.call(obj) : member.get(obj);
3292
- };
3293
- var __privateAdd = (obj, member, value) => {
3294
- if (member.has(obj))
3295
- throw TypeError("Cannot add the same private member more than once");
3296
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3297
- };
3298
- var __privateSet = (obj, member, value, setter) => {
3299
- __accessCheck(obj, member, "write to private field");
3300
- setter ? setter.call(obj, value) : member.set(obj, value);
3301
- return value;
3302
- };
3303
- var __privateMethod = (obj, member, method) => {
3304
- __accessCheck(obj, member, "access private method");
3305
- return method;
5221
+ var __typeError = (msg) => {
5222
+ throw TypeError(msg);
3306
5223
  };
5224
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
5225
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5226
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5227
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
5228
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
3307
5229
  const buildClient = (plugins) => {
3308
- var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
5230
+ var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
3309
5231
  return _a = class {
3310
5232
  constructor(options = {}, tables) {
3311
- __privateAdd(this, _parseOptions);
3312
- __privateAdd(this, _getFetchProps);
3313
- __privateAdd(this, _options, void 0);
3314
- const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
5233
+ __privateAdd(this, _instances);
5234
+ __privateAdd(this, _options);
5235
+ const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
3315
5236
  __privateSet(this, _options, safeOptions);
3316
5237
  const pluginOptions = {
3317
- ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
5238
+ ...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
3318
5239
  host: safeOptions.host,
3319
5240
  tables,
3320
5241
  branch: safeOptions.branch
@@ -3331,8 +5252,7 @@ const buildClient = (plugins) => {
3331
5252
  this.sql = sql;
3332
5253
  this.files = files;
3333
5254
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3334
- if (namespace === void 0)
3335
- continue;
5255
+ if (namespace === void 0) continue;
3336
5256
  this[key] = namespace.build(pluginOptions);
3337
5257
  }
3338
5258
  }
@@ -3341,8 +5261,8 @@ const buildClient = (plugins) => {
3341
5261
  const branch = __privateGet(this, _options).branch;
3342
5262
  return { databaseURL, branch };
3343
5263
  }
3344
- }, _options = new WeakMap(), _parseOptions = new WeakSet(), parseOptions_fn = function(options) {
3345
- const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
5264
+ }, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
5265
+ const enableBrowser = options?.enableBrowser ?? false;
3346
5266
  const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
3347
5267
  if (isBrowser && !enableBrowser) {
3348
5268
  throw new Error(
@@ -3350,8 +5270,9 @@ const buildClient = (plugins) => {
3350
5270
  );
3351
5271
  }
3352
5272
  const fetch = getFetchImplementation(options?.fetch);
3353
- const databaseURL = options?.databaseURL || getDatabaseURL();
3354
- const apiKey = options?.apiKey || getAPIKey();
5273
+ const databaseURL = options?.databaseURL;
5274
+ const apiKey = options?.apiKey;
5275
+ const branch = options?.branch;
3355
5276
  const trace = options?.trace ?? defaultTrace;
3356
5277
  const clientName = options?.clientName;
3357
5278
  const host = options?.host ?? "production";
@@ -3362,25 +5283,8 @@ const buildClient = (plugins) => {
3362
5283
  if (!databaseURL) {
3363
5284
  throw new Error("Option databaseURL is required");
3364
5285
  }
3365
- const envBranch = getBranch();
3366
- const previewBranch = getPreviewBranch();
3367
- const branch = options?.branch || previewBranch || envBranch || "main";
3368
- if (!!previewBranch && branch !== previewBranch) {
3369
- console.warn(
3370
- `Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
3371
- );
3372
- } else if (!!envBranch && branch !== envBranch) {
3373
- console.warn(
3374
- `Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
3375
- );
3376
- } else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
3377
- console.warn(
3378
- `Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
3379
- );
3380
- } else if (!previewBranch && !envBranch && options?.branch === void 0) {
3381
- console.warn(
3382
- `No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
3383
- );
5286
+ if (!branch) {
5287
+ throw new Error("Option branch is required");
3384
5288
  }
3385
5289
  return {
3386
5290
  fetch,
@@ -3394,7 +5298,7 @@ const buildClient = (plugins) => {
3394
5298
  clientName,
3395
5299
  xataAgentExtra
3396
5300
  };
3397
- }, _getFetchProps = new WeakSet(), getFetchProps_fn = function({
5301
+ }, getFetchProps_fn = function({
3398
5302
  fetch,
3399
5303
  apiKey,
3400
5304
  databaseURL,
@@ -3435,26 +5339,19 @@ class Serializer {
3435
5339
  }
3436
5340
  toJSON(data) {
3437
5341
  function visit(obj) {
3438
- if (Array.isArray(obj))
3439
- return obj.map(visit);
5342
+ if (Array.isArray(obj)) return obj.map(visit);
3440
5343
  const type = typeof obj;
3441
- if (type === "undefined")
3442
- return { [META]: "undefined" };
3443
- if (type === "bigint")
3444
- return { [META]: "bigint", [VALUE]: obj.toString() };
3445
- if (obj === null || type !== "object")
3446
- return obj;
5344
+ if (type === "undefined") return { [META]: "undefined" };
5345
+ if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
5346
+ if (obj === null || type !== "object") return obj;
3447
5347
  const constructor = obj.constructor;
3448
5348
  const o = { [META]: constructor.name };
3449
5349
  for (const [key, value] of Object.entries(obj)) {
3450
5350
  o[key] = visit(value);
3451
5351
  }
3452
- if (constructor === Date)
3453
- o[VALUE] = obj.toISOString();
3454
- if (constructor === Map)
3455
- o[VALUE] = Object.fromEntries(obj);
3456
- if (constructor === Set)
3457
- o[VALUE] = [...obj];
5352
+ if (constructor === Date) o[VALUE] = obj.toISOString();
5353
+ if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
5354
+ if (constructor === Set) o[VALUE] = [...obj];
3458
5355
  return o;
3459
5356
  }
3460
5357
  return JSON.stringify(visit(data));
@@ -3467,16 +5364,11 @@ class Serializer {
3467
5364
  if (constructor) {
3468
5365
  return Object.assign(Object.create(constructor.prototype), rest);
3469
5366
  }
3470
- if (clazz === "Date")
3471
- return new Date(val);
3472
- if (clazz === "Set")
3473
- return new Set(val);
3474
- if (clazz === "Map")
3475
- return new Map(Object.entries(val));
3476
- if (clazz === "bigint")
3477
- return BigInt(val);
3478
- if (clazz === "undefined")
3479
- return void 0;
5367
+ if (clazz === "Date") return new Date(val);
5368
+ if (clazz === "Set") return new Set(val);
5369
+ if (clazz === "Map") return new Map(Object.entries(val));
5370
+ if (clazz === "bigint") return BigInt(val);
5371
+ if (clazz === "undefined") return void 0;
3480
5372
  return rest;
3481
5373
  }
3482
5374
  return value;
@@ -3491,6 +5383,47 @@ const deserialize = (json) => {
3491
5383
  return defaultSerializer.fromJSON(json);
3492
5384
  };
3493
5385
 
5386
+ function parseEnvironment(environment) {
5387
+ try {
5388
+ if (typeof environment === "function") {
5389
+ return new Proxy(
5390
+ {},
5391
+ {
5392
+ get(target) {
5393
+ return environment(target);
5394
+ }
5395
+ }
5396
+ );
5397
+ }
5398
+ if (isObject(environment)) {
5399
+ return environment;
5400
+ }
5401
+ } catch (error) {
5402
+ }
5403
+ return {};
5404
+ }
5405
+ function buildPreviewBranchName({ org, branch }) {
5406
+ return `preview-${org}-${branch}`;
5407
+ }
5408
+ function getDeployPreviewBranch(environment) {
5409
+ try {
5410
+ const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
5411
+ if (deployPreviewBranch) return deployPreviewBranch;
5412
+ switch (deployPreview) {
5413
+ case "vercel": {
5414
+ if (!vercelGitCommitRef || !vercelGitRepoOwner) {
5415
+ console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
5416
+ return void 0;
5417
+ }
5418
+ return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
5419
+ }
5420
+ }
5421
+ return void 0;
5422
+ } catch (err) {
5423
+ return void 0;
5424
+ }
5425
+ }
5426
+
3494
5427
  class XataError extends Error {
3495
5428
  constructor(message, status) {
3496
5429
  super(message);
@@ -3498,5 +5431,5 @@ class XataError extends Error {
3498
5431
  }
3499
5432
  }
3500
5433
 
3501
- export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
5434
+ export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createBranchAsync, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, dropClusterExtension, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchMoveStatus, getBranchSchemaHistory, getBranchStats, getCluster, getClusterMetrics, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationJobs, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getSchemas, getTableColumns, getTableSchema, getTaskStatus, getTasks, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, installClusterExtension, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusterBranches, listClusterExtensions, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, moveBranch, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
3502
5435
  //# sourceMappingURL=index.mjs.map