@xata.io/client 0.0.0-next.vdf3a7b8c70c130a3e9c73decc8494a3f8c8febcb → 0.0.0-next.ve13b45f1a3157fc535f97a53d7dac2a33dac3b84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
22
22
  CLOUDFLARE_RAY_ID: "cf.ray"
23
23
  };
24
24
 
25
+ const lookup = [];
26
+ const revLookup = [];
27
+ const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
28
+ for (let i = 0, len = code.length; i < len; ++i) {
29
+ lookup[i] = code[i];
30
+ revLookup[code.charCodeAt(i)] = i;
31
+ }
32
+ revLookup["-".charCodeAt(0)] = 62;
33
+ revLookup["_".charCodeAt(0)] = 63;
34
+ function getLens(b64) {
35
+ const len = b64.length;
36
+ if (len % 4 > 0) {
37
+ throw new Error("Invalid string. Length must be a multiple of 4");
38
+ }
39
+ let validLen = b64.indexOf("=");
40
+ if (validLen === -1) validLen = len;
41
+ const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
42
+ return [validLen, placeHoldersLen];
43
+ }
44
+ function _byteLength(_b64, validLen, placeHoldersLen) {
45
+ return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
46
+ }
47
+ function toByteArray(b64) {
48
+ let tmp;
49
+ const lens = getLens(b64);
50
+ const validLen = lens[0];
51
+ const placeHoldersLen = lens[1];
52
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
53
+ let curByte = 0;
54
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
55
+ let i;
56
+ for (i = 0; i < len; i += 4) {
57
+ tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
58
+ arr[curByte++] = tmp >> 16 & 255;
59
+ arr[curByte++] = tmp >> 8 & 255;
60
+ arr[curByte++] = tmp & 255;
61
+ }
62
+ if (placeHoldersLen === 2) {
63
+ tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
64
+ arr[curByte++] = tmp & 255;
65
+ }
66
+ if (placeHoldersLen === 1) {
67
+ tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
68
+ arr[curByte++] = tmp >> 8 & 255;
69
+ arr[curByte++] = tmp & 255;
70
+ }
71
+ return arr;
72
+ }
73
+ function tripletToBase64(num) {
74
+ return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
75
+ }
76
+ function encodeChunk(uint8, start, end) {
77
+ let tmp;
78
+ const output = [];
79
+ for (let i = start; i < end; i += 3) {
80
+ tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
81
+ output.push(tripletToBase64(tmp));
82
+ }
83
+ return output.join("");
84
+ }
85
+ function fromByteArray(uint8) {
86
+ let tmp;
87
+ const len = uint8.length;
88
+ const extraBytes = len % 3;
89
+ const parts = [];
90
+ const maxChunkLength = 16383;
91
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
92
+ parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
93
+ }
94
+ if (extraBytes === 1) {
95
+ tmp = uint8[len - 1];
96
+ parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
97
+ } else if (extraBytes === 2) {
98
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
99
+ parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
100
+ }
101
+ return parts.join("");
102
+ }
103
+
104
+ const K_MAX_LENGTH = 2147483647;
105
+ const MAX_ARGUMENTS_LENGTH = 4096;
106
+ class Buffer extends Uint8Array {
107
+ /**
108
+ * Constructs a new `Buffer` instance.
109
+ *
110
+ * @param value
111
+ * @param encodingOrOffset
112
+ * @param length
113
+ */
114
+ constructor(value, encodingOrOffset, length) {
115
+ if (typeof value === "number") {
116
+ if (typeof encodingOrOffset === "string") {
117
+ throw new TypeError("The first argument must be of type string, received type number");
118
+ }
119
+ if (value < 0) {
120
+ throw new RangeError("The buffer size cannot be negative");
121
+ }
122
+ super(value < 0 ? 0 : Buffer._checked(value) | 0);
123
+ } else if (typeof value === "string") {
124
+ if (typeof encodingOrOffset !== "string") {
125
+ encodingOrOffset = "utf8";
126
+ }
127
+ if (!Buffer.isEncoding(encodingOrOffset)) {
128
+ throw new TypeError("Unknown encoding: " + encodingOrOffset);
129
+ }
130
+ const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
131
+ super(length2);
132
+ const written = this.write(value, 0, this.length, encodingOrOffset);
133
+ if (written !== length2) {
134
+ throw new TypeError(
135
+ "Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
136
+ );
137
+ }
138
+ } else if (ArrayBuffer.isView(value)) {
139
+ if (Buffer._isInstance(value, Uint8Array)) {
140
+ const copy = new Uint8Array(value);
141
+ const array = copy.buffer;
142
+ const byteOffset = copy.byteOffset;
143
+ const length2 = copy.byteLength;
144
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
145
+ throw new RangeError("offset is outside of buffer bounds");
146
+ }
147
+ if (array.byteLength < byteOffset + (length2 || 0)) {
148
+ throw new RangeError("length is outside of buffer bounds");
149
+ }
150
+ super(new Uint8Array(array, byteOffset, length2));
151
+ } else {
152
+ const array = value;
153
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
154
+ super(new Uint8Array(length2));
155
+ for (let i = 0; i < length2; i++) {
156
+ this[i] = array[i] & 255;
157
+ }
158
+ }
159
+ } else if (value == null) {
160
+ throw new TypeError(
161
+ "The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
162
+ );
163
+ } else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
164
+ const array = value;
165
+ const byteOffset = encodingOrOffset;
166
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
167
+ throw new RangeError("offset is outside of buffer bounds");
168
+ }
169
+ if (array.byteLength < byteOffset + (length || 0)) {
170
+ throw new RangeError("length is outside of buffer bounds");
171
+ }
172
+ super(new Uint8Array(array, byteOffset, length));
173
+ } else if (Array.isArray(value)) {
174
+ const array = value;
175
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
176
+ super(new Uint8Array(length2));
177
+ for (let i = 0; i < length2; i++) {
178
+ this[i] = array[i] & 255;
179
+ }
180
+ } else {
181
+ throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
182
+ }
183
+ }
184
+ /**
185
+ * Return JSON representation of the buffer.
186
+ */
187
+ toJSON() {
188
+ return {
189
+ type: "Buffer",
190
+ data: Array.prototype.slice.call(this)
191
+ };
192
+ }
193
+ /**
194
+ * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
195
+ * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
196
+ * only part of `string` will be written. However, partially encoded characters will not be written.
197
+ *
198
+ * @param string String to write to `buf`.
199
+ * @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
200
+ * @param length Maximum number of bytes to write: Default: `buf.length - offset`.
201
+ * @param encoding The character encoding of `string`. Default: `utf8`.
202
+ */
203
+ write(string, offset, length, encoding) {
204
+ if (typeof offset === "undefined") {
205
+ encoding = "utf8";
206
+ length = this.length;
207
+ offset = 0;
208
+ } else if (typeof length === "undefined" && typeof offset === "string") {
209
+ encoding = offset;
210
+ length = this.length;
211
+ offset = 0;
212
+ } else if (typeof offset === "number" && isFinite(offset)) {
213
+ offset = offset >>> 0;
214
+ if (typeof length === "number" && isFinite(length)) {
215
+ length = length >>> 0;
216
+ encoding ?? (encoding = "utf8");
217
+ } else if (typeof length === "string") {
218
+ encoding = length;
219
+ length = void 0;
220
+ }
221
+ } else {
222
+ throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
223
+ }
224
+ const remaining = this.length - offset;
225
+ if (typeof length === "undefined" || length > remaining) {
226
+ length = remaining;
227
+ }
228
+ if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
229
+ throw new RangeError("Attempt to write outside buffer bounds");
230
+ }
231
+ encoding || (encoding = "utf8");
232
+ switch (Buffer._getEncoding(encoding)) {
233
+ case "hex":
234
+ return Buffer._hexWrite(this, string, offset, length);
235
+ case "utf8":
236
+ return Buffer._utf8Write(this, string, offset, length);
237
+ case "ascii":
238
+ case "latin1":
239
+ case "binary":
240
+ return Buffer._asciiWrite(this, string, offset, length);
241
+ case "ucs2":
242
+ case "utf16le":
243
+ return Buffer._ucs2Write(this, string, offset, length);
244
+ case "base64":
245
+ return Buffer._base64Write(this, string, offset, length);
246
+ }
247
+ }
248
+ /**
249
+ * Decodes the buffer to a string according to the specified character encoding.
250
+ * Passing `start` and `end` will decode only a subset of the buffer.
251
+ *
252
+ * Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
253
+ * will be replaced with `U+FFFD`.
254
+ *
255
+ * @param encoding
256
+ * @param start
257
+ * @param end
258
+ */
259
+ toString(encoding, start, end) {
260
+ const length = this.length;
261
+ if (length === 0) {
262
+ return "";
263
+ }
264
+ if (arguments.length === 0) {
265
+ return Buffer._utf8Slice(this, 0, length);
266
+ }
267
+ if (typeof start === "undefined" || start < 0) {
268
+ start = 0;
269
+ }
270
+ if (start > this.length) {
271
+ return "";
272
+ }
273
+ if (typeof end === "undefined" || end > this.length) {
274
+ end = this.length;
275
+ }
276
+ if (end <= 0) {
277
+ return "";
278
+ }
279
+ end >>>= 0;
280
+ start >>>= 0;
281
+ if (end <= start) {
282
+ return "";
283
+ }
284
+ if (!encoding) {
285
+ encoding = "utf8";
286
+ }
287
+ switch (Buffer._getEncoding(encoding)) {
288
+ case "hex":
289
+ return Buffer._hexSlice(this, start, end);
290
+ case "utf8":
291
+ return Buffer._utf8Slice(this, start, end);
292
+ case "ascii":
293
+ return Buffer._asciiSlice(this, start, end);
294
+ case "latin1":
295
+ case "binary":
296
+ return Buffer._latin1Slice(this, start, end);
297
+ case "ucs2":
298
+ case "utf16le":
299
+ return Buffer._utf16leSlice(this, start, end);
300
+ case "base64":
301
+ return Buffer._base64Slice(this, start, end);
302
+ }
303
+ }
304
+ /**
305
+ * Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
306
+ *
307
+ * @param otherBuffer
308
+ */
309
+ equals(otherBuffer) {
310
+ if (!Buffer.isBuffer(otherBuffer)) {
311
+ throw new TypeError("Argument must be a Buffer");
312
+ }
313
+ if (this === otherBuffer) {
314
+ return true;
315
+ }
316
+ return Buffer.compare(this, otherBuffer) === 0;
317
+ }
318
+ /**
319
+ * Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
320
+ * or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
321
+ * buffer.
322
+ *
323
+ * - `0` is returned if `otherBuffer` is the same as this buffer.
324
+ * - `1` is returned if `otherBuffer` should come before this buffer when sorted.
325
+ * - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
326
+ *
327
+ * @param otherBuffer The buffer to compare to.
328
+ * @param targetStart The offset within `otherBuffer` at which to begin comparison.
329
+ * @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
330
+ * @param sourceStart The offset within this buffer at which to begin comparison.
331
+ * @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
332
+ */
333
+ compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
334
+ if (Buffer._isInstance(otherBuffer, Uint8Array)) {
335
+ otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
336
+ }
337
+ if (!Buffer.isBuffer(otherBuffer)) {
338
+ throw new TypeError("Argument must be a Buffer or Uint8Array");
339
+ }
340
+ targetStart ?? (targetStart = 0);
341
+ targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
342
+ sourceStart ?? (sourceStart = 0);
343
+ sourceEnd ?? (sourceEnd = this.length);
344
+ if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
345
+ throw new RangeError("Out of range index");
346
+ }
347
+ if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
348
+ return 0;
349
+ }
350
+ if (sourceStart >= sourceEnd) {
351
+ return -1;
352
+ }
353
+ if (targetStart >= targetEnd) {
354
+ return 1;
355
+ }
356
+ targetStart >>>= 0;
357
+ targetEnd >>>= 0;
358
+ sourceStart >>>= 0;
359
+ sourceEnd >>>= 0;
360
+ if (this === otherBuffer) {
361
+ return 0;
362
+ }
363
+ let x = sourceEnd - sourceStart;
364
+ let y = targetEnd - targetStart;
365
+ const len = Math.min(x, y);
366
+ const thisCopy = this.slice(sourceStart, sourceEnd);
367
+ const targetCopy = otherBuffer.slice(targetStart, targetEnd);
368
+ for (let i = 0; i < len; ++i) {
369
+ if (thisCopy[i] !== targetCopy[i]) {
370
+ x = thisCopy[i];
371
+ y = targetCopy[i];
372
+ break;
373
+ }
374
+ }
375
+ if (x < y) return -1;
376
+ if (y < x) return 1;
377
+ return 0;
378
+ }
379
+ /**
380
+ * Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
381
+ * region overlaps with this buffer.
382
+ *
383
+ * @param targetBuffer The target buffer to copy into.
384
+ * @param targetStart The offset within `targetBuffer` at which to begin writing.
385
+ * @param sourceStart The offset within this buffer at which to begin copying.
386
+ * @param sourceEnd The offset within this buffer at which to end copying (exclusive).
387
+ */
388
+ copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
389
+ if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
390
+ if (!sourceStart) sourceStart = 0;
391
+ if (!targetStart) targetStart = 0;
392
+ if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
393
+ if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
394
+ if (!targetStart) targetStart = 0;
395
+ if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
396
+ if (sourceEnd === sourceStart) return 0;
397
+ if (targetBuffer.length === 0 || this.length === 0) return 0;
398
+ if (targetStart < 0) {
399
+ throw new RangeError("targetStart out of bounds");
400
+ }
401
+ if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
402
+ if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
403
+ if (sourceEnd > this.length) sourceEnd = this.length;
404
+ if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
405
+ sourceEnd = targetBuffer.length - targetStart + sourceStart;
406
+ }
407
+ const len = sourceEnd - sourceStart;
408
+ if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
409
+ this.copyWithin(targetStart, sourceStart, sourceEnd);
410
+ } else {
411
+ Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
412
+ }
413
+ return len;
414
+ }
415
+ /**
416
+ * Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
417
+ * and `end` indices. This is the same behavior as `buf.subarray()`.
418
+ *
419
+ * This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
420
+ * the slice, use `Uint8Array.prototype.slice()`.
421
+ *
422
+ * @param start
423
+ * @param end
424
+ */
425
+ slice(start, end) {
426
+ if (!start) {
427
+ start = 0;
428
+ }
429
+ const len = this.length;
430
+ start = ~~start;
431
+ end = end === void 0 ? len : ~~end;
432
+ if (start < 0) {
433
+ start += len;
434
+ if (start < 0) {
435
+ start = 0;
436
+ }
437
+ } else if (start > len) {
438
+ start = len;
439
+ }
440
+ if (end < 0) {
441
+ end += len;
442
+ if (end < 0) {
443
+ end = 0;
444
+ }
445
+ } else if (end > len) {
446
+ end = len;
447
+ }
448
+ if (end < start) {
449
+ end = start;
450
+ }
451
+ const newBuf = this.subarray(start, end);
452
+ Object.setPrototypeOf(newBuf, Buffer.prototype);
453
+ return newBuf;
454
+ }
455
+ /**
456
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
457
+ * of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
458
+ *
459
+ * @param value Number to write.
460
+ * @param offset Number of bytes to skip before starting to write.
461
+ * @param byteLength Number of bytes to write, between 0 and 6.
462
+ * @param noAssert
463
+ * @returns `offset` plus the number of bytes written.
464
+ */
465
+ writeUIntLE(value, offset, byteLength, noAssert) {
466
+ value = +value;
467
+ offset = offset >>> 0;
468
+ byteLength = byteLength >>> 0;
469
+ if (!noAssert) {
470
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
471
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
472
+ }
473
+ let mul = 1;
474
+ let i = 0;
475
+ this[offset] = value & 255;
476
+ while (++i < byteLength && (mul *= 256)) {
477
+ this[offset + i] = value / mul & 255;
478
+ }
479
+ return offset + byteLength;
480
+ }
481
+ /**
482
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
483
+ * accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
484
+ *
485
+ * @param value Number to write.
486
+ * @param offset Number of bytes to skip before starting to write.
487
+ * @param byteLength Number of bytes to write, between 0 and 6.
488
+ * @param noAssert
489
+ * @returns `offset` plus the number of bytes written.
490
+ */
491
+ writeUIntBE(value, offset, byteLength, noAssert) {
492
+ value = +value;
493
+ offset = offset >>> 0;
494
+ byteLength = byteLength >>> 0;
495
+ if (!noAssert) {
496
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
497
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
498
+ }
499
+ let i = byteLength - 1;
500
+ let mul = 1;
501
+ this[offset + i] = value & 255;
502
+ while (--i >= 0 && (mul *= 256)) {
503
+ this[offset + i] = value / mul & 255;
504
+ }
505
+ return offset + byteLength;
506
+ }
507
+ /**
508
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
509
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
510
+ *
511
+ * @param value Number to write.
512
+ * @param offset Number of bytes to skip before starting to write.
513
+ * @param byteLength Number of bytes to write, between 0 and 6.
514
+ * @param noAssert
515
+ * @returns `offset` plus the number of bytes written.
516
+ */
517
+ writeIntLE(value, offset, byteLength, noAssert) {
518
+ value = +value;
519
+ offset = offset >>> 0;
520
+ if (!noAssert) {
521
+ const limit = Math.pow(2, 8 * byteLength - 1);
522
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
523
+ }
524
+ let i = 0;
525
+ let mul = 1;
526
+ let sub = 0;
527
+ this[offset] = value & 255;
528
+ while (++i < byteLength && (mul *= 256)) {
529
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
530
+ sub = 1;
531
+ }
532
+ this[offset + i] = (value / mul >> 0) - sub & 255;
533
+ }
534
+ return offset + byteLength;
535
+ }
536
+ /**
537
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
538
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
539
+ *
540
+ * @param value Number to write.
541
+ * @param offset Number of bytes to skip before starting to write.
542
+ * @param byteLength Number of bytes to write, between 0 and 6.
543
+ * @param noAssert
544
+ * @returns `offset` plus the number of bytes written.
545
+ */
546
+ writeIntBE(value, offset, byteLength, noAssert) {
547
+ value = +value;
548
+ offset = offset >>> 0;
549
+ if (!noAssert) {
550
+ const limit = Math.pow(2, 8 * byteLength - 1);
551
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
552
+ }
553
+ let i = byteLength - 1;
554
+ let mul = 1;
555
+ let sub = 0;
556
+ this[offset + i] = value & 255;
557
+ while (--i >= 0 && (mul *= 256)) {
558
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
559
+ sub = 1;
560
+ }
561
+ this[offset + i] = (value / mul >> 0) - sub & 255;
562
+ }
563
+ return offset + byteLength;
564
+ }
565
+ /**
566
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
567
+ * unsigned, little-endian integer supporting up to 48 bits of accuracy.
568
+ *
569
+ * @param offset Number of bytes to skip before starting to read.
570
+ * @param byteLength Number of bytes to read, between 0 and 6.
571
+ * @param noAssert
572
+ */
573
+ readUIntLE(offset, byteLength, noAssert) {
574
+ offset = offset >>> 0;
575
+ byteLength = byteLength >>> 0;
576
+ if (!noAssert) {
577
+ Buffer._checkOffset(offset, byteLength, this.length);
578
+ }
579
+ let val = this[offset];
580
+ let mul = 1;
581
+ let i = 0;
582
+ while (++i < byteLength && (mul *= 256)) {
583
+ val += this[offset + i] * mul;
584
+ }
585
+ return val;
586
+ }
587
+ /**
588
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
589
+ * unsigned, big-endian integer supporting up to 48 bits of accuracy.
590
+ *
591
+ * @param offset Number of bytes to skip before starting to read.
592
+ * @param byteLength Number of bytes to read, between 0 and 6.
593
+ * @param noAssert
594
+ */
595
+ readUIntBE(offset, byteLength, noAssert) {
596
+ offset = offset >>> 0;
597
+ byteLength = byteLength >>> 0;
598
+ if (!noAssert) {
599
+ Buffer._checkOffset(offset, byteLength, this.length);
600
+ }
601
+ let val = this[offset + --byteLength];
602
+ let mul = 1;
603
+ while (byteLength > 0 && (mul *= 256)) {
604
+ val += this[offset + --byteLength] * mul;
605
+ }
606
+ return val;
607
+ }
608
+ /**
609
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
610
+ * little-endian, two's complement signed value supporting up to 48 bits of accuracy.
611
+ *
612
+ * @param offset Number of bytes to skip before starting to read.
613
+ * @param byteLength Number of bytes to read, between 0 and 6.
614
+ * @param noAssert
615
+ */
616
+ readIntLE(offset, byteLength, noAssert) {
617
+ offset = offset >>> 0;
618
+ byteLength = byteLength >>> 0;
619
+ if (!noAssert) {
620
+ Buffer._checkOffset(offset, byteLength, this.length);
621
+ }
622
+ let val = this[offset];
623
+ let mul = 1;
624
+ let i = 0;
625
+ while (++i < byteLength && (mul *= 256)) {
626
+ val += this[offset + i] * mul;
627
+ }
628
+ mul *= 128;
629
+ if (val >= mul) {
630
+ val -= Math.pow(2, 8 * byteLength);
631
+ }
632
+ return val;
633
+ }
634
+ /**
635
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
636
+ * big-endian, two's complement signed value supporting up to 48 bits of accuracy.
637
+ *
638
+ * @param offset Number of bytes to skip before starting to read.
639
+ * @param byteLength Number of bytes to read, between 0 and 6.
640
+ * @param noAssert
641
+ */
642
+ readIntBE(offset, byteLength, noAssert) {
643
+ offset = offset >>> 0;
644
+ byteLength = byteLength >>> 0;
645
+ if (!noAssert) {
646
+ Buffer._checkOffset(offset, byteLength, this.length);
647
+ }
648
+ let i = byteLength;
649
+ let mul = 1;
650
+ let val = this[offset + --i];
651
+ while (i > 0 && (mul *= 256)) {
652
+ val += this[offset + --i] * mul;
653
+ }
654
+ mul *= 128;
655
+ if (val >= mul) {
656
+ val -= Math.pow(2, 8 * byteLength);
657
+ }
658
+ return val;
659
+ }
660
+ /**
661
+ * Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
662
+ *
663
+ * @param offset Number of bytes to skip before starting to read.
664
+ * @param noAssert
665
+ */
666
+ readUInt8(offset, noAssert) {
667
+ offset = offset >>> 0;
668
+ if (!noAssert) {
669
+ Buffer._checkOffset(offset, 1, this.length);
670
+ }
671
+ return this[offset];
672
+ }
673
+ /**
674
+ * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
675
+ *
676
+ * @param offset Number of bytes to skip before starting to read.
677
+ * @param noAssert
678
+ */
679
+ readUInt16LE(offset, noAssert) {
680
+ offset = offset >>> 0;
681
+ if (!noAssert) {
682
+ Buffer._checkOffset(offset, 2, this.length);
683
+ }
684
+ return this[offset] | this[offset + 1] << 8;
685
+ }
686
+ /**
687
+ * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
688
+ *
689
+ * @param offset Number of bytes to skip before starting to read.
690
+ * @param noAssert
691
+ */
692
+ readUInt16BE(offset, noAssert) {
693
+ offset = offset >>> 0;
694
+ if (!noAssert) {
695
+ Buffer._checkOffset(offset, 2, this.length);
696
+ }
697
+ return this[offset] << 8 | this[offset + 1];
698
+ }
699
+ /**
700
+ * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
701
+ *
702
+ * @param offset Number of bytes to skip before starting to read.
703
+ * @param noAssert
704
+ */
705
+ readUInt32LE(offset, noAssert) {
706
+ offset = offset >>> 0;
707
+ if (!noAssert) {
708
+ Buffer._checkOffset(offset, 4, this.length);
709
+ }
710
+ return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
711
+ }
712
+ /**
713
+ * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
714
+ *
715
+ * @param offset Number of bytes to skip before starting to read.
716
+ * @param noAssert
717
+ */
718
+ readUInt32BE(offset, noAssert) {
719
+ offset = offset >>> 0;
720
+ if (!noAssert) {
721
+ Buffer._checkOffset(offset, 4, this.length);
722
+ }
723
+ return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
724
+ }
725
+ /**
726
+ * Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
727
+ * as two's complement signed values.
728
+ *
729
+ * @param offset Number of bytes to skip before starting to read.
730
+ * @param noAssert
731
+ */
732
+ readInt8(offset, noAssert) {
733
+ offset = offset >>> 0;
734
+ if (!noAssert) {
735
+ Buffer._checkOffset(offset, 1, this.length);
736
+ }
737
+ if (!(this[offset] & 128)) {
738
+ return this[offset];
739
+ }
740
+ return (255 - this[offset] + 1) * -1;
741
+ }
742
+ /**
743
+ * Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
744
+ * are interpreted as two's complement signed values.
745
+ *
746
+ * @param offset Number of bytes to skip before starting to read.
747
+ * @param noAssert
748
+ */
749
+ readInt16LE(offset, noAssert) {
750
+ offset = offset >>> 0;
751
+ if (!noAssert) {
752
+ Buffer._checkOffset(offset, 2, this.length);
753
+ }
754
+ const val = this[offset] | this[offset + 1] << 8;
755
+ return val & 32768 ? val | 4294901760 : val;
756
+ }
757
+ /**
758
+ * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
759
+ * are interpreted as two's complement signed values.
760
+ *
761
+ * @param offset Number of bytes to skip before starting to read.
762
+ * @param noAssert
763
+ */
764
+ readInt16BE(offset, noAssert) {
765
+ offset = offset >>> 0;
766
+ if (!noAssert) {
767
+ Buffer._checkOffset(offset, 2, this.length);
768
+ }
769
+ const val = this[offset + 1] | this[offset] << 8;
770
+ return val & 32768 ? val | 4294901760 : val;
771
+ }
772
+ /**
773
+ * Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
774
+ * are interpreted as two's complement signed values.
775
+ *
776
+ * @param offset Number of bytes to skip before starting to read.
777
+ * @param noAssert
778
+ */
779
+ readInt32LE(offset, noAssert) {
780
+ offset = offset >>> 0;
781
+ if (!noAssert) {
782
+ Buffer._checkOffset(offset, 4, this.length);
783
+ }
784
+ return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
785
+ }
786
+ /**
787
+ * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
788
+ * are interpreted as two's complement signed values.
789
+ *
790
+ * @param offset Number of bytes to skip before starting to read.
791
+ * @param noAssert
792
+ */
793
+ readInt32BE(offset, noAssert) {
794
+ offset = offset >>> 0;
795
+ if (!noAssert) {
796
+ Buffer._checkOffset(offset, 4, this.length);
797
+ }
798
+ return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
799
+ }
800
+ /**
801
+ * Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
802
+ * Throws a `RangeError` if `buf.length` is not a multiple of 2.
803
+ */
804
+ swap16() {
805
+ const len = this.length;
806
+ if (len % 2 !== 0) {
807
+ throw new RangeError("Buffer size must be a multiple of 16-bits");
808
+ }
809
+ for (let i = 0; i < len; i += 2) {
810
+ this._swap(this, i, i + 1);
811
+ }
812
+ return this;
813
+ }
814
+ /**
815
+ * Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
816
+ * Throws a `RangeError` if `buf.length` is not a multiple of 4.
817
+ */
818
+ swap32() {
819
+ const len = this.length;
820
+ if (len % 4 !== 0) {
821
+ throw new RangeError("Buffer size must be a multiple of 32-bits");
822
+ }
823
+ for (let i = 0; i < len; i += 4) {
824
+ this._swap(this, i, i + 3);
825
+ this._swap(this, i + 1, i + 2);
826
+ }
827
+ return this;
828
+ }
829
+ /**
830
+ * Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
831
+ * Throws a `RangeError` if `buf.length` is not a multiple of 8.
832
+ */
833
+ swap64() {
834
+ const len = this.length;
835
+ if (len % 8 !== 0) {
836
+ throw new RangeError("Buffer size must be a multiple of 64-bits");
837
+ }
838
+ for (let i = 0; i < len; i += 8) {
839
+ this._swap(this, i, i + 7);
840
+ this._swap(this, i + 1, i + 6);
841
+ this._swap(this, i + 2, i + 5);
842
+ this._swap(this, i + 3, i + 4);
843
+ }
844
+ return this;
845
+ }
846
+ /**
847
+ * Swaps two octets.
848
+ *
849
+ * @param b
850
+ * @param n
851
+ * @param m
852
+ */
853
+ _swap(b, n, m) {
854
+ const i = b[n];
855
+ b[n] = b[m];
856
+ b[m] = i;
857
+ }
858
+ /**
859
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
860
+ * Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
861
+ *
862
+ * @param value Number to write.
863
+ * @param offset Number of bytes to skip before starting to write.
864
+ * @param noAssert
865
+ * @returns `offset` plus the number of bytes written.
866
+ */
867
+ writeUInt8(value, offset, noAssert) {
868
+ value = +value;
869
+ offset = offset >>> 0;
870
+ if (!noAssert) {
871
+ Buffer._checkInt(this, value, offset, 1, 255, 0);
872
+ }
873
+ this[offset] = value & 255;
874
+ return offset + 1;
875
+ }
876
+ /**
877
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
878
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
879
+ *
880
+ * @param value Number to write.
881
+ * @param offset Number of bytes to skip before starting to write.
882
+ * @param noAssert
883
+ * @returns `offset` plus the number of bytes written.
884
+ */
885
+ writeUInt16LE(value, offset, noAssert) {
886
+ value = +value;
887
+ offset = offset >>> 0;
888
+ if (!noAssert) {
889
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
890
+ }
891
+ this[offset] = value & 255;
892
+ this[offset + 1] = value >>> 8;
893
+ return offset + 2;
894
+ }
895
+ /**
896
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
897
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
898
+ *
899
+ * @param value Number to write.
900
+ * @param offset Number of bytes to skip before starting to write.
901
+ * @param noAssert
902
+ * @returns `offset` plus the number of bytes written.
903
+ */
904
+ writeUInt16BE(value, offset, noAssert) {
905
+ value = +value;
906
+ offset = offset >>> 0;
907
+ if (!noAssert) {
908
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
909
+ }
910
+ this[offset] = value >>> 8;
911
+ this[offset + 1] = value & 255;
912
+ return offset + 2;
913
+ }
914
+ /**
915
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
916
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
917
+ *
918
+ * @param value Number to write.
919
+ * @param offset Number of bytes to skip before starting to write.
920
+ * @param noAssert
921
+ * @returns `offset` plus the number of bytes written.
922
+ */
923
+ writeUInt32LE(value, offset, noAssert) {
924
+ value = +value;
925
+ offset = offset >>> 0;
926
+ if (!noAssert) {
927
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
928
+ }
929
+ this[offset + 3] = value >>> 24;
930
+ this[offset + 2] = value >>> 16;
931
+ this[offset + 1] = value >>> 8;
932
+ this[offset] = value & 255;
933
+ return offset + 4;
934
+ }
935
+ /**
936
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
937
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
938
+ *
939
+ * @param value Number to write.
940
+ * @param offset Number of bytes to skip before starting to write.
941
+ * @param noAssert
942
+ * @returns `offset` plus the number of bytes written.
943
+ */
944
+ writeUInt32BE(value, offset, noAssert) {
945
+ value = +value;
946
+ offset = offset >>> 0;
947
+ if (!noAssert) {
948
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
949
+ }
950
+ this[offset] = value >>> 24;
951
+ this[offset + 1] = value >>> 16;
952
+ this[offset + 2] = value >>> 8;
953
+ this[offset + 3] = value & 255;
954
+ return offset + 4;
955
+ }
956
+ /**
957
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
958
+ * Behavior is undefined when `value` is anything other than a signed 8-bit integer.
959
+ *
960
+ * @param value Number to write.
961
+ * @param offset Number of bytes to skip before starting to write.
962
+ * @param noAssert
963
+ * @returns `offset` plus the number of bytes written.
964
+ */
965
+ writeInt8(value, offset, noAssert) {
966
+ value = +value;
967
+ offset = offset >>> 0;
968
+ if (!noAssert) {
969
+ Buffer._checkInt(this, value, offset, 1, 127, -128);
970
+ }
971
+ if (value < 0) {
972
+ value = 255 + value + 1;
973
+ }
974
+ this[offset] = value & 255;
975
+ return offset + 1;
976
+ }
977
+ /**
978
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
979
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
980
+ *
981
+ * @param value Number to write.
982
+ * @param offset Number of bytes to skip before starting to write.
983
+ * @param noAssert
984
+ * @returns `offset` plus the number of bytes written.
985
+ */
986
+ writeInt16LE(value, offset, noAssert) {
987
+ value = +value;
988
+ offset = offset >>> 0;
989
+ if (!noAssert) {
990
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
991
+ }
992
+ this[offset] = value & 255;
993
+ this[offset + 1] = value >>> 8;
994
+ return offset + 2;
995
+ }
996
+ /**
997
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
998
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
999
+ *
1000
+ * @param value Number to write.
1001
+ * @param offset Number of bytes to skip before starting to write.
1002
+ * @param noAssert
1003
+ * @returns `offset` plus the number of bytes written.
1004
+ */
1005
+ writeInt16BE(value, offset, noAssert) {
1006
+ value = +value;
1007
+ offset = offset >>> 0;
1008
+ if (!noAssert) {
1009
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
1010
+ }
1011
+ this[offset] = value >>> 8;
1012
+ this[offset + 1] = value & 255;
1013
+ return offset + 2;
1014
+ }
1015
+ /**
1016
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
1017
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1018
+ *
1019
+ * @param value Number to write.
1020
+ * @param offset Number of bytes to skip before starting to write.
1021
+ * @param noAssert
1022
+ * @returns `offset` plus the number of bytes written.
1023
+ */
1024
+ writeInt32LE(value, offset, noAssert) {
1025
+ value = +value;
1026
+ offset = offset >>> 0;
1027
+ if (!noAssert) {
1028
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1029
+ }
1030
+ this[offset] = value & 255;
1031
+ this[offset + 1] = value >>> 8;
1032
+ this[offset + 2] = value >>> 16;
1033
+ this[offset + 3] = value >>> 24;
1034
+ return offset + 4;
1035
+ }
1036
+ /**
1037
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
1038
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1039
+ *
1040
+ * @param value Number to write.
1041
+ * @param offset Number of bytes to skip before starting to write.
1042
+ * @param noAssert
1043
+ * @returns `offset` plus the number of bytes written.
1044
+ */
1045
+ writeInt32BE(value, offset, noAssert) {
1046
+ value = +value;
1047
+ offset = offset >>> 0;
1048
+ if (!noAssert) {
1049
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1050
+ }
1051
+ if (value < 0) {
1052
+ value = 4294967295 + value + 1;
1053
+ }
1054
+ this[offset] = value >>> 24;
1055
+ this[offset + 1] = value >>> 16;
1056
+ this[offset + 2] = value >>> 8;
1057
+ this[offset + 3] = value & 255;
1058
+ return offset + 4;
1059
+ }
1060
+ /**
1061
+ * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
1062
+ * filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
1063
+ * integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
1064
+ *
1065
+ * If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
1066
+ * character that fit into `buf` are written.
1067
+ *
1068
+ * If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
1069
+ *
1070
+ * @param value
1071
+ * @param encoding
1072
+ */
1073
+ fill(value, offset, end, encoding) {
1074
+ if (typeof value === "string") {
1075
+ if (typeof offset === "string") {
1076
+ encoding = offset;
1077
+ offset = 0;
1078
+ end = this.length;
1079
+ } else if (typeof end === "string") {
1080
+ encoding = end;
1081
+ end = this.length;
1082
+ }
1083
+ if (encoding !== void 0 && typeof encoding !== "string") {
1084
+ throw new TypeError("encoding must be a string");
1085
+ }
1086
+ if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
1087
+ throw new TypeError("Unknown encoding: " + encoding);
1088
+ }
1089
+ if (value.length === 1) {
1090
+ const code = value.charCodeAt(0);
1091
+ if (encoding === "utf8" && code < 128) {
1092
+ value = code;
1093
+ }
1094
+ }
1095
+ } else if (typeof value === "number") {
1096
+ value = value & 255;
1097
+ } else if (typeof value === "boolean") {
1098
+ value = Number(value);
1099
+ }
1100
+ offset ?? (offset = 0);
1101
+ end ?? (end = this.length);
1102
+ if (offset < 0 || this.length < offset || this.length < end) {
1103
+ throw new RangeError("Out of range index");
1104
+ }
1105
+ if (end <= offset) {
1106
+ return this;
1107
+ }
1108
+ offset = offset >>> 0;
1109
+ end = end === void 0 ? this.length : end >>> 0;
1110
+ value || (value = 0);
1111
+ let i;
1112
+ if (typeof value === "number") {
1113
+ for (i = offset; i < end; ++i) {
1114
+ this[i] = value;
1115
+ }
1116
+ } else {
1117
+ const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
1118
+ const len = bytes.length;
1119
+ if (len === 0) {
1120
+ throw new TypeError('The value "' + value + '" is invalid for argument "value"');
1121
+ }
1122
+ for (i = 0; i < end - offset; ++i) {
1123
+ this[i + offset] = bytes[i % len];
1124
+ }
1125
+ }
1126
+ return this;
1127
+ }
1128
+ /**
1129
+ * Returns the index of the specified value.
1130
+ *
1131
+ * If `value` is:
1132
+ * - a string, `value` is interpreted according to the character encoding in `encoding`.
1133
+ * - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
1134
+ * - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
1135
+ *
1136
+ * Any other types will throw a `TypeError`.
1137
+ *
1138
+ * @param value What to search for.
1139
+ * @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
1140
+ * @param encoding If `value` is a string, this is the encoding used to search.
1141
+ * @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
1142
+ */
1143
+ indexOf(value, byteOffset, encoding) {
1144
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
1145
+ }
1146
+ /**
1147
+ * Gets the last index of the specified value.
1148
+ *
1149
+ * @see indexOf()
1150
+ * @param value
1151
+ * @param byteOffset
1152
+ * @param encoding
1153
+ */
1154
+ lastIndexOf(value, byteOffset, encoding) {
1155
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
1156
+ }
1157
+ _bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
1158
+ if (buffer.length === 0) {
1159
+ return -1;
1160
+ }
1161
+ if (typeof byteOffset === "string") {
1162
+ encoding = byteOffset;
1163
+ byteOffset = 0;
1164
+ } else if (typeof byteOffset === "undefined") {
1165
+ byteOffset = 0;
1166
+ } else if (byteOffset > 2147483647) {
1167
+ byteOffset = 2147483647;
1168
+ } else if (byteOffset < -2147483648) {
1169
+ byteOffset = -2147483648;
1170
+ }
1171
+ byteOffset = +byteOffset;
1172
+ if (byteOffset !== byteOffset) {
1173
+ byteOffset = dir ? 0 : buffer.length - 1;
1174
+ }
1175
+ if (byteOffset < 0) {
1176
+ byteOffset = buffer.length + byteOffset;
1177
+ }
1178
+ if (byteOffset >= buffer.length) {
1179
+ if (dir) {
1180
+ return -1;
1181
+ } else {
1182
+ byteOffset = buffer.length - 1;
1183
+ }
1184
+ } else if (byteOffset < 0) {
1185
+ if (dir) {
1186
+ byteOffset = 0;
1187
+ } else {
1188
+ return -1;
1189
+ }
1190
+ }
1191
+ if (typeof val === "string") {
1192
+ val = Buffer.from(val, encoding);
1193
+ }
1194
+ if (Buffer.isBuffer(val)) {
1195
+ if (val.length === 0) {
1196
+ return -1;
1197
+ }
1198
+ return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
1199
+ } else if (typeof val === "number") {
1200
+ val = val & 255;
1201
+ if (typeof Uint8Array.prototype.indexOf === "function") {
1202
+ if (dir) {
1203
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
1204
+ } else {
1205
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
1206
+ }
1207
+ }
1208
+ return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
1209
+ }
1210
+ throw new TypeError("val must be string, number or Buffer");
1211
+ }
1212
+ /**
1213
+ * Equivalent to `buf.indexOf() !== -1`.
1214
+ *
1215
+ * @param value
1216
+ * @param byteOffset
1217
+ * @param encoding
1218
+ */
1219
+ includes(value, byteOffset, encoding) {
1220
+ return this.indexOf(value, byteOffset, encoding) !== -1;
1221
+ }
1222
+ /**
1223
+ * Creates a new buffer from the given parameters.
1224
+ *
1225
+ * @param data
1226
+ * @param encoding
1227
+ */
1228
+ static from(a, b, c) {
1229
+ return new Buffer(a, b, c);
1230
+ }
1231
+ /**
1232
+ * Returns true if `obj` is a Buffer.
1233
+ *
1234
+ * @param obj
1235
+ */
1236
+ static isBuffer(obj) {
1237
+ return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
1238
+ }
1239
+ /**
1240
+ * Returns true if `encoding` is a supported encoding.
1241
+ *
1242
+ * @param encoding
1243
+ */
1244
+ static isEncoding(encoding) {
1245
+ switch (encoding.toLowerCase()) {
1246
+ case "hex":
1247
+ case "utf8":
1248
+ case "ascii":
1249
+ case "binary":
1250
+ case "latin1":
1251
+ case "ucs2":
1252
+ case "utf16le":
1253
+ case "base64":
1254
+ return true;
1255
+ default:
1256
+ return false;
1257
+ }
1258
+ }
1259
+ /**
1260
+ * Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
1261
+ * returns the number of characters in the string.
1262
+ *
1263
+ * @param string The string to test.
1264
+ * @param encoding The encoding to use for calculation. Defaults is `utf8`.
1265
+ */
1266
+ static byteLength(string, encoding) {
1267
+ if (Buffer.isBuffer(string)) {
1268
+ return string.length;
1269
+ }
1270
+ if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
1271
+ return string.byteLength;
1272
+ }
1273
+ if (typeof string !== "string") {
1274
+ throw new TypeError(
1275
+ 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
1276
+ );
1277
+ }
1278
+ const len = string.length;
1279
+ const mustMatch = arguments.length > 2 && arguments[2] === true;
1280
+ if (!mustMatch && len === 0) {
1281
+ return 0;
1282
+ }
1283
+ switch (encoding?.toLowerCase()) {
1284
+ case "ascii":
1285
+ case "latin1":
1286
+ case "binary":
1287
+ return len;
1288
+ case "utf8":
1289
+ return Buffer._utf8ToBytes(string).length;
1290
+ case "hex":
1291
+ return len >>> 1;
1292
+ case "ucs2":
1293
+ case "utf16le":
1294
+ return len * 2;
1295
+ case "base64":
1296
+ return Buffer._base64ToBytes(string).length;
1297
+ default:
1298
+ return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
1299
+ }
1300
+ }
1301
+ /**
1302
+ * Returns a Buffer which is the result of concatenating all the buffers in the list together.
1303
+ *
1304
+ * - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
1305
+ * - If the list has exactly one item, then the first item is returned.
1306
+ * - If the list has more than one item, then a new buffer is created.
1307
+ *
1308
+ * It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
1309
+ * a small computational expense.
1310
+ *
1311
+ * @param list An array of Buffer objects to concatenate.
1312
+ * @param totalLength Total length of the buffers when concatenated.
1313
+ */
1314
+ static concat(list, totalLength) {
1315
+ if (!Array.isArray(list)) {
1316
+ throw new TypeError('"list" argument must be an Array of Buffers');
1317
+ }
1318
+ if (list.length === 0) {
1319
+ return Buffer.alloc(0);
1320
+ }
1321
+ let i;
1322
+ if (totalLength === void 0) {
1323
+ totalLength = 0;
1324
+ for (i = 0; i < list.length; ++i) {
1325
+ totalLength += list[i].length;
1326
+ }
1327
+ }
1328
+ const buffer = Buffer.allocUnsafe(totalLength);
1329
+ let pos = 0;
1330
+ for (i = 0; i < list.length; ++i) {
1331
+ let buf = list[i];
1332
+ if (Buffer._isInstance(buf, Uint8Array)) {
1333
+ if (pos + buf.length > buffer.length) {
1334
+ if (!Buffer.isBuffer(buf)) {
1335
+ buf = Buffer.from(buf);
1336
+ }
1337
+ buf.copy(buffer, pos);
1338
+ } else {
1339
+ Uint8Array.prototype.set.call(buffer, buf, pos);
1340
+ }
1341
+ } else if (!Buffer.isBuffer(buf)) {
1342
+ throw new TypeError('"list" argument must be an Array of Buffers');
1343
+ } else {
1344
+ buf.copy(buffer, pos);
1345
+ }
1346
+ pos += buf.length;
1347
+ }
1348
+ return buffer;
1349
+ }
1350
+ /**
1351
+ * The same as `buf1.compare(buf2)`.
1352
+ */
1353
+ static compare(buf1, buf2) {
1354
+ if (Buffer._isInstance(buf1, Uint8Array)) {
1355
+ buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
1356
+ }
1357
+ if (Buffer._isInstance(buf2, Uint8Array)) {
1358
+ buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
1359
+ }
1360
+ if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
1361
+ throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
1362
+ }
1363
+ if (buf1 === buf2) {
1364
+ return 0;
1365
+ }
1366
+ let x = buf1.length;
1367
+ let y = buf2.length;
1368
+ for (let i = 0, len = Math.min(x, y); i < len; ++i) {
1369
+ if (buf1[i] !== buf2[i]) {
1370
+ x = buf1[i];
1371
+ y = buf2[i];
1372
+ break;
1373
+ }
1374
+ }
1375
+ if (x < y) {
1376
+ return -1;
1377
+ }
1378
+ if (y < x) {
1379
+ return 1;
1380
+ }
1381
+ return 0;
1382
+ }
1383
+ /**
1384
+ * Allocates a new buffer of `size` octets.
1385
+ *
1386
+ * @param size The number of octets to allocate.
1387
+ * @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
1388
+ * @param encoding The encoding used for the call to `buf.fill()` while initializing.
1389
+ */
1390
+ static alloc(size, fill, encoding) {
1391
+ if (typeof size !== "number") {
1392
+ throw new TypeError('"size" argument must be of type number');
1393
+ } else if (size < 0) {
1394
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1395
+ }
1396
+ if (size <= 0) {
1397
+ return new Buffer(size);
1398
+ }
1399
+ if (fill !== void 0) {
1400
+ return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
1401
+ }
1402
+ return new Buffer(size);
1403
+ }
1404
+ /**
1405
+ * Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
1406
+ *
1407
+ * @param size
1408
+ */
1409
+ static allocUnsafe(size) {
1410
+ if (typeof size !== "number") {
1411
+ throw new TypeError('"size" argument must be of type number');
1412
+ } else if (size < 0) {
1413
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1414
+ }
1415
+ return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
1416
+ }
1417
+ /**
1418
+ * Returns true if the given `obj` is an instance of `type`.
1419
+ *
1420
+ * @param obj
1421
+ * @param type
1422
+ */
1423
+ static _isInstance(obj, type) {
1424
+ return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
1425
+ }
1426
+ static _checked(length) {
1427
+ if (length >= K_MAX_LENGTH) {
1428
+ throw new RangeError(
1429
+ "Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
1430
+ );
1431
+ }
1432
+ return length | 0;
1433
+ }
1434
+ static _blitBuffer(src, dst, offset, length) {
1435
+ let i;
1436
+ for (i = 0; i < length; ++i) {
1437
+ if (i + offset >= dst.length || i >= src.length) {
1438
+ break;
1439
+ }
1440
+ dst[i + offset] = src[i];
1441
+ }
1442
+ return i;
1443
+ }
1444
+ static _utf8Write(buf, string, offset, length) {
1445
+ return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
1446
+ }
1447
+ static _asciiWrite(buf, string, offset, length) {
1448
+ return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
1449
+ }
1450
+ static _base64Write(buf, string, offset, length) {
1451
+ return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
1452
+ }
1453
+ static _ucs2Write(buf, string, offset, length) {
1454
+ return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
1455
+ }
1456
+ static _hexWrite(buf, string, offset, length) {
1457
+ offset = Number(offset) || 0;
1458
+ const remaining = buf.length - offset;
1459
+ if (!length) {
1460
+ length = remaining;
1461
+ } else {
1462
+ length = Number(length);
1463
+ if (length > remaining) {
1464
+ length = remaining;
1465
+ }
1466
+ }
1467
+ const strLen = string.length;
1468
+ if (length > strLen / 2) {
1469
+ length = strLen / 2;
1470
+ }
1471
+ let i;
1472
+ for (i = 0; i < length; ++i) {
1473
+ const parsed = parseInt(string.substr(i * 2, 2), 16);
1474
+ if (parsed !== parsed) {
1475
+ return i;
1476
+ }
1477
+ buf[offset + i] = parsed;
1478
+ }
1479
+ return i;
1480
+ }
1481
+ static _utf8ToBytes(string, units) {
1482
+ units = units || Infinity;
1483
+ const length = string.length;
1484
+ const bytes = [];
1485
+ let codePoint;
1486
+ let leadSurrogate = null;
1487
+ for (let i = 0; i < length; ++i) {
1488
+ codePoint = string.charCodeAt(i);
1489
+ if (codePoint > 55295 && codePoint < 57344) {
1490
+ if (!leadSurrogate) {
1491
+ if (codePoint > 56319) {
1492
+ if ((units -= 3) > -1) {
1493
+ bytes.push(239, 191, 189);
1494
+ }
1495
+ continue;
1496
+ } else if (i + 1 === length) {
1497
+ if ((units -= 3) > -1) {
1498
+ bytes.push(239, 191, 189);
1499
+ }
1500
+ continue;
1501
+ }
1502
+ leadSurrogate = codePoint;
1503
+ continue;
1504
+ }
1505
+ if (codePoint < 56320) {
1506
+ if ((units -= 3) > -1) {
1507
+ bytes.push(239, 191, 189);
1508
+ }
1509
+ leadSurrogate = codePoint;
1510
+ continue;
1511
+ }
1512
+ codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
1513
+ } else if (leadSurrogate) {
1514
+ if ((units -= 3) > -1) {
1515
+ bytes.push(239, 191, 189);
1516
+ }
1517
+ }
1518
+ leadSurrogate = null;
1519
+ if (codePoint < 128) {
1520
+ if ((units -= 1) < 0) {
1521
+ break;
1522
+ }
1523
+ bytes.push(codePoint);
1524
+ } else if (codePoint < 2048) {
1525
+ if ((units -= 2) < 0) {
1526
+ break;
1527
+ }
1528
+ bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
1529
+ } else if (codePoint < 65536) {
1530
+ if ((units -= 3) < 0) {
1531
+ break;
1532
+ }
1533
+ bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
1534
+ } else if (codePoint < 1114112) {
1535
+ if ((units -= 4) < 0) {
1536
+ break;
1537
+ }
1538
+ bytes.push(
1539
+ codePoint >> 18 | 240,
1540
+ codePoint >> 12 & 63 | 128,
1541
+ codePoint >> 6 & 63 | 128,
1542
+ codePoint & 63 | 128
1543
+ );
1544
+ } else {
1545
+ throw new Error("Invalid code point");
1546
+ }
1547
+ }
1548
+ return bytes;
1549
+ }
1550
+ static _base64ToBytes(str) {
1551
+ return toByteArray(base64clean(str));
1552
+ }
1553
+ static _asciiToBytes(str) {
1554
+ const byteArray = [];
1555
+ for (let i = 0; i < str.length; ++i) {
1556
+ byteArray.push(str.charCodeAt(i) & 255);
1557
+ }
1558
+ return byteArray;
1559
+ }
1560
+ static _utf16leToBytes(str, units) {
1561
+ let c, hi, lo;
1562
+ const byteArray = [];
1563
+ for (let i = 0; i < str.length; ++i) {
1564
+ if ((units -= 2) < 0) break;
1565
+ c = str.charCodeAt(i);
1566
+ hi = c >> 8;
1567
+ lo = c % 256;
1568
+ byteArray.push(lo);
1569
+ byteArray.push(hi);
1570
+ }
1571
+ return byteArray;
1572
+ }
1573
+ static _hexSlice(buf, start, end) {
1574
+ const len = buf.length;
1575
+ if (!start || start < 0) {
1576
+ start = 0;
1577
+ }
1578
+ if (!end || end < 0 || end > len) {
1579
+ end = len;
1580
+ }
1581
+ let out = "";
1582
+ for (let i = start; i < end; ++i) {
1583
+ out += hexSliceLookupTable[buf[i]];
1584
+ }
1585
+ return out;
1586
+ }
1587
+ static _base64Slice(buf, start, end) {
1588
+ if (start === 0 && end === buf.length) {
1589
+ return fromByteArray(buf);
1590
+ } else {
1591
+ return fromByteArray(buf.slice(start, end));
1592
+ }
1593
+ }
1594
+ static _utf8Slice(buf, start, end) {
1595
+ end = Math.min(buf.length, end);
1596
+ const res = [];
1597
+ let i = start;
1598
+ while (i < end) {
1599
+ const firstByte = buf[i];
1600
+ let codePoint = null;
1601
+ let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
1602
+ if (i + bytesPerSequence <= end) {
1603
+ let secondByte, thirdByte, fourthByte, tempCodePoint;
1604
+ switch (bytesPerSequence) {
1605
+ case 1:
1606
+ if (firstByte < 128) {
1607
+ codePoint = firstByte;
1608
+ }
1609
+ break;
1610
+ case 2:
1611
+ secondByte = buf[i + 1];
1612
+ if ((secondByte & 192) === 128) {
1613
+ tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
1614
+ if (tempCodePoint > 127) {
1615
+ codePoint = tempCodePoint;
1616
+ }
1617
+ }
1618
+ break;
1619
+ case 3:
1620
+ secondByte = buf[i + 1];
1621
+ thirdByte = buf[i + 2];
1622
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
1623
+ tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
1624
+ if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
1625
+ codePoint = tempCodePoint;
1626
+ }
1627
+ }
1628
+ break;
1629
+ case 4:
1630
+ secondByte = buf[i + 1];
1631
+ thirdByte = buf[i + 2];
1632
+ fourthByte = buf[i + 3];
1633
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
1634
+ tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
1635
+ if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
1636
+ codePoint = tempCodePoint;
1637
+ }
1638
+ }
1639
+ }
1640
+ }
1641
+ if (codePoint === null) {
1642
+ codePoint = 65533;
1643
+ bytesPerSequence = 1;
1644
+ } else if (codePoint > 65535) {
1645
+ codePoint -= 65536;
1646
+ res.push(codePoint >>> 10 & 1023 | 55296);
1647
+ codePoint = 56320 | codePoint & 1023;
1648
+ }
1649
+ res.push(codePoint);
1650
+ i += bytesPerSequence;
1651
+ }
1652
+ return Buffer._decodeCodePointsArray(res);
1653
+ }
1654
+ static _decodeCodePointsArray(codePoints) {
1655
+ const len = codePoints.length;
1656
+ if (len <= MAX_ARGUMENTS_LENGTH) {
1657
+ return String.fromCharCode.apply(String, codePoints);
1658
+ }
1659
+ let res = "";
1660
+ let i = 0;
1661
+ while (i < len) {
1662
+ res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
1663
+ }
1664
+ return res;
1665
+ }
1666
+ static _asciiSlice(buf, start, end) {
1667
+ let ret = "";
1668
+ end = Math.min(buf.length, end);
1669
+ for (let i = start; i < end; ++i) {
1670
+ ret += String.fromCharCode(buf[i] & 127);
1671
+ }
1672
+ return ret;
1673
+ }
1674
+ static _latin1Slice(buf, start, end) {
1675
+ let ret = "";
1676
+ end = Math.min(buf.length, end);
1677
+ for (let i = start; i < end; ++i) {
1678
+ ret += String.fromCharCode(buf[i]);
1679
+ }
1680
+ return ret;
1681
+ }
1682
+ static _utf16leSlice(buf, start, end) {
1683
+ const bytes = buf.slice(start, end);
1684
+ let res = "";
1685
+ for (let i = 0; i < bytes.length - 1; i += 2) {
1686
+ res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
1687
+ }
1688
+ return res;
1689
+ }
1690
+ static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
1691
+ let indexSize = 1;
1692
+ let arrLength = arr.length;
1693
+ let valLength = val.length;
1694
+ if (encoding !== void 0) {
1695
+ encoding = Buffer._getEncoding(encoding);
1696
+ if (encoding === "ucs2" || encoding === "utf16le") {
1697
+ if (arr.length < 2 || val.length < 2) {
1698
+ return -1;
1699
+ }
1700
+ indexSize = 2;
1701
+ arrLength /= 2;
1702
+ valLength /= 2;
1703
+ byteOffset /= 2;
1704
+ }
1705
+ }
1706
+ function read(buf, i2) {
1707
+ if (indexSize === 1) {
1708
+ return buf[i2];
1709
+ } else {
1710
+ return buf.readUInt16BE(i2 * indexSize);
1711
+ }
1712
+ }
1713
+ let i;
1714
+ if (dir) {
1715
+ let foundIndex = -1;
1716
+ for (i = byteOffset; i < arrLength; i++) {
1717
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
1718
+ if (foundIndex === -1) foundIndex = i;
1719
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
1720
+ } else {
1721
+ if (foundIndex !== -1) i -= i - foundIndex;
1722
+ foundIndex = -1;
1723
+ }
1724
+ }
1725
+ } else {
1726
+ if (byteOffset + valLength > arrLength) {
1727
+ byteOffset = arrLength - valLength;
1728
+ }
1729
+ for (i = byteOffset; i >= 0; i--) {
1730
+ let found = true;
1731
+ for (let j = 0; j < valLength; j++) {
1732
+ if (read(arr, i + j) !== read(val, j)) {
1733
+ found = false;
1734
+ break;
1735
+ }
1736
+ }
1737
+ if (found) {
1738
+ return i;
1739
+ }
1740
+ }
1741
+ }
1742
+ return -1;
1743
+ }
1744
+ static _checkOffset(offset, ext, length) {
1745
+ if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
1746
+ if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
1747
+ }
1748
+ static _checkInt(buf, value, offset, ext, max, min) {
1749
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
1750
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
1751
+ if (offset + ext > buf.length) throw new RangeError("Index out of range");
1752
+ }
1753
+ static _getEncoding(encoding) {
1754
+ let toLowerCase = false;
1755
+ let originalEncoding = "";
1756
+ for (; ; ) {
1757
+ switch (encoding) {
1758
+ case "hex":
1759
+ return "hex";
1760
+ case "utf8":
1761
+ return "utf8";
1762
+ case "ascii":
1763
+ return "ascii";
1764
+ case "binary":
1765
+ return "binary";
1766
+ case "latin1":
1767
+ return "latin1";
1768
+ case "ucs2":
1769
+ return "ucs2";
1770
+ case "utf16le":
1771
+ return "utf16le";
1772
+ case "base64":
1773
+ return "base64";
1774
+ default: {
1775
+ if (toLowerCase) {
1776
+ throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
1777
+ }
1778
+ toLowerCase = true;
1779
+ originalEncoding = encoding;
1780
+ encoding = encoding.toLowerCase();
1781
+ }
1782
+ }
1783
+ }
1784
+ }
1785
+ }
1786
+ const hexSliceLookupTable = function() {
1787
+ const alphabet = "0123456789abcdef";
1788
+ const table = new Array(256);
1789
+ for (let i = 0; i < 16; ++i) {
1790
+ const i16 = i * 16;
1791
+ for (let j = 0; j < 16; ++j) {
1792
+ table[i16 + j] = alphabet[i] + alphabet[j];
1793
+ }
1794
+ }
1795
+ return table;
1796
+ }();
1797
+ const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
1798
+ function base64clean(str) {
1799
+ str = str.split("=")[0];
1800
+ str = str.trim().replace(INVALID_BASE64_RE, "");
1801
+ if (str.length < 2) return "";
1802
+ while (str.length % 4 !== 0) {
1803
+ str = str + "=";
1804
+ }
1805
+ return str;
1806
+ }
1807
+
25
1808
  function notEmpty(value) {
26
1809
  return value !== null && value !== void 0;
27
1810
  }
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
116
1899
  return inputValues.reduce(reducer, Promise.resolve([]));
117
1900
  }
118
1901
 
119
- function getEnvironment() {
120
- try {
121
- if (isDefined(process) && isDefined(process.env)) {
122
- return {
123
- apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
124
- databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
125
- branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
126
- deployPreview: process.env.XATA_PREVIEW,
127
- deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
128
- vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
129
- vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
130
- };
131
- }
132
- } catch (err) {
133
- }
134
- try {
135
- if (isObject(Deno) && isObject(Deno.env)) {
136
- return {
137
- apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
138
- databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
139
- branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
140
- deployPreview: Deno.env.get("XATA_PREVIEW"),
141
- deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
142
- vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
143
- vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
144
- };
145
- }
146
- } catch (err) {
147
- }
148
- return {
149
- apiKey: getGlobalApiKey(),
150
- databaseURL: getGlobalDatabaseURL(),
151
- branch: getGlobalBranch(),
152
- deployPreview: void 0,
153
- deployPreviewBranch: void 0,
154
- vercelGitCommitRef: void 0,
155
- vercelGitRepoOwner: void 0
156
- };
157
- }
158
- function getEnableBrowserVariable() {
159
- try {
160
- if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
161
- return process.env.XATA_ENABLE_BROWSER === "true";
162
- }
163
- } catch (err) {
164
- }
165
- try {
166
- if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
167
- return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
168
- }
169
- } catch (err) {
170
- }
171
- try {
172
- return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
173
- } catch (err) {
174
- return void 0;
175
- }
176
- }
177
- function getGlobalApiKey() {
178
- try {
179
- return XATA_API_KEY;
180
- } catch (err) {
181
- return void 0;
182
- }
183
- }
184
- function getGlobalDatabaseURL() {
185
- try {
186
- return XATA_DATABASE_URL;
187
- } catch (err) {
188
- return void 0;
189
- }
190
- }
191
- function getGlobalBranch() {
192
- try {
193
- return XATA_BRANCH;
194
- } catch (err) {
195
- return void 0;
196
- }
197
- }
198
- function getDatabaseURL() {
199
- try {
200
- const { databaseURL } = getEnvironment();
201
- return databaseURL;
202
- } catch (err) {
203
- return void 0;
204
- }
205
- }
206
- function getAPIKey() {
207
- try {
208
- const { apiKey } = getEnvironment();
209
- return apiKey;
210
- } catch (err) {
211
- return void 0;
212
- }
213
- }
214
- function getBranch() {
215
- try {
216
- const { branch } = getEnvironment();
217
- return branch;
218
- } catch (err) {
219
- return void 0;
220
- }
221
- }
222
- function buildPreviewBranchName({ org, branch }) {
223
- return `preview-${org}-${branch}`;
224
- }
225
- function getPreviewBranch() {
226
- try {
227
- const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
228
- if (deployPreviewBranch)
229
- return deployPreviewBranch;
230
- switch (deployPreview) {
231
- case "vercel": {
232
- if (!vercelGitCommitRef || !vercelGitRepoOwner) {
233
- console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
234
- return void 0;
235
- }
236
- return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
237
- }
238
- }
239
- return void 0;
240
- } catch (err) {
241
- return void 0;
242
- }
243
- }
244
-
245
- var __accessCheck$6 = (obj, member, msg) => {
246
- if (!member.has(obj))
247
- throw TypeError("Cannot " + msg);
248
- };
249
- var __privateGet$5 = (obj, member, getter) => {
250
- __accessCheck$6(obj, member, "read from private field");
251
- return getter ? getter.call(obj) : member.get(obj);
252
- };
253
- var __privateAdd$6 = (obj, member, value) => {
254
- if (member.has(obj))
255
- throw TypeError("Cannot add the same private member more than once");
256
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
257
- };
258
- var __privateSet$4 = (obj, member, value, setter) => {
259
- __accessCheck$6(obj, member, "write to private field");
260
- setter ? setter.call(obj, value) : member.set(obj, value);
261
- return value;
262
- };
263
- var __privateMethod$4 = (obj, member, method) => {
264
- __accessCheck$6(obj, member, "access private method");
265
- return method;
1902
+ var __typeError$6 = (msg) => {
1903
+ throw TypeError(msg);
266
1904
  };
267
- var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
1905
+ var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
1906
+ var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
1907
+ var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1908
+ var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
1909
+ var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
1910
+ var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
268
1911
  const REQUEST_TIMEOUT = 5 * 60 * 1e3;
269
1912
  function getFetchImplementation(userFetch) {
270
1913
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
277
1920
  }
278
1921
  class ApiRequestPool {
279
1922
  constructor(concurrency = 10) {
280
- __privateAdd$6(this, _enqueue);
281
- __privateAdd$6(this, _fetch, void 0);
282
- __privateAdd$6(this, _queue, void 0);
283
- __privateAdd$6(this, _concurrency, void 0);
1923
+ __privateAdd$6(this, _ApiRequestPool_instances);
1924
+ __privateAdd$6(this, _fetch);
1925
+ __privateAdd$6(this, _queue);
1926
+ __privateAdd$6(this, _concurrency);
284
1927
  __privateSet$4(this, _queue, []);
285
1928
  __privateSet$4(this, _concurrency, concurrency);
286
1929
  this.running = 0;
@@ -315,7 +1958,7 @@ class ApiRequestPool {
315
1958
  }
316
1959
  return response;
317
1960
  };
318
- return __privateMethod$4(this, _enqueue, enqueue_fn).call(this, async () => {
1961
+ return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
319
1962
  return await runRequest();
320
1963
  });
321
1964
  }
@@ -323,7 +1966,7 @@ class ApiRequestPool {
323
1966
  _fetch = new WeakMap();
324
1967
  _queue = new WeakMap();
325
1968
  _concurrency = new WeakMap();
326
- _enqueue = new WeakSet();
1969
+ _ApiRequestPool_instances = new WeakSet();
327
1970
  enqueue_fn = function(task) {
328
1971
  const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
329
1972
  this.started--;
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
526
2169
  }
527
2170
  }
528
2171
 
529
- const VERSION = "0.29.2";
2172
+ const VERSION = "0.29.5";
530
2173
 
531
2174
  class ErrorWithCause extends Error {
532
2175
  constructor(message, options) {
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
606
2249
  return provider;
607
2250
  }
608
2251
  const [main, workspaces] = provider.split(",");
609
- if (!main || !workspaces)
610
- return null;
2252
+ if (!main || !workspaces) return null;
611
2253
  return { main, workspaces };
612
2254
  }
613
2255
  function buildProviderString(provider) {
614
- if (isHostProviderAlias(provider))
615
- return provider;
2256
+ if (isHostProviderAlias(provider)) return provider;
616
2257
  return `${provider.main},${provider.workspaces}`;
617
2258
  }
618
2259
  function parseWorkspacesUrlParts(url) {
619
- if (!isString(url))
620
- return null;
2260
+ if (!isString(url)) return null;
621
2261
  const matches = {
622
2262
  production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
623
2263
  staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
624
2264
  dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
625
- local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
2265
+ local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
626
2266
  };
627
2267
  const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
628
- if (!isHostProviderAlias(host) || !match)
629
- return null;
2268
+ if (!isHostProviderAlias(host) || !match) return null;
630
2269
  return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
631
2270
  }
632
2271
 
633
2272
  const pool = new ApiRequestPool();
634
2273
  const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
635
2274
  const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
636
- if (value === void 0 || value === null)
637
- return acc;
2275
+ if (value === void 0 || value === null) return acc;
638
2276
  return { ...acc, [key]: value };
639
2277
  }, {});
640
2278
  const query = new URLSearchParams(cleanQueryParams).toString();
@@ -682,8 +2320,7 @@ function hostHeader(url) {
682
2320
  return groups?.host ? { Host: groups.host } : {};
683
2321
  }
684
2322
  async function parseBody(body, headers) {
685
- if (!isDefined(body))
686
- return void 0;
2323
+ if (!isDefined(body)) return void 0;
687
2324
  if (isBlob(body) || typeof body.text === "function") {
688
2325
  return body;
689
2326
  }
@@ -762,8 +2399,7 @@ async function fetch$1({
762
2399
  [TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
763
2400
  });
764
2401
  const message = response.headers?.get("x-xata-message");
765
- if (message)
766
- console.warn(message);
2402
+ if (message) console.warn(message);
767
2403
  if (response.status === 204) {
768
2404
  return {};
769
2405
  }
@@ -847,16 +2483,96 @@ function parseUrl(url) {
847
2483
 
848
2484
  const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
849
2485
 
850
- const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
2486
+ const listClusterBranches = (variables, signal) => dataPlaneFetch({
2487
+ url: "/cluster/{clusterId}/branches",
2488
+ method: "get",
2489
+ ...variables,
2490
+ signal
2491
+ });
2492
+ const listClusterExtensions = (variables, signal) => dataPlaneFetch({
2493
+ url: "/cluster/{clusterId}/extensions",
2494
+ method: "get",
2495
+ ...variables,
2496
+ signal
2497
+ });
2498
+ const installClusterExtension = (variables, signal) => dataPlaneFetch({
2499
+ url: "/cluster/{clusterId}/extensions",
2500
+ method: "post",
2501
+ ...variables,
2502
+ signal
2503
+ });
2504
+ const dropClusterExtension = (variables, signal) => dataPlaneFetch({
2505
+ url: "/cluster/{clusterId}/extensions",
2506
+ method: "delete",
2507
+ ...variables,
2508
+ signal
2509
+ });
2510
+ const getClusterMetrics = (variables, signal) => dataPlaneFetch({
2511
+ url: "/cluster/{clusterId}/metrics",
2512
+ method: "get",
2513
+ ...variables,
2514
+ signal
2515
+ });
2516
+ const applyMigration = (variables, signal) => dataPlaneFetch({
2517
+ url: "/db/{dbBranchName}/migrations/apply",
2518
+ method: "post",
2519
+ ...variables,
2520
+ signal
2521
+ });
2522
+ const startMigration = (variables, signal) => dataPlaneFetch({
2523
+ url: "/db/{dbBranchName}/migrations/start",
2524
+ method: "post",
2525
+ ...variables,
2526
+ signal
2527
+ });
2528
+ const completeMigration = (variables, signal) => dataPlaneFetch({
2529
+ url: "/db/{dbBranchName}/migrations/complete",
2530
+ method: "post",
2531
+ ...variables,
2532
+ signal
2533
+ });
2534
+ const rollbackMigration = (variables, signal) => dataPlaneFetch({
2535
+ url: "/db/{dbBranchName}/migrations/rollback",
2536
+ method: "post",
2537
+ ...variables,
2538
+ signal
2539
+ });
851
2540
  const adaptTable = (variables, signal) => dataPlaneFetch({
852
2541
  url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
853
2542
  method: "post",
854
2543
  ...variables,
855
2544
  signal
856
2545
  });
857
- const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
858
- const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
859
- const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
2546
+ const adaptAllTables = (variables, signal) => dataPlaneFetch({
2547
+ url: "/db/{dbBranchName}/migrations/adapt",
2548
+ method: "post",
2549
+ ...variables,
2550
+ signal
2551
+ });
2552
+ const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2553
+ url: "/db/{dbBranchName}/migrations/status",
2554
+ method: "get",
2555
+ ...variables,
2556
+ signal
2557
+ });
2558
+ const getMigrationJobs = (variables, signal) => dataPlaneFetch({
2559
+ url: "/db/{dbBranchName}/migrations/jobs",
2560
+ method: "get",
2561
+ ...variables,
2562
+ signal
2563
+ });
2564
+ const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2565
+ url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
2566
+ method: "get",
2567
+ ...variables,
2568
+ signal
2569
+ });
2570
+ const getMigrationHistory = (variables, signal) => dataPlaneFetch({
2571
+ url: "/db/{dbBranchName}/migrations/history",
2572
+ method: "get",
2573
+ ...variables,
2574
+ signal
2575
+ });
860
2576
  const getBranchList = (variables, signal) => dataPlaneFetch({
861
2577
  url: "/dbs/{dbName}",
862
2578
  method: "get",
@@ -883,68 +2599,166 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
883
2599
  ...variables,
884
2600
  signal
885
2601
  });
886
- const getSchema = (variables, signal) => dataPlaneFetch({
887
- url: "/db/{dbBranchName}/schema",
888
- method: "get",
2602
+ const getSchema = (variables, signal) => dataPlaneFetch({
2603
+ url: "/db/{dbBranchName}/schema",
2604
+ method: "get",
2605
+ ...variables,
2606
+ signal
2607
+ });
2608
+ const getSchemas = (variables, signal) => dataPlaneFetch({
2609
+ url: "/db/{dbBranchName}/schemas",
2610
+ method: "get",
2611
+ ...variables,
2612
+ signal
2613
+ });
2614
+ const copyBranch = (variables, signal) => dataPlaneFetch({
2615
+ url: "/db/{dbBranchName}/copy",
2616
+ method: "post",
2617
+ ...variables,
2618
+ signal
2619
+ });
2620
+ const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
2621
+ const moveBranch = (variables, signal) => dataPlaneFetch({
2622
+ url: "/db/{dbBranchName}/move",
2623
+ method: "put",
2624
+ ...variables,
2625
+ signal
2626
+ });
2627
+ const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
2628
+ url: "/db/{dbBranchName}/metadata",
2629
+ method: "put",
2630
+ ...variables,
2631
+ signal
2632
+ });
2633
+ const getBranchMetadata = (variables, signal) => dataPlaneFetch({
2634
+ url: "/db/{dbBranchName}/metadata",
2635
+ method: "get",
2636
+ ...variables,
2637
+ signal
2638
+ });
2639
+ const getBranchStats = (variables, signal) => dataPlaneFetch({
2640
+ url: "/db/{dbBranchName}/stats",
2641
+ method: "get",
2642
+ ...variables,
2643
+ signal
2644
+ });
2645
+ const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
2646
+ const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
2647
+ const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
2648
+ url: "/dbs/{dbName}/gitBranches",
2649
+ method: "delete",
2650
+ ...variables,
2651
+ signal
2652
+ });
2653
+ const resolveBranch = (variables, signal) => dataPlaneFetch({
2654
+ url: "/dbs/{dbName}/resolveBranch",
2655
+ method: "get",
2656
+ ...variables,
2657
+ signal
2658
+ });
2659
+ const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
2660
+ url: "/db/{dbBranchName}/migrations",
2661
+ method: "get",
2662
+ ...variables,
2663
+ signal
2664
+ });
2665
+ const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2666
+ url: "/db/{dbBranchName}/migrations/plan",
2667
+ method: "post",
2668
+ ...variables,
2669
+ signal
2670
+ });
2671
+ const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2672
+ url: "/db/{dbBranchName}/migrations/execute",
2673
+ method: "post",
2674
+ ...variables,
2675
+ signal
2676
+ });
2677
+ const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
2678
+ url: "/dbs/{dbName}/migrations/query",
2679
+ method: "post",
2680
+ ...variables,
2681
+ signal
2682
+ });
2683
+ const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
2684
+ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
2685
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2686
+ method: "get",
2687
+ ...variables,
2688
+ signal
2689
+ });
2690
+ const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
2691
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2692
+ method: "patch",
2693
+ ...variables,
2694
+ signal
2695
+ });
2696
+ const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
2697
+ url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
2698
+ method: "post",
2699
+ ...variables,
2700
+ signal
2701
+ });
2702
+ const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
2703
+ url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
2704
+ method: "post",
2705
+ ...variables,
2706
+ signal
2707
+ });
2708
+ const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
2709
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2710
+ method: "get",
2711
+ ...variables,
2712
+ signal
2713
+ });
2714
+ const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
2715
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2716
+ method: "post",
2717
+ ...variables,
2718
+ signal
2719
+ });
2720
+ const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
2721
+ url: "/db/{dbBranchName}/schema/history",
2722
+ method: "post",
889
2723
  ...variables,
890
2724
  signal
891
2725
  });
892
- const copyBranch = (variables, signal) => dataPlaneFetch({
893
- url: "/db/{dbBranchName}/copy",
2726
+ const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
2727
+ url: "/db/{dbBranchName}/schema/compare",
894
2728
  method: "post",
895
2729
  ...variables,
896
2730
  signal
897
2731
  });
898
- const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
899
- url: "/db/{dbBranchName}/metadata",
900
- method: "put",
2732
+ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
2733
+ url: "/db/{dbBranchName}/schema/compare/{branchName}",
2734
+ method: "post",
901
2735
  ...variables,
902
2736
  signal
903
2737
  });
904
- const getBranchMetadata = (variables, signal) => dataPlaneFetch({
905
- url: "/db/{dbBranchName}/metadata",
906
- method: "get",
2738
+ const updateBranchSchema = (variables, signal) => dataPlaneFetch({
2739
+ url: "/db/{dbBranchName}/schema/update",
2740
+ method: "post",
907
2741
  ...variables,
908
2742
  signal
909
2743
  });
910
- const getBranchStats = (variables, signal) => dataPlaneFetch({
911
- url: "/db/{dbBranchName}/stats",
912
- method: "get",
2744
+ const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2745
+ url: "/db/{dbBranchName}/schema/preview",
2746
+ method: "post",
913
2747
  ...variables,
914
2748
  signal
915
2749
  });
916
- const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
917
- const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
918
- const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "delete", ...variables, signal });
919
- const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
920
- const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
921
- const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
922
- const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
923
- const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
924
- const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
925
- const getMigrationRequest = (variables, signal) => dataPlaneFetch({
926
- url: "/dbs/{dbName}/migrations/{mrNumber}",
927
- method: "get",
2750
+ const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2751
+ url: "/db/{dbBranchName}/schema/apply",
2752
+ method: "post",
928
2753
  ...variables,
929
2754
  signal
930
2755
  });
931
- const updateMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}", method: "patch", ...variables, signal });
932
- const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/commits", method: "post", ...variables, signal });
933
- const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
934
- const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
935
- const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
936
- url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2756
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
2757
+ url: "/db/{dbBranchName}/schema/push",
937
2758
  method: "post",
938
2759
  ...variables,
939
2760
  signal
940
2761
  });
941
- const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
942
- const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
943
- const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
944
- const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
945
- const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
946
- const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
947
- const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
948
2762
  const createTable = (variables, signal) => dataPlaneFetch({
949
2763
  url: "/db/{dbBranchName}/tables/{tableName}",
950
2764
  method: "put",
@@ -957,14 +2771,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
957
2771
  ...variables,
958
2772
  signal
959
2773
  });
960
- const updateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}", method: "patch", ...variables, signal });
2774
+ const updateTable = (variables, signal) => dataPlaneFetch({
2775
+ url: "/db/{dbBranchName}/tables/{tableName}",
2776
+ method: "patch",
2777
+ ...variables,
2778
+ signal
2779
+ });
961
2780
  const getTableSchema = (variables, signal) => dataPlaneFetch({
962
2781
  url: "/db/{dbBranchName}/tables/{tableName}/schema",
963
2782
  method: "get",
964
2783
  ...variables,
965
2784
  signal
966
2785
  });
967
- const setTableSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/schema", method: "put", ...variables, signal });
2786
+ const setTableSchema = (variables, signal) => dataPlaneFetch({
2787
+ url: "/db/{dbBranchName}/tables/{tableName}/schema",
2788
+ method: "put",
2789
+ ...variables,
2790
+ signal
2791
+ });
968
2792
  const getTableColumns = (variables, signal) => dataPlaneFetch({
969
2793
  url: "/db/{dbBranchName}/tables/{tableName}/columns",
970
2794
  method: "get",
@@ -972,7 +2796,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
972
2796
  signal
973
2797
  });
974
2798
  const addTableColumn = (variables, signal) => dataPlaneFetch(
975
- { url: "/db/{dbBranchName}/tables/{tableName}/columns", method: "post", ...variables, signal }
2799
+ {
2800
+ url: "/db/{dbBranchName}/tables/{tableName}/columns",
2801
+ method: "post",
2802
+ ...variables,
2803
+ signal
2804
+ }
976
2805
  );
977
2806
  const getColumn = (variables, signal) => dataPlaneFetch({
978
2807
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
@@ -980,15 +2809,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
980
2809
  ...variables,
981
2810
  signal
982
2811
  });
983
- const updateColumn = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}", method: "patch", ...variables, signal });
2812
+ const updateColumn = (variables, signal) => dataPlaneFetch({
2813
+ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
2814
+ method: "patch",
2815
+ ...variables,
2816
+ signal
2817
+ });
984
2818
  const deleteColumn = (variables, signal) => dataPlaneFetch({
985
2819
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
986
2820
  method: "delete",
987
2821
  ...variables,
988
2822
  signal
989
2823
  });
990
- const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
991
- const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
2824
+ const branchTransaction = (variables, signal) => dataPlaneFetch({
2825
+ url: "/db/{dbBranchName}/transaction",
2826
+ method: "post",
2827
+ ...variables,
2828
+ signal
2829
+ });
2830
+ const insertRecord = (variables, signal) => dataPlaneFetch({
2831
+ url: "/db/{dbBranchName}/tables/{tableName}/data",
2832
+ method: "post",
2833
+ ...variables,
2834
+ signal
2835
+ });
992
2836
  const getFileItem = (variables, signal) => dataPlaneFetch({
993
2837
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
994
2838
  method: "get",
@@ -1031,11 +2875,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
1031
2875
  ...variables,
1032
2876
  signal
1033
2877
  });
1034
- const insertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "put", ...variables, signal });
1035
- const updateRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "patch", ...variables, signal });
1036
- const upsertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "post", ...variables, signal });
1037
- const deleteRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "delete", ...variables, signal });
1038
- const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/bulk", method: "post", ...variables, signal });
2878
+ const insertRecordWithID = (variables, signal) => dataPlaneFetch({
2879
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2880
+ method: "put",
2881
+ ...variables,
2882
+ signal
2883
+ });
2884
+ const updateRecordWithID = (variables, signal) => dataPlaneFetch({
2885
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2886
+ method: "patch",
2887
+ ...variables,
2888
+ signal
2889
+ });
2890
+ const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
2891
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2892
+ method: "post",
2893
+ ...variables,
2894
+ signal
2895
+ });
2896
+ const deleteRecord = (variables, signal) => dataPlaneFetch({
2897
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2898
+ method: "delete",
2899
+ ...variables,
2900
+ signal
2901
+ });
2902
+ const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
2903
+ url: "/db/{dbBranchName}/tables/{tableName}/bulk",
2904
+ method: "post",
2905
+ ...variables,
2906
+ signal
2907
+ });
1039
2908
  const queryTable = (variables, signal) => dataPlaneFetch({
1040
2909
  url: "/db/{dbBranchName}/tables/{tableName}/query",
1041
2910
  method: "post",
@@ -1054,16 +2923,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
1054
2923
  ...variables,
1055
2924
  signal
1056
2925
  });
1057
- const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
2926
+ const vectorSearchTable = (variables, signal) => dataPlaneFetch({
2927
+ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
2928
+ method: "post",
2929
+ ...variables,
2930
+ signal
2931
+ });
1058
2932
  const askTable = (variables, signal) => dataPlaneFetch({
1059
2933
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
1060
2934
  method: "post",
1061
2935
  ...variables,
1062
2936
  signal
1063
2937
  });
1064
- const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
1065
- const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
1066
- const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
2938
+ const askTableSession = (variables, signal) => dataPlaneFetch({
2939
+ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
2940
+ method: "post",
2941
+ ...variables,
2942
+ signal
2943
+ });
2944
+ const summarizeTable = (variables, signal) => dataPlaneFetch({
2945
+ url: "/db/{dbBranchName}/tables/{tableName}/summarize",
2946
+ method: "post",
2947
+ ...variables,
2948
+ signal
2949
+ });
2950
+ const aggregateTable = (variables, signal) => dataPlaneFetch({
2951
+ url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
2952
+ method: "post",
2953
+ ...variables,
2954
+ signal
2955
+ });
1067
2956
  const fileAccess = (variables, signal) => dataPlaneFetch({
1068
2957
  url: "/file/{fileId}",
1069
2958
  method: "get",
@@ -1082,14 +2971,33 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
1082
2971
  ...variables,
1083
2972
  signal
1084
2973
  });
2974
+ const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
2975
+ url: "/db/{dbBranchName}/sql/batch",
2976
+ method: "post",
2977
+ ...variables,
2978
+ signal
2979
+ });
1085
2980
  const operationsByTag$2 = {
2981
+ cluster: {
2982
+ listClusterBranches,
2983
+ listClusterExtensions,
2984
+ installClusterExtension,
2985
+ dropClusterExtension,
2986
+ getClusterMetrics
2987
+ },
1086
2988
  migrations: {
1087
2989
  applyMigration,
2990
+ startMigration,
2991
+ completeMigration,
2992
+ rollbackMigration,
1088
2993
  adaptTable,
2994
+ adaptAllTables,
1089
2995
  getBranchMigrationJobStatus,
2996
+ getMigrationJobs,
1090
2997
  getMigrationJobStatus,
1091
2998
  getMigrationHistory,
1092
2999
  getSchema,
3000
+ getSchemas,
1093
3001
  getBranchMigrationHistory,
1094
3002
  getBranchMigrationPlan,
1095
3003
  executeBranchMigrationPlan,
@@ -1107,6 +3015,8 @@ const operationsByTag$2 = {
1107
3015
  createBranch,
1108
3016
  deleteBranch,
1109
3017
  copyBranch,
3018
+ getBranchMoveStatus,
3019
+ moveBranch,
1110
3020
  updateBranchMetadata,
1111
3021
  getBranchMetadata,
1112
3022
  getBranchStats,
@@ -1148,7 +3058,16 @@ const operationsByTag$2 = {
1148
3058
  deleteRecord,
1149
3059
  bulkInsertTableRecords
1150
3060
  },
1151
- files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
3061
+ files: {
3062
+ getFileItem,
3063
+ putFileItem,
3064
+ deleteFileItem,
3065
+ getFile,
3066
+ putFile,
3067
+ deleteFile,
3068
+ fileAccess,
3069
+ fileUpload
3070
+ },
1152
3071
  searchAndFilter: {
1153
3072
  queryTable,
1154
3073
  searchBranch,
@@ -1159,7 +3078,7 @@ const operationsByTag$2 = {
1159
3078
  summarizeTable,
1160
3079
  aggregateTable
1161
3080
  },
1162
- sql: { sqlQuery }
3081
+ sql: { sqlQuery, sqlBatchQuery }
1163
3082
  };
1164
3083
 
1165
3084
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
@@ -1226,7 +3145,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
1226
3145
  ...variables,
1227
3146
  signal
1228
3147
  });
1229
- const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
3148
+ const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
3149
+ url: "/user/oauth/tokens/{token}",
3150
+ method: "patch",
3151
+ ...variables,
3152
+ signal
3153
+ });
1230
3154
  const getWorkspacesList = (variables, signal) => controlPlaneFetch({
1231
3155
  url: "/workspaces",
1232
3156
  method: "get",
@@ -1257,49 +3181,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
1257
3181
  ...variables,
1258
3182
  signal
1259
3183
  });
1260
- const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
1261
- const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
1262
- const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
1263
- const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
3184
+ const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3185
+ url: "/workspaces/{workspaceId}/settings",
3186
+ method: "get",
3187
+ ...variables,
3188
+ signal
3189
+ });
3190
+ const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3191
+ url: "/workspaces/{workspaceId}/settings",
3192
+ method: "patch",
3193
+ ...variables,
3194
+ signal
3195
+ });
3196
+ const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
3197
+ url: "/workspaces/{workspaceId}/members",
3198
+ method: "get",
3199
+ ...variables,
3200
+ signal
3201
+ });
3202
+ const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
3203
+ url: "/workspaces/{workspaceId}/members/{userId}",
3204
+ method: "put",
3205
+ ...variables,
3206
+ signal
3207
+ });
1264
3208
  const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
1265
3209
  url: "/workspaces/{workspaceId}/members/{userId}",
1266
3210
  method: "delete",
1267
3211
  ...variables,
1268
3212
  signal
1269
3213
  });
1270
- const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites", method: "post", ...variables, signal });
1271
- const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "patch", ...variables, signal });
1272
- const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
1273
- const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
1274
- const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
1275
- const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
1276
- const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
3214
+ const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
3215
+ url: "/workspaces/{workspaceId}/invites",
3216
+ method: "post",
3217
+ ...variables,
3218
+ signal
3219
+ });
3220
+ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3221
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3222
+ method: "patch",
3223
+ ...variables,
3224
+ signal
3225
+ });
3226
+ const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3227
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3228
+ method: "delete",
3229
+ ...variables,
3230
+ signal
3231
+ });
3232
+ const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3233
+ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
3234
+ method: "post",
3235
+ ...variables,
3236
+ signal
3237
+ });
3238
+ const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3239
+ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
3240
+ method: "post",
3241
+ ...variables,
3242
+ signal
3243
+ });
3244
+ const listClusters = (variables, signal) => controlPlaneFetch({
3245
+ url: "/workspaces/{workspaceId}/clusters",
3246
+ method: "get",
3247
+ ...variables,
3248
+ signal
3249
+ });
3250
+ const createCluster = (variables, signal) => controlPlaneFetch({
3251
+ url: "/workspaces/{workspaceId}/clusters",
3252
+ method: "post",
3253
+ ...variables,
3254
+ signal
3255
+ });
1277
3256
  const getCluster = (variables, signal) => controlPlaneFetch({
1278
3257
  url: "/workspaces/{workspaceId}/clusters/{clusterId}",
1279
3258
  method: "get",
1280
3259
  ...variables,
1281
3260
  signal
1282
3261
  });
1283
- const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
3262
+ const updateCluster = (variables, signal) => controlPlaneFetch({
3263
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3264
+ method: "patch",
3265
+ ...variables,
3266
+ signal
3267
+ });
3268
+ const deleteCluster = (variables, signal) => controlPlaneFetch({
3269
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3270
+ method: "delete",
3271
+ ...variables,
3272
+ signal
3273
+ });
1284
3274
  const getDatabaseList = (variables, signal) => controlPlaneFetch({
1285
3275
  url: "/workspaces/{workspaceId}/dbs",
1286
3276
  method: "get",
1287
3277
  ...variables,
1288
3278
  signal
1289
3279
  });
1290
- const createDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "put", ...variables, signal });
3280
+ const createDatabase = (variables, signal) => controlPlaneFetch({
3281
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3282
+ method: "put",
3283
+ ...variables,
3284
+ signal
3285
+ });
1291
3286
  const deleteDatabase = (variables, signal) => controlPlaneFetch({
1292
3287
  url: "/workspaces/{workspaceId}/dbs/{dbName}",
1293
3288
  method: "delete",
1294
3289
  ...variables,
1295
3290
  signal
1296
3291
  });
1297
- const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
1298
- const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1299
- const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
1300
- const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
1301
- const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
1302
- const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
3292
+ const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3293
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3294
+ method: "get",
3295
+ ...variables,
3296
+ signal
3297
+ });
3298
+ const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3299
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3300
+ method: "patch",
3301
+ ...variables,
3302
+ signal
3303
+ });
3304
+ const renameDatabase = (variables, signal) => controlPlaneFetch({
3305
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
3306
+ method: "post",
3307
+ ...variables,
3308
+ signal
3309
+ });
3310
+ const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3311
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3312
+ method: "get",
3313
+ ...variables,
3314
+ signal
3315
+ });
3316
+ const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3317
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3318
+ method: "put",
3319
+ ...variables,
3320
+ signal
3321
+ });
3322
+ const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3323
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3324
+ method: "delete",
3325
+ ...variables,
3326
+ signal
3327
+ });
1303
3328
  const listRegions = (variables, signal) => controlPlaneFetch({
1304
3329
  url: "/workspaces/{workspaceId}/regions",
1305
3330
  method: "get",
@@ -1337,7 +3362,13 @@ const operationsByTag$1 = {
1337
3362
  acceptWorkspaceMemberInvite,
1338
3363
  resendWorkspaceMemberInvite
1339
3364
  },
1340
- xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
3365
+ xbcontrolOther: {
3366
+ listClusters,
3367
+ createCluster,
3368
+ getCluster,
3369
+ updateCluster,
3370
+ deleteCluster
3371
+ },
1341
3372
  databases: {
1342
3373
  getDatabaseList,
1343
3374
  createDatabase,
@@ -1357,7 +3388,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
1357
3388
  const buildApiClient = () => class {
1358
3389
  constructor(options = {}) {
1359
3390
  const provider = options.host ?? "production";
1360
- const apiKey = options.apiKey ?? getAPIKey();
3391
+ const apiKey = options.apiKey;
1361
3392
  const trace = options.trace ?? defaultTrace;
1362
3393
  const clientID = generateUUID();
1363
3394
  if (!apiKey) {
@@ -1424,8 +3455,7 @@ function buildTransformString(transformations) {
1424
3455
  ).join(",");
1425
3456
  }
1426
3457
  function transformImage(url, ...transformations) {
1427
- if (!isDefined(url))
1428
- return void 0;
3458
+ if (!isDefined(url)) return void 0;
1429
3459
  const newTransformations = buildTransformString(transformations);
1430
3460
  const { hostname, pathname, search } = new URL(url);
1431
3461
  const pathParts = pathname.split("/");
@@ -1538,8 +3568,7 @@ class XataFile {
1538
3568
  }
1539
3569
  }
1540
3570
  const parseInputFileEntry = async (entry) => {
1541
- if (!isDefined(entry))
1542
- return null;
3571
+ if (!isDefined(entry)) return null;
1543
3572
  const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
1544
3573
  return compactObject({
1545
3574
  id,
@@ -1554,24 +3583,19 @@ const parseInputFileEntry = async (entry) => {
1554
3583
  };
1555
3584
 
1556
3585
  function cleanFilter(filter) {
1557
- if (!isDefined(filter))
1558
- return void 0;
1559
- if (!isObject(filter))
1560
- return filter;
3586
+ if (!isDefined(filter)) return void 0;
3587
+ if (!isObject(filter)) return filter;
1561
3588
  const values = Object.fromEntries(
1562
3589
  Object.entries(filter).reduce((acc, [key, value]) => {
1563
- if (!isDefined(value))
1564
- return acc;
3590
+ if (!isDefined(value)) return acc;
1565
3591
  if (Array.isArray(value)) {
1566
3592
  const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
1567
- if (clean.length === 0)
1568
- return acc;
3593
+ if (clean.length === 0) return acc;
1569
3594
  return [...acc, [key, clean]];
1570
3595
  }
1571
3596
  if (isObject(value)) {
1572
3597
  const clean = cleanFilter(value);
1573
- if (!isDefined(clean))
1574
- return acc;
3598
+ if (!isDefined(clean)) return acc;
1575
3599
  return [...acc, [key, clean]];
1576
3600
  }
1577
3601
  return [...acc, [key, value]];
@@ -1581,10 +3605,8 @@ function cleanFilter(filter) {
1581
3605
  }
1582
3606
 
1583
3607
  function stringifyJson(value) {
1584
- if (!isDefined(value))
1585
- return value;
1586
- if (isString(value))
1587
- return value;
3608
+ if (!isDefined(value)) return value;
3609
+ if (isString(value)) return value;
1588
3610
  try {
1589
3611
  return JSON.stringify(value);
1590
3612
  } catch (e) {
@@ -1599,28 +3621,17 @@ function parseJson(value) {
1599
3621
  }
1600
3622
  }
1601
3623
 
1602
- var __accessCheck$5 = (obj, member, msg) => {
1603
- if (!member.has(obj))
1604
- throw TypeError("Cannot " + msg);
1605
- };
1606
- var __privateGet$4 = (obj, member, getter) => {
1607
- __accessCheck$5(obj, member, "read from private field");
1608
- return getter ? getter.call(obj) : member.get(obj);
1609
- };
1610
- var __privateAdd$5 = (obj, member, value) => {
1611
- if (member.has(obj))
1612
- throw TypeError("Cannot add the same private member more than once");
1613
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1614
- };
1615
- var __privateSet$3 = (obj, member, value, setter) => {
1616
- __accessCheck$5(obj, member, "write to private field");
1617
- setter ? setter.call(obj, value) : member.set(obj, value);
1618
- return value;
3624
+ var __typeError$5 = (msg) => {
3625
+ throw TypeError(msg);
1619
3626
  };
3627
+ var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
3628
+ var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3629
+ var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3630
+ var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
1620
3631
  var _query, _page;
1621
3632
  class Page {
1622
3633
  constructor(query, meta, records = []) {
1623
- __privateAdd$5(this, _query, void 0);
3634
+ __privateAdd$5(this, _query);
1624
3635
  __privateSet$3(this, _query, query);
1625
3636
  this.meta = meta;
1626
3637
  this.records = new PageRecordArray(this, records);
@@ -1707,7 +3718,7 @@ class RecordArray extends Array {
1707
3718
  const _PageRecordArray = class _PageRecordArray extends Array {
1708
3719
  constructor(...args) {
1709
3720
  super(..._PageRecordArray.parseConstructorParams(...args));
1710
- __privateAdd$5(this, _page, void 0);
3721
+ __privateAdd$5(this, _page);
1711
3722
  __privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
1712
3723
  }
1713
3724
  static parseConstructorParams(...args) {
@@ -1778,34 +3789,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1778
3789
  _page = new WeakMap();
1779
3790
  let PageRecordArray = _PageRecordArray;
1780
3791
 
1781
- var __accessCheck$4 = (obj, member, msg) => {
1782
- if (!member.has(obj))
1783
- throw TypeError("Cannot " + msg);
1784
- };
1785
- var __privateGet$3 = (obj, member, getter) => {
1786
- __accessCheck$4(obj, member, "read from private field");
1787
- return getter ? getter.call(obj) : member.get(obj);
1788
- };
1789
- var __privateAdd$4 = (obj, member, value) => {
1790
- if (member.has(obj))
1791
- throw TypeError("Cannot add the same private member more than once");
1792
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1793
- };
1794
- var __privateSet$2 = (obj, member, value, setter) => {
1795
- __accessCheck$4(obj, member, "write to private field");
1796
- setter ? setter.call(obj, value) : member.set(obj, value);
1797
- return value;
1798
- };
1799
- var __privateMethod$3 = (obj, member, method) => {
1800
- __accessCheck$4(obj, member, "access private method");
1801
- return method;
3792
+ var __typeError$4 = (msg) => {
3793
+ throw TypeError(msg);
1802
3794
  };
1803
- var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
3795
+ var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
3796
+ var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3797
+ var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3798
+ var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
3799
+ var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
3800
+ var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
1804
3801
  const _Query = class _Query {
1805
3802
  constructor(repository, table, data, rawParent) {
1806
- __privateAdd$4(this, _cleanFilterConstraint);
1807
- __privateAdd$4(this, _table$1, void 0);
1808
- __privateAdd$4(this, _repository, void 0);
3803
+ __privateAdd$4(this, _Query_instances);
3804
+ __privateAdd$4(this, _table$1);
3805
+ __privateAdd$4(this, _repository);
1809
3806
  __privateAdd$4(this, _data, { filter: {} });
1810
3807
  // Implements pagination
1811
3808
  this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
@@ -1883,12 +3880,12 @@ const _Query = class _Query {
1883
3880
  filter(a, b) {
1884
3881
  if (arguments.length === 1) {
1885
3882
  const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
1886
- [column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
3883
+ [column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
1887
3884
  }));
1888
3885
  const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1889
3886
  return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
1890
3887
  } else {
1891
- const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
3888
+ const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
1892
3889
  const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1893
3890
  return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
1894
3891
  }
@@ -1967,8 +3964,7 @@ const _Query = class _Query {
1967
3964
  }
1968
3965
  async getFirstOrThrow(options = {}) {
1969
3966
  const records = await this.getMany({ ...options, pagination: { size: 1 } });
1970
- if (records[0] === void 0)
1971
- throw new Error("No results found.");
3967
+ if (records[0] === void 0) throw new Error("No results found.");
1972
3968
  return records[0];
1973
3969
  }
1974
3970
  async summarize(params = {}) {
@@ -2023,7 +4019,7 @@ const _Query = class _Query {
2023
4019
  _table$1 = new WeakMap();
2024
4020
  _repository = new WeakMap();
2025
4021
  _data = new WeakMap();
2026
- _cleanFilterConstraint = new WeakSet();
4022
+ _Query_instances = new WeakSet();
2027
4023
  cleanFilterConstraint_fn = function(column, value) {
2028
4024
  const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
2029
4025
  if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
@@ -2084,8 +4080,7 @@ function isSortFilterString(value) {
2084
4080
  }
2085
4081
  function isSortFilterBase(filter) {
2086
4082
  return isObject(filter) && Object.entries(filter).every(([key, value]) => {
2087
- if (key === "*")
2088
- return value === "random";
4083
+ if (key === "*") return value === "random";
2089
4084
  return value === "asc" || value === "desc";
2090
4085
  });
2091
4086
  }
@@ -2106,29 +4101,15 @@ function buildSortFilter(filter) {
2106
4101
  }
2107
4102
  }
2108
4103
 
2109
- var __accessCheck$3 = (obj, member, msg) => {
2110
- if (!member.has(obj))
2111
- throw TypeError("Cannot " + msg);
4104
+ var __typeError$3 = (msg) => {
4105
+ throw TypeError(msg);
2112
4106
  };
2113
- var __privateGet$2 = (obj, member, getter) => {
2114
- __accessCheck$3(obj, member, "read from private field");
2115
- return getter ? getter.call(obj) : member.get(obj);
2116
- };
2117
- var __privateAdd$3 = (obj, member, value) => {
2118
- if (member.has(obj))
2119
- throw TypeError("Cannot add the same private member more than once");
2120
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2121
- };
2122
- var __privateSet$1 = (obj, member, value, setter) => {
2123
- __accessCheck$3(obj, member, "write to private field");
2124
- setter ? setter.call(obj, value) : member.set(obj, value);
2125
- return value;
2126
- };
2127
- var __privateMethod$2 = (obj, member, method) => {
2128
- __accessCheck$3(obj, member, "access private method");
2129
- return method;
2130
- };
2131
- var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
4107
+ var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
4108
+ var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4109
+ var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4110
+ var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
4111
+ var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
4112
+ var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
2132
4113
  const BULK_OPERATION_MAX_SIZE = 1e3;
2133
4114
  class Repository extends Query {
2134
4115
  }
@@ -2139,21 +4120,12 @@ class RestRepository extends Query {
2139
4120
  { name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
2140
4121
  {}
2141
4122
  );
2142
- __privateAdd$3(this, _insertRecordWithoutId);
2143
- __privateAdd$3(this, _insertRecordWithId);
2144
- __privateAdd$3(this, _insertRecords);
2145
- __privateAdd$3(this, _updateRecordWithID);
2146
- __privateAdd$3(this, _updateRecords);
2147
- __privateAdd$3(this, _upsertRecordWithID);
2148
- __privateAdd$3(this, _deleteRecord);
2149
- __privateAdd$3(this, _deleteRecords);
2150
- __privateAdd$3(this, _getSchemaTables);
2151
- __privateAdd$3(this, _transformObjectToApi);
2152
- __privateAdd$3(this, _table, void 0);
2153
- __privateAdd$3(this, _getFetchProps, void 0);
2154
- __privateAdd$3(this, _db, void 0);
2155
- __privateAdd$3(this, _schemaTables, void 0);
2156
- __privateAdd$3(this, _trace, void 0);
4123
+ __privateAdd$3(this, _RestRepository_instances);
4124
+ __privateAdd$3(this, _table);
4125
+ __privateAdd$3(this, _getFetchProps);
4126
+ __privateAdd$3(this, _db);
4127
+ __privateAdd$3(this, _schemaTables);
4128
+ __privateAdd$3(this, _trace);
2157
4129
  __privateSet$1(this, _table, options.table);
2158
4130
  __privateSet$1(this, _db, options.db);
2159
4131
  __privateSet$1(this, _schemaTables, options.schemaTables);
@@ -2172,31 +4144,28 @@ class RestRepository extends Query {
2172
4144
  return __privateGet$2(this, _trace).call(this, "create", async () => {
2173
4145
  const ifVersion = parseIfVersion(b, c, d);
2174
4146
  if (Array.isArray(a)) {
2175
- if (a.length === 0)
2176
- return [];
2177
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
4147
+ if (a.length === 0) return [];
4148
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
2178
4149
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2179
4150
  const result = await this.read(ids, columns);
2180
4151
  return result;
2181
4152
  }
2182
4153
  if (isString(a) && isObject(b)) {
2183
- if (a === "")
2184
- throw new Error("The id can't be empty");
4154
+ if (a === "") throw new Error("The id can't be empty");
2185
4155
  const columns = isValidSelectableColumns(c) ? c : void 0;
2186
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
4156
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
2187
4157
  }
2188
4158
  if (isObject(a) && isString(a.xata_id)) {
2189
- if (a.xata_id === "")
2190
- throw new Error("The id can't be empty");
4159
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2191
4160
  const columns = isValidSelectableColumns(b) ? b : void 0;
2192
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
4161
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2193
4162
  createOnly: true,
2194
4163
  ifVersion
2195
4164
  });
2196
4165
  }
2197
4166
  if (isObject(a)) {
2198
4167
  const columns = isValidSelectableColumns(b) ? b : void 0;
2199
- return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
4168
+ return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
2200
4169
  }
2201
4170
  throw new Error("Invalid arguments for create method");
2202
4171
  });
@@ -2205,8 +4174,7 @@ class RestRepository extends Query {
2205
4174
  return __privateGet$2(this, _trace).call(this, "read", async () => {
2206
4175
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2207
4176
  if (Array.isArray(a)) {
2208
- if (a.length === 0)
2209
- return [];
4177
+ if (a.length === 0) return [];
2210
4178
  const ids = a.map((item) => extractId(item));
2211
4179
  const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
2212
4180
  const dictionary = finalObjects.reduce((acc, object) => {
@@ -2229,7 +4197,7 @@ class RestRepository extends Query {
2229
4197
  queryParams: { columns },
2230
4198
  ...__privateGet$2(this, _getFetchProps).call(this)
2231
4199
  });
2232
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4200
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2233
4201
  return initObject(
2234
4202
  __privateGet$2(this, _db),
2235
4203
  schemaTables,
@@ -2270,11 +4238,10 @@ class RestRepository extends Query {
2270
4238
  return __privateGet$2(this, _trace).call(this, "update", async () => {
2271
4239
  const ifVersion = parseIfVersion(b, c, d);
2272
4240
  if (Array.isArray(a)) {
2273
- if (a.length === 0)
2274
- return [];
4241
+ if (a.length === 0) return [];
2275
4242
  const existing = await this.read(a, ["xata_id"]);
2276
4243
  const updates = a.filter((_item, index) => existing[index] !== null);
2277
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
4244
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
2278
4245
  ifVersion,
2279
4246
  upsert: false
2280
4247
  });
@@ -2285,15 +4252,14 @@ class RestRepository extends Query {
2285
4252
  try {
2286
4253
  if (isString(a) && isObject(b)) {
2287
4254
  const columns = isValidSelectableColumns(c) ? c : void 0;
2288
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4255
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2289
4256
  }
2290
4257
  if (isObject(a) && isString(a.xata_id)) {
2291
4258
  const columns = isValidSelectableColumns(b) ? b : void 0;
2292
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4259
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
2293
4260
  }
2294
4261
  } catch (error) {
2295
- if (error.status === 422)
2296
- return null;
4262
+ if (error.status === 422) return null;
2297
4263
  throw error;
2298
4264
  }
2299
4265
  throw new Error("Invalid arguments for update method");
@@ -2322,9 +4288,8 @@ class RestRepository extends Query {
2322
4288
  return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
2323
4289
  const ifVersion = parseIfVersion(b, c, d);
2324
4290
  if (Array.isArray(a)) {
2325
- if (a.length === 0)
2326
- return [];
2327
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
4291
+ if (a.length === 0) return [];
4292
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
2328
4293
  ifVersion,
2329
4294
  upsert: true
2330
4295
  });
@@ -2333,16 +4298,14 @@ class RestRepository extends Query {
2333
4298
  return result;
2334
4299
  }
2335
4300
  if (isString(a) && isObject(b)) {
2336
- if (a === "")
2337
- throw new Error("The id can't be empty");
4301
+ if (a === "") throw new Error("The id can't be empty");
2338
4302
  const columns = isValidSelectableColumns(c) ? c : void 0;
2339
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4303
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2340
4304
  }
2341
4305
  if (isObject(a) && isString(a.xata_id)) {
2342
- if (a.xata_id === "")
2343
- throw new Error("The id can't be empty");
4306
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2344
4307
  const columns = isValidSelectableColumns(c) ? c : void 0;
2345
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4308
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
2346
4309
  }
2347
4310
  if (!isDefined(a) && isObject(b)) {
2348
4311
  return await this.create(b, c);
@@ -2357,24 +4320,21 @@ class RestRepository extends Query {
2357
4320
  return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
2358
4321
  const ifVersion = parseIfVersion(b, c, d);
2359
4322
  if (Array.isArray(a)) {
2360
- if (a.length === 0)
2361
- return [];
2362
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
4323
+ if (a.length === 0) return [];
4324
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
2363
4325
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2364
4326
  const result = await this.read(ids, columns);
2365
4327
  return result;
2366
4328
  }
2367
4329
  if (isString(a) && isObject(b)) {
2368
- if (a === "")
2369
- throw new Error("The id can't be empty");
4330
+ if (a === "") throw new Error("The id can't be empty");
2370
4331
  const columns = isValidSelectableColumns(c) ? c : void 0;
2371
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
4332
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2372
4333
  }
2373
4334
  if (isObject(a) && isString(a.xata_id)) {
2374
- if (a.xata_id === "")
2375
- throw new Error("The id can't be empty");
4335
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2376
4336
  const columns = isValidSelectableColumns(c) ? c : void 0;
2377
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
4337
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2378
4338
  createOnly: false,
2379
4339
  ifVersion
2380
4340
  });
@@ -2391,25 +4351,22 @@ class RestRepository extends Query {
2391
4351
  async delete(a, b) {
2392
4352
  return __privateGet$2(this, _trace).call(this, "delete", async () => {
2393
4353
  if (Array.isArray(a)) {
2394
- if (a.length === 0)
2395
- return [];
4354
+ if (a.length === 0) return [];
2396
4355
  const ids = a.map((o) => {
2397
- if (isString(o))
2398
- return o;
2399
- if (isString(o.xata_id))
2400
- return o.xata_id;
4356
+ if (isString(o)) return o;
4357
+ if (isString(o.xata_id)) return o.xata_id;
2401
4358
  throw new Error("Invalid arguments for delete method");
2402
4359
  });
2403
4360
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2404
4361
  const result = await this.read(a, columns);
2405
- await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
4362
+ await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
2406
4363
  return result;
2407
4364
  }
2408
4365
  if (isString(a)) {
2409
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
4366
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
2410
4367
  }
2411
4368
  if (isObject(a) && isString(a.xata_id)) {
2412
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.xata_id, b);
4369
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
2413
4370
  }
2414
4371
  throw new Error("Invalid arguments for delete method");
2415
4372
  });
@@ -2453,7 +4410,7 @@ class RestRepository extends Query {
2453
4410
  },
2454
4411
  ...__privateGet$2(this, _getFetchProps).call(this)
2455
4412
  });
2456
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4413
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2457
4414
  return {
2458
4415
  records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
2459
4416
  totalCount
@@ -2478,7 +4435,7 @@ class RestRepository extends Query {
2478
4435
  },
2479
4436
  ...__privateGet$2(this, _getFetchProps).call(this)
2480
4437
  });
2481
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4438
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2482
4439
  return {
2483
4440
  records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
2484
4441
  totalCount
@@ -2520,7 +4477,7 @@ class RestRepository extends Query {
2520
4477
  fetchOptions: data.fetchOptions,
2521
4478
  ...__privateGet$2(this, _getFetchProps).call(this)
2522
4479
  });
2523
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4480
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2524
4481
  const records = objects.map(
2525
4482
  (record) => initObject(
2526
4483
  __privateGet$2(this, _db),
@@ -2554,7 +4511,7 @@ class RestRepository extends Query {
2554
4511
  },
2555
4512
  ...__privateGet$2(this, _getFetchProps).call(this)
2556
4513
  });
2557
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4514
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2558
4515
  return {
2559
4516
  ...result,
2560
4517
  summaries: result.summaries.map(
@@ -2602,9 +4559,9 @@ _getFetchProps = new WeakMap();
2602
4559
  _db = new WeakMap();
2603
4560
  _schemaTables = new WeakMap();
2604
4561
  _trace = new WeakMap();
2605
- _insertRecordWithoutId = new WeakSet();
4562
+ _RestRepository_instances = new WeakSet();
2606
4563
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2607
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4564
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2608
4565
  const response = await insertRecord({
2609
4566
  pathParams: {
2610
4567
  workspace: "{workspaceId}",
@@ -2616,14 +4573,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2616
4573
  body: record,
2617
4574
  ...__privateGet$2(this, _getFetchProps).call(this)
2618
4575
  });
2619
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4576
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2620
4577
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2621
4578
  };
2622
- _insertRecordWithId = new WeakSet();
2623
4579
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2624
- if (!recordId)
2625
- return null;
2626
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4580
+ if (!recordId) return null;
4581
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2627
4582
  const response = await insertRecordWithID({
2628
4583
  pathParams: {
2629
4584
  workspace: "{workspaceId}",
@@ -2636,13 +4591,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
2636
4591
  queryParams: { createOnly, columns, ifVersion },
2637
4592
  ...__privateGet$2(this, _getFetchProps).call(this)
2638
4593
  });
2639
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4594
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2640
4595
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2641
4596
  };
2642
- _insertRecords = new WeakSet();
2643
4597
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2644
4598
  const operations = await promiseMap(objects, async (object) => {
2645
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4599
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2646
4600
  return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
2647
4601
  });
2648
4602
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
@@ -2667,11 +4621,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2667
4621
  }
2668
4622
  return ids;
2669
4623
  };
2670
- _updateRecordWithID = new WeakSet();
2671
4624
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2672
- if (!recordId)
2673
- return null;
2674
- const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4625
+ if (!recordId) return null;
4626
+ const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2675
4627
  try {
2676
4628
  const response = await updateRecordWithID({
2677
4629
  pathParams: {
@@ -2685,7 +4637,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2685
4637
  body: record,
2686
4638
  ...__privateGet$2(this, _getFetchProps).call(this)
2687
4639
  });
2688
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4640
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2689
4641
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2690
4642
  } catch (e) {
2691
4643
  if (isObject(e) && e.status === 404) {
@@ -2694,10 +4646,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2694
4646
  throw e;
2695
4647
  }
2696
4648
  };
2697
- _updateRecords = new WeakSet();
2698
4649
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2699
4650
  const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
2700
- const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4651
+ const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2701
4652
  return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
2702
4653
  });
2703
4654
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
@@ -2722,10 +4673,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2722
4673
  }
2723
4674
  return ids;
2724
4675
  };
2725
- _upsertRecordWithID = new WeakSet();
2726
4676
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2727
- if (!recordId)
2728
- return null;
4677
+ if (!recordId) return null;
2729
4678
  const response = await upsertRecordWithID({
2730
4679
  pathParams: {
2731
4680
  workspace: "{workspaceId}",
@@ -2738,13 +4687,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2738
4687
  body: object,
2739
4688
  ...__privateGet$2(this, _getFetchProps).call(this)
2740
4689
  });
2741
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4690
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2742
4691
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2743
4692
  };
2744
- _deleteRecord = new WeakSet();
2745
4693
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
2746
- if (!recordId)
2747
- return null;
4694
+ if (!recordId) return null;
2748
4695
  try {
2749
4696
  const response = await deleteRecord({
2750
4697
  pathParams: {
@@ -2757,7 +4704,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2757
4704
  queryParams: { columns },
2758
4705
  ...__privateGet$2(this, _getFetchProps).call(this)
2759
4706
  });
2760
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4707
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2761
4708
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2762
4709
  } catch (e) {
2763
4710
  if (isObject(e) && e.status === 404) {
@@ -2766,7 +4713,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2766
4713
  throw e;
2767
4714
  }
2768
4715
  };
2769
- _deleteRecords = new WeakSet();
2770
4716
  deleteRecords_fn = async function(recordIds) {
2771
4717
  const chunkedOperations = chunk(
2772
4718
  compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
@@ -2784,10 +4730,8 @@ deleteRecords_fn = async function(recordIds) {
2784
4730
  });
2785
4731
  }
2786
4732
  };
2787
- _getSchemaTables = new WeakSet();
2788
4733
  getSchemaTables_fn = async function() {
2789
- if (__privateGet$2(this, _schemaTables))
2790
- return __privateGet$2(this, _schemaTables);
4734
+ if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
2791
4735
  const { schema } = await getBranchDetails({
2792
4736
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
2793
4737
  ...__privateGet$2(this, _getFetchProps).call(this)
@@ -2795,16 +4739,13 @@ getSchemaTables_fn = async function() {
2795
4739
  __privateSet$1(this, _schemaTables, schema.tables);
2796
4740
  return schema.tables;
2797
4741
  };
2798
- _transformObjectToApi = new WeakSet();
2799
4742
  transformObjectToApi_fn = async function(object) {
2800
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4743
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2801
4744
  const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
2802
- if (!schema)
2803
- throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
4745
+ if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
2804
4746
  const result = {};
2805
4747
  for (const [key, value] of Object.entries(object)) {
2806
- if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
2807
- continue;
4748
+ if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
2808
4749
  const type = schema.columns.find((column) => column.name === key)?.type;
2809
4750
  switch (type) {
2810
4751
  case "link": {
@@ -2834,11 +4775,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2834
4775
  const data = {};
2835
4776
  Object.assign(data, { ...object });
2836
4777
  const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
2837
- if (!columns)
2838
- console.error(`Table ${table} not found in schema`);
4778
+ if (!columns) console.error(`Table ${table} not found in schema`);
2839
4779
  for (const column of columns ?? []) {
2840
- if (!isValidColumn(selectedColumns, column))
2841
- continue;
4780
+ if (!isValidColumn(selectedColumns, column)) continue;
2842
4781
  const value = data[column.name];
2843
4782
  switch (column.type) {
2844
4783
  case "datetime": {
@@ -2924,15 +4863,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2924
4863
  return record;
2925
4864
  };
2926
4865
  function extractId(value) {
2927
- if (isString(value))
2928
- return value;
2929
- if (isObject(value) && isString(value.xata_id))
2930
- return value.xata_id;
4866
+ if (isString(value)) return value;
4867
+ if (isObject(value) && isString(value.xata_id)) return value.xata_id;
2931
4868
  return void 0;
2932
4869
  }
2933
4870
  function isValidColumn(columns, column) {
2934
- if (columns.includes("*"))
2935
- return true;
4871
+ if (columns.includes("*")) return true;
2936
4872
  return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
2937
4873
  }
2938
4874
  function parseIfVersion(...args) {
@@ -2972,19 +4908,12 @@ const includesAll = (value) => ({ $includesAll: value });
2972
4908
  const includesNone = (value) => ({ $includesNone: value });
2973
4909
  const includesAny = (value) => ({ $includesAny: value });
2974
4910
 
2975
- var __accessCheck$2 = (obj, member, msg) => {
2976
- if (!member.has(obj))
2977
- throw TypeError("Cannot " + msg);
2978
- };
2979
- var __privateGet$1 = (obj, member, getter) => {
2980
- __accessCheck$2(obj, member, "read from private field");
2981
- return getter ? getter.call(obj) : member.get(obj);
2982
- };
2983
- var __privateAdd$2 = (obj, member, value) => {
2984
- if (member.has(obj))
2985
- throw TypeError("Cannot add the same private member more than once");
2986
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4911
+ var __typeError$2 = (msg) => {
4912
+ throw TypeError(msg);
2987
4913
  };
4914
+ var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
4915
+ var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4916
+ var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2988
4917
  var _tables;
2989
4918
  class SchemaPlugin extends XataPlugin {
2990
4919
  constructor() {
@@ -2996,8 +4925,7 @@ class SchemaPlugin extends XataPlugin {
2996
4925
  {},
2997
4926
  {
2998
4927
  get: (_target, table) => {
2999
- if (!isString(table))
3000
- throw new Error("Invalid table name");
4928
+ if (!isString(table)) throw new Error("Invalid table name");
3001
4929
  if (__privateGet$1(this, _tables)[table] === void 0) {
3002
4930
  __privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
3003
4931
  }
@@ -3088,30 +5016,23 @@ function getContentType(file) {
3088
5016
  return "application/octet-stream";
3089
5017
  }
3090
5018
 
3091
- var __accessCheck$1 = (obj, member, msg) => {
3092
- if (!member.has(obj))
3093
- throw TypeError("Cannot " + msg);
3094
- };
3095
- var __privateAdd$1 = (obj, member, value) => {
3096
- if (member.has(obj))
3097
- throw TypeError("Cannot add the same private member more than once");
3098
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5019
+ var __typeError$1 = (msg) => {
5020
+ throw TypeError(msg);
3099
5021
  };
3100
- var __privateMethod$1 = (obj, member, method) => {
3101
- __accessCheck$1(obj, member, "access private method");
3102
- return method;
3103
- };
3104
- var _search, search_fn;
5022
+ var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
5023
+ var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5024
+ var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
5025
+ var _SearchPlugin_instances, search_fn;
3105
5026
  class SearchPlugin extends XataPlugin {
3106
5027
  constructor(db) {
3107
5028
  super();
3108
5029
  this.db = db;
3109
- __privateAdd$1(this, _search);
5030
+ __privateAdd$1(this, _SearchPlugin_instances);
3110
5031
  }
3111
5032
  build(pluginOptions) {
3112
5033
  return {
3113
5034
  all: async (query, options = {}) => {
3114
- const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5035
+ const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3115
5036
  return {
3116
5037
  totalCount,
3117
5038
  records: records.map((record) => {
@@ -3121,7 +5042,7 @@ class SearchPlugin extends XataPlugin {
3121
5042
  };
3122
5043
  },
3123
5044
  byTable: async (query, options = {}) => {
3124
- const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5045
+ const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3125
5046
  const records = rawRecords.reduce((acc, record) => {
3126
5047
  const table = record.xata_table;
3127
5048
  const items = acc[table] ?? [];
@@ -3133,7 +5054,7 @@ class SearchPlugin extends XataPlugin {
3133
5054
  };
3134
5055
  }
3135
5056
  }
3136
- _search = new WeakSet();
5057
+ _SearchPlugin_instances = new WeakSet();
3137
5058
  search_fn = async function(query, options, pluginOptions) {
3138
5059
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3139
5060
  const { records, totalCount } = await searchBranch({
@@ -3169,8 +5090,7 @@ function arrayString(val) {
3169
5090
  return result;
3170
5091
  }
3171
5092
  function prepareValue(value) {
3172
- if (!isDefined(value))
3173
- return null;
5093
+ if (!isDefined(value)) return null;
3174
5094
  if (value instanceof Date) {
3175
5095
  return value.toISOString();
3176
5096
  }
@@ -3197,8 +5117,8 @@ function prepareParams(param1, param2) {
3197
5117
  return { statement, params: param2?.map((value) => prepareValue(value)) };
3198
5118
  }
3199
5119
  if (isObject(param1)) {
3200
- const { statement, params, consistency } = param1;
3201
- return { statement, params: params?.map((value) => prepareValue(value)), consistency };
5120
+ const { statement, params, consistency, responseType } = param1;
5121
+ return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
3202
5122
  }
3203
5123
  throw new Error("Invalid query");
3204
5124
  }
@@ -3209,20 +5129,29 @@ class SQLPlugin extends XataPlugin {
3209
5129
  if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
3210
5130
  throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
3211
5131
  }
3212
- const { statement, params, consistency } = prepareParams(query, parameters);
3213
- const {
3214
- records,
3215
- rows,
3216
- warning,
3217
- columns = []
3218
- } = await sqlQuery({
5132
+ const { statement, params, consistency, responseType } = prepareParams(query, parameters);
5133
+ const { warning, columns, ...response } = await sqlQuery({
3219
5134
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
3220
- body: { statement, params, consistency },
5135
+ body: { statement, params, consistency, responseType },
3221
5136
  ...pluginOptions
3222
5137
  });
5138
+ const records = "records" in response ? response.records : void 0;
5139
+ const rows = "rows" in response ? response.rows : void 0;
3223
5140
  return { records, rows, warning, columns };
3224
5141
  };
3225
5142
  sqlFunction.connectionString = buildConnectionString(pluginOptions);
5143
+ sqlFunction.batch = async (query) => {
5144
+ const { results } = await sqlBatchQuery({
5145
+ pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
5146
+ body: {
5147
+ statements: query.statements.map(({ statement, params }) => ({ statement, params })),
5148
+ consistency: query.consistency,
5149
+ responseType: query.responseType
5150
+ },
5151
+ ...pluginOptions
5152
+ });
5153
+ return { results };
5154
+ };
3226
5155
  return sqlFunction;
3227
5156
  }
3228
5157
  }
@@ -3249,8 +5178,7 @@ function buildDomain(host, region) {
3249
5178
  function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
3250
5179
  const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
3251
5180
  const parts = parseWorkspacesUrlParts(url);
3252
- if (!parts)
3253
- throw new Error("Invalid workspaces URL");
5181
+ if (!parts) throw new Error("Invalid workspaces URL");
3254
5182
  const { workspace: workspaceSlug, region, database, host } = parts;
3255
5183
  const domain = buildDomain(host, region);
3256
5184
  const workspace = workspaceSlug.split("-").pop();
@@ -3275,39 +5203,24 @@ class TransactionPlugin extends XataPlugin {
3275
5203
  }
3276
5204
  }
3277
5205
 
3278
- var __accessCheck = (obj, member, msg) => {
3279
- if (!member.has(obj))
3280
- throw TypeError("Cannot " + msg);
3281
- };
3282
- var __privateGet = (obj, member, getter) => {
3283
- __accessCheck(obj, member, "read from private field");
3284
- return getter ? getter.call(obj) : member.get(obj);
3285
- };
3286
- var __privateAdd = (obj, member, value) => {
3287
- if (member.has(obj))
3288
- throw TypeError("Cannot add the same private member more than once");
3289
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3290
- };
3291
- var __privateSet = (obj, member, value, setter) => {
3292
- __accessCheck(obj, member, "write to private field");
3293
- setter ? setter.call(obj, value) : member.set(obj, value);
3294
- return value;
3295
- };
3296
- var __privateMethod = (obj, member, method) => {
3297
- __accessCheck(obj, member, "access private method");
3298
- return method;
5206
+ var __typeError = (msg) => {
5207
+ throw TypeError(msg);
3299
5208
  };
5209
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
5210
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5211
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5212
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
5213
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
3300
5214
  const buildClient = (plugins) => {
3301
- var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
5215
+ var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
3302
5216
  return _a = class {
3303
5217
  constructor(options = {}, tables) {
3304
- __privateAdd(this, _parseOptions);
3305
- __privateAdd(this, _getFetchProps);
3306
- __privateAdd(this, _options, void 0);
3307
- const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
5218
+ __privateAdd(this, _instances);
5219
+ __privateAdd(this, _options);
5220
+ const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
3308
5221
  __privateSet(this, _options, safeOptions);
3309
5222
  const pluginOptions = {
3310
- ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
5223
+ ...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
3311
5224
  host: safeOptions.host,
3312
5225
  tables,
3313
5226
  branch: safeOptions.branch
@@ -3324,8 +5237,7 @@ const buildClient = (plugins) => {
3324
5237
  this.sql = sql;
3325
5238
  this.files = files;
3326
5239
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3327
- if (namespace === void 0)
3328
- continue;
5240
+ if (namespace === void 0) continue;
3329
5241
  this[key] = namespace.build(pluginOptions);
3330
5242
  }
3331
5243
  }
@@ -3334,8 +5246,8 @@ const buildClient = (plugins) => {
3334
5246
  const branch = __privateGet(this, _options).branch;
3335
5247
  return { databaseURL, branch };
3336
5248
  }
3337
- }, _options = new WeakMap(), _parseOptions = new WeakSet(), parseOptions_fn = function(options) {
3338
- const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
5249
+ }, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
5250
+ const enableBrowser = options?.enableBrowser ?? false;
3339
5251
  const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
3340
5252
  if (isBrowser && !enableBrowser) {
3341
5253
  throw new Error(
@@ -3343,8 +5255,9 @@ const buildClient = (plugins) => {
3343
5255
  );
3344
5256
  }
3345
5257
  const fetch = getFetchImplementation(options?.fetch);
3346
- const databaseURL = options?.databaseURL || getDatabaseURL();
3347
- const apiKey = options?.apiKey || getAPIKey();
5258
+ const databaseURL = options?.databaseURL;
5259
+ const apiKey = options?.apiKey;
5260
+ const branch = options?.branch;
3348
5261
  const trace = options?.trace ?? defaultTrace;
3349
5262
  const clientName = options?.clientName;
3350
5263
  const host = options?.host ?? "production";
@@ -3355,25 +5268,8 @@ const buildClient = (plugins) => {
3355
5268
  if (!databaseURL) {
3356
5269
  throw new Error("Option databaseURL is required");
3357
5270
  }
3358
- const envBranch = getBranch();
3359
- const previewBranch = getPreviewBranch();
3360
- const branch = options?.branch || previewBranch || envBranch || "main";
3361
- if (!!previewBranch && branch !== previewBranch) {
3362
- console.warn(
3363
- `Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
3364
- );
3365
- } else if (!!envBranch && branch !== envBranch) {
3366
- console.warn(
3367
- `Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
3368
- );
3369
- } else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
3370
- console.warn(
3371
- `Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
3372
- );
3373
- } else if (!previewBranch && !envBranch && options?.branch === void 0) {
3374
- console.warn(
3375
- `No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
3376
- );
5271
+ if (!branch) {
5272
+ throw new Error("Option branch is required");
3377
5273
  }
3378
5274
  return {
3379
5275
  fetch,
@@ -3387,7 +5283,7 @@ const buildClient = (plugins) => {
3387
5283
  clientName,
3388
5284
  xataAgentExtra
3389
5285
  };
3390
- }, _getFetchProps = new WeakSet(), getFetchProps_fn = function({
5286
+ }, getFetchProps_fn = function({
3391
5287
  fetch,
3392
5288
  apiKey,
3393
5289
  databaseURL,
@@ -3428,26 +5324,19 @@ class Serializer {
3428
5324
  }
3429
5325
  toJSON(data) {
3430
5326
  function visit(obj) {
3431
- if (Array.isArray(obj))
3432
- return obj.map(visit);
5327
+ if (Array.isArray(obj)) return obj.map(visit);
3433
5328
  const type = typeof obj;
3434
- if (type === "undefined")
3435
- return { [META]: "undefined" };
3436
- if (type === "bigint")
3437
- return { [META]: "bigint", [VALUE]: obj.toString() };
3438
- if (obj === null || type !== "object")
3439
- return obj;
5329
+ if (type === "undefined") return { [META]: "undefined" };
5330
+ if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
5331
+ if (obj === null || type !== "object") return obj;
3440
5332
  const constructor = obj.constructor;
3441
5333
  const o = { [META]: constructor.name };
3442
5334
  for (const [key, value] of Object.entries(obj)) {
3443
5335
  o[key] = visit(value);
3444
5336
  }
3445
- if (constructor === Date)
3446
- o[VALUE] = obj.toISOString();
3447
- if (constructor === Map)
3448
- o[VALUE] = Object.fromEntries(obj);
3449
- if (constructor === Set)
3450
- o[VALUE] = [...obj];
5337
+ if (constructor === Date) o[VALUE] = obj.toISOString();
5338
+ if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
5339
+ if (constructor === Set) o[VALUE] = [...obj];
3451
5340
  return o;
3452
5341
  }
3453
5342
  return JSON.stringify(visit(data));
@@ -3460,16 +5349,11 @@ class Serializer {
3460
5349
  if (constructor) {
3461
5350
  return Object.assign(Object.create(constructor.prototype), rest);
3462
5351
  }
3463
- if (clazz === "Date")
3464
- return new Date(val);
3465
- if (clazz === "Set")
3466
- return new Set(val);
3467
- if (clazz === "Map")
3468
- return new Map(Object.entries(val));
3469
- if (clazz === "bigint")
3470
- return BigInt(val);
3471
- if (clazz === "undefined")
3472
- return void 0;
5352
+ if (clazz === "Date") return new Date(val);
5353
+ if (clazz === "Set") return new Set(val);
5354
+ if (clazz === "Map") return new Map(Object.entries(val));
5355
+ if (clazz === "bigint") return BigInt(val);
5356
+ if (clazz === "undefined") return void 0;
3473
5357
  return rest;
3474
5358
  }
3475
5359
  return value;
@@ -3484,6 +5368,47 @@ const deserialize = (json) => {
3484
5368
  return defaultSerializer.fromJSON(json);
3485
5369
  };
3486
5370
 
5371
+ function parseEnvironment(environment) {
5372
+ try {
5373
+ if (typeof environment === "function") {
5374
+ return new Proxy(
5375
+ {},
5376
+ {
5377
+ get(target) {
5378
+ return environment(target);
5379
+ }
5380
+ }
5381
+ );
5382
+ }
5383
+ if (isObject(environment)) {
5384
+ return environment;
5385
+ }
5386
+ } catch (error) {
5387
+ }
5388
+ return {};
5389
+ }
5390
+ function buildPreviewBranchName({ org, branch }) {
5391
+ return `preview-${org}-${branch}`;
5392
+ }
5393
+ function getDeployPreviewBranch(environment) {
5394
+ try {
5395
+ const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
5396
+ if (deployPreviewBranch) return deployPreviewBranch;
5397
+ switch (deployPreview) {
5398
+ case "vercel": {
5399
+ if (!vercelGitCommitRef || !vercelGitRepoOwner) {
5400
+ console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
5401
+ return void 0;
5402
+ }
5403
+ return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
5404
+ }
5405
+ }
5406
+ return void 0;
5407
+ } catch (err) {
5408
+ return void 0;
5409
+ }
5410
+ }
5411
+
3487
5412
  class XataError extends Error {
3488
5413
  constructor(message, status) {
3489
5414
  super(message);
@@ -3491,5 +5416,5 @@ class XataError extends Error {
3491
5416
  }
3492
5417
  }
3493
5418
 
3494
- export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
5419
+ export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, dropClusterExtension, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchMoveStatus, getBranchSchemaHistory, getBranchStats, getCluster, getClusterMetrics, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationJobs, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getSchemas, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, installClusterExtension, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusterBranches, listClusterExtensions, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, moveBranch, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
3495
5420
  //# sourceMappingURL=index.mjs.map